message
stringlengths
13
484
diff
stringlengths
38
4.63k
Add flip commands Closes
@@ -134,6 +134,10 @@ class Bsp(Layout): Key([mod, "shift"], "k", lazy.layout.shuffle_up()), Key([mod, "shift"], "h", lazy.layout.shuffle_left()), Key([mod, "shift"], "l", lazy.layout.shuffle_right()), + Key([mod, "mod1"], "j", lazy.layout.flip_down()), + Key([mod, "mod1"], "k", lazy.layout.flip_up()), + Key([mod, "mod1"], "h", lazy.layout.flip_left()), + Key([mod, "mod1"], "l", lazy.layout.flip_right()), Key([mod, "control"], "j", lazy.layout.grow_down()), Key([mod, "control"], "k", lazy.layout.grow_up()), Key([mod, "control"], "h", lazy.layout.grow_left()), @@ -451,6 +455,50 @@ class Bsp(Layout): child = parent parent = child.parent + def cmd_flip_left(self): + child = self.current + parent = child.parent + while parent: + if parent.split_horizontal and child is parent.children[1]: + parent.children = parent.children[::-1] + self.group.layoutAll() + break + child = parent + parent = child.parent + + def cmd_flip_right(self): + child = self.current + parent = child.parent + while parent: + if parent.split_horizontal and child is parent.children[0]: + parent.children = parent.children[::-1] + self.group.layoutAll() + break + child = parent + parent = child.parent + + def cmd_flip_up(self): + child = self.current + parent = child.parent + while parent: + if not parent.split_horizontal and child is parent.children[1]: + parent.children = parent.children[::-1] + self.group.layoutAll() + break + child = parent + parent = child.parent + + def cmd_flip_down(self): + child = self.current + parent = child.parent + while parent: + if not parent.split_horizontal and child is parent.children[0]: + parent.children = parent.children[::-1] + self.group.layoutAll() + break + child = parent + parent = child.parent + def cmd_normalize(self): distribute = True for node in self.root:
VHD transformer accept parameters of platform It makes the configuration of VHD transformer is simpler.
@@ -65,7 +65,7 @@ class VhdTransformerSchema(schema.Transformer): public_port: int = 22 username: str = constants.DEFAULT_USER_NAME password: str = "" - private_key_file: str = field(default="", metadata=schema.metadata(required=True)) + private_key_file: str = "" # values for exported vhd. storage_account_name is optional, because it can # be the default storage of LISA. @@ -122,6 +122,15 @@ class VhdTransformer(Transformer): platform, virtual_machine ) + platform_runbook: schema.Platform = platform.runbook + + if not runbook.username: + runbook.username = platform_runbook.admin_username + if not runbook.password: + runbook.password = platform_runbook.admin_password + if not runbook.private_key_file: + runbook.private_key_file = platform_runbook.admin_private_key_file + node_runbook = schema.RemoteNode( name=runbook.vm_name, public_address=runbook.public_address,
Mention gitter cirqdev in readme Fixes:
@@ -77,6 +77,7 @@ We use `Github issues <https://github.com/quantumlib/Cirq/issues>`__ for tracking requests and bugs. Please post questions to the `Quantum Computing Stack Exchange <https://quantumcomputing.stackexchange.com/>`__ with a 'cirq' tag. +For informal discussions about Cirq, join our `cirqdev <https://gitter.im/cirqdev>`__ Gitter channel. See Also --------
use correct batching util in custom_vjp_call_jaxpr fixes
@@ -654,7 +654,7 @@ def _custom_vjp_call_jaxpr_vmap( fwd_args_batched = [0 if b else not_mapped for b in args_batched] fwd_out_dims = lambda: out_dims2[0] - batched_bwd = batching.batch(bwd, axis_name, axis_size, fwd_out_dims, + batched_bwd = batching.batch_custom_vjp_bwd(bwd, axis_name, axis_size, fwd_out_dims, fwd_args_batched) batched_outs = custom_vjp_call_jaxpr_p.bind(
Grammar Changes Some simple grammatical changes.
@@ -50,7 +50,7 @@ Installation * `pip install pyspider` * run command `pyspider`, visit [http://localhost:5000/](http://localhost:5000/) -**WARNING:** WebUI is opened to public by default, it can be used to execute any command which may harm to you system. Please use it in internal network or [enable `need-auth` for webui](http://docs.pyspider.org/en/latest/Command-Line/#-config). +**WARNING:** WebUI is open to the public by default, it can be used to execute any command which may harm your system. Please use it in an internal network or [enable `need-auth` for webui](http://docs.pyspider.org/en/latest/Command-Line/#-config). Quickstart: [http://docs.pyspider.org/en/latest/Quickstart/](http://docs.pyspider.org/en/latest/Quickstart/)
Add / improve CircuitOperation memoizing Small change to cache control keys in circuit operations
@@ -96,6 +96,9 @@ class CircuitOperation(ops.Operation): _cached_measurement_key_objs: Optional[AbstractSet['cirq.MeasurementKey']] = dataclasses.field( default=None, init=False ) + _cached_control_keys: Optional[AbstractSet['cirq.MeasurementKey']] = dataclasses.field( + default=None, init=False + ) circuit: 'cirq.FrozenCircuit' repetitions: int = 1 @@ -208,9 +211,14 @@ class CircuitOperation(ops.Operation): return {str(key) for key in self._measurement_key_objs_()} def _control_keys_(self) -> AbstractSet['cirq.MeasurementKey']: - if not protocols.control_keys(self.circuit): - return frozenset() - return protocols.control_keys(self.mapped_circuit()) + if self._cached_control_keys is None: + keys = ( + frozenset() + if not protocols.control_keys(self.circuit) + else protocols.control_keys(self.mapped_circuit()) + ) + object.__setattr__(self, '_cached_control_keys', keys) + return self._cached_control_keys # type: ignore def _parameter_names_(self) -> AbstractSet[str]: return {
Fix GitHub repo link constant Previous version was pointing to Python, not SeasonalBot
@@ -85,7 +85,7 @@ class Client(NamedTuple): token = environ.get("SEASONALBOT_TOKEN") sentry_dsn = environ.get("SEASONALBOT_SENTRY_DSN") debug = environ.get("SEASONALBOT_DEBUG", "").lower() == "true" - github_bot_repo = "https://github.com/python-discord/bot" + github_bot_repo = "https://github.com/python-discord/seasonalbot" # Override seasonal locks: 1 (January) to 12 (December) month_override = int(environ["MONTH_OVERRIDE"]) if "MONTH_OVERRIDE" in environ else None
Metadata API: improve module documentation Clarify the purpose of metadata API and that it's a low-level API and as such it doesn't use concepts like "repository" or "trusted collection of metadata" and don't implement the repository logic or client updater workflow.
"""TUF role metadata model. -This module provides container classes for TUF role metadata, including methods -to read and write from and to file, perform TUF-compliant metadata updates, and -create and verify signatures. +This module contains low-level API through container classes for TUF role +metadata. The API aims to provide: + +* Safe de/serialization of metadata to and from files. +* Access to and modification of signed metadata content. +* Signing metadata and verifying signatures. + +Each of the top level metadata roles is an instance of the Metadata[T] class +where the "signed" portion of each of the roles (or the "T") is an instance +of one of the classes Root, Timestamp, Snapshot or Targets. +For example, Metadata[Root] represents the TUF root role and that in practice +means that this is a Metadata object with a signed attribute of type Root. + +Additionally, there are helper classes providing abstractions over the complex +metadata fields inside the four top level classes - Root, Timestamp, Snapshot +and Targets. + +Note: the metadata module provides a low-level API and as such it doesn't use +concepts like "repository" or "trusted collection of metadata". +In this file there is no implementation of the repository-side logic or client +update workflows, but instead it provides solid base for other components to do +so. The metadata model supports any custom serialization format, defaulting to JSON as wireline format and Canonical JSON for reproducible signature creation and
Update DatabaseConnector.py Updated to be able to execute in the virtual environment
@@ -13,9 +13,8 @@ database.addListener("publishDocument","python","onDocument") database.setIdField("actor_id") database.setSql("select actor_id, first_name, last_name from actor") +if ('virtual' in globals() and virtual) # start crawling database.startCrawling() - sleep(5) - database.stopCrawling()
C API: fix signature of array method arguments in header This reverts commit (which was wrong) and renames the name of the C structure used to bind arrays. The renaming will hopefully remove the confusion that led to the above commit. TN:
<%def name="incomplete_decl(cls)"> <% type_name = cls.c_type(capi).name %> -typedef struct ${type_name} *${type_name}; +typedef struct ${type_name}_record *${type_name}; </%def> <%def name="decl(cls)"> @@ -10,23 +10,23 @@ typedef struct ${type_name} *${type_name}; <% type_name = cls.c_type(capi).name %> ${c_doc(cls)} -struct ${type_name} { +struct ${type_name}_record { int n; int ref_count; ${cls.element_type.c_type(capi).name} items[1]; }; /* Create a length-sized array. */ -extern ${type_name} * +extern ${type_name} ${cls.c_create(capi)}(int length); /* Increment the ref-count for "a". */ extern void -${cls.c_inc_ref(capi)}(${type_name} *a); +${cls.c_inc_ref(capi)}(${type_name} a); /* Decrement the ref-count for "a". This deallocates it if the ref-count drops to 0. */ extern void -${cls.c_dec_ref(capi)}(${type_name} *a); +${cls.c_dec_ref(capi)}(${type_name} a); </%def>
Apply suggestions from code review Many thanks for the corrections!
@@ -49,7 +49,7 @@ Aazra and Rui are teammates competing in a pirate-themed treasure hunt. <br> But things are a bit disorganized: Azara's coordinates appear to be formatted and sorted differently from Rui's, and they have to keep looking from one list to the other to figure out which treasures go with which locations. - Being budding pythonistas, they've come to you for help in writing a small program (a set of functions, really) to better organize their hunt information. + Being budding pythonistas, they have come to you for help in writing a small program (a set of functions, really) to better organize their hunt information. ## 1. Extract coordinates @@ -87,7 +87,7 @@ True ## 4. Combine matched records -Implement the `create_record()` function that takes a `(treasure, coordinate)` pair from Azaras list and a `(location, coordinate, quadrant)` record from Ruis list and returns `(treasure, coordinate, location, coordinate, quadrant)` **if the coordinates match**. If the coordinates _do not_ match, return the string **"not a match"** +Implement the `create_record()` function that takes a `(treasure, coordinate)` pair from Azaras list and a `(location, coordinate, quadrant)` record from Ruis' list and returns `(treasure, coordinate, location, coordinate, quadrant)` **if the coordinates match**. If the coordinates _do not_ match, return the string **"not a match"** Re-format the coordinate as needed for accurate comparison.
add example for intersection of a single list This is to foresee in the future via doctests.
@@ -716,6 +716,9 @@ def intersection(array, *others): >>> intersection([1, 2, 3], [1, 2, 3, 4, 5], [2, 3]) [2, 3] + >>> intersection([1, 2, 3]) + [1, 2, 3] + .. versionadded:: 1.0.0 .. versionchanged:: 4.0.0
[IMPR] Improvements for askForHints (4) combine checkings
@@ -1167,16 +1167,13 @@ class Subject(interwiki_graph.Subject): def askForHints(self, counter): """Ask for hints to other sites.""" - if not self.workonme: # we don't work on it anyway - return - - if not ( - (self.untranslated or self.conf.askhints) - and not self.hintsAsked - and self.originPage - and self.originPage.exists() - and not self.originPage.isRedirectPage() - and not self.originPage.isCategoryRedirect()): + if (not self.workonme # we don't work on it anyway + or not self.untranslated and not self.conf.askhints + or self.hintsAsked + or not self.originPage + or not self.originPage.exists() + or self.originPage.isRedirectPage() + or self.originPage.isCategoryRedirect()): return self.hintsAsked = True @@ -1189,8 +1186,10 @@ class Subject(interwiki_graph.Subject): while True: newhint = pywikibot.input('Give a hint (? to see pagetext):') + if not newhint: break + if newhint == '?': t += self.conf.showtextlinkadd pywikibot.output(self.originPage.get()[:t])
delete_in_topic: Name unused variable as ignored. sub isn't used, so let's just call it ignored_sub to be explicit about that intent.
@@ -857,7 +857,7 @@ def delete_in_topic( stream_id: int = REQ(converter=to_non_negative_int, path_only=True), topic_name: str = REQ("topic_name"), ) -> HttpResponse: - (stream, sub) = access_stream_by_id(user_profile, stream_id) + stream, ignored_sub = access_stream_by_id(user_profile, stream_id) messages = messages_for_topic(assert_is_not_none(stream.recipient_id), topic_name) if not stream.is_history_public_to_subscribers():
Fix Editor examples We hadn't updated them for the scenario config changes.
@@ -135,12 +135,41 @@ def world_command_examples(): def editor_example(): """This editor example shows how to interact with holodeck worlds while they are being built - in the Unreal Engine. Most people that use holodeck will not need this. + in the Unreal Engine Editor. Most people that use holodeck will not need this. + + This example uses a custom scenario, see + https://holodeck.readthedocs.io/en/latest/usage/examples/custom-scenarios.html + + Note: When launching Holodeck from the editor, press the down arrow next to "Play" and select + "Standalone Game", otherwise the editor will lock up when the client stops ticking it. """ - agent_sensors = [sensors.RGBCamera, sensors.LocationSensor, sensors.VelocitySensor] - agent = AgentDefinition("uav0", agents.UavAgent, agent_sensors) - env = HolodeckEnvironment([agent], start_world=False) - env.agents["uav0"].set_control_scheme(1) + + config = { + "name": "test", + "world": "TestWorld", + "main_agent": "uav0", + "agents": [ + { + "agent_name": "uav0", + "agent_type": "UavAgent", + "sensors": [ + { + "sensor_type": "LocationSensor", + }, + { + "sensor_type": "VelocitySensor" + }, + { + "sensor_type": "RGBCamera" + } + ], + "control_scheme": 1, + "location": [0, 0, 1] + } + ] + } + + env = HolodeckEnvironment(scenario=config, start_world=False) command = [0, 0, 10, 50] for i in range(10): @@ -152,12 +181,35 @@ def editor_example(): def editor_multi_agent_example(): """This editor example shows how to interact with holodeck worlds that have multiple agents. This is specifically for when working with UE4 directly and not a prebuilt binary. + + Note: When launching Holodeck from the editor, press the down arrow next to "Play" and select + "Standalone Game", otherwise the editor will lock up when the client stops ticking it. """ - agent_definitions = [ - AgentDefinition("uav0", agents.UavAgent, [sensors.RGBCamera, sensors.LocationSensor]), - AgentDefinition("uav1", agents.UavAgent, [sensors.LocationSensor, sensors.VelocitySensor]) + config = { + "name": "test_handagent", + "world": "TestWorld", + "main_agent": "hand0", + "agents": [ + { + "agent_name": "uav0", + "agent_type": "UavAgent", + "sensors": [ + ], + "control_scheme": 1, + "location": [0, 0, 1] + }, + { + "agent_name": "uav1", + "agent_type": "UavAgent", + "sensors": [ + ], + "control_scheme": 1, + "location": [0, 0, 5] + } ] - env = HolodeckEnvironment(agent_definitions, start_world=False) + } + + env = HolodeckEnvironment(scenario=config, start_world=False) cmd0 = np.array([0, 0, -2, 10]) cmd1 = np.array([0, 0, 5, 10]) @@ -169,7 +221,3 @@ def editor_multi_agent_example(): for _ in range(1000): states = env.tick() - -if __name__ == "__main__": - - uav_example()
Update capella_opendata.yaml Updated license
@@ -26,7 +26,7 @@ Tags: - computer vision - synthetic aperture radar License: | - [Capella EULA](https://www.capellaspace.com/wp-content/uploads/2021/09/EULA-Single-Org-Open-Data-License-Ver.-1.0-September-2021-Final.pdf) + [CC BY 4.0](https://creativecommons.org/licenses/by/4.0/) Resources: - Description: Capella Space Open Data in COG format ARN: arn:aws:s3:::capella-open-data/data/
Update test_partial_integration.py Fix unittest hack
@@ -68,5 +68,5 @@ def test_partial_integral(): integral_x_true = integral_x(y=y, lowerx=lowerx, upperx=upperx) * ratio integral_y_true = integral_y(x=x, lowery=lowery, uppery=uppery) * ratio - np.testing.assert_allclose(integral_x_true, integral_x_np, atol=1e-3) - np.testing.assert_allclose(integral_y_true, integral_y_np, atol=2e-3) + np.testing.assert_allclose(integral_x_true, integral_x_np, atol=2.5e-3) + np.testing.assert_allclose(integral_y_true, integral_y_np, atol=2.5e-3)
Ensure JDK is propagated into `experimental_run_in_sandbox` execution environment Using JVM targets as runnables was not working, because we didn't propagate the JDK immutable input into the execution environment. Now we do.
@@ -90,6 +90,7 @@ class ShellCommandProcessRequest: timeout: int | None tools: tuple[str, ...] input_digest: Digest + immutable_input_digests: FrozenDict[str, Digest] | None append_only_caches: FrozenDict[str, str] | None output_files: tuple[str, ...] output_directories: tuple[str, ...] @@ -135,6 +136,7 @@ async def _prepare_process_request_from_target(shell_command: Target) -> ShellCo fetch_env_vars=shell_command.get(ShellCommandExtraEnvVarsField).value or (), append_only_caches=None, supplied_env_var_values=None, + immutable_input_digests=None, ) @@ -353,6 +355,7 @@ async def run_in_sandbox_request( timeout=None, tools=(), input_digest=input_digest, + immutable_input_digests=FrozenDict(run_request.immutable_input_digests or {}), append_only_caches=FrozenDict(run_request.append_only_caches or {}), output_files=output_files, output_directories=output_directories, @@ -433,6 +436,7 @@ async def prepare_shell_command_process( fetch_env_vars = shell_command.fetch_env_vars supplied_env_vars = shell_command.supplied_env_var_values or FrozenDict() append_only_caches = shell_command.append_only_caches or FrozenDict() + immutable_input_digests = shell_command.immutable_input_digests if interactive: command_env = { @@ -490,6 +494,7 @@ async def prepare_shell_command_process( timeout_seconds=timeout, working_directory=working_directory, append_only_caches=append_only_caches, + immutable_input_digests=immutable_input_digests, ) if not interactive:
SpreadsheetUI : Improve section ordering logic Don't put brand new sections after the "Other" section (if it's the last one). Deal with situation where a new section is created at the same time an old section is destroyed.
@@ -1389,17 +1389,25 @@ class _SectionChooser( GafferUI.Widget ) : def setSection( cls, cellPlug, sectionName ) : rowsPlug = cellPlug.ancestor( Gaffer.Spreadsheet.RowsPlug ) - sectionNames = cls.sectionNames( rowsPlug ) + oldSectionNames = cls.sectionNames( rowsPlug ) cls.__registerSectionMetadata( cellPlug, sectionName ) - if sectionName not in sectionNames : - # New section created. Make sure it goes at the end. - cls.__assignSectionOrder( rowsPlug, sectionNames + [ sectionName ] ) + # We may have made a new section and/or destroyed + # an old one (by removing its last item). Reassign order + # to put new sections where we want them, and to remove + # gaps and old metadata. + newSectionNames = cls.sectionNames( rowsPlug ) + if sectionName not in oldSectionNames : + # New section created. Make sure it goes at the end, unless "Other" + # is at the end, in which case put it in front of that. + newSectionNames.remove( sectionName ) + if len( newSectionNames ) and newSectionNames[-1] == "Other" : + newSectionNames.insert( -1, sectionName ) else : - # May have moved the last column out of a section. - # Reassign order to remove gaps and delete unneeded metadata. - cls.__assignSectionOrder( rowsPlug, cls.sectionNames( rowsPlug ) ) + newSectionNames.append( sectionName ) + + cls.__assignSectionOrder( rowsPlug, newSectionNames ) @classmethod def getSection( cls, cellPlug ) :
Improved the assign analyst to incident script: 1. Fixed description 2. Added the ability to specify username to assign
@@ -4,9 +4,10 @@ commonfields: name: AssignAnalystToIncident system: true script: | - var userToAssign = null; + var userToAssign = args.username; assignBy = args.assignBy || 'random'; + if (!userToAssign) { switch(assignBy) { case 'random': var usersRes = executeCommand('getUsers', { roles: args.roles }); @@ -27,6 +28,7 @@ script: | break; } } + } if (userToAssign) { executeCommand("setOwner", { owner: userToAssign }); @@ -58,5 +60,7 @@ args: - top-user - less-busy-user description: '(default: random) You can pick how to assign the owner - by random, - machine learning, top owner or the less busy user.' + machine-learning, top-user or less-busy-user.' +- name: username + description: If specify, the provided user will be set as an owner (optional). scripttarget: 0
m1n1.proxy: Default to /dev/m1n1 We have udev rules, let's just default to a pretty device name to avoid conflicts with other devices.
@@ -135,7 +135,7 @@ class UartInterface(Reloadable): self.debug = debug self.devpath = None if device is None: - device = os.environ.get("M1N1DEVICE", "/dev/ttyACM0:115200") + device = os.environ.get("M1N1DEVICE", "/dev/m1n1:115200") if isinstance(device, str): baud = 115200 if ":" in device: @@ -1076,7 +1076,7 @@ __all__.extend(k for k, v in globals().items() if __name__ == "__main__": import serial - uartdev = os.environ.get("M1N1DEVICE", "/dev/ttyACM0") + uartdev = os.environ.get("M1N1DEVICE", "/dev/m1n1") usbuart = serial.Serial(uartdev, 115200) uartif = UartInterface(usbuart, debug=True) print("Sending NOP...", end=' ')
Remove equivalent destinations when cleaning certificates Remove equivalent destinations when cleaning certificates. This will prevent Lemur from attempting to re-upload a certificate after it has been cleaned.
@@ -58,6 +58,13 @@ def execute_clean(plugin, certificate, source): try: plugin.clean(certificate, source.options) certificate.sources.remove(source) + + # If we want to remove the source from the certificate, we also need to clear any equivalent destinations to + # prevent Lemur from re-uploading the certificate. + for destination in certificate.destinations: + if destination.label == source.label: + certificate.destinations.remove(destination) + certificate_service.database.update(certificate) return SUCCESS_METRIC_STATUS except Exception as e:
Remove note on usage of pip unzip fix
@@ -3,13 +3,6 @@ FAQ Frequently asked questions: -* **start up of khal and ikhal is very slow** - In some case the pytz (python timezone) is only available as a zip file, - as pytz accesses several parts during initialization this takes some - time. If `time python -c "import pytz; pytz.timezone('Europe/Berlin')"` - takes nearly as much time as running khal, uncompressing that file via - pytz via `(sudo) pip unzip pytz` might help. - * **Installation stops with an error: source/str_util.c:25:20: fatal error: Python.h: No such file or directory** You do not have the Python development headers installed, on Debian based Distributions you can install them via *aptitude install python-dev*.
Fix bug: field that is not will raise a UnicodeDecodeError Test added. Related:
@@ -715,6 +715,15 @@ class TestFieldDeserialization: field.deserialize('invalid') assert 'Bad value.' in str(excinfo) + def test_field_deserialization_with_non_utf8_value(self): + non_utf8_char = '\xc8' + field = fields.String() + # This exception only happens in Python version <= 2.7 + if isinstance(non_utf8_char, bytes): + with pytest.raises(ValidationError) as excinfo: + field.deserialize(non_utf8_char) + assert excinfo.value.args[0] == 'Not a valid utf-8 string.' + # No custom deserialization behavior, so a dict is returned class SimpleUserSchema(Schema): name = fields.String()
launcher: add test for version requirements Make sure the modules stay in sync in case one is updated but we forgot to update the other. Tested-by: Mike Frysinger
@@ -26,6 +26,7 @@ import tempfile import unittest import git_command +import main import platform_utils from pyversion import is_python3 import wrapper @@ -83,6 +84,16 @@ class RepoWrapperUnitTest(RepoWrapperTestCase): self.assertEqual('', stderr.getvalue()) self.assertIn('repo launcher version', stdout.getvalue()) + def test_python_constraints(self): + """The launcher should never require newer than main.py.""" + self.assertGreaterEqual(main.MIN_PYTHON_VERSION_HARD, + wrapper.MIN_PYTHON_VERSION_HARD) + self.assertGreaterEqual(main.MIN_PYTHON_VERSION_SOFT, + wrapper.MIN_PYTHON_VERSION_SOFT) + # Make sure the versions are themselves in sync. + self.assertGreaterEqual(wrapper.MIN_PYTHON_VERSION_SOFT, + wrapper.MIN_PYTHON_VERSION_HARD) + def test_init_parser(self): """Make sure 'init' GetParser works.""" parser = self.wrapper.GetParser(gitc_init=False)
Fix off-by-one error While trying to use the `update` command for a custom format, I found that my update_to_2 was not called, because the range excludes the end version when it should include it.
@@ -836,7 +836,7 @@ def update_json(cls, path, api_version): "No version specified in {0}.".format(path)) if d['version'] < api_version: - for x in six.moves.xrange(d['version'] + 1, api_version): + for x in six.moves.xrange(d['version'] + 1, api_version + 1): d = getattr(cls, 'update_to_{0}'.format(x), lambda x: x)(d) write_json(path, d, api_version) elif d['version'] > api_version:
cabana: remove extra frame border in logs remove extra frame border
@@ -195,8 +195,13 @@ void HeaderView::paintSection(QPainter *painter, const QRect &rect, int logicalI LogsWidget::LogsWidget(QWidget *parent) : QWidget(parent) { QVBoxLayout *main_layout = new QVBoxLayout(this); + main_layout->setContentsMargins(0, 0, 0, 0); + main_layout->setSpacing(0); + + QWidget *toolbar = new QWidget(this); + toolbar->setAutoFillBackground(true); + QHBoxLayout *h = new QHBoxLayout(toolbar); - QHBoxLayout *h = new QHBoxLayout(); filters_widget = new QWidget(this); QHBoxLayout *filter_layout = new QHBoxLayout(filters_widget); filter_layout->setContentsMargins(0, 0, 0, 0); @@ -215,7 +220,11 @@ LogsWidget::LogsWidget(QWidget *parent) : QWidget(parent) { dynamic_mode->setChecked(true); dynamic_mode->setEnabled(!can->liveStreaming()); - main_layout->addLayout(h); + main_layout->addWidget(toolbar); + QFrame *line = new QFrame(this); + line->setFrameStyle(QFrame::HLine | QFrame::Sunken); + main_layout->addWidget(line);; + main_layout->addWidget(logs = new QTableView(this)); logs->setModel(model = new HistoryLogModel(this)); logs->setItemDelegateForColumn(1, new MessageBytesDelegate(this)); @@ -223,6 +232,7 @@ LogsWidget::LogsWidget(QWidget *parent) : QWidget(parent) { logs->horizontalHeader()->setDefaultAlignment(Qt::AlignLeft | (Qt::Alignment)Qt::TextWordWrap); logs->horizontalHeader()->setSectionResizeMode(QHeaderView::ResizeToContents); logs->verticalHeader()->setVisible(false); + logs->setFrameShape(QFrame::NoFrame); QObject::connect(display_type_cb, SIGNAL(activated(int)), model, SLOT(setDisplayType(int))); QObject::connect(dynamic_mode, &QCheckBox::stateChanged, model, &HistoryLogModel::setDynamicMode);
Fixed import error in gym env missing optional pybullet environment made gym environment crash added flag to avoid crash
@@ -3,8 +3,9 @@ import gym try: import pybullet_envs import time + pybullet_found = True except ImportError: - pass + pybullet_found = False from gym import spaces as gym_spaces from mushroom_rl.environments import Environment, MDPInfo @@ -30,7 +31,7 @@ class Gym(Environment): """ # MDP creation self._close_at_stop = True - if '- ' + name in pybullet_envs.getList(): + if pybullet_found and '- ' + name in pybullet_envs.getList(): import pybullet pybullet.connect(pybullet.DIRECT) self._close_at_stop = False
Flash, erase, reset subcommands block by default. This matches the --no-wait argument's default. Added error log message if no session is returned when not blocking.
@@ -517,9 +517,10 @@ class PyOCDTool(object): unique_id=self._args.unique_id, target_override=self._args.target_override, frequency=self._args.frequency, - blocking=False, + blocking=(not self._args.no_wait), options=convert_session_options(self._args.options)) if session is None: + LOG.error("No device available to flash") sys.exit(1) with session: programmer = FileProgrammer(session, @@ -551,9 +552,10 @@ class PyOCDTool(object): unique_id=self._args.unique_id, target_override=self._args.target_override, frequency=self._args.frequency, - blocking=False, + blocking=(not self._args.no_wait), options=convert_session_options(self._args.options)) if session is None: + LOG.error("No device available to erase") sys.exit(1) with session: mode = self._args.erase_mode or FlashEraser.Mode.SECTOR @@ -582,9 +584,10 @@ class PyOCDTool(object): unique_id=self._args.unique_id, target_override=self._args.target_override, frequency=self._args.frequency, - blocking=False, + blocking=(not self._args.no_wait), options=convert_session_options(self._args.options)) if session is None: + LOG.error("No device available to reset") sys.exit(1) try: # Handle hw reset specially using the probe, so we don't need a valid connection
Add Tyk API Add Tyk API to Development
@@ -367,6 +367,7 @@ API | Description | Auth | HTTPS | CORS | | [StackExchange](https://api.stackexchange.com/) | Q&A forum for developers | `OAuth` | Yes | Unknown | | [Statically](https://statically.io/) | A free CDN for developers | No | Yes | Yes | | [Trending-Github](https://docs.trending-github.com) | Discover what is currently trending on github | No | Yes | Yes | +| [Tyk](https://tyk.io/open-source/) | Api and service management platform | `apiKey` | Yes | Yes | | [userstack](https://userstack.com/) | Secure User-Agent String Lookup JSON API | `OAuth` | Yes | Unknown | | [WebScraping.AI](https://webscraping.ai/) | Web Scraping API with built-in proxies and JS rendering | `apiKey` | Yes | Yes |
fix: fix misconfigured HA url overriding url input The get_url command would throw an exception even when hass_url was provided through the form. This would end up ignoring the hass_url input and result in a form loop. Thanks to for testing and discovering this. closes
@@ -244,12 +244,14 @@ class AlexaMediaFlowHandler(config_entries.ConfigFlow): errors={"base": "2fa_key_invalid"}, description_placeholders={"message": ""}, ) + hass_url: str = user_input.get(CONF_HASS_URL) + if hass_url is None: try: - hass_url: str = user_input.get( - CONF_HASS_URL, get_url(self.hass, prefer_external=True) - ) + hass_url = get_url(self.hass, prefer_external=True) except NoURLAvailableError: - _LOGGER.debug("No Home Assistant URL found in config or detected; forcing user form") + _LOGGER.debug( + "No Home Assistant URL found in config or detected; forcing user form" + ) return self.async_show_form( step_id="user", data_schema=vol.Schema(self.proxy_schema),
sql: add ADMIN CHECK TABLE description * sql: add ADMIN CHECK TABLE description Via: PTAL * address morgan's comment * improve the language
@@ -128,13 +128,14 @@ mysql> show master status; ## `ADMIN` statement -This statement is a TiDB extension syntax, used to view the status of TiDB. +This statement is a TiDB extension syntax, used to view the status of TiDB and check the data of tables in TiDB. ```sql ADMIN SHOW DDL ADMIN SHOW DDL JOBS ADMIN SHOW DDL JOB QUERIES job_id [, job_id] ... ADMIN CANCEL DDL JOBS job_id [, job_id] ... +ADMIN CHECK TABLE tbl_name [, tbl_name] ... ``` - `ADMIN SHOW DDL`: To view the currently running DDL jobs. @@ -181,6 +182,7 @@ ADMIN CANCEL DDL JOBS job_id [, job_id] ... - `ADMIN SHOW DDL JOB QUERIES job_id [, job_id] ...`: To view the original SQL statement of the DDL task corresponding to the `job_id`; the `job_id` only searches the running DDL job and the last ten results in the DDL history job queue - `ADMIN CANCEL DDL JOBS job_id [, job_id] ...`: To cancel the currently running DDL jobs and return whether the corresponding jobs are successfully cancelled. If the operation fails to cancel the jobs, specific reasons are displayed. +- `ADMIN CHECK TABLE tbl_name [, tbl_name] ...`: To check the consistency of all the data in the specified table and corresponding indexes. If the check is passed, an empty result will be returned. On failure, an error message will indicate that data is inconsistent. > **Note**: >
add alternative lldp local interface key On an the key is 'Local Interface' instead of 'Local Intf' when issuing 'show lldp neighbors detail'. Search for both options.
@@ -8,7 +8,7 @@ Value REMOTE_SYSTEM_CAPAB (.*) Value REMOTE_SYSTEM_ENABLE_CAPAB (.*) Start - ^Local Intf\s*?[:-]\s+${LOCAL_INTERFACE} + ^Local Int(?:er)?f(?:ace)?\s*?[:-]\s+${LOCAL_INTERFACE} ^Chassis id\s*?[:-]\s+${REMOTE_CHASSIS_ID} ^Port id\s*?[:-]\s+${REMOTE_PORT} ^Port Description\s*?[:-]\s+${REMOTE_PORT_DESCRIPTION}
raop: Harmonize protocol string in requests Relates to
@@ -18,6 +18,7 @@ _LOGGER = logging.getLogger(__name__) FRAMES_PER_PACKET = 352 USER_AGENT = "AirPlay/540.31" +HTTP_PROTOCOL = "HTTP/1.1" ANNOUNCE_PAYLOAD = ( "v=0\r\n" @@ -95,7 +96,9 @@ class RtspSession: async def info(self) -> Dict[str, object]: """Return device information.""" - device_info = await self.exchange("GET", "/info", allow_error=True) + device_info = await self.exchange( + "GET", "/info", allow_error=True, protocol=HTTP_PROTOCOL + ) # If not supported, just return an empty dict if device_info.code != 200: @@ -119,6 +122,7 @@ class RtspSession: "/auth-setup", content_type="application/octet-stream", body=body, + protocol=HTTP_PROTOCOL, ) # This method is only used by AirPlay 1 and is very specific (e.g. does not support @@ -224,7 +228,7 @@ class RtspSession: """Send TEARDOWN message.""" return await self.exchange("TEARDOWN", headers={"Session": rtsp_session}) - async def exchange( + async def exchange( # pylint: disable=too-many-locals self, method: str, uri: Optional[str] = None, @@ -232,6 +236,7 @@ class RtspSession: headers: Mapping[str, object] = None, body: Union[str, bytes] = None, allow_error: bool = False, + protocol: str = "RTSP/1.0", ) -> HttpResponse: """Send a RTSP message and return response.""" cseq = self.cseq @@ -258,7 +263,7 @@ class RtspSession: resp = await self.connection.send_and_receive( method, uri or self.uri, - protocol="RTSP/1.0", + protocol=protocol, user_agent=USER_AGENT, content_type=content_type, headers=hdrs,
Fix wrong import of service models from core, such as in A-CORD
@@ -13,13 +13,16 @@ from header import * {% if file_exists(m.name|lower+'_top.py') -%}{{ include_file(m.name|lower+'_top.py') }} {% endif %} {%- for l in m.links -%}{% set peer_name=l.peer.name %} + {% if peer_name not in proto.message_names -%} from core.models import {{ peer_name }} {%- endif -%} {%- endfor -%} {%- for b in m.bases -%} {%- if b.name!='XOSBase' and 'Mixin' not in b.name %} +{% if b.name not in proto.message_names %} from core.models import {{ b.name }} +{% endif %} {%- endif -%} {% endfor %}
wrstsegments: fix sync issues Fixes:
@@ -282,31 +282,32 @@ class subtitle(object): if n: # don't get the empty lines. itmes.append(n) - itemsn = 0 several_items = False + skip = False sub = [] for x in range(len(itmes)): - item = itmes[itemsn] - if strdate(item) and len(subs) > 0 and itmes[itemsn + 1] == subs[-1][1]: + item = itmes[x] + if strdate(item) and len(subs) > 0 and itmes[x + 1] == subs[-1][1]: ha = strdate(subs[-1][0]) ha3 = strdate(item) second = str2sec(ha3.group(2)) + time subs[-1][0] = "{} --> {}".format(ha.group(1), sec2str(second)) + skip = True continue - else: has_date = strdate(item) if has_date: if several_items: subs.append(sub) sub = [] + skip = False first = str2sec(has_date.group(1)) + time second = str2sec(has_date.group(2)) + time sub.append("{} --> {}".format(sec2str(first), sec2str(second))) several_items = True - elif has_date is None: + elif has_date is None and skip is False: sub.append(item) - itemsn += 1 + if sub: subs.append(sub) string = ""
Accept custom provided cfg in WriteInferenceGraph. When provided, WriteInferenceGraph will use the provided cfg instead of getting it from model_registry.
@@ -1493,10 +1493,12 @@ class RunnerManager: """Sets the model name.""" self._model_name = model_name - def WriteInferenceGraph(self, prune_graph=True): + def WriteInferenceGraph(self, cfg=None, prune_graph=True): """Generates the inference graphs for a given model. Args: + cfg: Full `~.hyperparams.Params` for the model class. If present, + this cfg will be used instead of retrieving from model_registry. prune_graph: If true, prune the graph to just the parts we need. Returns: @@ -1506,8 +1508,10 @@ class RunnerManager: tf.io.gfile.makedirs(inference_graph_dir) tf.logging.info('Writing inference graphs to dir: %s', inference_graph_dir) + if not cfg: cfg = self.model_registry.GetParams(self._model_name, FLAGS.inference_dataset_name) + task_names = [FLAGS.model_task_name] if (issubclass(cfg.cls, base_model.MultiTaskModel) and not FLAGS.model_task_name):
ebd/ebuild-daemon-lib.bash: drop old read -N fallback EAPI 6 and up requires at least bash-4.2 so we can depend on that being available.
@@ -17,24 +17,13 @@ __ebd_read_line() { die "coms error in ${PKGCORE_EBD_PID}, read_line $@ failed w/ ${ret}: backing out of daemon." } -# are we running a version of bash (4.1 or so) that does -N? -if echo 'y' | read -N 1 &> /dev/null; then - __ebd_read_size() - { - read -u ${PKGCORE_EBD_READ_FD} -r -N $1 $2 - local ret=$? - [[ ${ret} -ne 0 ]] && \ - die "coms error in ${PKGCORE_EBD_PID}, read_size $@ failed w/ ${ret}: backing out of daemon." - } -else - # fallback to a *icky icky* but working alternative. +# read -N usage requires bash-4.1 or so (EAPI 6 requires >= 4.2) __ebd_read_size() { - eval "${2}=\$(dd bs=1 count=$1 <&${PKGCORE_EBD_READ_FD} 2> /dev/null)" + read -u ${PKGCORE_EBD_READ_FD} -r -N $1 $2 local ret=$? [[ ${ret} -ne 0 ]] && \ die "coms error in ${PKGCORE_EBD_PID}, read_size $@ failed w/ ${ret}: backing out of daemon." } -fi __ebd_read_cat_size() { dd bs=$1 count=1 <&${PKGCORE_EBD_READ_FD}
[hexagon][tests] re-enable maxpool hardware test Re-enable test_max_pool2d_slice.py when run on Hexagon hardware (as opposed to hexagon-sim). This is now safe because has been fixed.
@@ -330,9 +330,6 @@ class TestmaxPool2dSlice: expected_output_np, hexagon_session: Session, ): - if hexagon_session._launcher._serial_number != "simulator": - pytest.skip(msg="Due to https://github.com/apache/tvm/issues/11928") - target_hexagon = tvm.target.hexagon("v69") A = te.placeholder(input_shape_padded, name="A", dtype=dtype)
Error "ipynb more recent than text file" is HTTP 400 To make sure it is displayed in Jupyter
@@ -300,15 +300,15 @@ def build_jupytext_contents_manager_class(base_contents_manager_class): + timedelta(seconds=config.outdated_text_notebook_margin) ): raise HTTPError( - 500, + 400, """{out} (last modified {out_last}) seems more recent than {src} (last modified {src_last}) Please either: - open {src} in a text editor, make sure it is up to date, and save it, - or delete {src} if not up to date, - or increase check margin by adding, say, - c.ContentsManager.outdated_text_notebook_margin = 5 # in seconds # or float("inf") - to your .jupyter/jupyter_notebook_config.py file + outdated_text_notebook_margin = 5 # default is 1 (second) + to your jupytext.toml file """.format( src=inputs.path, src_last=inputs.timestamp,
Add kwarg support for `embedding_fn`. The `neural_structured_learning` package passes kwargs to the embedding function, so ensure that its use in the docs/examples includes it.
"\n", "# This function will be used to generate the embeddings for samples and their\n", "# corresponding neighbors, which will then be used for graph regularization.\n", - "def embedding_fn(features, mode):\n", + "def embedding_fn(features, mode, **params):\n", " \"\"\"Returns the embedding corresponding to the given features.\n", "\n", " Args:\n",
Fix `do_outdated()` when `pip freeze` is blank If no packages are installed, `results` contains a single empty string. This causes problems in the subsequent call to `convert_deps_from_pip`. Therefore filter out empty strings from `results` to avoid this. Fixes
@@ -1701,6 +1701,7 @@ def do_py(system=False): def do_outdated(): packages = {} results = delegator.run('{0} freeze'.format(which('pip'))).out.strip().split('\n') + results = filter(bool, results) for result in results: packages.update(convert_deps_from_pip(result))
[Bugfix] Fix primary key lookup The current primary key lookup is broken for tables that contain primary keys with more than 5 columns. indkey is an int2vector column and this seems to be the only way to lookup if attnum exists in the vector type because redshift doesn't support int2vector type.
@@ -299,12 +299,7 @@ WHERE AND att.attrelid = cl.oid and cl.relnamespace = pgn.oid and pgn.nspname = '%s' - and (ind.indkey[0] = att.attnum or - ind.indkey[1] = att.attnum or - ind.indkey[2] = att.attnum or - ind.indkey[3] = att.attnum or - ind.indkey[4] = att.attnum - ) + and att.attnum = ANY(string_to_array(textin(int2vectorout(ind.indkey)), ' ')) and attnum > 0 AND ind.indisprimary order by att.attnum;
settings_users: Refactor and extract function for last active. This just done to improves code readability and removes some code too.
@@ -170,29 +170,29 @@ function populate_users(realm_people_data) { }, }).init(); - var $users_table = $("#admin_users_table"); - list_render.create($users_table, active_users, { - name: "users_table_list", - modifier: function (item) { - var activity_rendered; + function get_rendered_last_activity(item) { var today = new XDate(); if (item.last_active === LAST_ACTIVE_UNKNOWN) { - activity_rendered = $("<span></span>").text(i18n.t("Unknown")); - } else if (item.last_active === LAST_ACTIVE_NEVER) { - activity_rendered = $("<span></span>").text(i18n.t("Never")); - } else { - activity_rendered = timerender.render_date( + return $("<span></span>").text(i18n.t("Unknown")); + } + if (item.last_active === LAST_ACTIVE_NEVER) { + return $("<span></span>").text(i18n.t("Never")); + } + return timerender.render_date( new XDate(item.last_active * 1000), undefined, today); } + var $users_table = $("#admin_users_table"); + list_render.create($users_table, active_users, { + name: "users_table_list", + modifier: function (item) { var $row = $(render_admin_user_list({ can_modify: page_params.is_admin, is_current_user: people.is_my_user_id(item.user_id), show_email: settings_org.show_email(), user: item, })); - $row.find(".last_active").append(activity_rendered); - + $row.find(".last_active").append(get_rendered_last_activity(item)); return $row; }, filter: {
Fix const-cast lint error in process_group_agent.cpp Summary: Pull Request resolved: Test Plan: Imported from OSS
@@ -437,6 +437,7 @@ void ProcessGroupAgent::handleSend(const SendWork& work) { std::vector<std::shared_ptr<c10d::ProcessGroup::Work>> pendingSends; const auto dst = work.to_.id_; + // NOLINTNEXTLINE(cppcoreguidelines-pro-type-const-cast) auto serializedPayloadData = const_cast<char*>(serializedPayload->data()); auto serializedPayloadSize = serializedPayload->size(); std::string* deleteWhenDone = serializedPayload.release();
[interpolatable] Compare all masters to first master Reduces number of errors reported.
@@ -237,14 +237,15 @@ def test(glyphsets, glyphs=None, names=None): if b == bits: isomorphisms.append(_rot_list ([complex(*pt) for pt,bl in mirrored], i)) - # Check each master against the next one in the list. - for i, (m0, m1) in enumerate(zip(allNodeTypes[:-1], allNodeTypes[1:])): + # Check each master against the first on in the list. + m0 = allNodeTypes[0] + for i,m1 in enumerate(allNodeTypes[1:]): if len(m0) != len(m1): add_problem( glyph_name, { "type": "path_count", - "master_1": names[i], + "master_1": names[0], "master_2": names[i + 1], "value_1": len(m0), "value_2": len(m1), @@ -261,7 +262,7 @@ def test(glyphsets, glyphs=None, names=None): { "type": "node_count", "path": pathIx, - "master_1": names[i], + "master_1": names[0], "master_2": names[i + 1], "value_1": len(nodes1), "value_2": len(nodes2), @@ -276,7 +277,7 @@ def test(glyphsets, glyphs=None, names=None): "type": "node_incompatibility", "path": pathIx, "node": nodeIx, - "master_1": names[i], + "master_1": names[0], "master_2": names[i + 1], "value_1": n1, "value_2": n2, @@ -284,7 +285,8 @@ def test(glyphsets, glyphs=None, names=None): ) continue - for i, (m0, m1) in enumerate(zip(allVectors[:-1], allVectors[1:])): + m0 = allVectors[0] + for i, m1 in enumerate(allVectors[1:]): if len(m0) != len(m1): # We already reported this continue @@ -299,7 +301,7 @@ def test(glyphsets, glyphs=None, names=None): glyph_name, { "type": "contour_order", - "master_1": names[i], + "master_1": names[0], "master_2": names[i + 1], "value_1": list(range(len(m0))), "value_2": matching, @@ -307,7 +309,8 @@ def test(glyphsets, glyphs=None, names=None): ) break - for i, (m0, m1) in enumerate(zip(allContourIsomorphisms[:-1], allContourIsomorphisms[1:])): + m0 = allContourIsomorphisms[0] + for i, m1 in enumerate(allContourIsomorphisms[1:]): if len(m0) != len(m1): # We already reported this continue @@ -324,7 +327,7 @@ def test(glyphsets, glyphs=None, names=None): { "type": "wrong_start_point", "contour": ix, - "master_1": names[i], + "master_1": names[0], "master_2": names[i + 1], }, )
Lexical env: give Env_Rebindings' Ref_Count field a constant offset TN:
@@ -377,8 +377,8 @@ private end record; type Env_Rebindings_Type (Size : Natural) is record - Rebindings : Env_Rebindings_Array (1 .. Size); Ref_Count : Natural := 1; + Rebindings : Env_Rebindings_Array (1 .. Size); end record; No_Env_Getter : constant Env_Getter := (False, null);
tests: Check all cases in check_has_permission_policies. This commit adds tests for POLICY_EVERYONE and POLICY_NOBODY in check_has_permission_policies test. The original code used these values but these were not covered in test.
@@ -1261,6 +1261,14 @@ Output: ) member_user.save() + do_set_realm_property(realm, policy, Realm.POLICY_NOBODY, acting_user=None) + self.assertFalse(validation_func(owner_user)) + self.assertFalse(validation_func(admin_user)) + self.assertFalse(validation_func(moderator_user)) + self.assertFalse(validation_func(member_user)) + self.assertFalse(validation_func(new_member_user)) + self.assertFalse(validation_func(guest_user)) + do_set_realm_property(realm, policy, Realm.POLICY_ADMINS_ONLY, acting_user=None) self.assertTrue(validation_func(owner_user)) self.assertTrue(validation_func(admin_user)) @@ -1293,6 +1301,14 @@ Output: self.assertTrue(validation_func(new_member_user)) self.assertFalse(validation_func(guest_user)) + do_set_realm_property(realm, policy, Realm.POLICY_EVERYONE, acting_user=None) + self.assertTrue(validation_func(owner_user)) + self.assertTrue(validation_func(admin_user)) + self.assertTrue(validation_func(moderator_user)) + self.assertTrue(validation_func(member_user)) + self.assertTrue(validation_func(new_member_user)) + self.assertTrue(validation_func(guest_user)) + def subscribe_realm_to_manual_license_management_plan( self, realm: Realm, licenses: int, licenses_at_next_renewal: int, billing_schedule: int ) -> Tuple[CustomerPlan, LicenseLedger]:
Update apt_c23.txt Add domain + generic trail. Meanwhile domains from ```micropsia.txt``` are also present in ```apt_c23.txt```, ```micropsia.txt``` should be merged with ```apt_c23.txt```.
@@ -265,3 +265,13 @@ joycebyers.club harvey-ross.info davina-claire.xyz arthursaito.club + +# Reference: https://twitter.com/ClearskySec/status/1067109104492134400 +# Reference: https://blog.radware.com/security/2018/07/micropsia-malware/ + +samwinchester.club + +# Generic (callback) path + +/api/hazard/oneo +/api/white_walkers/
Jenkins fixes from review comments Jenkinsfile fixes to address comments from Thanks!
@@ -41,6 +41,8 @@ pipeline { condaInstallDevito() runCondaTests() runExamples() + runCodecov() + buildDocs() } } stage('Build and test gcc-5 container') { @@ -56,6 +58,8 @@ pipeline { condaInstallDevito() runCondaTests() runExamples() + runCodecov() + buildDocs() } } stage('Build and test gcc-7 container') { @@ -73,6 +77,8 @@ pipeline { condaInstallDevito() installYask() runCondaTests() + runCodecov() + buildDocs() } } } @@ -134,7 +140,7 @@ def runExamples () { } def runCodecov() { - sh 'codecov' + sh 'source activate devito; codecov' } def buildDocs() {
settings_users: Remove /json/users calls. As part of a refactoring, we are now able to remove the /json/users calls and get all the information needed on people.js. To do this, now the populate_users uses the people api to get all the active and non active human users.
@@ -127,31 +127,26 @@ function get_status_field() { } } -function failed_listing_users(xhr) { +function failed_listing_users() { loading.destroy_indicator($('#subs_page_loading_indicator')); const status = get_status_field(); - ui_report.error(i18n.t("Error listing users"), xhr, status); + const user_id = people.my_current_user_id(); + blueslip.error('Error while listing users for user_id ' + user_id, status); } -function populate_users(realm_people_data) { - let active_users = []; - let deactivated_users = []; +function populate_users() { + const active_user_ids = people.get_active_human_ids(); + let active_users = active_user_ids.map(user_id => people.get_by_user_id(user_id)); + active_users = _.sortBy(active_users, 'full_name'); - for (const user of realm_people_data.members) { - if (user.is_bot) { - continue; - } + const deactivated_user_ids = people.get_non_active_human_ids(); + let deactivated_users = deactivated_user_ids.map(user_id => people.get_by_user_id(user_id)); + deactivated_users = _.sortBy(deactivated_users, 'full_name'); - if (user.is_active) { - active_users.push(user); - } else { - deactivated_users.push(user); - } + if (active_user_ids.length === 0 && deactivated_user_ids.length === 0) { + failed_listing_users(); } - active_users = _.sortBy(active_users, 'full_name'); - deactivated_users = _.sortBy(deactivated_users, 'full_name'); - section.active.create_table(active_users); section.deactivated.create_table(deactivated_users); } @@ -375,14 +370,7 @@ function start_data_load() { $("#admin_deactivated_users_table").hide(); $("#admin_users_table").hide(); - // Populate users and bots tables - channel.get({ - url: '/json/users', - idempotent: true, - timeout: 10 * 1000, - success: populate_users, - error: failed_listing_users, - }); + populate_users(); } function open_human_form(person) {
Update models.py Fix the handling of shared IPs (VIP, VRRF, etc.) when unique IP space enforcement is set. Add parentheses for the logical OR-statement to make the evaluation valid. Fixes:
@@ -596,11 +596,11 @@ class IPAddress(ChangeLoggedModel, CustomFieldModel): if self.address: # Enforce unique IP space (if applicable) - if self.role not in IPADDRESS_ROLES_NONUNIQUE and ( + if self.role not in IPADDRESS_ROLES_NONUNIQUE and (( self.vrf is None and settings.ENFORCE_GLOBAL_UNIQUE ) or ( self.vrf and self.vrf.enforce_unique - ): + )): duplicate_ips = self.get_duplicates() if duplicate_ips: raise ValidationError({
Update 00_intro.rst Deleted definition for R
@@ -21,11 +21,8 @@ Glossary ICU **I**\ nternational **C**\ omponents for **U**\ nicode. ICU is an open-source project of mature C/C++ and Java libraries for Unicode support, software internationalization, and software globalization. `Learn More <http://site.icu-project.org/>`_. - R - **R** is a free software environment for statistical computing and graphics. It compiles and runs on a wide variety of UNIX platforms, Windows and MacOS. `Learn More <https://r-project.org/>`_. - CRAN - **C**\ omprehensive **R** **A**\ rchive **N**\ etwork. CRAN is a network of FTP and web servers around the world that store identical, up-to-date, versions of code and documentation for :term:`R`. `Learn More <https://cran.r-project.org/>`_. + **C**\ omprehensive **R** **A**\ rchive **N**\ etwork. CRAN is a network of FTP and web servers around the world that store identical, up-to-date, versions of code and documentation for R. `Learn More <https://cran.r-project.org/>`_. TODO list
Reduce insights handler cache timer Fixes
@@ -165,12 +165,12 @@ class MainCompetitionseasonHandler(CacheableHandler): class MainInsightsHandler(CacheableHandler): - CACHE_VERSION = 2 + CACHE_VERSION = 3 CACHE_KEY_FORMAT = "main_insights" def __init__(self, *args, **kw): super(MainInsightsHandler, self).__init__(*args, **kw) - self._cache_expiration = 60 * 60 * 24 + self._cache_expiration = 60 * 5 def _render(self, *args, **kw): week_events = EventHelper.getWeekEvents()
Important bug fix for UHFQC detectors Solves the problem that looks like missing triggers.
@@ -1384,11 +1384,13 @@ class UHFQC_input_average_detector(Hard_Detector): print(nr_samples) def get_values(self): + self.UHFQC.quex_rl_readout(0) # resets UHFQC internal readout counters self.UHFQC.awgs_0_enable(1) try: temp = self.UHFQC.awgs_0_enable() except: temp = self.UHFQC.awgs_0_enable() + del temp if self.AWG is not None: self.AWG.start() while self.UHFQC.awgs_0_enable() == 1: @@ -1397,11 +1399,7 @@ class UHFQC_input_average_detector(Hard_Detector): for i, channel in enumerate(self.channels): dataset = eval("self.UHFQC.quex_iavg_data_{}()".format(channel)) data[i] = dataset[0]['vector'] - # data = self.UHFQC.single_acquisition(self.nr_sweep_points, - # self.poll_time, timeout=0, - # channels=set(self.channels), - # mode='iavg') - # data = np.array([data[key] for key in data.keys()]) + return data def prepare(self, sweep_points): @@ -1449,8 +1447,11 @@ class UHFQC_integrated_average_detector(Hard_Detector): self.cross_talk_suppression = cross_talk_suppression def get_values(self): + self.AWG.stop() + self.UHFQC.quex_rl_readout(0) # resets UHFQC internal readout counters self.UHFQC.awgs_0_enable(1) # probing the values to be sure communication is finished before + # this way of checking the UHFQC should be OK according to Niels H try: temp = self.UHFQC.awgs_0_enable() except: @@ -1464,6 +1465,7 @@ class UHFQC_integrated_average_detector(Hard_Detector): time.sleep(0.01) data = ['']*len(self.channels) for i, channel in enumerate(self.channels): + # FIXME: better to use dataset = self.UHFQC.get('quex_rl_data_{}'.format(channel)) dataset = eval("self.UHFQC.quex_rl_data_{}()".format(channel)) data[i] = dataset[0]['vector']/self.nr_averages if self.cross_talk_suppression: @@ -1558,6 +1560,7 @@ class UHFQC_integration_logging_det(Hard_Detector): self.cross_talk_suppression = cross_talk_suppression def get_values(self): + self.UHFQC.quex_rl_readout(0) # resets UHFQC internal readout counters self.UHFQC.awgs_0_enable(1) # probing the values to be sure communication is finished before try: @@ -1568,10 +1571,6 @@ class UHFQC_integration_logging_det(Hard_Detector): # starting AWG if self.AWG is not None: self.AWG.start() - # data = self.UHFQC.single_acquisition(self.nr_shots, - # self.poll_time, timeout=0, - # channels=set(self.channels)) - # data = np.array([data[key] for key in data.keys()]) while self.UHFQC.awgs_0_enable() == 1: time.sleep(0.01)
StandardNodeGadget : Fix bookmark texture mag filter GL_LINEAR_MIPMAP_LINEAR is not a valid magnification filter - see
@@ -161,7 +161,7 @@ static IECoreGL::Texture *bookmarkTexture() IECoreGL::Texture::ScopedBinding binding( *bookmarkTexture ); glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR ); - glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR_MIPMAP_LINEAR ); + glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR ); glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_BORDER ); glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_BORDER ); }
Add 'Sign In' link to navigation. Refs
{% if user_authenticated %} <li class="avatar"><a href="#"><img src="{{ request.user.picture_url }}" /></a></li> {% else %} - <li class="show-on-desktop"><a href="#">{{ _('Sign In') }}</a></li> + <li class="show-on-desktop"><a href="{{ login_link() }}">{{ _('Sign In') }}</a></li> {% endif %} </ul> </div>
short circuit error page when unit testing to avoid stack overflow a rendering error on the error page itself just creates a huge stack trace making it hard to debug
@@ -153,6 +153,9 @@ def server_error(request, template_name='500.html'): traceback_key = uuid.uuid4().hex cache.cache.set(traceback_key, traceback_text, 60*60) + if settings.UNIT_TESTING: + return HttpResponse(status=500) + return HttpResponseServerError(t.render( context={ 'MEDIA_URL': settings.MEDIA_URL,
change: rename .api._load._maybe_schema Rename .api._load._maybe_schema to try_to_load_schema to make its purpose clearer.
@@ -30,7 +30,7 @@ MappingT = typing.Dict[str, typing.Any] MaybeParserOrIdOrTypeT = typing.Optional[typing.Union[str, ParserT]] -def _maybe_schema(**options) -> typing.Optional[InDataT]: +def try_to_load_schema(**options) -> typing.Optional[InDataT]: """Try to load a schema object for validation. :param options: Optional keyword arguments such as @@ -145,8 +145,9 @@ def single_load(input_: ioinfo.PathOrIOInfoT, ioi = ioinfo.make(input_) cnf = _single_load(ioi, ac_parser=ac_parser, ac_template=ac_template, ac_context=ac_context, **options) - schema = _maybe_schema(ac_template=ac_template, ac_context=ac_context, - **options) + schema = try_to_load_schema( + ac_template=ac_template, ac_context=ac_context, **options + ) if schema and not is_valid(cnf, schema, **options): return None @@ -211,8 +212,9 @@ def multi_load(inputs: typing.Union[typing.Iterable[ioinfo.PathOrIOInfoT], :return: Mapping object or any query result might be primitive objects :raises: ValueError, UnknownProcessorTypeError, UnknownFileTypeError """ - schema = _maybe_schema(ac_template=ac_template, ac_context=ac_context, - **options) + schema = try_to_load_schema( + ac_template=ac_template, ac_context=ac_context, **options + ) options['ac_schema'] = None # Avoid to load schema more than twice. iois = ioinfo.makes(inputs)
Update formbook.txt C2 addresses from: [0] [1]
@@ -25,3 +25,14 @@ www.n01.tech www.ourcrazyveterans.com www.sy-adm.com www.yinuxw.info + +# Reference: https://twitter.com/dms1899/status/1038276577254146049 +# Reference: https://pastebin.com/4pDsDuxn + +http://5.101.78.222/ +http://5.255.94.75/saite/gate.php +http://0day4today.com +http://www.commercekorea.net/hx289/ +http://www.magnagrecia.net/h319/ +http://npromo.eu/index.php +http://www.southsidenewhomes.com/hx341/
GDB helpers: materialize Entity rather than Self when appropriate TN:
@@ -26,11 +26,13 @@ is ## that we can use to dispatch on other properties and all. Self : ${Self.type.name} := ${Self.type.name} (${property.self_arg_name}); - ${gdb_bind('self', 'Self')} % if property._has_self_entity: Ent : ${Self.type.entity.name} := ${Self.type.entity.name}'(Node => Self, Info => E_Info); + ${gdb_bind('entity', 'Ent')} + % else: + ${gdb_bind('self', 'Self')} % endif % for arg in property.arguments:
Update go language binary to version 1.17.2 Go client libraries from cloud.google.com/go may require an updated Go version. For instance, github.com/google/go-github requires Go version 1.13 or greater.
@@ -19,13 +19,13 @@ PACKAGE_NAME = 'go_lang' # Download go language release binary. When the binary need to be updated to # to a new version, please update the value of GO_TAR. -GO_TAR = 'go1.12.9.linux-amd64.tar.gz' +GO_TAR = 'go1.17.2.linux-amd64.tar.gz' GO_URL = 'https://dl.google.com/go/' + GO_TAR PREPROVISIONED_DATA = { - GO_TAR: 'ac2a6efcc1f5ec8bdc0db0a988bb1d301d64b6d61b7e8d9e42f662fbb75a2b9b' + GO_TAR: 'f242a9db6a0ad1846de7b6d94d507915d14062660616a61ef7c808a76e4f1676' } PACKAGE_DATA_URL = {GO_TAR: GO_URL} -GO_VERSION = '1.12.9' +GO_VERSION = '1.17.2' GO_DIR = '%s/go-%s' % (linux_packages.INSTALL_DIR, GO_VERSION) GO_BIN = '/usr/local/go/bin/go'
Update sso-saml-okta.rst Add SAML FAQ
@@ -100,6 +100,8 @@ It is also recommended to post an announcement about how the migration will work You may also configure SAML for Okta by editing ``config.json`` to enable SAML based on :ref:`SAML configuration settings <saml-enterprise>`. You must restart the Mattermost server for the changes to take effect. +.. include:: saml-faq.rst + .. include:: sso-saml-ldapsync.rst .. include:: sso-saml-troubleshooting.rst
Remove ssl_verify property Only used when debugging, and in that case, the functionality could be implemented using private APIs.
@@ -49,19 +49,6 @@ class Client: Used when creating an external event loop to determine when to stop listening. """ - @property - def ssl_verify(self): - """Verify SSL certificate. - - Set to False to allow debugging with a proxy. - """ - # TODO: Deprecate this - return self._state._session.verify - - @ssl_verify.setter - def ssl_verify(self, value): - self._state._session.verify = value - @property def uid(self): """The ID of the client.
Temporarily limit setuptools version See
[build-system] requires = [ - "setuptools >= 41.0.0", + "setuptools == 41.0.0", # See https://github.com/ansible/molecule/issues/2350 "setuptools_scm >= 1.15.0", "setuptools_scm_git_archive >= 1.0", "wheel",
correct lineplot documentation for err_kws The variable err_band isn't used by lineplot. This seems to be a typo for err_kws.
@@ -1136,7 +1136,7 @@ lineplot.__doc__ = dedent("""\ err_style : "band" or "bars", optional Whether to draw the confidence intervals with translucent error bands or discrete error bars. - err_band : dict of keyword arguments + err_kws : dict of keyword arguments Additional paramters to control the aesthetics of the error bars. The kwargs are passed either to ``ax.fill_between`` or ``ax.errorbar``, depending on the ``err_style``.
Change column output order on `get-identities` Closes
@@ -60,7 +60,7 @@ def get_identities_command(values, lookup_style): else: ids = res['identities'] - print_table(ids, [('ID', 'id'), ('Full Name', 'name'), - ('Username', 'username'), + print_table(ids, [('ID', 'id'), ('Username', 'username'), + ('Full Name', 'name'), ('Organization', 'organization'), ('Email Address', 'email')])
test: convert to pytest test_noop.py Split the tests and add verification for access to mount when necessary.
# # Runtime Tests for No-op Pipelines # - import json -import unittest import tempfile +import pytest from .. import test - -NOOP_V2 = { [email protected](name="jsondata", scope="module") +def jsondata_fixture(): + return json.dumps({ "version": "2", "pipelines": [ { @@ -31,14 +31,19 @@ NOOP_V2 = { ] } ] -} + }) [email protected](name="tmpdir", scope="module") +def tmpdir_fixture(): + with tempfile.TemporaryDirectory() as tmp: + yield tmp -class TestNoop(unittest.TestCase): - def setUp(self): - self.osbuild = test.OSBuild() - def test_noop(self): [email protected](name="osb", scope="module") +def osbuild_fixture(): + with test.OSBuild() as osb: + yield osb + # # Run a noop Pipeline. Run twice to verify the cache does not affect # the operation (we do not have checkpoints, nor any stages that could @@ -47,16 +52,14 @@ class TestNoop(unittest.TestCase): # Then run the entire thing again, to verify our own `osbuild` executor # tears things down properly and allows to be executed multiple times. # - - with self.osbuild as osb: +def test_noop(osb): osb.compile("{}") osb.compile("{}") - with self.osbuild as osb: +def test_noop2(osb): osb.compile("{}") osb.compile("{}") - def test_noop_v2(self): - with tempfile.TemporaryDirectory() as tmp: - with self.osbuild as osb: - osb.compile(json.dumps(NOOP_V2), output_dir=tmp) [email protected](not test.TestBase.can_bind_mount(), reason="root-only") +def test_noop_v2(osb, tmpdir, jsondata): + osb.compile(jsondata, output_dir=tmpdir)
[tests] improve output see
@@ -67,7 +67,7 @@ class TestConfig(unittest.TestCase): cfg = Config(["-l", "modules"]) result = self.stdout.getvalue() for module in all_modules(): - self.assertTrue(module["name"] in result) + self.assertTrue(module["name"] in result, "module {} missing in result".format(module["name"])) def test_invalid_list(self): with self.assertRaises(SystemExit):
Reduce filter function to single function Removed duplicate filter functions to promote consistency between data provider classes outputs.
@@ -162,7 +162,6 @@ class Momentum(Scanner): async def run(self, back_time: datetime = None) -> List[str]: if not back_time: trade_able_symbols = await self._get_trade_able_symbols() - if isinstance(self.data_loader.data_api, PolygonData): filter_func = lambda ticket_snapshot: ( ticket_snapshot["ticker"] in trade_able_symbols # type: ignore and self.max_share_price @@ -176,24 +175,7 @@ class Momentum(Scanner): and ticket_snapshot["day"]["v"] > self.min_volume # type: ignore ) sort_key = lambda ticker: float(ticker["day"]["v"]) - tlog('applying momentum filter on market snapshots from Polygon API') - elif isinstance(self.data_loader.data_api, AlpacaData): - filter_func = lambda ticket_snapshot: ( - ticket_snapshot["ticker"] in trade_able_symbols # type: ignore - and self.max_share_price - >= ticket_snapshot["latest_trade"]["p"] # type: ignore - >= self.min_share_price # type: ignore - and float(ticket_snapshot["prev_daily_bar"]["v"]) # type: ignore - * float(ticket_snapshot["latest_trade"]["p"]) # type: ignore - > self.min_last_dv # type: ignore - and (((ticket_snapshot["daily_bar"]["o"] - ticket_snapshot["prev_daily_bar"]["c"]) - / ticket_snapshot["prev_daily_bar"]["c"])*100) >= self.today_change_percent # type: ignore - and ticket_snapshot["daily_bar"]["v"] > self.min_volume # type: ignore - ) - sort_key = lambda ticker: float(ticker["daily_bar"]["v"]) - tlog('applying momentum filter on market snapshots from Alpaca API') - else: - raise ValueError(f"Invalid data API: {type(self.data_loader.data_api)}") + tlog(f'applying momentum filter on market snapshots from {self.data_loader.data_api}') return await self.apply_filter_on_market_snapshot(sort_key, filter_func) rows = await self.load_from_db(back_time)
add support for displaying image messages with utime=0 some publishers don't fill in the utime field. In that case, we will will auto increment the utime as a sequential counter, since the app reads the utime field to know that a new image has arrived.
@@ -29,6 +29,7 @@ bool ddBotImageQueue::initCameraData(const QString& cameraName, CameraData* came { cameraData->mName = cameraName.toAscii().data(); cameraData->mHasCalibration = true; + cameraData->mImageMessage.utime = 0; cameraData->mCamTrans = bot_param_get_new_camtrans(mBotParam, cameraName.toAscii().data()); if (!cameraData->mCamTrans) @@ -415,11 +416,17 @@ void ddBotImageQueue::onImageMessage(const QByteArray& data, const QString& chan CameraData* cameraData = this->getCameraData(cameraName); + int64_t prevTimestamp = cameraData->mImageMessage.utime; QMutexLocker locker(&cameraData->mMutex); cameraData->mImageMessage.decode(data.data(), 0, data.size()); cameraData->mImageBuffer.clear(); + if (cameraData->mImageMessage.utime == 0) + { + cameraData->mImageMessage.utime = prevTimestamp + 1; + } + if (cameraData->mHasCalibration) { this->getTransform("local", cameraData->mCoordFrame, cameraData->mLocalToCamera, cameraData->mImageMessage.utime);
library/radosgw_user.py: fix user update Removes the case when display_name was defined prevously but was not provided when modifying. Without this change the module will change display_name to name even if display_name was not name originally. See
@@ -265,8 +265,6 @@ def modify_user(module, container_image=None): cluster = module.params.get('cluster') name = module.params.get('name') display_name = module.params.get('display_name') - if not display_name: - display_name = name email = module.params.get('email', None) access_key = module.params.get('access_key', None) secret_key = module.params.get('secret_key', None)
Changed the policy about cached Nones Now we use them for any cache TTL
@@ -85,8 +85,7 @@ class QueryCache(object): if self.ttl_min < 0 or (self.ttl_min > 0 and dif_minutes <= self.ttl_min): with open(file, "rb") as fh: result = pickle.loads(fh.read()) - # Valid load if we got a valid result or we have a persistent cache - return result, result is not None or self.ttl_min < 0 + return result, True # Cache expired return None, False
fix Subscribers on slots graph HG-- branch : feature/microservices
{ "key": "object", "operator": "=", - "value": "bng1a.mo" + "value": "$device" } ], "refId": "B", { "key": "object", "operator": "=", - "value": "bng1a.mo" + "value": "$device" } ], "refId": "A",
fix search for testdata don't look for specific repo name, but look for the testdata directory directly should fix the deploy of delpi-epidata, which currently fails due to unit tests failing due to being unable to find test data
@@ -21,7 +21,7 @@ class TestUtils: def __init__(self, abs_path_to_caller): # navigate to the root of the delphi-epidata repo path_to_repo = Path(abs_path_to_caller) - while path_to_repo.name != 'delphi-epidata': + while not (path_to_repo / 'testdata').exists(): if not path_to_repo.name: raise Exception('unable to determine path to delphi-epidata repo') path_to_repo = path_to_repo.parent
StandardLightVisualiser : Conform spotlightCone wireframe weight Now we have reduced line thickness in general, and previously removed inner cone spokes, having the outer cone be thinner makes soft spots seem a lot fainter than ones with only a single cone being drawn. This feels like a better trade-off.
@@ -739,34 +739,18 @@ IECoreGL::ConstRenderablePtr StandardLightVisualiser::spotlightCone( float inner addCone( innerAngle, lensRadius, vertsPerCurve->writable(), p->writable(), length, !drawSecondaryCone ); - IECoreGL::CurvesPrimitivePtr curves = new IECoreGL::CurvesPrimitive( IECore::CubicBasisf::linear(), false, vertsPerCurve ); - curves->addPrimitiveVariable( "P", IECoreScene::PrimitiveVariable( IECoreScene::PrimitiveVariable::Vertex, p ) ); - - const Color3fDataPtr color = new Color3fData( lineWidthScale < 1.0f ? Color3f( 0.627f, 0.580f, 0.352f ) : g_lightWireframeColor ); - curves->addPrimitiveVariable( "Cs", IECoreScene::PrimitiveVariable( IECoreScene::PrimitiveVariable::Constant, color ) ); - - group->addChild( curves ); - if( drawSecondaryCone ) { - IECoreGL::GroupPtr outerGroup = new Group; - // Make the outer wireframe slightly thinner, as - // inner cone is where we reach full light output. - addWireframeCurveState( outerGroup.get(), 0.5f * lineWidthScale ); - - IntVectorDataPtr vertsPerCurve = new IntVectorData; - V3fVectorDataPtr p = new V3fVectorData; addCone( outerAngle, lensRadius, vertsPerCurve->writable(), p->writable(), length, true ); + } IECoreGL::CurvesPrimitivePtr curves = new IECoreGL::CurvesPrimitive( IECore::CubicBasisf::linear(), false, vertsPerCurve ); curves->addPrimitiveVariable( "P", IECoreScene::PrimitiveVariable( IECoreScene::PrimitiveVariable::Vertex, p ) ); + const Color3fDataPtr color = new Color3fData( lineWidthScale < 1.0f ? Color3f( 0.627f, 0.580f, 0.352f ) : g_lightWireframeColor ); curves->addPrimitiveVariable( "Cs", IECoreScene::PrimitiveVariable( IECoreScene::PrimitiveVariable::Constant, color ) ); - outerGroup->addChild( curves ); - - group->addChild( outerGroup ); - } + group->addChild( curves ); return group; }
Except OSError with errno.WSAEACCES when connecting "OSError: [WinError 10013] An attempt was made to access a socket in a way forbidden by its access permissions."
@@ -73,7 +73,8 @@ class TcpClient: # There are some errors that we know how to handle, and # the loop will allow us to retry if e.errno in (errno.EBADF, errno.ENOTSOCK, errno.EINVAL, - errno.ECONNREFUSED): + errno.ECONNREFUSED, # Windows-specific follow + getattr(errno, 'WSAEACCES', None)): # Bad file descriptor, i.e. socket was closed, set it # to none to recreate it on the next iteration self._socket = None
Update main.tf adding back teamcity based outputs
@@ -181,3 +181,15 @@ output "public_agent_ips" { description = "These are the IP addresses of all public agents" value = "${join(",", module.dcos.infrastructure.public_agents.private_ips)}" } + +output "masters-ips" { + value = "${module.dcos.masters-ips}" +} + +output "cluster-address" { + value = "${module.dcos.masters-loadbalancer}" +} + +output "public-agents-loadbalancer" { + value = "${module.dcos.public-agents-loadbalancer}" +} \ No newline at end of file
config.py: Add support for source containers Adding support for the application/vnd.oci.source.image.config.v1+json mime type.
@@ -757,6 +757,9 @@ class DefaultConfig(ImmutableConfig): "application/tar+gzip", "application/vnd.cncf.helm.chart.content.v1.tar+gzip", ], + "application/vnd.oci.source.image.config.v1+json": [ + "application/vnd.oci.image.layer.v1.tar+gzip" + ], } # Feature Flag: Whether to allow Helm OCI content types.
Add get_flink_metadata This will be used to expose the metadata in flink paasta API object
@@ -183,6 +183,22 @@ def get_flink_status( raise +def get_flink_metadata( + kube_client: KubeClient, service: str, instance: str +) -> Optional[Mapping[str, Any]]: + try: + co = kube_client.custom.get_namespaced_custom_object( + **flink_custom_object_id(service, instance) + ) + metadata = co.get("metadata") + return metadata + except ApiException as e: + if e.status == 404: + return None + else: + raise + + def set_flink_desired_state( kube_client: KubeClient, service: str, instance: str, desired_state: str ) -> str:
[Small] Minor optimization for ImageScaleTransformer Image observation can take a lot of memory. So we try to make the memory footprint as small as possible. 1. uint8 can be directly multiplied with a float number without converting it first. 2. Do not add self._min if possible.
@@ -337,7 +337,6 @@ class FrameStacker(DataTransformer): def _stack_frame(obs, i): prev_obs = replay_buffer.get_field(self._exp_fields[i], env_ids, prev_positions) - prev_obs = convert_device(prev_obs) stacked_shape = alf.nest.get_field( self._transformed_observation_spec, self._fields[i]).shape # [batch_size, mini_batch_length + stack_size - 1, ...] @@ -447,8 +446,10 @@ class ImageScaleTransformer(SimpleDataTransformer): assert isinstance(obs, torch.Tensor), str(type(obs)) + ' is not Tensor' assert obs.dtype == torch.uint8, "Image must have dtype uint8!" - obs = obs.type(torch.float32) - return self._scale * obs + self._min + obs = self._scale * obs + if self._min != 0: + obs.add_(self._min) + return obs observation = timestep_or_exp.observation for field in self._fields:
Fixed wrong behaviour of release license caused by upnext and supplemental media type videoid
@@ -32,11 +32,10 @@ except NameError: # Python 3 class MSLHandler(object): """Handles session management and crypto for license, manifest and event requests""" - last_license_session_id = '' last_license_url = '' - last_license_release_url = '' - last_drm_context = '' - last_playback_context = '' + licenses_session_id = [] + licenses_xid = [] + licenses_release_url = [] def __init__(self): super(MSLHandler, self).__init__() @@ -200,10 +199,12 @@ class MSLHandler(object): params, 'sessionId'), g.get_esn()) - # This xid must be used for any future request, until playback stops + # This xid must be used also for each future Event request, until playback stops g.LOCAL_DB.set_value('xid', xid, TABLE_SESSION) - self.last_license_session_id = sid - self.last_license_release_url = response[0]['links']['releaseLicense']['href'] + + self.licenses_xid.insert(0, xid) + self.licenses_session_id.insert(0, sid) + self.licenses_release_url.insert(0, response[0]['links']['releaseLicense']['href']) if self.msl_requests.msl_switch_requested: self.msl_requests.msl_switch_requested = False @@ -226,13 +227,19 @@ class MSLHandler(object): @common.time_execution(immediate=True) def release_license(self, data=None): # pylint: disable=unused-argument """Release the server license""" - common.debug('Requesting releasing license') + try: + # When UpNext is used a new video is loaded while another one is running and not yet released, + # so you need to take the right data of first added license + url = self.licenses_release_url.pop() + sid = self.licenses_session_id.pop() + xid = self.licenses_xid.pop() + common.debug('Requesting releasing license') params = [{ - 'url': self.last_license_release_url, + 'url': url, 'params': { - 'sessionId': self.last_license_session_id, - 'xid': g.LOCAL_DB.get_value('xid', table=TABLE_SESSION) + 'sessionId': sid, + 'xid': xid }, 'echo': 'sessionId' }] @@ -241,6 +248,9 @@ class MSLHandler(object): self.msl_requests.build_request_data('/bundle', params), g.get_esn()) common.debug('License release response: {}', response) + except IndexError: + # Example the supplemental media type have no license + common.debug('No license to release') def clear_user_id_tokens(self, data=None): # pylint: disable=unused-argument """Clear all user id tokens""" @@ -249,8 +259,6 @@ class MSLHandler(object): @common.time_execution(immediate=True) def __tranform_to_dash(self, manifest): self.last_license_url = manifest['links']['license']['href'] - self.last_playback_context = manifest['playbackContextId'] - self.last_drm_context = manifest['drmContextId'] return convert_to_dash(manifest)
docker: use version 5.4 of tpm2-tools We need the lastest version of tpm2_eventlog to parse logs with newer Shims correctly.
@@ -2,6 +2,14 @@ FROM fedora:37 AS keylime_base LABEL version="_version_" description="Keylime Base - Only used as an base image for derived packages" MAINTAINER Keylime Team <[email protected]> +RUN dnf -y install dnf-plugins-core git && dnf -y builddep tpm2-tools +RUN git clone -b 5.4 https://github.com/tpm2-software/tpm2-tools.git && \ + cd tpm2-tools && \ + ./bootstrap && \ + ./configure && \ + make && make install && \ + cd .. && rm -rf tpm2-tools + ENV GOPATH=/root/go RUN --mount=target=/keylime,type=bind,source=.,rw \ cd /keylime && ./installer.sh -o && \
Slightly simplify BinaryReader There was no need for the BufferedReader, since everything is already in memory. Further, the stream parameter was never used, so it was also unnecessary. The check for None when reading length was also unnecessary, since we could just pass 1 to begin with.
This module contains the BinaryReader utility class. """ import os +import time from datetime import datetime, timezone, timedelta -from io import BufferedReader, BytesIO +from io import BytesIO from struct import unpack -import time from ..errors import TypeNotFoundError from ..tl.alltlobjects import tlobjects @@ -18,18 +18,10 @@ _EPOCH = _EPOCH_NAIVE.replace(tzinfo=timezone.utc) class BinaryReader: """ Small utility class to read binary data. - Also creates a "Memory Stream" if necessary """ - def __init__(self, data=None, stream=None): - if data: + def __init__(self, data): self.stream = BytesIO(data) - elif stream: - self.stream = stream - else: - raise ValueError('Either bytes or a stream must be provided') - - self.reader = BufferedReader(self.stream) self._last = None # Should come in handy to spot -404 errors # region Reading @@ -61,13 +53,10 @@ class BinaryReader: return int.from_bytes( self.read(bits // 8), byteorder='little', signed=signed) - def read(self, length=None): - """Read the given amount of bytes.""" - if length is None: - return self.reader.read() - - result = self.reader.read(length) - if len(result) != length: + def read(self, length=-1): + """Read the given amount of bytes, or -1 to read all remaining.""" + result = self.stream.read(length) + if (length >= 0) and (len(result) != length): raise BufferError( 'No more data left to read (need {}, got {}: {}); last read {}' .format(length, len(result), repr(result), repr(self._last)) @@ -164,24 +153,24 @@ class BinaryReader: def close(self): """Closes the reader, freeing the BytesIO stream.""" - self.reader.close() + self.stream.close() # region Position related def tell_position(self): """Tells the current position on the stream.""" - return self.reader.tell() + return self.stream.tell() def set_position(self, position): """Sets the current position on the stream.""" - self.reader.seek(position) + self.stream.seek(position) def seek(self, offset): """ Seeks the stream position given an offset from the current position. The offset may be negative. """ - self.reader.seek(offset, os.SEEK_CUR) + self.stream.seek(offset, os.SEEK_CUR) # endregion
Changed _default_manager in favor of _base_manager This fixes a bug where models with overwritten default managers that filter out some instances do not get added to revisions properly.
@@ -218,6 +218,7 @@ def add_to_revision(obj, model_db=None): def _save_revision(versions, user=None, comment="", meta=(), date_created=None, using=None): from reversion.models import Revision # Only save versions that exist in the database. + # Use _base_manager so we don't have problems when _default_manager is overriden model_db_pks = defaultdict(lambda: defaultdict(set)) for version in versions: model_db_pks[version._model][version.db].add(version.object_id) @@ -225,7 +226,7 @@ def _save_revision(versions, user=None, comment="", meta=(), date_created=None, model: { db: frozenset(map( force_text, - model._default_manager.using(db).filter(pk__in=pks).values_list("pk", flat=True), + model._base_manager.using(db).filter(pk__in=pks).values_list("pk", flat=True), )) for db, pks in db_pks.items() } @@ -258,7 +259,7 @@ def _save_revision(versions, user=None, comment="", meta=(), date_created=None, version.save(using=using) # Save the meta information. for meta_model, meta_fields in meta: - meta_model._default_manager.db_manager(using=using).create( + meta_model._base_manager.db_manager(using=using).create( revision=revision, **meta_fields )
Add subtest support (https://github.com/CleanCut/green/issues/111) Great start! That really helped get things going. There's some corner cases we need to take care off and some tests that need to be included. I spent a few hours researching it last night. I'll see if I can finish polishing it up now.
@@ -48,9 +48,13 @@ class ProtoTest(): """ def __init__(self, test=None): if test: + method_parts = str(test).split(None, 2) + if hasattr(test, 'test_case'): + test = test.test_case self.module = test.__module__ self.class_name = test.__class__.__name__ - self.method_name = str(test).split()[0] + self.method_name = method_parts[0] if len(method_parts) < 3 \ + else ' '.join((method_parts[0], method_parts[2])) # docstr_part strips initial whitespace, then combines all lines # into one string until the first completely blank line in the # docstring @@ -182,6 +186,7 @@ class ProtoTestResult(BaseTestResult): 'stdout_output', 'unexpectedSuccesses', ] + self.failfast = False self.reinitialize() def reinitialize(self): @@ -281,6 +286,13 @@ class ProtoTestResult(BaseTestResult): """ self.unexpectedSuccesses.append(proto_test(test)) + def addSubTest(self, test, subtest, err): + if err is not None: + if issubclass(err[0], test.failureException): + self.addFailure(subtest, err) + else: + self.addError(subtest, err) + class GreenTestResult(BaseTestResult): """
Add pattern_syntax error to Raisecom.ROS profile HG-- branch : feature/microservices
@@ -22,6 +22,7 @@ class Profile(BaseProfile): pattern_prompt = r"^\S+?#" command_more = " " command_exit = "exit" + pattern_syntax_error = r"% \".+\" (?:Unknown command.)" rogue_chars = [re.compile(r"\x08+\s+\x08+"), "\r"] rx_ver = re.compile(
[modules/pomodoro] Add note about command chaining to doc fixes
@@ -10,7 +10,10 @@ Parameters: * pomodoro.format: Timer display format with "%m" and "%s" for minutes and seconds (defaults to "%m:%s") Examples: "%m min %s sec", "%mm", "", "timer" * pomodoro.notify: Notification command to run when timer ends/starts (defaults to nothing) - Example: 'notify-send "Time up!"' + Example: 'notify-send "Time up!"'. If you want to chain multiple commands, + please use an external wrapper script and invoke that. The module itself does + not support command chaining (see https://github.com/tobi-wan-kenobi/bumblebee-status/issues/532 + for a detailled explanation) """ from __future__ import absolute_import
Update Redis exporter to 1.12.0 Update Redis exporter to 1.12.0 and fix license.
@@ -58,11 +58,10 @@ packages: context: static: <<: *default_static_context - version: 1.9.0 - license: ASL 2.0 - release: 2 + version: 1.12.0 + license: MIT summary: Prometheus exporter for Redis server metrics. - description: Prometheus Exporter for Redis Metrics. Supports Redis 2.x, 3.x, 4.x, and 5.x + description: Prometheus Exporter for Redis Metrics. Supports Redis 2.x, 3.x, 4.x, 5.x and 6.x package: '%{name}-v%{version}.linux-amd64' URL: https://github.com/oliver006/redis_exporter dynamic:
replace.py: allow to edit modified text Allow to edit the latest version (i.e., with modifications) of the text. This allows to make changes without further replacements being applied. It is useful to make changes that should not be caught by replacements or to amend unwanted fixes in complex replacement cases.
@@ -733,13 +733,13 @@ class ReplaceRobot(Bot): continue applied = set() new_text = original_text + last_text = None while True: if self.isTextExcepted(new_text): pywikibot.output(u'Skipping %s because it contains text ' u'that is on the exceptions list.' % page.title(asLink=True)) break - last_text = None while new_text != last_text: last_text = new_text new_text = self.apply_replacements(last_text, applied, @@ -768,7 +768,7 @@ class ReplaceRobot(Bot): break choice = pywikibot.input_choice( u'Do you want to accept these changes?', - [('Yes', 'y'), ('No', 'n'), ('Edit', 'e'), + [('Yes', 'y'), ('No', 'n'), ('Edit original', 'e'), ('edit Latest', 'l'), ('open in Browser', 'b'), ('all', 'a')], default='N') if choice == 'e': @@ -778,6 +778,14 @@ class ReplaceRobot(Bot): if as_edited and as_edited != new_text: new_text = as_edited continue + if choice == 'l': + editor = editarticle.TextEditor() + as_edited = editor.edit(new_text) + # if user didn't press Cancel + if as_edited and as_edited != new_text: + new_text = as_edited + last_text = new_text # prevent changes from being applied again + continue if choice == 'b': pywikibot.bot.open_webbrowser(page) try: @@ -787,6 +795,7 @@ class ReplaceRobot(Bot): % page.title()) break new_text = original_text + last_text = None continue if choice == 'a': self.options['always'] = True
filter: Show stream and topic title for near link narrows. Updates the `filter.get_title` logic to return the stream name for narrows that include the stream, topic and near operators. That way the browser/tab title remains the same for these views, which have a particular scroll offset.
@@ -662,7 +662,10 @@ export class Filter { get_title() { // Nice explanatory titles for common views. const term_types = this.sorted_term_types(); - if (term_types.length === 2 && _.isEqual(term_types, ["stream", "topic"])) { + if ( + (term_types.length === 3 && _.isEqual(term_types, ["stream", "topic", "near"])) || + (term_types.length === 2 && _.isEqual(term_types, ["stream", "topic"])) + ) { if (!this._sub) { const search_text = this.operands("stream")[0]; return $t({defaultMessage: "Unknown stream #{search_text}"}, {search_text});
expressen: they started to use https for their stuff now fixes
@@ -21,7 +21,7 @@ class Expressen(Service): yield ServiceError("Excluding video") return - match = re.search('="(http://www.expressen.se/tvspelare[^"]+)"', data) + match = re.search('="(https://www.expressen.se/tvspelare[^"]+)"', data) if not match: log.error("Can't find video id") return
Update android_bankbot.txt > ```android_roamingmantis```
@@ -1978,12 +1978,6 @@ bbvaupdateappdownload.com lockappdown.com update-bbva-v2.com -# Reference: https://www.virustotal.com/gui/file/1cafde8a16790eb1b8b6839daced4f015b0d7ce8619ecdb69580cd00d3bbd3ee/detection - -http://192.186.11.125 -192.186.11.125:6666 -220103.top - # Reference: https://twitter.com/malwrhunterteam/status/1492106775826513922 # Reference: https://www.virustotal.com/gui/file/17d7526af61a94cd3707a75b00005d01cd9211eed503baf9325904b186dbc32c/detection
status: optimize the _fill_statuses method Close
@@ -383,8 +383,12 @@ class RemoteLOCAL(RemoteBase): return ret def _fill_statuses(self, checksum_info_dir, local_exists, remote_exists): + # Using sets because they are way faster for lookups + local = set(local_exists) + remote = set(remote_exists) + for md5, info in checksum_info_dir.items(): - status = STATUS_MAP[(md5 in local_exists, md5 in remote_exists)] + status = STATUS_MAP[(md5 in local, md5 in remote)] info["status"] = status def _get_chunks(self, download, remote, status_info, status, jobs):
Windows: Fixup dependency walker usage in unicode directories * This prevented the tool from discovering dependencies and made it produce non-working dist folders, in or outside of unicode paths. * Produced outside, the dist folders were good though.
@@ -52,6 +52,7 @@ from nuitka.utils.Execution import withEnvironmentPathAdded from nuitka.utils.FileOperations import ( areSamePaths, deleteFile, + getExternalUsePath, getFileContentByLine, getFileContents, getSubDirectories, @@ -1029,7 +1030,8 @@ def detectBinaryPathDLLsWindowsDependencyWalker( "-pa1", "-ps1", binary_filename, - ) + ), + cwd=getExternalUsePath(os.getcwd()), ) # TODO: Exit code should be checked.
Update allen-cell-imaging-collections.yaml Removing documentation link from description field.
Name: Allen Cell Imaging Collections Description: | - https://open.quiltdata.com/b/allencell - This bucket contains multiple datasets (as Quilt packages) created by the Allen Institute for Cell Science (AICS). The imaging data in this bucket contains either of the following:
pagegenerators: try..except UnicodeEncodeError on getattr() Python 2 getattr() will do implicit convertion from unicode to str, and when the unicode is non-ascii it'll choke. Since we don't really have a reason to use non-ascii methods, I'm just doing try..except instead if detecting if it's Python 2 and conditionally stringify.
@@ -1176,7 +1176,11 @@ class GeneratorFactory(object): if value == '': value = None + try: handler = getattr(self, '_handle_' + arg[1:], None) + except UnicodeEncodeError: + # getattr() on py2 does implicit unicode -> str + return False if handler: handler_result = handler(value) if isinstance(handler_result, bool):
Implemented review suggestions. Added namespaces for json loading and random choices. Changed Path() to take a direct path instead of an argument separated path. Added aliases "anthem" and "pridesong". Added periods to the end of all doc strings.
+import json import logging -from json import load +import random from pathlib import Path -from random import choice from discord.ext import commands @@ -23,24 +23,24 @@ class PrideAnthem(commands.Cog): If none can be found, it will log this as well as provide that information to the user. """ if not genre: - return choice(self.anthems) + return random.choice(self.anthems) else: songs = [song for song in self.anthems if genre.casefold() in song['genre']] try: - return choice(songs) + return random.choice(songs) except IndexError: log.info('No videos for that genre.') @staticmethod def load_vids() -> list: - """Loads a list of videos from the resources folder as dictionaries""" - with open(Path('bot', 'resources', 'pride', 'anthems.json').absolute(), 'r') as f: - anthems = load(f) + """Loads a list of videos from the resources folder as dictionaries.""" + with open(Path('bot/resources/pride/anthems.json').absolute(), 'r') as f: + anthems = json.load(f) return anthems - @commands.command(name='prideanthem') + @commands.group(aliases=["prideanthem", "anthem", "pridesong"], invoke_without_command=True) async def send_anthem(self, ctx, genre: str = None): - """Generates and sends message with youtube link""" + """Generates and sends message with youtube link.""" anthem = self.get_video(genre) if anthem: await ctx.send(anthem['url']) @@ -49,6 +49,6 @@ class PrideAnthem(commands.Cog): def setup(bot): - """Cog loader for pride anthem""" + """Cog loader for pride anthem.""" bot.add_cog(PrideAnthem(bot)) log.info('Pride anthems cog loaded!')
Big bucks: v2 analysis not saving fit result values Internal representation of the lmfit.Parameters make the value a '_val'. Also we filtered out all variables of the internal representation with a '_', deeming it too private. Bonus bug solve: the key on which the result was saved was overwritten
@@ -525,11 +525,9 @@ class BaseDataAnalysis(object): fit_fn = fit_dict.get('fit_fn', None) model = fit_dict.get('model', lmfit.Model(fit_fn)) fit_guess_fn = fit_dict.get('fit_guess_fn', None) - if fit_guess_fn is None: if fitting_type == 'model' and fit_dict.get('fit_guess', True): fit_guess_fn = model.guess - if guess_pars is None: # if you pass on guess_pars, immediately go to the fitting if fit_guess_fn is not None: # Run the guess funtions here @@ -553,7 +551,6 @@ class BaseDataAnalysis(object): # A guess can also be specified as a dictionary. # additionally this can be used to overwrite values # from the guess functions. - if guess_dict is not None: for gd_key, val in guess_dict.items(): for attr, attr_val in val.items(): @@ -562,13 +559,14 @@ class BaseDataAnalysis(object): elif guess_dict is not None: if fitting_type is 'minimize': params = lmfit.Parameters() - for key, val in list(guess_dict.items()): - params.add(key) + for gd_key, val in list(guess_dict.items()): + params.add(gd_key) for attr, attr_val in val.items(): - setattr(params[key], attr, attr_val) + setattr(params[gd_key], attr, attr_val) + elif fitting_type is 'model': - for key, val in list(guess_dict.items()): - model.set_param_hint(key, **val) + for gd_key, val in list(guess_dict.items()): + model.set_param_hint(gd_key, **val) guess_pars = model.make_params() else: if fitting_type is 'minimize': @@ -578,7 +576,6 @@ class BaseDataAnalysis(object): fit_dict['fit_res'] = model.fit(**fit_xvals, **fit_yvals, params=guess_pars) self.fit_res[key] = fit_dict['fit_res'] - elif fitting_type is 'minimize': # Perform the fitting fit_dict['fit_res'] = lmfit.minimize(fcn=_complex_residual_function, params=params, @@ -660,6 +657,8 @@ class BaseDataAnalysis(object): for k in param.__dict__: if not k.startswith('_') and k not in ['from_internal', ]: dic['params'][param_name][k] = getattr(param, k) + if k in '_val': + dic['params'][param_name]['value'] = getattr(param,k) return dic def plot(self, key_list=None, axs_dict=None,
Mae reformat script working directory free After this PR, `dev/reformat` script is not affected by the current working directory.
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + +# The current directory of the script. +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +FWDIR="$( cd "$DIR"/.. && pwd )" +cd "$FWDIR" + BLACK_BUILD="python -m black" BLACK_VERSION="19.10b0" $BLACK_BUILD 2> /dev/null