message
stringlengths
13
484
diff
stringlengths
38
4.63k
utils/trace_cmd: move params to __init__() Move the check_for_markers and events parameters from parser() to __init__(). These parameters control the behavior of the parser, and do not relate to a particular trace file, so it makes more sense to have them there.
@@ -235,7 +235,7 @@ class TraceCmdParser(object): """ - def __init__(self, filter_markers=True): + def __init__(self, filter_markers=True, check_for_markers=True, events=None): """ Initialize a new trace parser. @@ -245,27 +245,29 @@ class TraceCmdParser(object): markers will be reported). This maybe overriden based on `check_for_markers` parameter of `parse()` + :param check_for_markers: Check if the start/stop markers are present + in the trace and ensure that `filter_markers` + is `False` if they aren't + :param events: A list of event names to be reported; if not specified, + all events will be reported. + """ self.filter_markers = filter_markers + self.check_for_markers = check_for_markers + self.events = events - def parse(self, filepath, events=None, check_for_markers=True): # pylint: disable=too-many-branches,too-many-locals + def parse(self, filepath): # pylint: disable=too-many-branches,too-many-locals """ This is a generator for the trace event stream. :param filepath: The path to the file containg text trace as reported by trace-cmd - :param events: A list of event names to be reported; if not specified, - all events will be reported. - :param check_for_markers: Check if the start/stop markers are present - in the trace and ensure that `filter_markers` - is `False` if they aren't - """ inside_maked_region = False - filters = [re.compile('^{}$'.format(e)) for e in (events or [])] + filters = [re.compile('^{}$'.format(e)) for e in (self.events or [])] filter_markers = self.filter_markers - if filter_markers and check_for_markers: + if filter_markers and self.check_for_markers: with open(filepath) as fh: for line in fh: if TRACE_MARKER_START in line:
Update nyc-tlc-trip-records-pds.yaml Update entry to reflect availability of new parquet objects / requirement for signed requests.
Name: New York City Taxi and Limousine Commission (TLC) Trip Record Data -Description: "*Note this data is currently inaccessible. We are working with the data provider to reenable access.* Data of trips taken by taxis and for-hire vehicles in New York City." -Documentation: http://www.nyc.gov/html/tlc/html/about/trip_record_data.shtml +Description: "Data of trips taken by taxis and for-hire vehicles in New York City. Note: access to this dataset is free, however direct S3 access does require an AWS account. Anonymous downloads are accessible from the dataset's documentation webpage listed below." +Documentation: https://www1.nyc.gov/site/tlc/about/tlc-trip-record-data.page Contact: [email protected] ManagedBy: City of New York Taxi and Limousine Commission UpdateFrequency: As soon as new data is available to be shared publicly. @@ -15,6 +15,7 @@ Resources: ARN: arn:aws:s3:::nyc-tlc Region: us-east-1 Type: S3 Bucket + AccountRequired: True DataAtWork: Tutorials: - Title: Machine learning on distributed Dask using Amazon SageMaker and AWS Fargate
Corrected years for Prii w/ good steering sensor This was already noted in PR#1198 comments by ...
@@ -3,7 +3,7 @@ Version 0.7.4 (2020-03-20) * New driving model: improved lane changes and lead car detection * Improved driver monitoring model: improve eye detection * Improved calibration stability - * Improved lateral control on some 2018 and 2019 Toyota Prius + * Improved lateral control on some 2019 and 2020 Toyota Prius * Improved lateral control on VW Golf: 20% more steering torque * Fixed bug where some 2017 and 2018 Toyota C-HR would use the wrong steering angle sensor * Support for Honda Insight thanks to theantihero!
flask_env is deprecated FLASK_DEBUG=1 is the proper way to have the debug stack trace screen appear
@@ -32,7 +32,7 @@ export FIRETEXT_API_KEY='FIRETEXT_ACTUAL_KEY' export NOTIFICATION_QUEUE_PREFIX='YOUR_OWN_PREFIX' export FLASK_APP=application.py -export FLASK_ENV=development +export FLASK_DEBUG=1 export WERKZEUG_DEBUG_PIN=off "> environment.sh ```
Fix geom_map geopandas 0.6.0 refactored to use pandas ExtensionArray. That led to an issue where we cannot concat a geopandas array without copying it. This will get resolved in pandas 0.26.0. Then we can maybe revert this commit.
@@ -88,7 +88,7 @@ class geom_map(geom): }, inplace=True) - data = pd.concat([data, bounds], axis=1, copy=False) + data = pd.concat([data, bounds], axis=1) return data def draw_panel(self, data, panel_params, coord, ax, **params):
readme: remove proxy from job configuration It has been removed in
@@ -104,7 +104,6 @@ however everything else is optional. For details, see `<job-conf.rst>`_. id: myjob time_limit: 60 # seconds - proxy: 127.0.0.1:8000 # point at warcprox for archiving ignore_robots: false warcprox_meta: null metadata: {}
fix: load best epoch and value from prior validations fixes
@@ -330,6 +330,14 @@ class Application: # epoch -> value dictionary values = SortedDict() + # load best epoch and value from past executions + if params_yml.exists(): + with open(params_yml, 'r') as fp: + params = yaml.load(fp, Loader=yaml.SafeLoader) + best_epoch = params['epoch'] + best_value = params[metric] + values[best_epoch] = best_value + # metric value for current epoch values[epoch] = details['value']
Use prune option in Pygit2 provider when fetching Pygit2 version 0.26.2 added support for pruning when fetching. In this way Pygit2 provider will no longer need to leverage git commanand line utility for pruning the remote.
@@ -1611,11 +1611,19 @@ class Pygit2(GitProvider): ''' Clean stale local refs so they don't appear as fileserver environments ''' + try: + if pygit2.GIT_FETCH_PRUNE: + # Don't need to clean anything, pygit2 can do it by itself + return [] + except AttributeError: + # However, only in 0.26.2 and newer + pass if self.credentials is not None: log.debug( - 'pygit2 does not support detecting stale refs for ' - 'authenticated remotes, saltenvs will not reflect ' - 'branches/tags removed from remote \'%s\'', self.id + 'The installed version of pygit2 (%s) does not support ' + 'detecting stale refs for authenticated remotes, saltenvs ' + 'will not reflect branches/tags removed from remote \'%s\'', + PYGIT2_VERSION, self.id ) return [] return super(Pygit2, self).clean_stale_refs() @@ -1721,6 +1729,11 @@ class Pygit2(GitProvider): else: if self.credentials is not None: origin.credentials = self.credentials + try: + fetch_kwargs['prune'] = pygit2.GIT_FETCH_PRUNE + except AttributeError: + # pruning only available in pygit2 >= 0.26.2 + pass try: fetch_results = origin.fetch(**fetch_kwargs) except GitError as exc: @@ -2573,7 +2586,8 @@ class GitBase(object): LIBGIT2_VERSION ) ) - if not salt.utils.path.which('git'): + if not getattr(pygit2, 'GIT_FETCH_PRUNE', False) \ + and not salt.utils.path.which('git'): errors.append( 'The git command line utility is required when using the ' '\'pygit2\' {0}_provider.'.format(self.role)
Making error message more descriptive While starting the server, an error "[Errno 111] Connection refused" is thrown without specifying any reason. This commit makes the error message more descriptive.
@@ -16,10 +16,12 @@ from hwilib.errors import ( DeviceFailureError, UnavailableActionError, ) +import logging import hashlib from binascii import a2b_base64, b2a_base64 py_enumerate = enumerate +logger = logging.getLogger(__name__) class SpecterClient(HardwareWalletClient): @@ -240,9 +242,10 @@ def enumerate(password=""): s.connect(("127.0.0.1", 8789)) s.close() ports.append("127.0.0.1:8789") - except Exception as e: - print(e) - pass + except ConnectionRefusedError as e: + logger.warning( + f"Warning: Specter DIY failed to establish socket connection. Error: {e}" + ) for port in ports: # for every port try to get a fingerprint
Update 6-ldap.md - AUTH_LDAP_USER_DN_TEMPLATE to none for windows 2012+ changed When using Windows Server 2012, `AUTH_LDAP_USER_DN_TEMPLATE` should be set to None. to Windows Server 2012+
@@ -74,7 +74,7 @@ STARTTLS can be configured by setting `AUTH_LDAP_START_TLS = True` and using the ### User Authentication !!! info - When using Windows Server 2012, `AUTH_LDAP_USER_DN_TEMPLATE` should be set to None. + When using Windows Server 2012+, `AUTH_LDAP_USER_DN_TEMPLATE` should be set to None. ```python from django_auth_ldap.config import LDAPSearch
Fixing temporary bug with inputs and bucketing when series are bucketed (i.e. batches() do not return data in the same order as get_series()), inputs were returned in the wrong order.
@@ -317,6 +317,8 @@ def run_on_dataset(tf_manager: TensorFlowManager, feedables = set.union(*[runner.feedables for runner in runners]) feedables |= dataset_runner.feedables + fetched_input = {s: [] for s in dataset.series} # type: Dict[str, List] + processed_examples = 0 for batch in dataset.batches(): if 0 < log_progress < time.process_time() - last_log_time: @@ -335,6 +337,9 @@ def run_on_dataset(tf_manager: TensorFlowManager, for script_list, ex_result in zip(batch_results, execution_results): script_list.append(ex_result) + for s_id in batch.series: + fetched_input[s_id].extend(batch.get_series(s_id)) + # Transpose runner interim results. all_results = [join_execution_results(res) for res in batch_results[:-1]] @@ -343,7 +348,6 @@ def run_on_dataset(tf_manager: TensorFlowManager, # fetched_input = { # k: [dic[k] for dic in input_transposed] for k in input_transposed[0]} - fetched_input = {s: list(dataset.get_series(s)) for s in dataset.series} fetched_input_lengths = {s: len(fetched_input[s]) for s in dataset.series} if len(set(fetched_input_lengths.values())) != 1:
DOC: Add missing description to `brute_force` parameter. Add missing description to the `brute_force` parameter to the docstrings of the `ndimage.morphology.binary_dilation` and `ndimage.morphology.binary_erosion` methods. Also made the `origin` parameter description come after the one for `border_value` to match the method signature. Resolves
@@ -309,10 +309,16 @@ def binary_erosion(input, structure=None, iterations=1, mask=None, output=None, output : ndarray, optional Array of the same shape as input, into which the output is placed. By default, a new array is created. - origin : int or tuple of ints, optional - Placement of the filter, by default 0. border_value : int (cast to 0 or 1), optional Value at the border in the output array. + origin : int or tuple of ints, optional + Placement of the filter, by default 0. + brute_force : boolean, optional + Memory condition: if False, only the pixels whose value was changed in + the last iteration are tracked as candidates to be updated (eroded) in + the current iteration; if True all pixels are considered as candidates + for erosion, regardless of what happened in the previous iteration. + False by default. Returns ------- @@ -398,10 +404,16 @@ def binary_dilation(input, structure=None, iterations=1, mask=None, output : ndarray, optional Array of the same shape as input, into which the output is placed. By default, a new array is created. - origin : int or tuple of ints, optional - Placement of the filter, by default 0. border_value : int (cast to 0 or 1), optional Value at the border in the output array. + origin : int or tuple of ints, optional + Placement of the filter, by default 0. + brute_force : boolean, optional + Memory condition: if False, only the pixels whose value was changed in + the last iteration are tracked as candidates to be updated (dilated) + in the current iteration; if True all pixels are considered as + candidates for dilation, regardless of what happened in the previous + iteration. False by default. Returns -------
make import simpler the minimum version is 3.9
@@ -12,7 +12,7 @@ import os import subprocess from typing import OrderedDict -from importlib import reload +from importlib import reload, metadata from django.apps import apps from django.conf import settings @@ -22,12 +22,6 @@ from django.urls import clear_url_caches from django.contrib import admin from django.utils.text import slugify -try: - from importlib import metadata -except: # pragma: no cover - import importlib_metadata as metadata - # TODO remove when python minimum is 3.8 - from maintenance_mode.core import maintenance_mode_on from maintenance_mode.core import get_maintenance_mode, set_maintenance_mode
Replace RuntimeError in _lookup_task with deferred error. * Replace RuntimeError in _lookup_task with deferred error. This allows unknown tasks to be created (e.g., when parsing autotvm log files) but not invoked. * Format. * Update python/tvm/autotvm/task/task.py
@@ -40,11 +40,11 @@ from .space import ConfigSpace def _lookup_task(name): task = TASK_TABLE.get(name) if task is None: - raise RuntimeError( - f"Could not find a registered function for the task {name}. It is " - "possible that the function is registered in a python file which was " - "not imported in this run." - ) + # Unable to find the given task. This might be because we are + # creating a task based on a name that has not been imported. + # Rather than raising an exception here, we return a dummy + # task which cannot be invoked. + task = MissingTask(name) return task @@ -264,6 +264,25 @@ class TaskTemplate(object): return inputs +class MissingTask(TaskTemplate): + """ + Dummy task template for a task lookup which cannot be resolved. + This can occur if the task being requested from _lookup_task() + has not been imported in this run. + """ + + def __init__(self, taskname: str): + super().__init__() + self._taskname = taskname + + def __call__(self, *args, **kwargs): + raise RuntimeError( + f"Attempting to invoke a missing task {self._taskname}." + "It is possible that the function is registered in a " + "Python module that is not imported in this run, or the log is out-of-date." + ) + + def _register_task_compute(name, func=None): """Register compute function to autotvm task
Update settings.py 1) (see port-list). 2) Sample from 1) is in ```elf_doki```: .
@@ -75,7 +75,7 @@ HIGH_PRIORITY_REFERENCES = ("bambenekconsulting.com", "github.com/stamparm/black CONSONANTS = "bcdfghjklmnpqrstvwxyz" BAD_TRAIL_PREFIXES = ("127.", "192.168.", "localhost") LOCALHOST_IP = { 4: "127.0.0.1", 6: "::1" } -POTENTIAL_INFECTION_PORTS = (445, 1433) +POTENTIAL_INFECTION_PORTS = (445, 1433, 3389) IGNORE_DNS_QUERY_SUFFIXES = set(("arpa", "local", "guest", "intranet", "int", "corp", "home", "lan", "intra", "intran", "workgroup", "localdomain")) VALID_DNS_NAME_REGEX = r"\A[a-zA-Z0-9.-]*\.[a-zA-Z0-9-]+\Z" # Reference: http://stackoverflow.com/a/3523068 SUSPICIOUS_CONTENT_TYPES = ("application/vnd.ms-htmlhelp", "application/x-bsh", "application/x-chm", "application/x-sh", "application/x-shellscript", "application/hta", "text/x-scriptlet", "text/x-sh", "text/x-shellscript")
Fixup to gaphor/diagram/general/tests/test_simpleitem.py Changed Line into Box for respective test
"""Unit tests for simple items.""" -from gaphor.diagram.general.simpleitem import Ellipse, Line +from gaphor.diagram.general.simpleitem import Box, Ellipse, Line def test_line(case): @@ -10,7 +10,7 @@ def test_line(case): def test_box(case): """""" - case.diagram.create(Line) + case.diagram.create(Box) def test_ellipse(case):
Check base_doc, not is_deleted id_deleted() is a property function, not an attribute, so it isn't present in the JSON representation of a user. We have to check the value of base_doc to determine whether the user is deleted.
{ "expression": { "type": "property_name", - "property_name": "is_deleted", + "property_name": "base_doc", "datatype": null }, - "operator": "eq", - "property_value": false, + "operator": "not_eq", + "property_value": "CouchUser-Deleted", "type": "boolean_expression", "comment": null }
Fix, the --recompile-c-only option wasn't working without C11 compiler. * This affected MSVC mainly and prevented this approach of debugging and trying things out during development.
@@ -1374,7 +1374,7 @@ def discoverSourceFiles(): # Scan for Nuitka created source files, and add them too. for filename in os.listdir(source_dir): # Only C files are of interest here. - if not filename.endswith(".c") or \ + if not filename.endswith((".c", "cpp")) or \ not filename.startswith(("module.", "__")): continue @@ -1386,9 +1386,11 @@ def discoverSourceFiles(): if c11_mode: result.append(filename) else: + if filename.endswith(".c"): target_file += "pp" # .cpp" suffix then os.rename(filename, target_file) + result.append(target_file) # Main program, unless of course it's a Python module/package we build.
TH: Updated Capacity from August 2022 For detailed breakdown see
], "capacity": { "battery storage": 0, - "biomass": 1006.95, + "biomass": 1040.79, "coal": 6067.5, - "gas": 28695, + "gas": 29358, "geothermal": 0.3, - "hydro": 7015.73, + "hydro": 7530.03, "hydro storage": 1000, "nuclear": 0, "oil": 1497.4,
Update README.md To reflect that bot support multibot functon
@@ -42,7 +42,9 @@ If this is your first time making a PR or aren't sure of the standard practice o ## Features - [x] Based on Python for botting on any operating system - Windows, macOS and Linux -- [x] Allow custom hash service provider [NEW] +- [x] Multi-bot supported +- [x] Able to edit bot if certain level has reached +- [x] Allow custom hash service provider, if any - [x] GPS Location configuration - [x] Search & spin Pokestops / Gyms - [x] Diverse options for humanlike behavior from movement to overall game play
llvm, debug: Store base parameters in alloca-ted location. This avoids relying on memory analysis to propagate constants.
@@ -220,6 +220,7 @@ class LLVMBuilderContext: if "const_params" in debug_env: const_params = params.type.pointee(composition._get_param_initializer(None)) + params = builder.alloca(const_params.type) builder.store(const_params, params) # Call input CIM
Add **kwargs back into mine function in cache runner This was removed during the deprecation removal process and shouldn't have been.
@@ -76,7 +76,7 @@ def pillar(tgt=None, tgt_type='glob'): return cached_pillar -def mine(tgt=None, tgt_type='glob'): +def mine(tgt=None, tgt_type='glob', **kwargs): ''' .. versionchanged:: 2017.7.0 The ``expr_form`` argument has been renamed to ``tgt_type``, earlier
Update conf.py Testing redirect for licensing and subscriptions.
@@ -91,7 +91,7 @@ redirects = { "https://docs.mattermost.com/about/security.html", "overview/integrations": "https://docs.mattermost.com/about/integrations.html", - "about/subscription": "https://docs.mattermost.com/about/licensing-and-subscription.html", + "about/subscription": "https://docs.mattermost.com/overview/license-and-subscription.html", "overview/license-and-subscription": "https://docs.mattermost.com/about/licensing-and-subscription.html", "overview/auth":
Typo fix Should read top-level, not to-level.
@@ -9,7 +9,7 @@ If you're confused by the many names, please check out `names` for clarification What follows is the API explanation, if you'd like a more hands-on introduction, have a look at `examples`. -As of version 21.3.0, ``attrs`` consists of **two** to-level package names: +As of version 21.3.0, ``attrs`` consists of **two** top-level package names: - The classic ``attr`` that powered the venerable `attr.s` and `attr.ib` - The modern ``attrs`` that only contains most modern APIs and relies on `attrs.define` and `attrs.field` to define your classes.
navbar: Reset searchbox text on calling ".exit_search()". This commit makes sure that we replace the text in the search box every time a user calls `exit_search()` eg via the escape hotkey or by clicking the `x` icon, so that the search box discards any input and always starts at the current narrow.
@@ -84,6 +84,10 @@ exports.exit_search = function () { if (!filter || filter.is_common_narrow()) { // for common narrows, we change the UI (and don't redirect) exports.close_search_bar_and_open_narrow_description(); + + // reset searchbox text + const search_string = narrow_state.search_string(); + $("#search_query").val(search_string); } else { // for "searching narrows", we redirect window.location.replace(filter.generate_redirect_url());
Fix type of indent in JSONEncoder A None value for indent means the most compact representation (no newlines), it is also the default value.
@@ -73,7 +73,7 @@ class JSONEncoder(object): check_circular = ... # type: bool allow_nan = ... # type: bool sort_keys = ... # type: bool - indent = ... # type: int + indent = ... # type: Optional[int] def __init__(self, skipkeys: bool = ..., @@ -81,7 +81,7 @@ class JSONEncoder(object): check_circular: bool = ..., allow_nan: bool = ..., sort_keys: bool = ..., - indent: int = ..., + indent: Optional[int] = ..., separators: Tuple[Union[Text, bytes], Union[Text, bytes]] = ..., encoding: Union[Text, bytes] = ..., default: Callable[..., Any] = ...) -> None: ...
Stop requiring accessKey/secret when updating an S3 assetstore This was missed in
@@ -217,9 +217,7 @@ class Assetstore(Resource): assetstore['shard'] = shard elif assetstore['type'] == AssetstoreType.S3: self.requireParams({ - 'bucket': bucket, - 'accessKeyId': accessKeyId, - 'secret': secret + 'bucket': bucket }) assetstore['bucket'] = bucket assetstore['prefix'] = prefix
fix:check keyerror fix bug: avoid KeyError when config field missing
@@ -41,7 +41,7 @@ class SFTPArtifactRepository(ArtifactRepository): if 'hostname' in user_config: self.config['host'] = user_config['hostname'] - if self.config['username'] is None and 'username' in user_config: + if self.config.get('username', None) is None and 'username' in user_config: self.config['username'] = user_config['username'] if 'identityfile' in user_config:
Adds PoS websocket endpoint Receives all payments to a pos
@@ -3,7 +3,7 @@ import asyncio from loguru import logger from lnbits.core.models import Payment -from lnbits.core.services import create_invoice, pay_invoice +from lnbits.core.services import create_invoice, pay_invoice, websocketUpdater from lnbits.helpers import get_current_extension_name from lnbits.tasks import register_invoice_listener @@ -26,6 +26,16 @@ async def on_invoice_paid(payment: Payment) -> None: tpos = await get_tpos(payment.extra.get("tposId")) tipAmount = payment.extra.get("tipAmount") + strippedPayment = { + "amount":payment.amount, + "fee":payment.fee, + "checking_id":payment.checking_id, + "payment_hash":payment.payment_hash, + "bolt11":payment.bolt11, + } + + await websocketUpdater(payment.extra.get("tposId"), str(strippedPayment)) + if tipAmount is None: # no tip amount return
refactor: Add is_new_member property. Only the getter of the is_new_member property is added, to the UserProfile class. This is done to deduplicate action of checking whether a user is a new member or not.
@@ -1091,6 +1091,13 @@ class UserProfile(AbstractBaseUser, PermissionsMixin): def __str__(self) -> str: return "<UserProfile: %s %s>" % (self.email, self.realm) + @property + def is_new_member(self) -> bool: + diff = (timezone_now() - self.date_joined).days + if diff < self.realm.waiting_period_threshold: + return True + return False + @property def is_realm_admin(self) -> bool: return self.role == UserProfile.ROLE_REALM_ADMINISTRATOR @@ -1161,11 +1168,7 @@ class UserProfile(AbstractBaseUser, PermissionsMixin): if self.realm.create_stream_policy == Realm.CREATE_STREAM_POLICY_MEMBERS: return True - - diff = (timezone_now() - self.date_joined).days - if diff >= self.realm.waiting_period_threshold: - return True - return False + return not self.is_new_member def can_subscribe_other_users(self) -> bool: if self.is_realm_admin: @@ -1179,10 +1182,7 @@ class UserProfile(AbstractBaseUser, PermissionsMixin): return True assert self.realm.invite_to_stream_policy == Realm.INVITE_TO_STREAM_POLICY_WAITING_PERIOD - diff = (timezone_now() - self.date_joined).days - if diff >= self.realm.waiting_period_threshold: - return True - return False + return not self.is_new_member def can_access_public_streams(self) -> bool: return not (self.is_guest or self.realm.is_zephyr_mirror_realm)
Fix libClusterFuzz env vars. bot/env.yaml does not exist in libClusterFuzz. Just set the ones we need.
@@ -18,10 +18,12 @@ import os if not os.getenv('ROOT_DIR') and not os.getenv('GAE_ENV'): # If ROOT_DIR isn't set by the time we import this and we're not on GAE, # assume we're libClusterFuzz. + # Actual value does not matter, it just needs to be set. + os.environ['ROOT_DIR'] = '/tmp' + os.environ['LIB_CF'] = 'True' this_dir = os.path.dirname(os.path.abspath(__file__)) os.environ['CONFIG_DIR_OVERRIDE'] = os.path.join(this_dir, 'lib-config') - os.environ['ROOT_DIR'] = os.path.dirname(os.path.dirname(this_dir)) - os.environ['LIB_CF'] = 'True' - from ._internal.system import environment - environment.set_default_vars() + # Other necessary env vars. + os.environ['FAIL_RETRIES'] = '1' + os.environ['FAIL_WAIT'] = '10'
workloads/rt-app: fix Change "pull_file" to "pull" when exacting results. "pull_file" was WA2 API which somehow got missed.
@@ -274,7 +274,7 @@ class RtApp(Workload): self.target.execute(tar_command, timeout=300) target_path = self.target.path.join(self.target_working_directory, TARBALL_FILENAME) host_path = os.path.join(context.output_directory, TARBALL_FILENAME) - self.target.pull_file(target_path, host_path, timeout=120) + self.target.pull(target_path, host_path, timeout=120) with tarfile.open(host_path, 'r:gz') as tf: tf.extractall(context.output_directory) os.remove(host_path)
Adapt release notes after 3.6.1 release fix syntax error in qualifiers
@@ -12,10 +12,13 @@ the proposed changes so you can be ready. ## Version 3.7 (as yet unreleased) +## [Version 3.6.1](https://pypi.python.org/pypi/pyfakefs/3.6.1) + ### Fixes * avoid rare side effect during module iteration in test setup (see [#338](../../issues/338)) - + * make sure real OS tests are not executed by default + (see [#495](../../issues/495)) ## [Version 3.6](https://pypi.python.org/pypi/pyfakefs/3.6)
LegendasTV: Don't discard provider when BadRarFile or BadZipFile Show a 'Invalid subtitle' warning instead
@@ -11,9 +11,9 @@ from dogpile.cache.api import NO_VALUE from guessit import guessit import pytz import rarfile -from rarfile import RarFile, is_rarfile +from rarfile import BadRarFile, RarFile, is_rarfile from requests import Session -from zipfile import ZipFile, is_zipfile +from zipfile import BadZipfile, ZipFile, is_zipfile from . import ParserBeautifulSoup, Provider from .. import __short_version__ @@ -449,4 +449,7 @@ class LegendasTVProvider(Provider): self.download_archive(subtitle.archive) # extract subtitle's content + try: subtitle.content = fix_line_ending(subtitle.archive.content.read(subtitle.name)) + except (BadRarFile, BadZipfile): + logger.error('Bad archive for %s', subtitle.name)
use kubeflow/pipelines branch for deployment in test /assign
@@ -27,21 +27,13 @@ tar -xzf ks_${KS_VERSION}_linux_amd64.tar.gz chmod +x ./ks_${KS_VERSION}_linux_amd64/ks mv ./ks_${KS_VERSION}_linux_amd64/ks /usr/local/bin/ -# Download kubeflow master -KUBEFLOW_MASTER=${DIR}/kubeflow_master -git clone https://github.com/kubeflow/kubeflow.git ${KUBEFLOW_MASTER} - ## Download latest kubeflow release source code KUBEFLOW_SRC=${DIR}/kubeflow_latest_release mkdir ${KUBEFLOW_SRC} cd ${KUBEFLOW_SRC} -export KUBEFLOW_TAG=v0.5.0-rc.1 +export KUBEFLOW_TAG=pipelines curl https://raw.githubusercontent.com/kubeflow/kubeflow/${KUBEFLOW_TAG}/scripts/download.sh | bash -## Override the pipeline config with code from master -cp -r ${KUBEFLOW_MASTER}/kubeflow/pipeline ${KUBEFLOW_SRC}/kubeflow/pipeline -cp -r ${KUBEFLOW_MASTER}/kubeflow/argo ${KUBEFLOW_SRC}/kubeflow/argo - export CLIENT_ID=${RANDOM} export CLIENT_SECRET=${RANDOM} KFAPP=${TEST_CLUSTER}
langkit_support/diagnostics_output: fix compilation warning TN:
@@ -24,7 +24,7 @@ procedure Main is return To_Text (Self.Lines (Line_Number)); end; - B : Simple_Buffer := + B : constant Simple_Buffer := (Size => 1, Lines => (1 => To_Unbounded_Text ("A simple line")));
don't overspecify required python version Summary: Pull Request resolved: We don't care which python version, and github actions has changed the versions available, breaking our CI. So just pin it to 3-something to make it more future proof Test Plan: Imported from OSS
@@ -13,7 +13,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v1 with: - python-version: 3.7.4 + python-version: 3.x architecture: x64 - name: Checkout PyTorch uses: actions/checkout@master @@ -28,7 +28,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v1 with: - python-version: 3.7.4 + python-version: 3.x architecture: x64 - name: Checkout PyTorch uses: actions/checkout@master @@ -69,7 +69,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v1 with: - python-version: 3.7.4 + python-version: 3.x architecture: x64 - name: Checkout PyTorch uses: actions/checkout@master @@ -84,7 +84,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v1 with: - python-version: 3.7.4 + python-version: 3.x architecture: x64 - name: Checkout PyTorch uses: actions/checkout@master @@ -99,7 +99,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v1 with: - python-version: 3.7.4 + python-version: 3.x architecture: x64 - name: Fetch PyTorch uses: actions/checkout@master @@ -174,7 +174,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v1 with: - python-version: 3.7.4 + python-version: 3.x architecture: x64 - name: Checkout PyTorch uses: actions/checkout@master
Update requirements.txt Remove unnecessary requirements for docs
Sphinx >= 1.7.5 sphinx_rtd_theme >= 0.4.0 +tensorflow==1.4.0 keras >= 2.0.5 jieba >= 0.39 many_stop_words >= 0.2.2 @@ -7,17 +8,7 @@ nltk >= 3.2.3 numpy >= 1.12.1 six >= 1.10.0 h5py >= 2.7.0 -tqdm >= 4.19.4 -coverage >= 4.3.4 -codecov >= 2.0.15 -pytest >= 3.6.0 -pytest-cov >= 2.4.0 -mock >= 2.0.0 -flake8 >= 3.2.1 -flake8_docstrings >= 1.0.2 dill >= 0.2.7.1 hyperopt == 0.1 -networkx == 1.11 pandas >= 0.23.1 -tensorflow==1.4.0 -sphinx_autodoc_typehints>=1.3.0 +sphinx_autodoc_typehints==1.3.0
dash: dont use the the same init file for every video. fixes:
@@ -92,7 +92,7 @@ def parsesegments(content, url): bitrate = int(i.attrib["bandwidth"]) if vinit is None: init = i.find("{urn:mpeg:dash:schema:mpd:2011}SegmentTemplate").attrib["initialization"] - vinit = init.replace("$RepresentationID$", id) + vidinit = init.replace("$RepresentationID$", id) if media is None: scheme = i.find("{urn:mpeg:dash:schema:mpd:2011}SegmentTemplate").attrib["media"] if "startNumber" in content[0].findall(".//{urn:mpeg:dash:schema:mpd:2011}SegmentTemplate")[0].attrib: @@ -100,7 +100,7 @@ def parsesegments(content, url): else: start = 1 dirname = os.path.dirname(url) + "/" - segments.append(urljoin(dirname, vinit)) + segments.append(urljoin(dirname, vidinit)) name = scheme.replace("$RepresentationID$", id) if "$Number" in name: match = re.search("\$Number(\%\d+)d\$", name)
Regex match IB NICs Recent changes to the ip show regex made IB devices no longer show up in node.nics.
@@ -70,11 +70,17 @@ class Nics(InitializableMixin): 4: enP13530s1: <BROADCAST,MULTICAST,SLAVE,UP,LOWER_UP> mtu 1500 ... qdisc mq master eth0 state UP group default qlen 1000 link/ether 00:22:48:79:6c:c2 brd ff:ff:ff:ff:ff:ff + 6: ib0: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 2044 qdisc mq state UP ... + link/infiniband 00:00:09:27:fe:80:00:00:00:00:00:00:00:15:5d:... + inet 172.16.1.118/16 brd 172.16.255.255 scope global ib0 + valid_lft forever preferred_lft forever + inet6 fe80::215:5dff:fd33:ff7f/64 scope link + valid_lft forever preferred_lft forever """ __ip_addr_show_regex = re.compile( ( r"\d+: (?P<name>\w+): \<.+\> .+\n\s+" - r"link\/ether (?P<mac>[0-9a-z:]+) .+\n?" + r"link\/(?:ether|infiniband) (?P<mac>[0-9a-z:]+) .+\n?" r"(?:\s+inet (?P<ip_addr>[\d.]+)\/.*\n)?" ) )
Add "Trailer Addict" api added Trailer Addict
@@ -1661,6 +1661,7 @@ API | Description | Auth | HTTPS | CORS | | [The Vampire Diaries](https://vampire-diaries-api.netlify.app/) | TV Show Data | `apiKey` | Yes | Yes | | [ThronesApi](https://thronesapi.com/) | Game Of Thrones Characters Data with imagery | No | Yes | Unknown | | [TMDb](https://www.themoviedb.org/documentation/api) | Community-based movie data | `apiKey` | Yes | Unknown | +| [TrailerAddict](https://www.traileraddict.com/trailerapi) | Easily embed trailers from TrailerAddict | `apiKey` | No | Unknown | | [Trakt](https://trakt.tv/b/api-docs) | Movie and TV Data | `apiKey` | Yes | Yes | | [TVDB](https://api.thetvdb.com/swagger) | Television data | `apiKey` | Yes | Unknown | | [TVMaze](http://www.tvmaze.com/api) | TV Show Data | No | No | Unknown |
test: Corrected selectors and shorten them (Using the added testing-library)
@@ -13,8 +13,8 @@ context('Dashboard links', () => { //Adding a new contact cy.get('.btn[data-doctype="Contact"]').click(); - cy.get('.has-error > .form-group > .control-input-wrapper > .control-input > .input-with-feedback').type('Admin'); - cy.get('#page-Contact > .page-head > .container > .row > .col > .standard-actions > .primary-action').click(); + cy.get('[data-doctype="Contact"][data-fieldname="first_name"]').type('Admin'); + cy.findByRole('button', {name: 'Save'}).click(); cy.visit('/app/user'); cy.get('.list-row-col > .level-item > .ellipsis').eq(0).click(); @@ -25,14 +25,14 @@ context('Dashboard links', () => { //Deleting the newly created contact cy.visit('/app/contact'); cy.get('.list-subject > .select-like > .list-row-checkbox').eq(0).click(); - cy.get('.actions-btn-group > .btn').contains('Actions').click(); - cy.get('.actions-btn-group > .dropdown-menu > li > .grey-link').eq(5).click(); - cy.get('.modal-footer > .standard-actions > .btn-primary').contains('Yes').click({delay: 700}); + cy.findByRole('button', {name: 'Actions'}).click(); + cy.get('.actions-btn-group [data-label="Delete"]').click(); + cy.findByRole('button', {name: 'Yes'}).click({delay: 700}); //To check if the counter from the "Contact" doc link is removed - cy.visit('/app/user'); cy.wait(700); + cy.visit('/app/user'); cy.get('.list-row-col > .level-item > .ellipsis').eq(0).click(); cy.get('[data-doctype="Contact"]').should('contain', 'Contact'); });
Update README.md Removed the extra |
@@ -476,7 +476,7 @@ API | Description | Auth | HTTPS | Link | | Gfycat | Jiffier GIFs | `OAuth` | Yes | [Go!](https://developers.gfycat.com/api/) | | Giphy | Get all your gifs | No | Yes | [Go!](https://github.com/Giphy/GiphyAPI) | | Imgur | Images | `OAuth` | Yes | [Go!](https://apidocs.imgur.com/) | -| PiXhost | Upload images, photos, galleries | | No | Yes | [Go!](https://pixhost.org/api/index.html) +| PiXhost | Upload images, photos, galleries | No | Yes | [Go!](https://pixhost.org/api/index.html) | PlaceKitten | Resizable kitten placeholder images | No | Yes | [Go!](https://placekitten.com/) | | ScreenShotLayer | URL 2 Image | No | Yes | [Go!](https://screenshotlayer.com) | | Unsplash | Photography | `OAuth` | Yes | [Go!](https://unsplash.com/developers) |
Fix divide by zero exception again. encode_times_a is an array, which can't be compared to 0.
@@ -303,20 +303,20 @@ def bdrate(file1, file2, anchorfile, fullrange): # handle encode time and decode time separately encode_times_a = a[:,3+met_index['Encoding Time']]; encode_times_b = b[:,3+met_index['Encoding Time']]; - if encode_times_a != 0.0: + try: # compute a percent change for each qp encode_times = (encode_times_b - encode_times_a) / encode_times_a # average the percent changes together ret[met_index['Encoding Time']] = encode_times.mean() * 100.0 - else: + except ZeroDivisionError: ret[met_index['Encoding Time']] = NaN decode_times_a = a[:,3+met_index['Decoding Time']]; decode_times_b = b[:,3+met_index['Decoding Time']]; - if decode_times_a != 0.0: + try: decode_times = (decode_times_b - decode_times_a) / decode_times_a ret[met_index['Decoding Time']] = decode_times.mean() * 100.0 - else: - ret[met_index['Decoding Time']] = NaN + except ZeroDivisionError: + ret[met_index['Decoding Time']] return ret metric_data = {}
Switch from -dev to released 3.11 version As in title, switching Python 3.11 in CI from beta to full release
@@ -28,7 +28,7 @@ jobs: strategy: matrix: os: [Ubuntu, macOS, Windows] - python-version: ["3.7", "3.8", "3.9", "3.10", "3.11-dev"] + python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"] include: - os: Ubuntu image: ubuntu-22.04 @@ -83,11 +83,6 @@ jobs: # Using `timeout` is a safeguard against the Poetry command hanging for some reason. timeout 10s poetry run pip --version || rm -rf .venv - # XXX: https://github.com/pypa/pip/issues/11352 causes random failures -- remove once fixed in a release. - - name: Upgrade pip on Python 3.11 - if: ${{ matrix.python-version == '3.11-dev' }} - run: poetry run pip install git+https://github.com/pypa/pip.git@f8a25921e5c443b07483017b0ffdeb08b9ba2fdf - - name: Install dependencies run: poetry install --with github-actions
Update dataset.py Added square and curly brace tags to dimensionize naming rules.
@@ -4762,7 +4762,10 @@ class DataSet(object): """ def fix(string): - tags = ["'", '"', ' ', '&', '(', ')', '.', '/', '-'] + tags = [ + "'", '"', ' ', '&', '.', '/', '-', + '(', ')', '[', ']', '{', '}' + ] for tag in tags: string = string.replace(tag, '_') return string
rbd-mirror: bring back compatibility with jewel deployment rbd-mirror can't start when deploying jewel because it needs admin keyring. Getting back this task brings backward compatibility for jewel deployment.
tags: - package-install +- name: copy ceph admin key + copy: + src: "{{ fetch_directory }}/{{ fsid }}/etc/ceph/{{ cluster }}.client.admin.keyring" + dest: "/etc/ceph/{{ cluster }}.client.admin.keyring" + owner: "{{ ceph_uid }}" + group: "{{ ceph_uid }}" + mode: "0600" + when: + - cephx + - ceph_release_num[ceph_release] < ceph_release_num.luminous + - name: create rbd-mirror keyring command: ceph --cluster {{ cluster }} --name client.bootstrap-rbd --keyring /var/lib/ceph/bootstrap-rbd/{{ cluster }}.keyring auth get-or-create client.rbd-mirror.{{ ansible_hostname }} mon 'profile rbd' osd 'profile rbd' -o /etc/ceph/{{ cluster }}.client.rbd-mirror.{{ ansible_hostname }}.keyring args:
Typo in the conda install command conda install flag should be -c instead of -f to choose a channel
@@ -45,7 +45,7 @@ Installation The easiest way to install *cfgrib* and all its binary dependencies is via `Conda <https://conda.io/>`_:: - $ conda install -f conda-forge cfgrib + $ conda install -c conda-forge cfgrib alternatively, if you install the binary dependencies yourself, you can install the Python package from *PyPI* with::
Add test case using on_conflict with BinaryJSONField. Refs
@@ -843,6 +843,18 @@ class TestBinaryJsonField(BaseJsonFieldTestCase, ModelTestCase): (5, 7), ('k4', None)]) + def test_conflict_update(self): + b1 = BJson.create(data={'k1': 'v1'}) + iq = (BJson + .insert(id=b1.id, data={'k1': 'v1-x'}) + .on_conflict('update', conflict_target=[BJson.id], + update={BJson.data: {'k1': 'v1-z'}})) + b1_id_db = iq.execute() + self.assertEqual(b1.id, b1_id_db) + + b1_db = BJson.get(BJson.id == b1.id) + self.assertEqual(BJson.data, {'k1': 'v1-z'}) + class TestIntervalField(ModelTestCase): database = db
Allow kwargs in nova_volume_attach As a part of the scenario/manager.py stabilization tracked by the below BP the patch adds kwargs argument for nova_volume_attach method so that the consumers are able to pass additional parameters if needed. Implements: blueprint tempest-scenario-manager-stable
@@ -697,17 +697,20 @@ class ScenarioTest(tempest.test.BaseTestCase): image_name, server['name']) return snapshot_image - def nova_volume_attach(self, server, volume_to_attach): + def nova_volume_attach(self, server, volume_to_attach, **kwargs): """Compute volume attach This utility attaches volume from compute and waits for the volume status to be 'in-use' state. """ volume = self.servers_client.attach_volume( - server['id'], volumeId=volume_to_attach['id'])['volumeAttachment'] + server['id'], volumeId=volume_to_attach['id'], + **kwargs)['volumeAttachment'] self.assertEqual(volume_to_attach['id'], volume['id']) waiters.wait_for_volume_resource_status(self.volumes_client, volume['id'], 'in-use') + self.addCleanup(test_utils.call_and_ignore_notfound_exc, + self.nova_volume_detach, server, volume) # Return the updated volume after the attachment return self.volumes_client.show_volume(volume['id'])['volume']
langkit.diagnostics.source_listing: avoid confusing var name reuse For GitLab issue
@@ -591,12 +591,12 @@ def source_listing(highlight_sloc: Location, lines_after: int = 0) -> str: append_line("", col(caret_line, Colors.RED + Colors.BOLD)) # Append following lines up to ``lines_after`` lines - for line_nb, line in enumerate( + for cur_line_nb, cur_line in enumerate( source_buffer[line_nb + 1: min(line_nb + lines_after + 1, len(source_buffer))], line_nb + 1 ): - append_line(line_nb, line) + append_line(cur_line_nb, cur_line) return "".join(ret)
Importing: Add support importing from egg files directly * Adding a ".egg" file on the PYTHONPATH is allowed, but wasn't working with Nuitka yet. * Transparently unpacks the .egg file for use there.
@@ -37,9 +37,11 @@ the ``os`` module like it's done in ``isStandardLibraryPath`` of this module. from __future__ import print_function +import hashlib import imp import os import sys +import zipfile from logging import warning from nuitka import Options @@ -47,6 +49,7 @@ from nuitka.containers.oset import OrderedSet from nuitka.importing import StandardLibrary from nuitka.plugins.Plugins import Plugins from nuitka.PythonVersions import python_version +from nuitka.utils.AppDirs import getCacheDir from nuitka.utils.FileOperations import listDir from .PreloadedPackages import getPreloadedPackagePath, isPreloadedPackagePath @@ -395,14 +398,39 @@ def _findModuleInPath2(module_name, search_path): # Nothing found. raise ImportError +_egg_files = {} + +def _unpackPathElement(path_entry): + if not path_entry: + return "." # empty means current directory + + if os.path.isfile(path_entry) and path_entry.lower().endswith(".egg"): + if path_entry not in _egg_files: + checksum = hashlib.md5(open(path_entry, "rb").read()).hexdigest() + + target_dir = os.path.join( + getCacheDir(), + "egg-content", + checksum + ) + + zip_ref = zipfile.ZipFile(path_entry, 'r') + zip_ref.extractall(target_dir) + zip_ref.close() + + _egg_files[path_entry] = target_dir + + return _egg_files[path_entry] + + return path_entry def getPackageSearchPath(package_name): assert main_path is not None if package_name is None: return [os.getcwd(), main_path] + [ - element or '.' # empty means current directory - for element in + _unpackPathElement(path_element) + for path_element in sys.path ] elif '.' in package_name:
Change quotes to make link work I am not sure why this needs to happen, but using double quotes within single quotes screws up the link-parsing in the browser.
@@ -12,7 +12,7 @@ def total_spending(request, format=None): spending_type = utils.get_spending_type(codes) if spending_type is False: - err = 'Error: BNF Codes must all be the same length if written in the same search box. For example, you cannot search for Cerazette_Tab 75mcg (0703021Q0BBAAAA) and Cerelle (0703021Q0BD), but you could search for Cerazette (0703021Q0BB) and Cerelle (0703021Q0BD). If you need this data, please <a href="mailto:{{ SUPPORT_EMAIL }}" class="doorbell-show">get in touch</a> and we may be able to extract it for you' + err = "Error: BNF Codes must all be the same length if written in the same search box. For example, you cannot search for Cerazette_Tab 75mcg (0703021Q0BBAAAA) and Cerelle (0703021Q0BD), but you could search for Cerazette (0703021Q0BB) and Cerelle (0703021Q0BD). If you need this data, please <a href='mailto:[email protected]' class='doorbell-show'>get in touch</a> and we may be able to extract it for you" return Response(err, status=400) query = _get_query_for_total_spending(codes) @@ -33,7 +33,7 @@ def spending_by_ccg(request, format=None): spending_type = utils.get_spending_type(codes) if spending_type is False: - err = 'Error: BNF Codes must all be the same length if written in the same search box. For example, you cannot search for Cerazette_Tab 75mcg (0703021Q0BBAAAA) and Cerelle (0703021Q0BD), but you could search for Cerazette (0703021Q0BB) and Cerelle (0703021Q0BD). If you need this data, please <a href="mailto:{{ SUPPORT_EMAIL }}" class="doorbell-show">get in touch</a> and we may be able to extract it for you' + err = "Error: BNF Codes must all be the same length if written in the same search box. For example, you cannot search for Cerazette_Tab 75mcg (0703021Q0BBAAAA) and Cerelle (0703021Q0BD), but you could search for Cerazette (0703021Q0BB) and Cerelle (0703021Q0BD). If you need this data, please <a href='mailto:[email protected]' class='doorbell-show'>get in touch</a> and we may be able to extract it for you" return Response(err, status=400) if not spending_type or spending_type == 'bnf-section' \
Removes basis debug printing and deprecation warnings. Removes basis-name printing (a debug statement) and removes the deprecation warnings that were applied to the string-basis versions of build_gateset, build_gate, build_vector, etc.
@@ -14,7 +14,6 @@ import scipy.linalg as _spl from ..tools import gatetools as _gt from ..tools import basis as _basis -from ..tools import deprecated_fn as _deprecated_fn from ..objects import gate as _gate from ..objects import gateset as _gateset from ..objects import gaugegroup as _gg @@ -69,7 +68,6 @@ def basis_build_vector(vecExpr, basis): return _basis.change_basis(vecInReducedStdBasis, 'std', basis) -@_deprecated_fn(replacement='basis_build_vector w/ Basis object') def build_vector(stateSpaceDims, stateSpaceLabels, vecExpr, basis="gm"): return basis_build_vector(vecExpr, _basis.Basis(basis, stateSpaceDims)) @@ -102,7 +100,6 @@ def basis_build_identity_vec(basis): start += blockDim return _basis.change_basis(vecInReducedStdBasis, "std", basis) -@_deprecated_fn(replacement='basis_identity_vec w/ Basis object') def build_identity_vec(stateSpaceDims, basis="gm"): return basis_build_identity_vec(_basis.Basis(basis, stateSpaceDims)) @@ -414,8 +411,6 @@ def basis_build_gate(stateSpaceLabels, gateExpr, basis="gm", parameterization="f # clevel qubit ops: Leak # two clevel opts: Flip # each of which is given additional parameters specifying which indices it acts upon - print(basis) - dmDim, gateDim, blockDims = basis.dim #fullOpDim = dmDim**2 #Store each tensor product blocks start index (within the density matrix), which tensor product block @@ -815,7 +810,6 @@ def basis_build_gate(stateSpaceLabels, gateExpr, basis="gm", parameterization="f return gateInFinalBasis # a Gate object -@_deprecated_fn(replacement='basis_build_gate w/ Basis object') def build_gate(stateSpaceDims, stateSpaceLabels, gateExpr, basis="gm", parameterization="full", unitaryEmbedding=False): return basis_build_gate(stateSpaceLabels, gateExpr, _basis.Basis(basis, stateSpaceDims), parameterization, unitaryEmbedding) @@ -938,7 +932,6 @@ def basis_build_gateset(stateSpaceLabels, return ret -@_deprecated_fn(replacement='basis_build_gateset w/ Basis object') def build_gateset(stateSpaceDims, stateSpaceLabels, gateLabels, gateExpressions, prepLabels, prepExpressions,
Update README.md tiny clean up
- [Loader User Guide](./tools/loader.md) - [Performance Tuning](op-guide/tune-TiKV.md) - [Reading Data from History Versions](op-guide/history-read.md) - - [Troubleshooting](./trouble-shooting.md) ++ [Troubleshooting](./trouble-shooting.md) + More Resources - [Frequently Used Tools](https://github.com/pingcap/tidb-tools) - [PingCAP Blog](https://pingcap.github.io/blog/)
Doc: Link to Windows PyGTK packages built for Inkscape 0.92.4 The binaries shipped with this PyGTK packages now exactly match the version used for building Inkscape 0.92.4 on Windows.
@@ -55,6 +55,8 @@ Install the Python bindings for the graphical user interface of Install PyGTK2 (recommended) ---------------------------- +.. _inkscape-0.92.4-64-bit: https://github.com/textext/pygtk-for-inkscape-windows/releases/download/0.92.4/Install-PyGTK-2.24-Inkscape-0.92.4-64bit.exe +.. _inkscape-0.92.4-32-bit: https://github.com/textext/pygtk-for-inkscape-windows/releases/download/0.92.4/Install-PyGTK-2.24-Inkscape-0.92.4-32bit.exe .. _inkscape-0.92.3-64-bit: https://github.com/textext/pygtk-for-inkscape-windows/releases/download/0.92.3/Install-PyGTK-2.24-Inkscape-0.92.3-64bit.exe .. _inkscape-0.92.3-32-bit: https://github.com/textext/pygtk-for-inkscape-windows/releases/download/0.92.3/Install-PyGTK-2.24-Inkscape-0.92.3-32bit.exe .. _inkscape-0.92.2-64-bit: https://github.com/textext/pygtk-for-inkscape-windows/releases/download/0.92.2/Install-PyGTK-2.24-Inkscape-0.92.2-64bit.exe @@ -63,7 +65,8 @@ Install PyGTK2 (recommended) Install the package that matches your Inkscape version: - - Inkscape 0.92.3 + 0.92.4 (`32-bit <inkscape-0.92.3-32-bit_>`_ , `64-bit <inkscape-0.92.3-64-bit_>`_) + - Inkscape 0.92.4 (`32-bit <inkscape-0.92.4-32-bit_>`_ , `64-bit <inkscape-0.92.4-64-bit_>`_) + - Inkscape 0.92.3 (`32-bit <inkscape-0.92.3-32-bit_>`_ , `64-bit <inkscape-0.92.3-64-bit_>`_) - Inkscape 0.92.2 (`32-bit <inkscape-0.92.2-32-bit_>`_ , `64-bit <inkscape-0.92.2-64-bit_>`_) - Inkscape 0.92.0 - 0.92.1 (`32-bit and 64-bit <inkscape-0.92.0-0.92.1-multi_>`_)
Random seed use float instead of datetime object. Originally passing datetime object implicitly used hash value of it, but it is deprecated from 3.9 and raises pytype error.
"""Common utilities for testing various runners.""" import contextlib -import datetime import os import random import string @@ -53,11 +52,10 @@ def random_id() -> str: Returns: A random string valid for Kubernetes DNS name. """ - random.seed(datetime.datetime.now()) - - choices = string.ascii_lowercase + string.digits - return '{}-{}'.format(datetime.datetime.now().strftime('%s'), - ''.join([random.choice(choices) for _ in range(10)])) + now = time.time() + random.seed(now) + alpha = ''.join(random.choices(string.ascii_lowercase + string.digits, k=10)) + return f'{int(now)}-{alpha}' # Set longer timeout when pushing an image. Default timeout is 60 seconds.
fix docs build Fix a typo in SRP's docs that makes the documentation build fail.
@@ -48,7 +48,7 @@ class SRPClassifier(base.WrapperMixin, base.EnsembleMixin, base.Classifier): Drift detector. warning_detector Warning detector. - disable_detector: + disable_detector Option to disable drift detectors:<br/> * If `'off'`, detectors are enabled.<br/> * If `'drift'`, disables concept drift detection and the background learner.<br/>
Encode path only for old versions of hfh encode path only for old versions of hfh
@@ -2,7 +2,11 @@ from typing import Optional from urllib.parse import quote import huggingface_hub as hfh +from packaging import version def hf_hub_url(repo_id: str, path: str, revision: Optional[str] = None) -> str: - return hfh.hf_hub_url(repo_id, quote(path), repo_type="dataset", revision=revision) + if version.parse(hfh.__version__) < version.parse("0.11.0"): + # old versions of hfh don't url-encode the file path + path = quote(path) + return hfh.hf_hub_url(repo_id, path, repo_type="dataset", revision=revision)
Skips `unit.modules.test_groupadd` on Windows There is a test_win_groupadd modules for testing the win_groupadd module on Windows.
# Import Python libs from __future__ import absolute_import +try: import grp +except ImportError: + pass # Import Salt Testing Libs from tests.support.mixins import LoaderModuleMockMixin @@ -13,10 +16,12 @@ from tests.support.unit import TestCase, skipIf from tests.support.mock import MagicMock, patch, NO_MOCK, NO_MOCK_REASON # Import Salt Libs +import salt.utils import salt.modules.groupadd as groupadd @skipIf(NO_MOCK, NO_MOCK_REASON) +@skipIf(salt.utils.is_windows(), "Module not available on Windows") class GroupAddTestCase(TestCase, LoaderModuleMockMixin): ''' TestCase for salt.modules.groupadd
fix: Better metatags fallback Set metatag title from context.title
@@ -225,33 +225,44 @@ def add_sidebar_data(context): def add_metatags(context): tags = frappe._dict(context.get("metatags") or {}) - if tags: if "og:type" not in tags: tags["og:type"] = "article" - name = tags.get('name') or tags.get('title') - if name: - tags["og:title"] = tags["twitter:title"] = name + if "title" not in tags and context.title: + tags["title"] = context.title - description = tags.get("description") or context.description + title = tags.get("name") or tags.get("title") + if title: + tags["og:title"] = tags["twitter:title"] = title + + if "description" not in tags and context.description: + tags["description"] = context.description + + description = tags.get("description") if description: - tags['description'] = tags["og:description"] = tags["twitter:description"] = description + tags["og:description"] = tags["twitter:description"] = description + + if "image" not in tags and context.image: + tags["image"] = context.image - image = tags.get('image', context.image or None) + image = tags.get("image") if image: tags["og:image"] = tags["twitter:image"] = tags["image"] = frappe.utils.get_url(image) tags['twitter:card'] = "summary_large_image" - if context.author or tags.get('author'): - tags['author'] = context.author or tags.get('author') + if "author" not in tags and context.author: + tags["author"] = context.author - tags['og:url'] = tags['url'] = frappe.utils.get_url(context.path) + tags["og:url"] = tags["url"] = frappe.utils.get_url(context.path) - if context.published_on: - tags['datePublished'] = context.published_on + if "published_on" not in tags and context.published_on: + tags["published_on"] = context.published_on + if "published_on" in tags: + tags["datePublished"] = tags["published_on"] + del tags["published_on"] - tags['language'] = frappe.local.lang or 'en' + tags["language"] = frappe.local.lang or "en" # Get meta tags from Website Route meta # they can override the defaults set above
Update pgc-rema.yaml add asdi tag
@@ -4,6 +4,10 @@ Documentation: https://www.pgc.umn.edu/data/rema/ Contact: [email protected] ManagedBy: "[Polar Geospatial Center](http://www.pgc.umn.edu/)" UpdateFrequency: New DEM strips are added twice yearly. Mosaic products are added as soon as they are available. +Collabs: + ASDI: + Tags: + - satelitte imagery Tags: - aws-pds - elevation
Add NonConvex option. Since the flow in the diesel genset represents a combination of NonConvex and Investment options, both NonConvex and Investment option has to be defined in the flow.
@@ -145,6 +145,7 @@ diesel_genset = solph.components.Transformer( ep_costs=epc_diesel_genset * n_days / n_days_in_year, maximum=2 * peak_demand, ), + nonconvex=solph.NonConvex(), ) }, conversion_factors={b_el_ac: 0.33},
config.central: CompatConfigManager: fix recursion error When running under a thread/process pool.
@@ -267,6 +267,8 @@ class CompatConfigManager: self._manager = manager def __getattr__(self, attr): + if attr == '_manager': + return object.__getattribute__(self, '_manager') obj = getattr(self._manager, attr, _singleton) if obj is _singleton: obj = getattr(self._manager.objects, attr)
Removed an unidentified location, ref in desc. Location identified in commit
@@ -7,14 +7,6 @@ Police officers are seen pushing around and even driving into crowds of people. * https://twitter.com/perfectlyg0lden/status/1267014293628870656 -### Police shove a person to the ground and put a weapon into their hand | (Believed) May 31st - -Police shove a person to the ground and put a weapon into their hand. The person is already on the ground, and three officers continue to restrain them. - -**Links** - -* [https://twitter.com/BrutumF/status/1267575655509577728](https://twitter.com/BrutumF/status/1267575655509577728) - ### Officer grabs bystander by the neck | June 1st Person on the sidewalk appears to say something to an officer on a bike. The officer grabs him by the neck and forces him to the ground.
speedometer: fix methods which declare attributes We need to make those attributes class-attributes, to make sure they are still defined in subsequent jobs. We still access them through 'self', however.
@@ -128,7 +128,7 @@ class Speedometer(Workload): @once def initialize(self, context): super(Speedometer, self).initialize(context) - self.archive_server = ArchiveServer() + Speedometer.archive_server = ArchiveServer() if not self.target.is_rooted: raise WorkloadError( "Device must be rooted for the speedometer workload currently" @@ -148,18 +148,17 @@ class Speedometer(Workload): # Temporary directory used for storing the Speedometer files, uiautomator # dumps, and modified XML chrome config files. - self.temp_dir = tempfile.TemporaryDirectory() - self.document_root = os.path.join(self.temp_dir.name, "document_root") + Speedometer.temp_dir = tempfile.TemporaryDirectory() + Speedometer.document_root = os.path.join(self.temp_dir.name, "document_root") # Host a copy of Speedometer locally tarball = context.get_resource(File(self, "speedometer_archive.tgz")) with tarfile.open(name=tarball) as handle: handle.extractall(self.temp_dir.name) self.archive_server.start(self.document_root, self.target) - self.webserver_port = self.archive_server.get_port() - self.speedometer_url = "http://localhost:{}/Speedometer2.0/index.html".format( - self.webserver_port + Speedometer.speedometer_url = "http://localhost:{}/Speedometer2.0/index.html".format( + self.archive_server.get_port() ) def setup(self, context):
Enhance collectd Ceph python plugins This patch adds supports for ceph collectd python plugins for OSP17.
shell: ls /var/run/ceph/ceph-osd.*.asok | head -n 1 | egrep -o '[0-9]+' register: cephstorage_osd_socket become: true - when: "('CephStorage' in group_names and ceph_storage_collectd_plugin)" + when: "('CephStorage' in group_names and ceph_storage_collectd_plugin) and (rhosp_version is version('17.0', '<'))" # End CephStorage OSD monitoring +- name: Get 1st OSD socket + shell: cephadm shell -- ls /var/run/ceph/ | head -n 1 | egrep -o '[0-9]+' + register: cephstorage_osd_socket + become: true + when: "('CephStorage' in group_names and ceph_storage_collectd_plugin) and (rhosp_version is version('17.0', '>='))" + - name: Create configuration directory file: path: "{{ browbeat_containers_path }}/collectd-openstack/config" -v /home/{{ host_remote_user }}/collectd_pipe:/collectd_pipe \ {% endif %} {% endif %} - {% if config_type == 'controller' or config_type == 'cephstorage' %} - {% if ceph_controller_collectd_plugin or ceph_storage_collectd_plugin %} - -v /etc/ceph/:/etc/ceph/ -v /var/run/ceph/:/var/run/ceph/ \ + {% if config_type == 'controller' and ceph_controller_collectd_plugin and inventory_hostname == groups['Controller'][0] %} + -v /etc/ceph/:/etc/ceph/ -v /var/run/ceph/:/var/run/ceph \ {% endif %} + {% if config_type == 'cephstorage' and ceph_storage_collectd_plugin and inventory_hostname == groups['CephStorage'][0] %} + -v /var/run/ceph/:/var/run/ceph \ {% endif %} collectd-openstack become: yes
[Hexagon] Fix use of subprocess.run in _check_call_verbose It uses parameters that are not present in Python 3.6, plus it catches generic exception, which may not have `stdout` or `stderr` members.
@@ -47,8 +47,15 @@ def _check_call_verbose(cmd, **kwargs) -> None: the stdout/stderr provided by the subprocess. """ try: - subprocess.run(cmd, capture_output=True, check=True, text=True, **kwargs) - except Exception as err: + subprocess.run( + cmd, + check=True, + encoding="UTF-8", + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + **kwargs, + ) + except subprocess.CalledProcessError as err: error_msg = f"{err}\nstdout:\n{err.stdout}\nstderr:\n{err.stderr}" raise Exception(error_msg)
Update README.md added youtube.py
@@ -60,3 +60,5 @@ In the scripts the comments etc are lined up correctly when they are viewed in [ - `Google_News.py` - Uses BeautifulSoup to provide Latest News Headline along with news link. - `cricket_live_score` - Uses BeautifulSoup to provide live cricket score. + +- `youtube.py` - Takes input a song name and fetches the youtube url of best matching song and plays it.
Correct types, add missing functions to c_distributions.pxd Correct floating point types on several npyrandom functions exposed for Cython in c_distributions.pyx, add missing float functions
ctypedef s_binomial_t binomial_t + float random_standard_uniform_f(bitgen_t *bitgen_state) nogil double random_standard_uniform(bitgen_t *bitgen_state) nogil void random_standard_uniform_fill(bitgen_t* bitgen_state, npy_intp cnt, double *out) nogil + void random_standard_uniform_fill_f(bitgen_t *bitgen_state, npy_intp cnt, float *out) nogil + double random_standard_exponential(bitgen_t *bitgen_state) nogil - double random_standard_exponential_f(bitgen_t *bitgen_state) nogil + float random_standard_exponential_f(bitgen_t *bitgen_state) nogil void random_standard_exponential_fill(bitgen_t *bitgen_state, npy_intp cnt, double *out) nogil - void random_standard_exponential_fill_f(bitgen_t *bitgen_state, npy_intp cnt, double *out) nogil + void random_standard_exponential_fill_f(bitgen_t *bitgen_state, npy_intp cnt, float *out) nogil void random_standard_exponential_inv_fill(bitgen_t *bitgen_state, npy_intp cnt, double *out) nogil - void random_standard_exponential_inv_fill_f(bitgen_t *bitgen_state, npy_intp cnt, double *out) nogil + void random_standard_exponential_inv_fill_f(bitgen_t *bitgen_state, npy_intp cnt, float *out) nogil + double random_standard_normal(bitgen_t* bitgen_state) nogil + float random_standard_normal_f(bitgen_t *bitgen_state) nogil void random_standard_normal_fill(bitgen_t *bitgen_state, npy_intp count, double *out) nogil void random_standard_normal_fill_f(bitgen_t *bitgen_state, npy_intp count, float *out) nogil double random_standard_gamma(bitgen_t *bitgen_state, double shape) nogil + float random_standard_gamma_f(bitgen_t *bitgen_state, float shape) nogil float random_standard_uniform_f(bitgen_t *bitgen_state) nogil void random_standard_uniform_fill_f(bitgen_t* bitgen_state, npy_intp cnt, float *out) nogil double random_uniform(bitgen_t *bitgen_state, double lower, double range) nogil double random_beta(bitgen_t *bitgen_state, double a, double b) nogil double random_chisquare(bitgen_t *bitgen_state, double df) nogil - double random_f(bitgen_t *bitgen_state, double dfnum, double dfden) nogil + float random_f(bitgen_t *bitgen_state, double dfnum, double dfden) nogil double random_standard_cauchy(bitgen_t *bitgen_state) nogil double random_pareto(bitgen_t *bitgen_state, double a) nogil double random_weibull(bitgen_t *bitgen_state, double a) nogil
Pin pylint for build stability Changes on stable branches should be kept minimal; we don't want to change their code purely for new pylint minutiae.
@@ -23,7 +23,8 @@ dependencies: - xarray - redis-py # redis client lib, used by celery - redis # redis server -- pylint # testing +- pylint = 1.7 # testing +- astroid = 1.5 # needed to match pylint - pep8 # testing - fiona # movie generator app - mock # testing
Update changelog for 0.6.2 release Summary: title Test Plan: none Reviewers: #ft, prha
# Changelog -## 0.6.2 (Upcoming) +## 0.6.2 - Changed composition functions `@pipeline` and `@composite_solid` to automatically give solids aliases with an incrementing integer suffix when there are conflicts. This removes to the need to manually alias solid definitions that are used multiple times. +- Add `dagster schedule wipe` command to delete all schedules and remove all schedule cron jobs - `execute_solid` test util now works on composite solids. +- Docs and example improvements: https://dagster.readthedocs.io/ - Added `--remote` flag to `dagster-graphql` for querying remote dagit servers. +- Fixed issue with duplicate run tag autocomplete suggestions in dagit (#1839) +- Fixed Windows 10 / py3.6+ bug causing pipeline execution failures ## 0.6.1
lnAdress fix link to docs Fixes:
Charge people for using your domain name...<br /> <a - href="https://github.com/lnbits/lnbits/tree/master/lnbits/extensions/lnaddress" + href="https://github.com/lnbits/lnbits-legend/tree/main/lnbits/extensions/lnaddress" >More details</a > <br />
HelpChannels: fix role not resetting after dormant command Resetting permissions relied on getting the member from the cache, but the member was already removed from the cache prior to resetting the role. Now the member is passed directly rather than relying on the cache.
@@ -230,7 +230,7 @@ class HelpChannels(Scheduler, commands.Cog): del self.help_channel_claimants[ctx.channel] with suppress(discord.errors.HTTPException, discord.errors.NotFound): - await self.reset_claimant_send_permission(ctx.channel) + await self.reset_claimant_send_permission(ctx.author) await self.move_to_dormant(ctx.channel, "command") self.cancel_task(ctx.channel.id) @@ -640,18 +640,8 @@ class HelpChannels(Scheduler, commands.Cog): log.trace(f"Resetting send permissions for {member} ({member.id}).") await member.remove_roles(COOLDOWN_ROLE) - async def reset_claimant_send_permission(self, channel: discord.TextChannel) -> None: - """Reset send permissions in the Available category for the help `channel` claimant.""" - log.trace(f"Attempting to find claimant for #{channel.name} ({channel.id}).") - try: - member = self.help_channel_claimants[channel] - except KeyError: - log.trace( - f"Channel #{channel.name} ({channel.id}) not in claimant cache, " - f"permissions unchanged." - ) - return - + async def reset_claimant_send_permission(self, member: discord.Member) -> None: + """Reset send permissions in the Available category for `member`.""" log.trace(f"Resetting send permissions for {member} ({member.id}).") await member.remove_roles(COOLDOWN_ROLE)
Update why-xarray.rst with clearer expression in one sentence.
@@ -62,9 +62,8 @@ The power of the dataset over a plain dictionary is that, in addition to pulling out arrays by name, it is possible to select or combine data along a dimension across all arrays simultaneously. Like a :py:class:`~pandas.DataFrame`, datasets facilitate array operations with -heterogeneous data -- the difference is that the arrays in a dataset can not -only have different data types, but can also have different numbers of -dimensions. +heterogeneous data -- the difference is that the arrays in a dataset can have +not only different data types, but also different numbers of dimensions. This data model is borrowed from the netCDF_ file format, which also provides xarray with a natural and portable serialization format. NetCDF is very popular
Send 400 in case of missed required parameters While creating OS-kubespray cluster if some parameters were missed, 500 was sent. Also use get method for checking avalibility zone, since it is optional paramater
@@ -616,15 +616,22 @@ class OpenStack: self.meta["dns"] = [validate_ip(ip) for ip in self.cluster.metadata.get("dns_nameservers", []).split(",")] - self.meta["ext_net"] = self.c.get_network(self.cluster.metadata["floating_network"]) - if self.meta["ext_net"] is None: - raise ValueError("External network '%s' is not found" % self.cluster.metadata["floating_network"]) + try: self.meta["image"] = self.c.get_image(self.cluster.metadata["image_name"]) if self.meta["image"] is None: raise ValueError("Image '%s' is not found" % self.cluster.metadata["image_name"]) self.meta["flavor"] = self.c.get_flavor(self.cluster.metadata["flavor"]) if self.meta["flavor"] is None: raise ValueError("Flavor '%s' is not found" % self.cluster.metadata["flavor"]) + # Check that required fields are provided (KeyError is raised if not) + self.cluster.metadata["ssh_key_name"] + self.cluster.metadata["ssh_username"] + except KeyError as e: + raise ValueError("Required parameter is not set: {}".format(e)) + + self.meta["ext_net"] = self.c.get_network(self.cluster.metadata["floating_network"]) + if self.meta["ext_net"] is None: + raise ValueError("External network '%s' is not found" % self.cluster.metadata["floating_network"]) azone = self.cluster.metadata.get("availability_zone") if azone and azone not in self.c.list_availability_zone_names(): @@ -762,7 +769,7 @@ class OpenStack: flavor=flavor, userdata=self._get_userdata(), network=network, - availability_zone=self.os_kwargs["availability_zone"], + availability_zone=self.os_kwargs.get("availability_zone", "nova"), key_name=self.cluster.metadata["ssh_key_name"], ) server_ids.append(server.id)
update Reepater.get in zapier_subscription_post_delete the changes are covered by corehq.apps.zapier.tests.test_zapier_hooks:TestZapierIntegration
@@ -6,7 +6,7 @@ from tastypie.http import HttpBadRequest from corehq.apps.zapier.consts import CASE_TYPE_REPEATER_CLASS_MAP, EventTypes from corehq.apps.zapier.models import ZapierSubscription -from corehq.motech.repeaters.models import FormRepeater +from corehq.motech.repeaters.models import FormRepeater, SQLFormRepeater @receiver(pre_save, sender=ZapierSubscription) @@ -48,7 +48,7 @@ def zapier_subscription_post_delete(sender, instance, *args, **kwargs): Deletes the repeater object when the corresponding zap is turned off """ if instance.event_name == EventTypes.NEW_FORM: - repeater = FormRepeater.get(instance.repeater_id) + repeater = SQLFormRepeater.objects.get(repeater_id=instance.repeater_id) elif instance.event_name in CASE_TYPE_REPEATER_CLASS_MAP: repeater = CASE_TYPE_REPEATER_CLASS_MAP[instance.event_name].get(instance.repeater_id) else:
Fix emojis Fix emojis breaking with shortcodes containing dashes - Fix emojis breaking with variants - Add tests for both cases
/* eslint-disable */ -// We only need this one function of Twemoji to locate the CDN emoji image, -// so we copy it instead of importing the whole library. + +// We only need a few functions of Twemoji to locate the CDN emoji image, +// so we copy them instead of importing the whole library. + // https://github.com/twitter/twemoji/blob/42f8843cb3aa1f9403d5479d7e3f7e01176ad08e/scripts/build.js#L571 export function toCodePoint(unicodeSurrogates: string, sep?: string): string { const r = [] @@ -20,3 +22,14 @@ export function toCodePoint(unicodeSurrogates: string, sep?: string): string { } return r.join(sep || "-") } + +// https://github.com/twitter/twemoji/blob/42f8843cb3aa1f9403d5479d7e3f7e01176ad08e/scripts/build.js#L255 +const UFE0Fg = /\uFE0F/g +const U200D = String.fromCharCode(0x200d) + +// https://github.com/twitter/twemoji/blob/42f8843cb3aa1f9403d5479d7e3f7e01176ad08e/scripts/build.js#L344 +export function grabTheRightIcon(rawText: string) { + return toCodePoint( + rawText.indexOf(U200D) < 0 ? rawText.replace(UFE0Fg, "") : rawText + ) +}
[bugfix] Fix parameter order of userPut method reorder userPut parameters to enable tagging pages for speedy deletion.
@@ -72,8 +72,8 @@ and arguments can be: # # (C) Daniel Herding, 2004 # (C) Purodha Blissenbach, 2009 -# (C) xqt, 2009-2018 -# (C) Pywikibot team, 2004-2018 +# (C) xqt, 2009-2019 +# (C) Pywikibot team, 2004-2019 # # Distributed under the terms of the MIT license. # @@ -481,7 +481,7 @@ class RedirectRobot(SingleSiteBot, ExistingPageBot, RedirectPageBot): page.title(with_section=False)) content = content_page.get(get_redirect=True) content = self.sdtemplate + '\n' + content - self.userPut(page, content, page.text, summary=reason, + self.userPut(page, page.text, content, summary=reason, ignore_save_related_errors=True, ignore_server_errors=True)
Update elf_coinminer.txt ```220.194.237.43:43768``` returns ```hello```-message.
@@ -126,3 +126,11 @@ w2wz.com # Reference: https://twitter.com/bad_packets/status/1123473023313616896 45.67.14.152:1337 + +# Reference: https://twitter.com/liuya0904/status/1135901420958281729 +# Reference: https://pastebin.com/5Ee4Xevs + +220.194.237.43:43768 +w.21-3n.xyz +w.3ei.xyz +w.lazer-n.com
[IMPR] derive ReplaceRobot from ExistingPageBot use ExistingPageBot to skip NoPage exception move isTitleExcepted and has_permission() checking to skip_page method directly leave treat method if isTextExcepted or new_text == original_text after replacements
@@ -156,7 +156,7 @@ from pywikibot.exceptions import ArgumentDeprecationWarning # Imports predefined replacements tasks from fixes.py from pywikibot import fixes from pywikibot import i18n, textlib, pagegenerators -from pywikibot.bot import SingleSiteBot +from pywikibot.bot import ExistingPageBot, SingleSiteBot from pywikibot.tools import ( chars, deprecated, @@ -498,7 +498,7 @@ class XmlDumpReplacePageGenerator(object): return False -class ReplaceRobot(SingleSiteBot): +class ReplaceRobot(SingleSiteBot, ExistingPageBot): """A bot that can do text replacements. @@ -713,46 +713,46 @@ class ReplaceRobot(SingleSiteBot): semicolon = self.site.mediawiki_message('semicolon-separator') return semicolon.join(summary_messages) - def treat(self, page): - """Work on each page retrieved from generator.""" + def skip_page(self, page): + """Check whether treat should be skipped for the page.""" if self.isTitleExcepted(page.title()): - pywikibot.output( - 'Skipping {0} because the title is on the exceptions list.' - .format(page.title(as_link=True))) - return + pywikibot.warning( + 'Skipping {} because the title is on the exceptions list.' + .format(page)) + return True - try: - # Load the page's text from the wiki - original_text = page.get(get_redirect=True) if not page.has_permission(): - pywikibot.output("You can't edit page " - + page.title(as_link=True)) - return - except pywikibot.NoPage: - pywikibot.output('Page {0} not found' - .format(page.title(as_link=True))) - return + pywikibot.warning("You can't edit page {}".format(page)) + return True + return super(ReplaceRobot, self).skip_page(page) + + def treat(self, page): + """Work on each page retrieved from generator.""" + original_text = page.text applied = set() new_text = original_text last_text = None context = 0 while True: if self.isTextExcepted(new_text): - pywikibot.output('Skipping {0} because it contains text ' + pywikibot.output('Skipping {} because it contains text ' 'that is on the exceptions list.' - .format(page.title(as_link=True))) - break + .format(page)) + return + while new_text != last_text: last_text = new_text new_text = self.apply_replacements(last_text, applied, page) if not self.getOption('recursive'): break + if new_text == original_text: pywikibot.output('No changes were necessary in ' + page.title(as_link=True)) - break + return + if self.addcat: # Fetch only categories in wikitext, otherwise the others # will be explicitly added.
Don't set the_geom_webmercator explicitly Cartodbfied tables should handle it automatically via triggers
@@ -164,10 +164,6 @@ def _geocode_query(table, street, city, state, country, metadata): UPDATE {table} SET the_geom = _g.the_geom, - the_geom_webmercator = CASE - WHEN _g.the_geom IS NULL THEN NULL - ELSE ST_Transform(_g.the_geom, 3857) - END, {metadata_assignment} {hash_column} = {hash_expression} FROM (SELECT * FROM {geocode_expression}) _g
Update kubernetesmod.py Added CLI Example to top of documentation
@@ -19,6 +19,8 @@ The data format for `kubernetes.kubeconfig-data` value is the content of Only `kubeconfig` or `kubeconfig-data` should be provided. In case both are provided `kubeconfig` entry is preferred. +CLI Example: + .. code-block:: bash salt '*' kubernetes.nodes kubeconfig=/etc/salt/k8s/kubeconfig context=minikube
DOC: Add testing dependencies to build workflow instructions Adds note on how to install the test dependencies when building numpy from source.
@@ -8,7 +8,7 @@ source. Your choice depends on your operating system and familiarity with the command line. Gitpod ------------- +------ Gitpod is an open-source platform that automatically creates the correct development environment right in your browser, reducing the need to @@ -21,7 +21,7 @@ in-depth instructions for building NumPy with `building NumPy with Gitpod`_. .. _building NumPy with Gitpod: https://numpy.org/devdocs/dev/development_gitpod.html Building locally ------------------- +---------------- Building locally on your machine gives you more granular control. If you are a MacOS or Linux user familiar with using the @@ -94,7 +94,14 @@ Testing ------- Make sure to test your builds. To ensure everything stays in shape, see if -all tests pass:: +all tests pass. + +The test suite requires additional dependencies, which can easily be +installed with:: + + $ python -m pip install -r test_requirements.txt + +Run tests:: $ python runtests.py -v -m full
Added site: codeforces Added using response_URL method
"username_claimed": "blue", "username_unclaimed": "noonewouldeverusethis7" }, + "Codeforces": { + "errorType": "response_url", + "errorUrl": "https://codeforces.com/", + "url": "https://codeforces.com/profile/{}", + "urlMain": "https://www.codeforces.com/", + "username_claimed": "tourist", + "username_unclaimed": "noonewouldeverusethis789" + }, "Codepen": { "errorType": "status_code", "url": "https://codepen.io/{}",
Implement psutil within blackbox tests Psutil is a well-established python package that is currently used elsewhere in the code for a similar purpose to that of process_exists, and should therefore be expanded to the blackbox tests as well.
@@ -20,6 +20,9 @@ import random import string from subprocess import PIPE, Popen +# isort: THIRDPARTY +import psutil + # Name prefix, so that we hopefully don't destroy any end user data by mistake! TEST_PREF = os.getenv("STRATIS_UT_PREFIX", "STRATI$_DE$TROY_ME!_") @@ -53,16 +56,20 @@ def random_string(length=4): def process_exists(name): """ - Walk the process table looking for executable 'name', returns pid if one - found, else return None + Look through processes, using their pids, to find one matching 'name'. + Return None if no such process found, else return the pid. + :param name: name of process to check + :type name: str + :return: pid or None + :rtype: int or NoneType """ - for pid in [pid for pid in os.listdir("/proc") if pid.isdigit()]: + for proc in psutil.process_iter(["name"]): try: - exe_name = os.readlink(os.path.join("/proc/", pid, "exe")) - except OSError: - continue - if exe_name and exe_name.endswith(os.path.join("/", name)): - return pid + if proc.name() == name: + return proc.pid + except psutil.NoSuchProcess: + pass + return None
Update cea/interfaces/dashboard/inputs/routes.py does this work? then let's commit it!
@@ -169,5 +169,6 @@ def route_table_post(db): def df_to_json(file_location): table_df = geopandas.GeoDataFrame.from_file(file_location) - table_df = table_df.to_crs(epsg=4326) # make sure that the geojson is coded in latitude / longitude + from cea.utilities.standardize_coordinates import get_geographic_coordinate_system + table_df = table_df.to_crs(get_geographic_coordinate_system()) # make sure that the geojson is coded in latitude / longitude return json.loads(table_df.to_json())
issue : `print_call_args` function was introduced when starting adding support for displaying arguments on function calls but is not used anymore, we can safely remove it
@@ -1606,22 +1606,6 @@ class X86(Architecture): taken, reason = val&(1<<flags["sign"]), "S" return taken, reason - def print_call_args(self): - offsets = [0, 4, 8, 12, 16, 20] - sp = get_register("$esp") - for i, offset in enumerate(offsets): - addr = sp + offset - line = "arg[{:d}] (sp+{:#x}) ".format(i, offset) - line += Color.boldify(format_address(addr)) - addrs = DereferenceCommand.dereference_from(addr) - - if len(addrs) > 1: - sep = " {:s} ".format(right_arrow) - line += sep + sep.join(addrs[1:]) - - print(line) - return - def mprotect_asm(self, addr, size, perm): _NR_mprotect = 125 insns = [ @@ -1647,23 +1631,6 @@ class X86_64(X86): return_register = "$rax" function_parameters = ["$rdi", "$rsi", "$rdx", "$rcx", "$r8", "$r9"] - def print_call_args(self): - regs = ["$rdi", "$rsi", "$rdx", "$rcx", "$r8", "$r9"] - for i, reg in enumerate(regs): - addr = long(gdb.parse_and_eval(reg)) - line = "Arg {:d} ({:s}) ".format(i, reg) - - line += Color.boldify(format_address(addr)) - addrs = DereferenceCommand.dereference_from(addr) - - if len(addrs) > 1: - sep = " {:s} ".format(right_arrow) - line += sep + sep.join(addrs[1:]) - - print(line) - - return - def mprotect_asm(self, addr, size, perm): _NR_mprotect = 10 insns = ["push rax", "push rdi", "push rsi", "push rdx",
MAINT: modernised test.G_fit to rely on numpy arrays [CHANGED] now rely on array operations. Simplifies code.
@@ -297,26 +297,20 @@ def G_fit(obs, exp, williams=1): See Sokal and Rohlf chapter 17. """ + obs = array(obs) + exp = array(exp) + if obs.shape != exp.shape: + raise ValueError("requires data with equal dimensions.") + elif (obs < 0).any(): + raise ValueError("requires all observed values to be positive.") + elif (exp == 0).any() or (exp < 0).any(): + raise ZeroExpectedError("requires all expected values to be positive") + non_zero = obs != 0 + G = 2 * (obs[non_zero] * (log(obs[non_zero]) - + log(exp[non_zero]))).sum() k = len(obs) - if k != len(exp): - raise ValueError("G_fit requires two lists of equal length.") - G = 0 - n = 0 - - for o, e in zip(obs, exp): - if o < 0: - raise ValueError( - "G_fit requires all observed values to be positive.") - if e <= 0: - raise ZeroExpectedError( - "G_fit requires all expected values to be positive.") - if o: # if o is zero, o * log(o/e) must be zero as well. - G += o * log(o / e) - n += o - - G *= 2 if williams: - q = 1 + (k + 1) / (6 * n) + q = 1 + (k + 1) / (6 * obs.sum()) G /= q return G, chi_high(G, k - 1)
Fixing filename creation for dplay Encoding of show name failed. Fixed that! fixes:
@@ -85,8 +85,10 @@ class Dplay(Service): name = jsondata["data"]["attributes"]["name"] if is_py2: show = filenamify(show).encode("latin1") + name = filenamify(name).encode("latin1") else: show = filenamify(show) + return filenamify("{0}.s{1:02d}e{2:02d}.{3}".format(show, int(season), int(episode), name)) def find_all_episodes(self, options):
Select different minio docker release to fix the zombie issue Issue
@@ -45,7 +45,7 @@ services: minio-protected: # This is for protected data, should only be exposed via an internal link # in nginx - image: minio/minio:RELEASE.2019-02-20T22-44-29Z + image: minio/minio:RELEASE.2019-04-04T18-31-46Z environment: MINIO_ACCESS_KEY: minioprotected MINIO_SECRET_KEY: minioprotected12345
Point API for staging at email and sms stubs for the soak tests. This is done to avoid sending real email and sms and incurring unnecessary charges while we run the soak tests.
@@ -468,6 +468,9 @@ class Staging(Config): API_RATE_LIMIT_ENABLED = True CHECK_PROXY_HEADER = True REDIS_ENABLED = True + SES_STUB_URL = 'https://notify-email-provider-stub-staging.cloudapps.digital/ses' + MMG_URL = 'https://notify-sms-provider-stub-staging.cloudapps.digital/mmg' + FIRETEXT_URL = 'https://notify-sms-provider-stub-staging.cloudapps.digital/firetext' class Live(Config):
Time display changes Changes the return time format to Min:Sec
@@ -369,7 +369,7 @@ class Sniper(BaseTask): exists = False self._log('Sniping distance is more than supported distance, abort sniping') else: - self._log('Base on distance, pausing for {0:.2f} Mins'.format(sleep_time/60)) + self._log('Base on distance, pausing for '+time.strftime("%M:%S", time.gmtime(sleep_time))) # Teleport, so that we can see nearby stuff self.bot.hb_locked = True
Update release-process.md Made a note about marketing if release falls on a Friday.
@@ -86,6 +86,7 @@ Day when Leads and PMs decide which major features are included in the release, - Create meta issue for release in GitHub (see [example](https://github.com/mattermost/mattermost-server/issues/3702)) 3. Logistics: - Confirm date of marketing announcement for the release date with Marketing, and update release channel header if needed + - If release day falls on a Friday, the blog post goes out on the Friday and the emailed newsletter goes out the following Tuesday. - Post a reminder to devs in the Release Discussion channel of the the code complete date with the ZBB count [see example](https://pre-release.mattermost.com/core/pl/coggyys9atg7fqyam81q3gkmoo) 4. Leads: - Finalize roadmap for next release, and identify planned marketing bullet points
[Logs] Avoid logging an error when we successfully retry submission as otherwise we see errors in the logs, and this is confusing If we successfully retry, then it is not an error
@@ -91,9 +91,8 @@ def safe_submit_log(s, log): try: send_entry(s, log) except Exception as e: - err_message = 'Error sending the log line. Exception: {}'.format(str(e)) + # retry once s = connect_to_datadog(host, ssl_port) - send_entry(s, err_message) send_entry(s, log) return s
Type fixes for tempfile.TemporaryDirectory If no arguments are passed to the TemporaryDirectory constructor, then the class defaults to using str. Overload the __init__ function to cover this case.
@@ -310,7 +310,10 @@ class SpooledTemporaryFile(IO[AnyStr]): def __next__(self) -> AnyStr: ... class TemporaryDirectory(Generic[AnyStr]): - name: str + name: AnyStr + @overload + def __init__(self: TemporaryDirectory[str], suffix: None = ..., prefix: None = ..., dir: None = ...) -> None: ... + @overload def __init__( self, suffix: Optional[AnyStr] = ..., prefix: Optional[AnyStr] = ..., dir: Optional[_DirT[AnyStr]] = ... ) -> None: ...
Minor fix for quantizing the Ads complex model Summary: Remove Int8Relu in quantized model Suppress log warnings if verbose is false Test Plan: TBD
@@ -95,8 +95,8 @@ void DynamicHistogram::Add(float f) { max_ = std::max(max_, f); if (histogram_ == nullptr) { - histogram_ = std::make_unique<Histogram>( - nbins_ * OVER_BINNING_FACTOR, min_, max_); + histogram_ = + std::make_unique<Histogram>(nbins_ * OVER_BINNING_FACTOR, min_, max_); histogram_->Add(f); return; } @@ -119,6 +119,8 @@ void DynamicHistogram::Add(float f) { ceil((f - curr_hist.Max()) / old_spread) * old_spread; } } + new_min = std::max(numeric_limits<float>::lowest(), new_min); + new_max = std::min(numeric_limits<float>::max(), new_max); histogram_.reset( new Histogram(curr_hist.GetHistogram()->size(), new_min, new_max)); RemapHistograms(curr_hist, *histogram_); @@ -132,12 +134,12 @@ void DynamicHistogram::Add(const float* f, int len) { minimum = std::min(f[i], minimum); maximum = std::max(f[i], maximum); } - min_ = minimum; - max_ = maximum; + min_ = std::max(numeric_limits<float>::lowest(), minimum); + max_ = std::min(numeric_limits<float>::max(), maximum); if (histogram_ == nullptr) { - histogram_ = std::make_unique<Histogram>( - nbins_ * OVER_BINNING_FACTOR, min_, max_); + histogram_ = + std::make_unique<Histogram>(nbins_ * OVER_BINNING_FACTOR, min_, max_); histogram_->Add(f, len); return; } @@ -162,6 +164,8 @@ void DynamicHistogram::Add(const float* f, int len) { ceil((max_ - curr_hist.Max()) / old_spread) * old_spread; } } + new_min = std::max(numeric_limits<float>::lowest(), new_min); + new_max = std::min(numeric_limits<float>::max(), new_max); histogram_.reset( new Histogram(curr_hist.GetHistogram()->size(), new_min, new_max)); RemapHistograms(curr_hist, *histogram_);
[hailctl][devdeploy] improve error messages Previously we get a stack trace without the http response body. I tested this locally on a branch that does not exist: # hailctl dev deploy --branch danking/hail:shuffler-deploymefdsafdsa --steps test_shuffler HTTP Response code was 400 error finding {"repo": {"owner": "danking", "name": "hail"}, "name": "shuffler-deploymefdsafdsa"} at GitHub
import asyncio import webbrowser import aiohttp +import sys from hailtop.config import get_deploy_config from hailtop.auth import service_auth_headers @@ -27,7 +28,7 @@ class CIClient: async def __aenter__(self): headers = service_auth_headers(self._deploy_config, 'ci') self._session = ssl_client_session( - raise_for_status=True, timeout=aiohttp.ClientTimeout(total=60), headers=headers) + timeout=aiohttp.ClientTimeout(total=60), headers=headers) return self async def __aexit__(self, exc_type, exc, tb): @@ -45,6 +46,10 @@ class CIClient: } async with self._session.post( self._deploy_config.url('ci', '/api/v1alpha/dev_deploy_branch'), json=data) as resp: + if resp.status >= 400: + print(f'HTTP Response code was {resp.status}') + print(await resp.text()) + sys.exit(1) resp_data = await resp.json() return resp_data['batch_id']
fix Unicode multipart/form-data values in python3 Multipart form uploads are not affected by the WSGI/PEP-3333 'latin1' default encoding quirk and already properly decoded as utf8, so we have to disable FormsDict.recode_unicode for these.
@@ -1248,6 +1248,7 @@ class BaseRequest(object): :class:`FormsDict`. All keys and values are strings. File uploads are stored separately in :attr:`files`. """ forms = FormsDict() + forms.recode_unicode = self.POST.recode_unicode for name, item in self.POST.allitems(): if not isinstance(item, FileUpload): forms[name] = item @@ -1271,6 +1272,7 @@ class BaseRequest(object): """ files = FormsDict() + files.recode_unicode = self.POST.recode_unicode for name, item in self.POST.allitems(): if isinstance(item, FileUpload): files[name] = item @@ -1405,6 +1407,7 @@ class BaseRequest(object): if py3k: args['encoding'] = 'utf8' + post.recode_unicode = False data = cgi.FieldStorage(**args) self['_cgi.FieldStorage'] = data #http://bugs.python.org/issue18394 data = data.list or []
override ceph_release with ceph_stable_release when `ceph_origin` is set to `'repository'` and `ceph_repository` to `'community'` we need to ensure `ceph_release` reflect `ceph_stable_release`. simply removed the override while it should just have to be run only when the condition mentioned above is satisfied.
tags: - always +- name: set_fact ceph_release - override ceph_release with ceph_stable_release + set_fact: + ceph_release: "{{ ceph_stable_release }}" + when: + - ceph_origin == 'repository' + tags: + - always + - name: include facts_mon_fsid.yml include_tasks: facts_mon_fsid.yml run_once: true