message
stringlengths
6
474
diff
stringlengths
8
5.22k
gmskframegen: removing unused preprocessor directive
#include "liquid.internal.h" -#define DEBUG_GMSKFRAMEGEN 0 - // gmskframegen int gmskframegen_encode_header( gmskframegen _q, const unsigned char * _header); int gmskframegen_write_zeros (gmskframegen _q, float complex * _y);
Update `ScriptEntry.swift`
@@ -42,6 +42,12 @@ struct ScriptEntry: TimelineEntry, Codable { /// A boolean indicating whether the console should be rendered as a placeholder. var isPlaceholder = false + /// For widgets handled in app, the update interval. + var updateInterval: TimeInterval? + + /// A boolean indicating whether the script content is set in app. + var inApp = false + /// Returns the URL to open the script. /// /// - Parameters: @@ -85,6 +91,10 @@ struct ScriptEntry: TimelineEntry, Codable { case snapshots case view case bookmarkData + case updateInterval + case output + case date + case code } init(date: Date, output: String, snapshots: [WidgetFamily:(UIImage, UIColor)] = [:], view: [WidgetFamily:WidgetView]? = nil, code: String = "", bookmarkData: Data? = nil) { @@ -130,9 +140,18 @@ struct ScriptEntry: TimelineEntry, Codable { bookmarkData = nil } - output = "" - date = Date() - code = "" + do { + updateInterval = try container.decode(Double.self, forKey: .updateInterval) + } catch { + updateInterval = nil + } + + output = try container.decode(String.self, forKey: .output) + + date = try container.decode(Date.self, forKey: .date) + + code = try container.decode(String.self, forKey: .code) + self.snapshots = snapshots } @@ -152,5 +171,9 @@ struct ScriptEntry: TimelineEntry, Codable { try container.encode(views, forKey: .view) try container.encode(snapshots, forKey: .snapshots) try container.encode(bookmarkData, forKey: .bookmarkData) + try container.encode(updateInterval, forKey: .updateInterval) + try container.encode(output, forKey: .output) + try container.encode(date, forKey: .date) + try container.encode(code, forKey: .code) } }
Update readme overview size and logo padding.
<div align="center"> - <a href="https://metacall.io" target="_blank"><img src="https://raw.githubusercontent.com/metacall/core/master/deploy/images/logo.png" alt="M E T A C A L L" style="max-width:100%;padding-right:2.5rem;" width="128" height="128"> + <a href="https://metacall.io" target="_blank"><img src="https://raw.githubusercontent.com/metacall/core/master/deploy/images/logo.png" alt="M E T A C A L L" style="max-width:100%;padding-right:3rem;" width="128" height="128"> <p><b>M E T A C A L L</b></p></a> <p>A library for providing inter-language foreign function interface calls</p> - <a href="https://medium.com/@metacall/call-functions-methods-or-procedures-between-programming-languages-with-metacall-58cfece35d7" target="_blank"><img src="https://raw.githubusercontent.com/metacall/core/master/deploy/images/overview.png" alt="M E T A C A L L" style="max-width:100%;" width="256" height="182"> + <a href="https://medium.com/@metacall/call-functions-methods-or-procedures-between-programming-languages-with-metacall-58cfece35d7" target="_blank"><img src="https://raw.githubusercontent.com/metacall/core/master/deploy/images/overview.png" alt="M E T A C A L L" style="max-width:100%;" width="512" height="364"> </div> # Abstract
Add tests for PL and GL using ploidy of GT
@@ -232,6 +232,49 @@ namespace ebi source}), vcf::SamplesFieldBodyError*); + CHECK_THROWS_AS( (vcf::Record{ + 1, + "chr1", + 123456, + { "id123" }, + "A", + { "AT" }, + 1.0, + { vcf::PASS }, + { {vcf::AA, "243"} }, + { vcf::GT, vcf::GL }, + { "1/0:1.3,2.4" }, + source}), + vcf::SamplesFieldBodyError*); + + CHECK_NOTHROW( (vcf::Record{ + 1, + "chr1", + 123456, + { "id123" }, + "A", + { "AT" }, + 1.0, + { vcf::PASS }, + { {vcf::AA, "243"} }, + { vcf::GT, vcf::GL }, + { "1:1.3,2.4" }, + source})); + + CHECK_NOTHROW( (vcf::Record{ + 1, + "chr1", + 123456, + { "id123" }, + "A", + { "AT" }, + 1.0, + { vcf::PASS }, + { {vcf::AA, "243"} }, + { vcf::GL }, + { "1.3,2.4,2.3" }, + source})); + CHECK_THROWS_AS( (vcf::Record{ 1, "chr1", @@ -412,6 +455,49 @@ namespace ebi source}), vcf::SamplesFieldBodyError*); + CHECK_THROWS_AS( (vcf::Record{ + 1, + "chr1", + 123456, + { "id123" }, + "A", + { "AT" }, + 1.0, + { vcf::PASS }, + { {vcf::AA, "243"} }, + { vcf::GT, vcf::PL }, + { "1/0:1,2" }, + source}), + vcf::SamplesFieldBodyError*); + + CHECK_NOTHROW( (vcf::Record{ + 1, + "chr1", + 123456, + { "id123" }, + "A", + { "AT" }, + 1.0, + { vcf::PASS }, + { {vcf::AA, "243"} }, + { vcf::GT, vcf::PL }, + { "1:1,2" }, + source})); + + CHECK_NOTHROW( (vcf::Record{ + 1, + "chr1", + 123456, + { "id123" }, + "A", + { "AT" }, + 1.0, + { vcf::PASS }, + { {vcf::AA, "243"} }, + { vcf::PL }, + { "1,2,5" }, + source})); + CHECK_THROWS_AS( (vcf::Record{ 1, "chr1",
the new way to create the ar file name
@@ -509,6 +509,7 @@ static BOOLEAN check_skip_entry_status_for_event_actionreq_set(BOOLEAN not_match BOOLEAN skip_entry = not_matching; UINT32 event_type = 0; CHAR8 event_cat = 0; + char dimm_uid[SYSTEM_LOG_FILE_NAME_MAX_LEN] = { 0 }; char log_file_name[SYSTEM_LOG_FILE_NAME_MAX_LEN] = { 0 }; FILE *h_file = NULL; NVM_EVENT_MSG event_type_str = { 0 }; @@ -522,9 +523,9 @@ static BOOLEAN check_skip_entry_status_for_event_actionreq_set(BOOLEAN not_match // Get the event type value event_type = get_event_type_form_event_entry(event_message, NULL, NULL); // Get the UID file name - get_unified_id_form_event_entry(event_message, sizeof(log_file_name), log_file_name); + get_unified_id_form_event_entry(event_message, sizeof(dimm_uid), dimm_uid); // The action required file configured - snprintf(log_file_name, SYSTEM_LOG_FILE_NAME_MAX_LEN, ACTION_REQUIRED_FILE_PARSING_STRING, log_file_name); + snprintf(log_file_name, sizeof(log_file_name), ACTION_REQUIRED_FILE_PARSING_STRING, dimm_uid); // Event type cannot equal 0, it is stored in the log file means at least one bit needs to be set h_file = fopen(log_file_name, "r+"); if (NULL != h_file)
Change error key in example
@@ -67,7 +67,7 @@ defmodule Main do use Elektra def main do - error_key = Elektra.System.key_new("user:/error/key") + error_key = Elektra.System.key_new("/") config = Elektra.System.ks_new(0) root = Elektra.System.key_new("user:/test")
VERSION bump to version 2.0.58
@@ -58,7 +58,7 @@ set (CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}) # set version of the project set(LIBYANG_MAJOR_VERSION 2) set(LIBYANG_MINOR_VERSION 0) -set(LIBYANG_MICRO_VERSION 57) +set(LIBYANG_MICRO_VERSION 58) set(LIBYANG_VERSION ${LIBYANG_MAJOR_VERSION}.${LIBYANG_MINOR_VERSION}.${LIBYANG_MICRO_VERSION}) # set version of the library set(LIBYANG_MAJOR_SOVERSION 2)
net/ip; increase the default pbuf pool, listen socket count to allow inet_def_service to operate.
@@ -80,7 +80,7 @@ extern "C" { #define MEMP_NUM_TCP_PCB 3 /* XXX */ /* MEMP_NUM_TCP_PCB_LISTEN: the number of listening TCP connections. */ -#define MEMP_NUM_TCP_PCB_LISTEN 1 +#define MEMP_NUM_TCP_PCB_LISTEN 3 /* MEMP_NUM_TCP_SEG: the number of simultaneously queued TCP segments. */ #define MEMP_NUM_TCP_SEG (TCP_SND_QUEUELEN + 1) @@ -101,12 +101,18 @@ extern "C" { /* ---------- Pbuf options ---------- */ /* PBUF_POOL_SIZE: the number of buffers in the pbuf pool. */ #ifndef PBUF_POOL_SIZE -#define PBUF_POOL_SIZE 3 +#define PBUF_POOL_SIZE 6 #endif /* PBUF_POOL_BUFSIZE: the size of each pbuf in the pbuf pool. */ #define PBUF_POOL_BUFSIZE 1580 +/* + * Disable this; causes excessive stack use in device drivers calling + * pbuf_alloc() + */ +#define PBUF_POOL_FREE_OOSEQ 0 + /* PBUF_LINK_HLEN: the number of bytes that should be allocated for a link level header. */ #define PBUF_LINK_HLEN 16
doc: update documents for "--rtvm" parameter This patch update documents for "--rtvm" parameter.
@@ -318,6 +318,15 @@ Here are descriptions for each of these ``acrn-dm`` command line parameters: * - :kbd:`--lapic_pt` - This option is to create a VM with lapic pass-through. With this option, a VM is created with LAPIC_PASSTHROUGH and - IOREQ_COMPLETION_POLLING mode. This kind of VM is generally for realtime scenarios. + IOREQ_COMPLETION_POLLING mode. This kind of VM is generally for hard realtime scenarios. + + By default, DM will create VM without this option. + + * - :kbd:`--rtvm` + - This option is to create a VM with realtime attribute. + With this option, a VM is created with GUEST_FLAG_RT and GUEST_FLAG_IOREQ_COMPLETION_POLLING + mode. This kind of VM is generally for soft realtime scenarios (without lapic_pt) or hard + realtime scenarios (with lapic_pt). This kind of VM can't be interfered by SOS during runtime. + It can only be poweroff from inside of itself. By default, DM will create VM without this option.
test: Add AES-GCM decrypt benchmark TEST=test/run_device_tests.py -b bloonchipper -t aes TEST=make run-aes BRANCH=none Code-Coverage: Zoss
@@ -431,7 +431,7 @@ static void test_aes_gcm_speed(void) 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, }; const int key_size = sizeof(key); - static const uint8_t plaintext[512] = { 0 }; + static uint8_t plaintext[512] = { 0 }; const auto plaintext_size = sizeof(plaintext); static const uint8_t nonce[] = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, @@ -441,20 +441,33 @@ static void test_aes_gcm_speed(void) uint8_t tag[16] = { 0 }; const int tag_size = sizeof(tag); - uint8_t *out = tmp; + uint8_t *encrypted_data = tmp; static AES_KEY aes_key; static GCM128_CONTEXT ctx; assert(plaintext_size <= sizeof(tmp)); - benchmark.run("AES-GCM", [&]() { + benchmark.run("AES-GCM encrypt", [&]() { AES_set_encrypt_key(key, 8 * key_size, &aes_key); CRYPTO_gcm128_init(&ctx, &aes_key, (block128_f)AES_encrypt, 0); CRYPTO_gcm128_setiv(&ctx, &aes_key, nonce, nonce_size); - CRYPTO_gcm128_encrypt(&ctx, &aes_key, plaintext, out, + CRYPTO_gcm128_encrypt(&ctx, &aes_key, plaintext, encrypted_data, plaintext_size); CRYPTO_gcm128_tag(&ctx, tag, tag_size); }); + + benchmark.run("AES-GCM decrypt", [&]() { + AES_set_encrypt_key(key, 8 * key_size, &aes_key); + CRYPTO_gcm128_init(&ctx, &aes_key, (block128_f)AES_encrypt, 0); + CRYPTO_gcm128_setiv(&ctx, &aes_key, nonce, nonce_size); + auto decrypt_res = + CRYPTO_gcm128_decrypt(&ctx, &aes_key, encrypted_data, + plaintext, plaintext_size); + + auto finish_res = CRYPTO_gcm128_finish(&ctx, tag, tag_size); + assert(decrypt_res); + assert(finish_res); + }); benchmark.print_results(); }
not always define seed cookie path
@@ -370,13 +370,13 @@ tokenDefinition_t *getKnownToken(uint8_t *contractAddress) { return NULL; } +#ifndef HAVE_WALLET_ID_SDK + unsigned int const U_os_perso_seed_cookie[] = { 0xda7aba5e, 0xc1a551c5, }; -#ifndef HAVE_WALLET_ID_SDK - void handleGetWalletId(volatile unsigned int *tx) { unsigned char t[64]; cx_ecfp_256_private_key_t priv; @@ -394,7 +394,7 @@ void handleGetWalletId(volatile unsigned int *tx) { THROW(0x9000); } -#endif +#endif // HAVE_WALLET_ID_SDK void handleApdu(unsigned int *flags, unsigned int *tx) { unsigned short sw = 0; @@ -409,7 +409,7 @@ void handleApdu(unsigned int *flags, unsigned int *tx) { return; } -#endif +#endif // HAVE_WALLET_ID_SDK #ifdef HAVE_STARKWARE
Restore missing logging.
@@ -3650,6 +3650,39 @@ static PyObject *wsgi_load_source(apr_pool_t *pool, request_rec *r, PyObject *result = NULL; char *source_buf = NULL; + if (exists) { + Py_BEGIN_ALLOW_THREADS + if (r) { + ap_log_rerror(APLOG_MARK, APLOG_INFO, 0, r, + "mod_wsgi (pid=%d, process='%s', application='%s'): " + "Reloading WSGI script '%s'.", getpid(), + process_group, application_group, filename); + } + else { + ap_log_error(APLOG_MARK, APLOG_INFO, 0, wsgi_server, + "mod_wsgi (pid=%d, process='%s', application='%s'): " + "Reloading WSGI script '%s'.", getpid(), + process_group, application_group, filename); + } + Py_END_ALLOW_THREADS + } + else { + Py_BEGIN_ALLOW_THREADS + if (r) { + ap_log_rerror(APLOG_MARK, APLOG_INFO, 0, r, + "mod_wsgi (pid=%d, process='%s', application='%s'): " + "Loading Python script file '%s'.", getpid(), + process_group, application_group, filename); + } + else { + ap_log_error(APLOG_MARK, APLOG_INFO, 0, wsgi_server, + "mod_wsgi (pid=%d, process='%s', application='%s'): " + "Loading Python script file '%s'.", getpid(), + process_group, application_group, filename); + } + Py_END_ALLOW_THREADS + } + io_module = PyImport_ImportModule("io"); if (!io_module) { goto load_source_finally;
virtualscroller: fix iOS bounce
@@ -304,7 +304,9 @@ export default class VirtualScroller<T> extends Component<VirtualScrollerProps<T //ref.scrollIntoView(); const newScrollTop = this.window.scrollHeight - ref.offsetTop - this.savedDistance; + this.window.style['-webkit-overflow-scrolling'] = 'auto'; this.window.scrollTop = newScrollTop; + this.window.style['-webkit-overflow-scrolling'] = 'touch'; requestAnimationFrame(() => { this.savedIndex = null; this.savedDistance = 0;
Python fix
@@ -134,7 +134,7 @@ Requires: openmpi3-%{compiler_family}%{PROJ_DELIM} %global python_module_prefix py3- %global python_site_dir %{python3_sitearch} # Failback in the event new python macros are undefined -%if 0{!?python3_version_nodots:1} +%if 0%{!?python3_version:1} %if 0%{sle_version} < 150000 %global python3_version=3.4 %global python3_version_nodots=34 @@ -143,14 +143,14 @@ Requires: openmpi3-%{compiler_family}%{PROJ_DELIM} %global python3_version_nodots=36 %endif # sle_version %endif # python3_version -%global python_prefix python%{python3_version_nodots} +%global python_prefix python3 %global python_lib_dir python%{python3_version} %endif # Python3 %if "%{python_family}" == "python2" %global python_module_prefix py2- %global python_prefix python -%if 0{!?python_version:1} +%if 0%{!?python_version:1} %global python_version=2.7 %global python_version_nodots=27 %endif # python_version
docs/usocket: Document that settimeout() isn't supported by all ports. And describe an alternative of using uselect.poll().
@@ -237,12 +237,30 @@ Methods .. method:: socket.settimeout(value) + **Note**: Not every port supports this method, see below. + Set a timeout on blocking socket operations. The value argument can be a nonnegative floating point number expressing seconds, or None. If a non-zero value is given, subsequent socket operations will raise an `OSError` exception if the timeout period value has elapsed before the operation has completed. If zero is given, the socket is put in non-blocking mode. If None is given, the socket is put in blocking mode. + Not every `MicroPython port` supports this method. A more portable and + generic solution is to use `uselect.poll` object. This allows to wait on + multiple objects at the same time (and not just on sockets, but on generic + stream objects which support polling). Example:: + + # Instead of: + s.settimeout(1.0) # time in seconds + s.read(10) # may timeout + + # Use: + poller = uselect.poll() + poller.register(s, uselect.POLLIN) + res = poller.poll(1000) # time in milliseconds + if not res: + # s is still not ready for input, i.e. operation timed out + .. admonition:: Difference to CPython :class: attention
Prevent XML_UNICODE being redefined
@@ -121,7 +121,9 @@ extern "C" { #endif #ifdef XML_UNICODE_WCHAR_T +# ifndef XML_UNICODE # define XML_UNICODE +# endif # if defined(__SIZEOF_WCHAR_T__) && (__SIZEOF_WCHAR_T__ != 2) # error "sizeof(wchar_t) != 2; Need -fshort-wchar for both Expat and libc" # endif
usb_cdc: fix WFI
@@ -182,14 +182,14 @@ static uint16_t VCP_DataTx(uint8_t *Buf, uint32_t Len) { */ while (CDC_Send_FreeBytes() == 0 || USB_Tx_State != 0) - __WFI; + __WFI(); for (uint32_t i = 0; i < Len; i++) { APP_Rx_Buffer[APP_Rx_ptr_in] = Buf[i]; APP_Rx_ptr_in = (APP_Rx_ptr_in + 1) % APP_RX_DATA_SIZE; while (CDC_Send_FreeBytes() == 0 || USB_Tx_State != 0) - __WFI; + __WFI(); } return USBD_OK;
Changed fps_limit's delimiter to +
@@ -124,7 +124,7 @@ parse_fps_limit(const char *str) std::stringstream fps_limit_strings(str); std::string value; - while (std::getline(fps_limit_strings, value, ',')) { + while (std::getline(fps_limit_strings, value, '+')) { trim(value); uint32_t as_int;
vppinfra: fix u32x4_gather definition Type: fix
@@ -699,7 +699,7 @@ u64x2_gather (void *p0, void *p1) } static_always_inline u32x4 -u32x4_gather (void *p0, void *p1, void *p2, void *p3, void *p4) +u32x4_gather (void *p0, void *p1, void *p2, void *p3) { u32x4 r = { *(u32 *) p0, *(u32 *) p1, *(u32 *) p2, *(u32 *) p3 }; return r;
TCPMv2: PD Timers - Add PE PSTransition to framework BRANCH=none TEST=make runtests Tested-by: Denis Brockus
#include "usb_pd_dpm.h" #include "usb_pd.h" #include "usb_pd_tcpm.h" +#include "usb_pd_timer.h" #include "usb_pe_sm.h" #include "usb_tbt_alt_mode.h" #include "usb_prl_sm.h" @@ -604,13 +605,6 @@ static struct policy_engine { */ uint64_t source_cap_timer; - /* - * This timer is started when a request for a new Capability has been - * accepted and will timeout after PD_T_PS_TRANSITION if a PS_RDY - * Message has not been received. - */ - uint64_t ps_transition_timer; - /* * This timer is used to ensure that a Message requesting a response * (e.g. Get_Source_Cap Message) is responded to within a bounded time @@ -3282,7 +3276,7 @@ static void pe_snk_transition_sink_entry(int port) print_current_state(port); /* Initialize and run PSTransitionTimer */ - pe[port].ps_transition_timer = get_time().val + PD_T_PS_TRANSITION; + pd_timer_enable(port, PE_TIMER_PS_TRANSITION, PD_T_PS_TRANSITION); } static void pe_snk_transition_sink_run(int port) @@ -3329,19 +3323,19 @@ static void pe_snk_transition_sink_run(int port) *pd_get_snk_caps(port)); set_state_pe(port, PE_SNK_READY); - return; - } - + } else { /* * Protocol Error */ set_state_pe(port, PE_SNK_HARD_RESET); } + return; + } /* * Timeout will lead to a Hard Reset */ - if (get_time().val > pe[port].ps_transition_timer && + if (pd_timer_is_expired(port, PE_TIMER_PS_TRANSITION) && pe[port].hard_reset_counter <= N_HARD_RESET_COUNT) { PE_SET_FLAG(port, PE_FLAGS_PS_TRANSITION_TIMEOUT); @@ -3359,6 +3353,8 @@ static void pe_snk_transition_sink_exit(int port) /* Set ceiling based on what's negotiated */ charge_manager_set_ceil(port, CEIL_REQUESTOR_PD, pe[port].curr_limit); + + pd_timer_disable(port, PE_TIMER_PS_TRANSITION); }
More debug pokes, for checking incoming subs and resubscribing to federating children.
lent=(lent grams.s) known=k mismatch=m + ?: =(a 'check subs') + ~& 'here are all incoming non-circle subs' + ~& ^- (list (pair ship path)) + %+ murn ~(tap by sup.bol) + |= {b/bone s/ship p/path} + ^- (unit (pair ship path)) + ?: ?=({$circle *} p) ~ + `[s p] + [~ +>] ?: =(a 'rebuild') ~& 'rebuilding message references...' =- [~ +>.$(stories -)] |= {n/name s/story} [n src.shape.s] [~ +>] + ?: =(`0 (find "re-listen " (trip a))) + ~& 're-listening' + :_ +> + :_ ~ + (wire-to-peer /report/(crip (slag 10 (trip a)))) [~ +>] --
ssse3: add NEON implementation of simde_mm_alignr_pi8
@@ -269,6 +269,15 @@ simde_mm_alignr_pi8 (simde__m64 a, simde__m64 b, const int count) { } #if defined(SIMDE_X86_SSSE3_NATIVE) && defined(SIMDE_X86_MMX_NATIVE) # define simde_mm_alignr_pi8(a, b, count) _mm_alignr_pi8(a, b, count) +#elif defined(SIMDE_ARM_NEON_A32V7_NATIVE) + #define simde_mm_alignr_pi8(a, b, count) \ + ( \ + ((count) > 15) \ + ? simde__m64_from_neon_i8(vdup_n_s8(0)) \ + : ( \ + ((count) > 7) \ + ? (simde__m64_from_neon_i8(vext_s8(simde__m64_to_neon_i8(a), vdup_n_s8(0), (count) & 7))) \ + : (simde__m64_from_neon_i8(vext_s8(simde__m64_to_neon_i8(b), simde__m64_to_neon_i8(a), ((count) & 7)))))) #endif #if defined(SIMDE_X86_SSSE3_ENABLE_NATIVE_ALIASES) # define _mm_alignr_pi8(a, b, count) simde_mm_alignr_pi8(a, b, count)
Define PSA_WANT definitions for all ECC curves Mirror the default non-PSA configuration by enabling all supported ECC curves.
#define PSA_WANT_ALG_TLS12_PSK_TO_MS 1 #define PSA_WANT_ALG_XTS 1 +#define PSA_WANT_ECC_BP256R1 1 +#define PSA_WANT_ECC_BP384R1 1 +#define PSA_WANT_ECC_BP512R1 1 +#define PSA_WANT_ECC_CURVE25519 1 +#define PSA_WANT_ECC_CURVE448 1 +#define PSA_WANT_ECC_SECP192K1 1 +#define PSA_WANT_ECC_SECP192R1 1 +#define PSA_WANT_ECC_SECP224K1 1 +#define PSA_WANT_ECC_SECP224R1 1 +#define PSA_WANT_ECC_SECP256K1 1 +#define PSA_WANT_ECC_SECP256R1 1 +#define PSA_WANT_ECC_SECP384R1 1 +#define PSA_WANT_ECC_SECP521R1 1 + #define PSA_WANT_KEY_TYPE_DERIVE 1 #define PSA_WANT_KEY_TYPE_HMAC 1 #define PSA_WANT_KEY_TYPE_AES 1
sm2_dupctx: Avoid potential use after free of the md
@@ -138,6 +138,8 @@ static void *sm2_dupctx(void *vpsm2ctx) return NULL; *dstctx = *srcctx; + memset(&dstctx->md, 0, sizeof(dstctx->md)); + if (dstctx->key != NULL && !EC_KEY_up_ref(dstctx->key)) { OPENSSL_free(dstctx); return NULL;
Enable serial only when required for Mynewt For Mynewt when no logging (default option) or serial boot was selected, avoid initializing all serial supporting system. This enables to save flash space when only basic bootloader functionality is used.
#include <console/console.h> #include "bootutil/image.h" #include "bootutil/bootutil.h" +#include "bootutil/bootutil_log.h" #define BOOT_AREA_DESC_MAX (256) #define AREA_DESC_MAX (BOOT_AREA_DESC_MAX) @@ -67,10 +68,14 @@ main(void) hal_bsp_init(); +#if defined(MCUBOOT_SERIAL) || defined(MCUBOOT_HAVE_LOGGING) /* initialize uart without os */ os_dev_initialize_all(OS_DEV_INIT_PRIMARY); sysinit(); console_blocking_mode(); +#else + flash_map_init(); +#endif #ifdef MCUBOOT_SERIAL /*
horadric update (282605911)
}, "horadric":{ "formula": { - "sandbox_id": 281520118, + "sandbox_id": 282605911, "match": "horadric" }, "executable": {
Remove IKEA devices from the duplicated sequence number check For IKEA the issue was already addressed in
@@ -4155,7 +4155,7 @@ void DeRestPluginPrivate::checkSensorButtonEvent(Sensor *sensor, const deCONZ::A if (zclFrame.sequenceNumber() == sensor->previousSequenceNumber) { // useful in general but limit scope to known problematic devices - if (sensor->manufacturer().startsWith(QLatin1String("IKEA")) || isTuyaManufacturerName(sensor->manufacturer())) + if (isTuyaManufacturerName(sensor->manufacturer())) { // deCONZ doesn't always send ZCL Default Response to unicast commands, or they can get lost. // in this case some devices re-send the command multiple times
bn/bn_lib.c address Coverity nit in bn2binpad. It was false positive, but one can as well view it as readability issue. Switch even to unsigned indices because % BN_BYTES takes 4-6 instructions with signed dividend vs. 1 (one) with unsigned.
@@ -417,28 +417,27 @@ BIGNUM *BN_bin2bn(const unsigned char *s, int len, BIGNUM *ret) /* ignore negative */ static int bn2binpad(const BIGNUM *a, unsigned char *to, int tolen) { - int i, j, top; + int n; + size_t i, inc, lasti, j; BN_ULONG l; - i = BN_num_bytes(a); + n = BN_num_bytes(a); if (tolen == -1) - tolen = i; - else if (tolen < i) + tolen = n; + else if (tolen < n) return -1; - if (i == 0) { + if (n == 0) { OPENSSL_cleanse(to, tolen); return tolen; } - top = a->top * BN_BYTES; - for (i = 0, j = tolen; j > 0; i++) { - unsigned int mask; - - mask = constant_time_lt(i, top); - i -= 1 & ~mask; /* stay on top limb */ + lasti = n - 1; + for (i = 0, inc = 1, j = tolen; j > 0;) { l = a->d[i / BN_BYTES]; - to[--j] = (unsigned char)(l >> (8 * (i % BN_BYTES)) & mask); + to[--j] = (unsigned char)(l >> (8 * (i % BN_BYTES)) & (0 - inc)); + inc = (i - lasti) >> (8 * sizeof(i) - 1); + i += inc; /* stay on top limb */ } return tolen;
Mapping the whole buffer doesn't mean we can invalidate it;
@@ -269,7 +269,6 @@ VertexPointer lovrMeshMap(Mesh* mesh, int start, size_t count, bool read, bool w GLbitfield access = 0; access |= read ? GL_MAP_READ_BIT : 0; access |= write ? GL_MAP_WRITE_BIT : 0; - access |= (write && start == 0 && count == mesh->vertexData->count) ? GL_MAP_INVALIDATE_BUFFER_BIT : 0; lovrGraphicsBindVertexBuffer(mesh->vbo); VertexPointer pointer; pointer.raw = glMapBufferRange(GL_ARRAY_BUFFER, start * stride, count * stride, access);
Add file from previous commit
@@ -202,10 +202,6 @@ VOID PhMwpDispatchMenuCommand( _In_ ULONG_PTR ItemData ); -HBITMAP PhMwpGetShieldBitmap( - VOID - ); - VOID PhMwpInitializeSubMenu( _In_ PPH_EMENU Menu, _In_ ULONG Index
ks: Handle null array in ks{Cut,Append} Fix allocation size in ksAppendKey
@@ -930,7 +930,7 @@ ssize_t ksAppendKey (KeySet * ks, Key * toAppend) // If array was not allocated before if (newSize == 0) - newSize = KEYSET_SIZE; + newSize = KEYSET_SIZE - 1; else newSize = newSize - 1; @@ -1001,9 +1001,15 @@ ssize_t ksAppend (KeySet * ks, const KeySet * toAppend) if (!toAppend) return -1; if (toAppend->size == 0) return ks->size; + if (toAppend->array == NULL) return ks->size; + + if (ks->array == NULL) + toAlloc = KEYSET_SIZE; + else + toAlloc = ks->alloc; /* Do only one resize in advance */ - for (toAlloc = ks->alloc; ks->size + toAppend->size >= toAlloc; toAlloc *= 2) + for (; ks->size + toAppend->size >= toAlloc; toAlloc *= 2) ; ksResize (ks, toAlloc - 1); @@ -1189,6 +1195,7 @@ KeySet * ksCut (KeySet * ks, const Key * cutpoint) int set_cursor = 0; if (!ks) return 0; + if (!ks->array) return 0; if (!cutpoint) return 0; char * name = cutpoint->key; @@ -1282,7 +1289,7 @@ KeySet * ksCut (KeySet * ks, const Key * cutpoint) returned = ksNew (newsize, KS_END); elektraMemcpy (returned->array, ks->array + found, newsize); returned->size = newsize; - returned->array[returned->size] = 0; + if (returned->size > 0) returned->array[returned->size] = 0; ksCopyInternal (ks, found, it);
Remove some stupid shell gymnastics in Makefile.
@@ -35,6 +35,7 @@ JANET_STATIC_LIBRARY=build/libjanet.a JANET_PATH?=$(LIBDIR)/janet JANET_MANPATH?=$(PREFIX)/share/man/man1/ JANET_PKG_CONFIG_PATH?=$(LIBDIR)/pkgconfig +JANET_DIST_DIR?=janet-dist DEBUGGER=gdb SONAME_SETTER=-Wl,-soname, @@ -223,8 +224,7 @@ dist: build/janet-dist.tar.gz build/janet-%.tar.gz: $(JANET_TARGET) \ build/janet.h \ jpm.1 janet.1 LICENSE CONTRIBUTING.md $(JANET_LIBRARY) $(JANET_STATIC_LIBRARY) \ - build/doc.html README.md build/c/janet.c build/c/shell.c jpm - $(eval JANET_DIST_DIR = "janet-$(shell basename $*)") + README.md build/c/janet.c build/c/shell.c jpm mkdir -p build/$(JANET_DIST_DIR)/bin cp $(JANET_TARGET) build/$(JANET_DIST_DIR)/bin/ cp jpm build/$(JANET_DIST_DIR)/bin/ @@ -236,17 +236,8 @@ build/janet-%.tar.gz: $(JANET_TARGET) \ cp janet.1 jpm.1 build/$(JANET_DIST_DIR)/man/man1/ mkdir -p build/$(JANET_DIST_DIR)/src/ cp build/c/janet.c build/c/shell.c build/$(JANET_DIST_DIR)/src/ - cp CONTRIBUTING.md build/doc.html LICENSE README.md build/$(JANET_DIST_DIR)/ - cd build && tar -czvf ../$@ $(JANET_DIST_DIR) - -######################### -##### Documentation ##### -######################### - -docs: build/doc.html - -build/doc.html: $(JANET_TARGET) tools/gendoc.janet - $(JANET_TARGET) tools/gendoc.janet > build/doc.html + cp CONTRIBUTING.md LICENSE README.md build/$(JANET_DIST_DIR)/ + cd build && tar -czvf ../$@ ./$(JANET_DIST_DIR) ######################## ##### Installation #####
Fixed issue that caused incorrect debug display of Range along vertical axis. Default minimum range on Y axis is now 0 (previously -1000).
@@ -10335,9 +10335,9 @@ s_model *load_cached_model(char *name, char *owner, char unload) newanim->range.x.max = (int)newchar->jumpheight * 20; // 30-12-2004 default range affected by jump height newanim->range.z.min = (int) - newchar->grabdistance / 3; //zmin newanim->range.z.max = (int)newchar->grabdistance / 3; //zmax - newanim->range.y.min = T_MIN_BASEMAP; //amin + newanim->range.y.min = 0; //amin newanim->range.y.max = (int)newchar->jumpheight * 20; // Same logic as X. Good for attacks, but not terrian. Author better remember to add jump ranges. - newanim->range.base.min = T_MIN_BASEMAP; // Base min. + newanim->range.base.min = 0; // Base min. newanim->range.base.max = (int)newchar->jumpheight * 20; // Just use same logic as range Y. newanim->energycost = NULL; newanim->chargetime = 2; // Default for backwards compatibility @@ -16567,6 +16567,9 @@ void draw_visual_debug() s_drawmethod drawmethod = plainmethod; entity *entity; + int range_y_min = 0; + int range_y_max = 0; + drawmethod.alpha = BLEND_MODE_ALPHA; for(i=0; i<ent_max; i++) @@ -16594,7 +16597,17 @@ void draw_visual_debug() // Range debug requested? if(savedata.debuginfo & DEBUG_DISPLAY_RANGE) { - draw_box_on_entity(entity, entity->animation->range.x.min, entity->animation->range.y.min, entity->position.z+1, entity->animation->range.x.max, entity->animation->range.y.max, -1, LOCAL_COLOR_GREEN, &drawmethod); + // Range is calculated a bit differently than body/attack + // boxes, which is what the draw_box_on_entity() funciton + // is meant for. For Y axis, We need to invert the value, + // and place them in opposiing parameters (Max Y into + // function's min Y parameter, and and min Y into function's + // max Y parameter). + + range_y_min = -entity->animation->range.y.min; + range_y_max = -entity->animation->range.y.max; + + draw_box_on_entity(entity, entity->animation->range.x.min, range_y_max, entity->position.z+1, entity->animation->range.x.max, range_y_min, -1, LOCAL_COLOR_GREEN, &drawmethod); } // Collision body debug requested?
build: bump to 0.12.2
@@ -22,7 +22,7 @@ set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/library") # Fluent Bit Version set(FLB_VERSION_MAJOR 0) set(FLB_VERSION_MINOR 12) -set(FLB_VERSION_PATCH 1) +set(FLB_VERSION_PATCH 2) set(FLB_VERSION_STR "${FLB_VERSION_MAJOR}.${FLB_VERSION_MINOR}.${FLB_VERSION_PATCH}") # Build Options
HW: Fixing ack generation in alignment error cases
@@ -421,7 +421,7 @@ BEGIN -- -- MMIO CFG READ -- - IF mmio_read_cfg_access_q = '1' THEN + IF (mmio_read_cfg_access_q AND NOT mmio_read_alignment_error_q) = '1' THEN -- acknowledge read request ah_mm_read_ack_q <= '1'; @@ -512,7 +512,7 @@ BEGIN -- MMIO READ -- valid read request that is not targeting the action -- - IF (mmio_read_access_q AND NOT mmio_action_access_q) = '1' THEN + IF (mmio_read_access_q AND NOT (mmio_action_access_q OR mmio_read_alignment_error_q)) = '1' THEN -- acknowledge read request mmio_read_ack_q0 <= '1'; mmio_master_read_q0 <= mmio_read_master_access_q; @@ -648,7 +648,7 @@ BEGIN END IF; -- (mmio_read_access_q AND NOT mmio_action_access_q) = '1' - IF (mmio_read_action_access_q = '1') THEN + IF (mmio_read_action_access_q AND NOT mmio_read_alignment_error_q) = '1' THEN mmio_read_action_outstanding_q <= '1'; END IF; @@ -827,7 +827,7 @@ BEGIN -- MMIO WRITE -- valid write request that is not targeting the action -- - IF (mmio_write_access_q AND NOT mmio_action_access_q) = '1' THEN + IF (mmio_write_access_q AND NOT (mmio_action_access_q OR mmio_write_alignment_error_q)) = '1' THEN CASE to_integer(unsigned(ha_mm_w_q.ad(13 DOWNTO 5))) IS -- TODO: master access with bits 22:14 not zero? -- --
Fixes lux value if lightlevel is 0 Fixes If `lightlevel=0`, the `lux` value was not set. Also, `lux=0` was no longer possible.
@@ -4,9 +4,9 @@ const measuredValue = Attr.val; R.item('state/dark').val = measuredValue <= tholddark; R.item('state/daylight').val = measuredValue >= tholddark + tholdoffset; -if (measuredValue > 0 && measuredValue < 0xffff) { +if (measuredValue >= 0 && measuredValue < 0xffff) { const exp = measuredValue - 1; const l = Math.pow(10, exp / 10000.0) + 0.5; - R.item('state/lux').val = Math.round(l); + R.item('state/lux').val = Math.floor(l); } Item.val = measuredValue;
nrf/Makefile: Improve user C modules support. Add CFLAGS_EXTRA to CFLAGS. Include LDFLAGS_MOD to the compilation. And, add SRC_MOD to SRC_QSTR.
@@ -113,7 +113,7 @@ endif CFLAGS += $(CFLAGS_MCU_$(MCU_SERIES)) -CFLAGS += $(INC) -Wall -Werror -g -ansi -std=c11 -nostdlib $(COPT) $(NRF_DEFINES) $(CFLAGS_MOD) +CFLAGS += $(INC) -Wall -Werror -g -ansi -std=c11 -nostdlib $(COPT) $(NRF_DEFINES) $(CFLAGS_MOD) $(CFLAGS_EXTRA) CFLAGS += -fno-strict-aliasing CFLAGS += -Iboards/$(BOARD) CFLAGS += -DNRF5_HAL_H='<$(MCU_VARIANT)_hal.h>' @@ -445,11 +445,11 @@ flash: deploy $(BUILD)/$(OUTPUT_FILENAME).elf: $(OBJ) $(ECHO) "LINK $@" - $(Q)$(CC) $(LDFLAGS) -o $@ $(OBJ) $(LIBS) + $(Q)$(CC) $(LDFLAGS) -o $@ $(OBJ) $(LDFLAGS_MOD) $(LIBS) $(Q)$(SIZE) $@ # List of sources for qstr extraction -SRC_QSTR += $(SRC_C) $(SRC_LIB) $(DRIVERS_SRC_C) $(SRC_BOARD_MODULES) +SRC_QSTR += $(SRC_C) $(SRC_LIB) $(DRIVERS_SRC_C) $(SRC_BOARD_MODULES) $(SRC_MOD) # Append any auto-generated sources that are needed by sources listed in # SRC_QSTR
Fix a memory leak in tls_parse_stoc_key_share
@@ -1830,6 +1830,7 @@ int tls_parse_stoc_key_share(SSL *s, PACKET *pkt, unsigned int context, X509 *x, skey = EVP_PKEY_new(); if (skey == NULL || EVP_PKEY_copy_parameters(skey, ckey) <= 0) { SSLfatal(s, SSL_AD_INTERNAL_ERROR, SSL_R_COPY_PARAMETERS_FAILED); + EVP_PKEY_free(skey); return 0; }
perf-tools/paraver: tweak boost requirements, just call out serialization library on sles to avoid pulling system openmpi
@@ -23,12 +23,14 @@ Group: %{PROJ_NAME}/perf-tools URL: https://tools.bsc.es Source0: https://ftp.tools.bsc.es/wxparaver/wxparaver-%{version}-src.tar.bz2 -BuildRequires: boost-devel + BuildRequires: bison %if 0%{?suse_version} +BuildRequires: libboost_serialization1_54_0 BuildRequires: flex BuildRequires: wxGTK3-3_2-devel %else +BuildRequires: boost-devel BuildRequires: flex-devel BuildRequires: wxGTK3-devel %endif
include/driver/als_tcs3400_public.h: Format with clang-format BRANCH=none TEST=none
@@ -60,7 +60,8 @@ struct tcs_saturation_t { /* tcs3400 rgb als driver data */ struct tcs3400_rgb_drv_data_t { - uint8_t calibration_mode;/* 0 = normal run mode, 1 = calibration mode */ + uint8_t calibration_mode; /* 0 = normal run mode, 1 = calibration mode + */ struct rgb_calibration_t calibration; struct tcs_saturation_t saturation; /* saturation adjustment */
VCL: drain the vpp app event queue.
@@ -2850,6 +2850,19 @@ vppcom_session_read_ready (session_t * session, u32 session_index) } } rv = ready; + + if (vcm->app_event_queue->cursize && + !pthread_mutex_trylock (&vcm->app_event_queue->mutex)) + { + u32 i, n_to_dequeue = vcm->app_event_queue->cursize; + session_fifo_event_t e; + + for (i = 0; i < n_to_dequeue; i++) + unix_shared_memory_queue_sub_raw (vcm->app_event_queue, (u8 *) & e); + + pthread_mutex_unlock (&vcm->app_event_queue->mutex); + } + done: return rv; }
Shortcut when find either side is nullable.
@@ -1530,18 +1530,20 @@ convert_IN_to_antijoin(PlannerInfo *root, SubLink *sublink, int subq_indx = add_notin_subquery_rte(parse, subselect); List *inner_exprs = NIL; List *outer_exprs = NIL; - bool inner_nullable = true; - bool outer_nullable = true; + bool nullable = true; + JoinExpr *join_expr = make_join_expr(NULL, subq_indx, JOIN_LASJ_NOTIN); + + join_expr->quals = make_lasj_quals(root, sublink, subq_indx); inner_exprs = fetch_targetlist_exprs(subselect->targetList); + nullable = is_exprs_nullable((Node *) inner_exprs, subselect); + if (!nullable) + { outer_exprs = fetch_outer_exprs(sublink->testexpr); - inner_nullable = is_exprs_nullable((Node *) inner_exprs, subselect); - outer_nullable = is_exprs_nullable((Node *) outer_exprs, parse); - - JoinExpr *join_expr = make_join_expr(NULL, subq_indx, JOIN_LASJ_NOTIN); + nullable = is_exprs_nullable((Node *) outer_exprs, parse); + } - join_expr->quals = make_lasj_quals(root, sublink, subq_indx); - if (inner_nullable || outer_nullable) + if (nullable) join_expr->quals = add_null_match_clause(join_expr->quals); return join_expr;
firfilt: fixing typo in error statement with copy()
@@ -295,7 +295,7 @@ FIRFILT() FIRFILT(_copy)(FIRFILT() q_orig) { // validate input if (q_orig == NULL) - return liquid_error_config("firfilt_%s_create(), filter object cannot be NULL", EXTENSION_FULL); + return liquid_error_config("firfilt_%s_copy(), object cannot be NULL", EXTENSION_FULL); // create filter object and copy base parameters FIRFILT() q_copy = (FIRFILT()) malloc(sizeof(struct FIRFILT(_s)));
mangle: no need for multiple mangle_Shrink
@@ -853,9 +853,6 @@ void mangle_mangleContent(run_t* run, int speed_factor) { static void (*const mangleFuncs[])(run_t * run, bool printable) = { /* Every *Insert or Expand expands file, so add more Shrink's */ mangle_Shrink, - mangle_Shrink, - mangle_Shrink, - mangle_Shrink, mangle_Expand, mangle_Bit, mangle_IncByte,
Add repo path env var to make release commands easier to run.
# Release Build Instructions +## Set location of the `pgbackrest` repo + +This makes the rest of the commands in the document easier to run (change to your repo path): +``` +export PGBR_REPO=/backrest +``` + ## Create a branch to test the release ``` @@ -28,12 +35,12 @@ to: ## Build release documentation. Be sure to install latex using the instructions from the Vagrantfile before running this step. ``` -doc/release.pl --build +${PGBR_REPO?}/doc/release.pl --build ``` ## Update code counts ``` -test/test.pl --code-count +${PGBR_REPO?}/test/test.pl --code-count ``` ## Commit release branch and push to CI for testing @@ -44,13 +51,13 @@ git push origin release-ci ## Clone web documentation into `doc/site` ``` -cd doc +cd ${PGBR_REPO?}/doc git clone [email protected]:pgbackrest/website.git site ``` ## Deploy web documentation to `doc/site` ``` -doc/release.pl --deploy +${PGBR_REPO?}/doc/release.pl --deploy ``` ## Final commit of release to integration @@ -103,7 +110,7 @@ The first line will be the release title and the rest will be the body. The tag ## Push web documentation to master and deploy ``` -cd doc/site +cd ${PGBR_REPO?}/doc/site git commit -m "v2.14 documentation." git push origin master ``` @@ -132,12 +139,12 @@ to: Run deploy to generate git history (ctrl-c as soon as the file is generated): ``` -doc/release.pl --build +${PGBR_REPO?}/doc/release.pl --build ``` Build to generate files and test documentation: ``` -test/test.pl --vm=u18 --build-only +${PGBR_REPO?}/test/test.pl --vm=u18 --build-only ``` Commit and push to integration:
additions to example-vlsi for asap7 demo
@@ -8,12 +8,41 @@ from typing import Dict, Callable, Optional, List def example_place_tap_cells(x: hammer_vlsi.HammerTool) -> bool: x.append(''' # TODO +# Place custom TCL here +''') + return True + +def example_add_fillers(x: hammer_vlsi.HammerTool) -> bool: + x.append(''' +# TODO +# Place custom TCL here +''') + return True + +def example_tool_settings(x: hammer_vlsi.HammerTool) -> bool: + x.append(''' +# TODO +# Place custom TCL here ''') return True class ExampleDriver(CLIDriver): def get_extra_par_hooks(self) -> List[HammerToolHookAction]: - return [hammer_vlsi.HammerTool.make_replacement_hook("place_tap_cells", example_place_tap_cells)] + extra_hooks = [ + + # Default set of steps can be found in the CAD tool plugin's __init__.py + + # make_pre_insertion_hook will execute the custom hook before the specified step + hammer_vlsi.HammerTool.make_pre_insertion_hook("route_design", example_add_fillers), # SYNTAX: make_pre_insertion_hook("EXISTING_STEP", INSERTED_HOOK) + # make_post_insertion_hook will execute the custom hook after the specified step + hammer_vlsi.HammerTool.make_post_insertion_hook("init_design", example_tool_settings), + # make_replacement_hook will replace the specified step with a custom hook + hammer_vlsi.HammerTool.make_replacement_hook("place_tap_cells", example_place_tap_cells), + # make_removal_hook will remove the specified step from the flow + hammer_vlsi.HammerTool.make_removal_hook("place_bumps") + # The target step in any of the above calls may be a default step or another one of your custom hooks + ] + return extra_hooks if __name__ == '__main__': ExampleDriver().main()
Allow 0ms activity threshold and wake up time for ALRU
@@ -27,7 +27,7 @@ enum ocf_cleaning_alru_parameters { */ /** Wake up time minimum value */ -#define OCF_ALRU_MIN_WAKE_UP 1 +#define OCF_ALRU_MIN_WAKE_UP 0 /** Wake up time maximum value */ #define OCF_ALRU_MAX_WAKE_UP 3600 /** Wake up time default value */ @@ -60,7 +60,7 @@ enum ocf_cleaning_alru_parameters { */ /** Idle time before flushing thread can start minimum value */ -#define OCF_ALRU_MIN_ACTIVITY_THRESHOLD 500 +#define OCF_ALRU_MIN_ACTIVITY_THRESHOLD 0 /** Idle time before flushing thread can start maximum value */ #define OCF_ALRU_MAX_ACTIVITY_THRESHOLD 1000000 /** Idle time before flushing thread can start default value */
Remove some parens
@@ -28,7 +28,7 @@ install: - call "C:\Program Files (x86)\Microsoft Visual Studio\2019\Community\VC\Auxiliary\Build\vcvars32.bat" - build_win test-install - set janet_outname=%appveyor_repo_tag_name% - - if %janet_outname%=="" (set janet_outname=v1.2.0) + - if %janet_outname%=="" set janet_outname=v1.2.0 - echo %janet_outname% build: off
OSSL_PROVIDER_set_default_search_path() return value CLA: trivial
@@ -18,7 +18,7 @@ OSSL_PROVIDER_self_test typedef struct ossl_provider_st OSSL_PROVIDER; - void OSSL_PROVIDER_set_default_search_path(OSSL_LIB_CTX *libctx, + int OSSL_PROVIDER_set_default_search_path(OSSL_LIB_CTX *libctx, const char *path); OSSL_PROVIDER *OSSL_PROVIDER_load(OSSL_LIB_CTX *libctx, const char *name); @@ -157,7 +157,8 @@ L<provider-base(7)/CAPABILTIIES>. =head1 RETURN VALUES -OSSL_PROVIDER_add(), OSSL_PROVIDER_unload(), OSSL_PROVIDER_get_params() and +OSSL_PROVIDER_set_default_search_path(), OSSL_PROVIDER_add(), +OSSL_PROVIDER_unload(), OSSL_PROVIDER_get_params() and OSSL_PROVIDER_get_capabilities() return 1 on success, or 0 on error. OSSL_PROVIDER_load() and OSSL_PROVIDER_try_load() return a pointer to a
Hexcolor Plugin: Add highlevel tests for expansion
@@ -46,6 +46,25 @@ kdb set user/tests/color/hex "#aabbccdd" kdb get user/tests/color/hex #> \xaa\xbb\xcc\xdd +kdb set user/tests/color/hex2 "#abc" +kdb setmeta user/tests/color/hex2 check/hexcolor any + +# Expanded to rgba: #aabbccff +kdb get user/tests/color/hex2 +#> \xaa\xbb\xcc\xff + +kdb set user/tests/color/hex2 "#abcd" + +# Expanded to rgba: #aabbccdd +kdb get user/tests/color/hex2 +#> \xaa\xbb\xcc\xdd + +kdb set user/tests/color/hex2 "#aabbcc" + +# Expanded to rgba: #aabbccff +kdb get user/tests/color/hex2 +#> \xaa\xbb\xcc\xff + # Try to set incorrect value kdb set user/tests/color/hex fff # RET: 5
btc-wallet: import pre-dist settings as needed If the have btc-wallet settings stored under the landscape desk, reimport them if we haven't written it to the bitcoin desk yet. Additionally, removes the bitcoin settings from the landscape desks.
$% state-0 state-1 state-2 + state-3 == :: +$ state-0 :: +$ state-1 [%1 base-state] +$ state-2 [%2 base-state] ++$ state-3 [%3 base-state] -- -=| state-2 +=| state-3 =* state - %- agent:dbug ^- agent:gall :- cards %_ this state - :* %2 + :* %3 ~ *(map xpub:bc walt) *^btc-state =| cards=(list card) |- ?- -.ver - %2 + %3 [cards this(state ver)] + :: + %2 + =- $(-.ver %3, cards (weld cards -)) + ^- (list card) + =/ bas=path /(scot %p our.bowl)/settings-store/(scot %da now.bowl) + ?. .^(? %gu bas) + ~& [dap.bowl %settings-store-mia] + ~ + ?. .^(? %gx (weld bas /has-bucket/landscape/btc-wallet/noun)) + ~ + =/ dat + .^(data:settings %gx (weld bas /bucket/landscape/btc-wallet/noun)) + ?> ?=(%bucket -.dat) + |^ :- =/ del=event:settings [%del-bucket %landscape %btc-wallet] + (poke-our:hc %settings-store %settings-event !>(del)) + %- zing + %+ turn ~(tap by bucket.dat) + (cork copy-if-missing drop) + :: + ++ copy-if-missing + |= [=key:settings =val:settings] + ^- (unit card) + =/ hav=? + .^(? %gx (weld bas /has-entry/[q.byk.bowl]/btc-wallet/[key]/noun)) + ?: hav ~ + ~& [dap.bowl %importing-previous-setting key] + =/ put=event:settings [%put-entry q.byk.bowl %btc-wallet key val] + `(poke-our:hc %settings-store %settings-event !>(put)) + -- :: %1 =? cards ?=(^ prov.ver) :: %tx-info :: TODO: why do we get a nest-fail when using =^ ? - =/ [cards=(list card) sty=state-2] + =/ [cards=(list card) sty=state-3] (handle-tx-info:hc info.p.upd) :_ sty :_ cards
libcupsfilters: In universal() error on invalis input/output formats If invalid input and/or output formats get provided as parameters, the universal() flter function simply silently chooses valid formats instead of erroring out. This is fixed with this commit.
@@ -30,6 +30,7 @@ universal(int inputfd, /* I - File descriptor input stream */ filter_input_output_format_t input_output_format; filter_logfunc_t log = data->logfunc; void *ld = data->logdata; + int ret = 0; input_output_format = *(filter_input_output_format_t *)parameters; input = input_output_format.input_format; @@ -207,6 +208,12 @@ universal(int inputfd, /* I - File descriptor input stream */ if (log) log(ld, FILTER_LOGLEVEL_DEBUG, "universal: Adding %s to chain", filter->name); } + else if (!strstr(input_type, "pdf")) + { + // Input format is not PDF and unknown -> Error + ret = 1; + goto out; + } } if (((strcmp(input_super, "image") && strcmp(input_type, "vnd.adobe-reader-postscript")) || @@ -294,11 +301,25 @@ universal(int inputfd, /* I - File descriptor input stream */ "universal: Adding %s to chain", filter->name); cupsArrayAdd(filter_chain, filter); } + else + { + // Output format is not PDF and unknown -> Error + ret = 1; + goto out; + } } } } - int ret = filterChain(inputfd, outputfd, inputseekable, data, filter_chain); + out: + + if (ret) { + if (log) log(ld, FILTER_LOGLEVEL_ERROR, + "universal: Unsupported combination of input and output formats: %s -> %s", + input, output); + } + else + ret = filterChain(inputfd, outputfd, inputseekable, data, filter_chain); free(input_super); free(input_type);
Fix path separators.
(redef "ext" "posix/ext") (decl-sep "posix" "/") -(decl-delim "posix" ";") +(decl-delim "posix" ":") (decl-last-sep "posix" "/") (decl-basename "posix") (decl-parts "posix" "/") (redef "ext" "win32/ext") (decl-sep "win32" "\\") -(decl-delim "win32" ":") +(decl-delim "win32" ";") (decl-last-sep "win32" "\\") (decl-basename "win32") (decl-parts "win32" "\\")
vere: drop bail:evil events without error notifications
@@ -565,6 +565,14 @@ _serf_work(u3_serf* sef_u, c3_w mil_w, u3_noun job) sef_u->mug_l, vir)); } + // event rejected -- bad ciphertext + // + else if ( c3__evil == u3h(gon) ) { + sef_u->sen_d = sef_u->dun_d; + + u3z(job); + return u3nt(c3__bail, gon, u3_nul); + } // event rejected // else {
Add permissions to clear script in dockerfile.
@@ -32,7 +32,8 @@ WORKDIR $METACALL_PATH COPY . $METACALL_PATH # Configure MetaCall build tool script -RUN chmod 500 $METACALL_PATH/tools/metacall-build.sh +RUN chmod 500 $METACALL_PATH/tools/metacall-build.sh \ + && chmod 500 $METACALL_PATH/tools/metacall-clear.sh # Build and install MetaCall libraries then run tests RUN mkdir -p $METACALL_PATH/build \
Add test of multi-byte encoding without a conversion function At this commit, the tests seg fault
@@ -5266,6 +5266,44 @@ START_TEST(test_comment_handled_in_default) #undef COMMENT_TEXT END_TEST +/* Test that the unknown encoding handler with map entries that expect + * conversion but no conversion function is faulted + */ +static int XMLCALL +BadEncodingHandler(void *data, + const XML_Char *UNUSED_P(encoding), + XML_Encoding *info) +{ + int i; + + for (i = 0; i < 128; ++i) + info->map[i] = i; + for (; i < 256; ++i) + info->map[i] = -2; /* A 2-byte sequence */ + info->data = NULL; + info->convert = (int (XMLCALL *)(void *, const char *))data; + info->release = NULL; + return XML_STATUS_OK; +} + +START_TEST(test_missing_encoding_conversion_fn) +{ + const char *text = + "<?xml version='1.0' encoding='experimental'?>\n" + "<doc>\x81</doc>"; + + XML_SetUnknownEncodingHandler(parser, BadEncodingHandler, NULL); + /* BadEncodingHandler sets up an encoding with every top-bit-set + * character introducing a two-byte sequence. For this, it + * requires a convert function. The above function call doesn't + * pass one through, so when BadEncodingHandler actually gets + * called it should supply an invalid encoding. + */ + expect_failure(text, XML_ERROR_UNKNOWN_ENCODING, + "Encoding with missing convert() not faulted"); +} +END_TEST + /* * Namespaces tests. @@ -10523,6 +10561,7 @@ make_suite(void) tcase_add_test(tc_basic, test_invalid_character_entity); tcase_add_test(tc_basic, test_pi_handled_in_default); tcase_add_test(tc_basic, test_comment_handled_in_default); + tcase_add_test(tc_basic, test_missing_encoding_conversion_fn); suite_add_tcase(s, tc_namespace); tcase_add_checked_fixture(tc_namespace,
Write-All Permissions for WAN Perf Merge Job
@@ -122,8 +122,7 @@ jobs: name: ${{ format('logs.{0}mbps.{1}ms', matrix.rate, matrix.rtt) }} path: artifacts/logs/wanperf/*.etl merge-data: - permissions: - contents: read # for actions/checkout to fetch code + permissions: write-all name: Merge Results runs-on: windows-2022 needs: wan-perf
components/confirm: improved centering Fix centering algorithm - before it would center the text in the whole screen. Now it is centering it in the body part, which is the full screen without the title bar. This leads to more natural looking positioning.
@@ -71,6 +71,12 @@ static const component_functions_t _component_functions = { .on_event = _on_event, }; +static const component_functions_t _body_container_functions = { + .cleanup = ui_util_component_cleanup, + .render = ui_util_component_render_subcomponents, + .on_event = ui_util_on_event_noop, +}; + /********************************** Create Instance **********************************/ component_t* confirm_create( @@ -108,15 +114,34 @@ component_t* confirm_create( } slider_location_t slider_position = top_slider; - // Create labels + // Create labels. We nest them in a body component that covers the screen minus the title bar, + // so that the CENTER positioning starts below the title bar. + component_t* body_container = malloc(sizeof(component_t)); + if (!body_container) { + Abort("Error: malloc confirm"); + } + memset(body_container, 0, sizeof(component_t)); + + component_t* title_component = label_create(params->title, NULL, CENTER_TOP, confirm); + ui_util_add_sub_component(confirm, title_component); + + body_container->position.left = 0; + // title bar height plus small padding + body_container->position.top = title_component->dimension.height + 1; + body_container->dimension.width = SCREEN_WIDTH; + body_container->dimension.height = SCREEN_HEIGHT - body_container->position.top; + body_container->f = &_body_container_functions; + ui_util_add_sub_component(confirm, body_container); + if (params->scrollable) { ui_util_add_sub_component( - confirm, label_create_scrollable(params->body, params->font, CENTER, confirm)); + body_container, + label_create_scrollable(params->body, params->font, CENTER, body_container)); } else { ui_util_add_sub_component( - confirm, label_create(params->body, params->font, CENTER, confirm)); + body_container, label_create(params->body, params->font, CENTER, body_container)); } - ui_util_add_sub_component(confirm, label_create(params->title, NULL, CENTER_TOP, confirm)); + // Create buttons if (!params->accept_only) { ui_util_add_sub_component(
Return non-zero value if part of segments fail to do incremental recovery in gprecoverseg. This helps script handling by checking return values.
@@ -304,6 +304,7 @@ class GpMirrorListToBuild: # Disable Ctrl-C, going to save metadata in database and transition segments signal.signal(signal.SIGINT, signal.SIG_IGN) + rewindFailedSegments = [] try: self.__logger.info("Updating configuration with new mirrors") configInterface.getConfigurationProvider().updateSystemConfig( @@ -329,6 +330,9 @@ class GpMirrorListToBuild: # Re-enable Ctrl-C signal.signal(signal.SIGINT, signal.default_int_handler) + if len(rewindFailedSegments) != 0: + return False + return start_all_successful def run_pg_rewind(self, rewindInfo):
Changed signature of function
@@ -81,7 +81,7 @@ static void sut_pubSet(void *handle, void *service) { } -static int tst_receive(void *handle, const char *msgType, unsigned int msgTypeId, void * voidMsg, celix_properties_t *metadata, bool *release) { +static int tst_receive(void *handle, const char *msgType, unsigned int msgTypeId, void * voidMsg, const celix_properties_t *metadata, bool *release) { struct activator *act =handle; msg_t *msg = voidMsg; msg_t send_msg = *msg;
[bsp][stm32] Add HAL_TIM_Base_Init
@@ -327,6 +327,12 @@ static rt_err_t stm32_hw_pwm_init(struct stm32_pwm *device) tim->Init.AutoReloadPreload = TIM_AUTORELOAD_PRELOAD_DISABLE; #endif + if (HAL_TIM_Base_Init(tim) != HAL_OK) + { + LOG_E("%s pwm init failed", device->name); + result = -RT_ERROR; + goto __exit; + } if (HAL_TIM_PWM_Init(tim) != HAL_OK) { LOG_E("%s pwm init failed", device->name);
interface: handle undefined group metadata correctly
@@ -56,8 +56,11 @@ export function parentPath(path: string) { * string -> enabled feed */ export function getFeedPath(association: Association): string | null | undefined { - const { metadata = { config: {} } } = association; - if (metadata.config && 'group' in metadata?.config && metadata.config?.group) { + const metadata = association?.metadata; + if(!metadata) { + return undefined; + } + if (metadata?.config && 'group' in metadata?.config && metadata.config?.group) { if ('resource' in metadata.config.group) { return metadata.config.group.resource; }
papi: truncate long logger messages Dumping whole cli_inband output causes huge unformatted messages written to logger, so truncate these to avoid that. Type: fix
@@ -665,7 +665,10 @@ class VPPApiClient(object): self.transport.resume() - self.logger.debug('Return from {!r}'.format(r)) + s = 'Return value: {!r}'.format(r) + if len(s) > 80: + s = s[:80] + "..." + self.logger.debug(s) return rl def _call_vpp_async(self, i, msg, **kwargs):
Remove esp_set_default_server_callback function. Set function as parameter to esp_set_server function.
@@ -215,19 +215,6 @@ esp_set_server(esp_port_t port, uint16_t max_conn, uint16_t timeout, esp_cb_fn c return espi_send_msg_to_producer_mbox(&ESP_MSG_VAR_REF(msg), espi_initiate_cmd, blocking, 1000); /* Send message to producer queue */ } -/** - * \brief Set default callback function for incoming server connections - * \param[in] cb_func: Callback function. Set to NULL to use default ESP callback function - * \return \ref espOK on success, member of \ref espr_t enumeration otherwise - */ -espr_t -esp_set_default_server_callback(esp_cb_fn cb_func) { - ESP_CORE_PROTECT(); /* Protect system */ - esp.cb_server = cb_func != NULL ? cb_func : esp.cb_func->fn;/* Set default callback */ - ESP_CORE_UNPROTECT(); /* Unprotect system */ - return espOK; -} - #if ESP_CFG_DNS || __DOXYGEN__ /**
fix coverity warning in bier
@@ -236,11 +236,11 @@ vl_api_bier_route_add_del_t_handler (vl_api_bier_route_add_del_t * mp) if (mp->br_is_add) { - bier_table_route_add(&bti, ntohs(mp->br_bp), brpaths); + bier_table_route_add(&bti, bp, brpaths); } else { - bier_table_route_remove(&bti, ntohs(mp->br_bp), brpaths); + bier_table_route_remove(&bti, bp, brpaths); } done:
BugID:18494941: fix compile app@esp8266 error on windows
EXTRA_POST_BUILD_TARGETS += gen_crc_bin OTA_BIN_OUTPUT_FILE := $(CLEANED_BUILD_STRING)$(RADIXPOINT)$(BINTYPE_LOWER)$(BIN_OUTPUT_SUFFIX) + +ifeq ($(HOST_OS),Win32) + SETENV = set +else + SETENV = export +endif + gen_crc_bin: - cd $(OUTPUT_DIR)/binary/; \ - $(OBJCOPY) --only-section .text -O binary $(CLEANED_BUILD_STRING)$(RADIXPOINT)$(BINSTYPE_LOWER)$(LINK_OUTPUT_SUFFIX) eagle.app.v6.text.bin; \ - $(OBJCOPY) --only-section .data -O binary $(CLEANED_BUILD_STRING)$(RADIXPOINT)$(BINSTYPE_LOWER)$(LINK_OUTPUT_SUFFIX) eagle.app.v6.data.bin; \ - $(OBJCOPY) --only-section .rodata -O binary $(CLEANED_BUILD_STRING)$(RADIXPOINT)$(BINSTYPE_LOWER)$(LINK_OUTPUT_SUFFIX) eagle.app.v6.rodata.bin; \ - $(OBJCOPY) --only-section .irom0.text -O binary $(CLEANED_BUILD_STRING)$(RADIXPOINT)$(BINSTYPE_LOWER)$(LINK_OUTPUT_SUFFIX) eagle.app.v6.irom0text.bin; \ - export ESP8266_NM=${NM}; \ - python2 ../../../platform/mcu/esp8266/tools/gen_appbin.py $(CLEANED_BUILD_STRING)$(RADIXPOINT)$(BINSTYPE_LOWER)$(LINK_OUTPUT_SUFFIX) 2 1 15 5; \ - unset ESP8266_NM; \ - mv eagle.app.flash.bin $(OTA_BIN_OUTPUT_FILE); \ - rm eagle.app.v6.text.bin eagle.app.v6.data.bin eagle.app.v6.rodata.bin eagle.app.v6.irom0text.bin; \ + cd $(OUTPUT_DIR)/binary/ && \ + $(OBJCOPY) --only-section .text -O binary $(CLEANED_BUILD_STRING)$(RADIXPOINT)$(BINSTYPE_LOWER)$(LINK_OUTPUT_SUFFIX) eagle.app.v6.text.bin && \ + $(OBJCOPY) --only-section .data -O binary $(CLEANED_BUILD_STRING)$(RADIXPOINT)$(BINSTYPE_LOWER)$(LINK_OUTPUT_SUFFIX) eagle.app.v6.data.bin && \ + $(OBJCOPY) --only-section .rodata -O binary $(CLEANED_BUILD_STRING)$(RADIXPOINT)$(BINSTYPE_LOWER)$(LINK_OUTPUT_SUFFIX) eagle.app.v6.rodata.bin && \ + $(OBJCOPY) --only-section .irom0.text -O binary $(CLEANED_BUILD_STRING)$(RADIXPOINT)$(BINSTYPE_LOWER)$(LINK_OUTPUT_SUFFIX) eagle.app.v6.irom0text.bin && \ + $(SETENV) ESP8266_NM=${NM} && \ + python ../../../platform/mcu/esp8266/tools/gen_appbin.py $(CLEANED_BUILD_STRING)$(RADIXPOINT)$(BINSTYPE_LOWER)$(LINK_OUTPUT_SUFFIX) 2 1 15 5 && \ + $(CP) eagle.app.flash.bin $(OTA_BIN_OUTPUT_FILE) && \ + $(RM) eagle.app.flash.bin eagle.app.v6.text.bin eagle.app.v6.data.bin eagle.app.v6.rodata.bin eagle.app.v6.irom0text.bin
Fix leftover tabs and casing
@@ -189,13 +189,13 @@ To iterate over tabix files, use :func:`~pysam.tabix_iterator`: :members: -Fasta files +FASTA files =========== .. autoclass:: pysam.FastaFile :members: -Fastq files +FASTQ files =========== .. autoclass:: pysam.FastxFile @@ -206,8 +206,8 @@ Fastq files :members: -VCF files -========= +VCF/BCF files +============= .. autoclass:: pysam.VariantFile :members:
fix compiler family
@@ -77,7 +77,7 @@ module load scalapack export SHARED_OPT=-shared -%if %{compiler_family} == gnu +%if %{compiler_family} == gnu7 export PIC_OPT=-fPIC export SONAME_OPT="-Wl,-soname" %endif
feat(mainloop): set mainloop to processing only one task per frame
@@ -374,7 +374,6 @@ typedef void (*FuncPtr)(void *); LCUI_END_HEADER #include <LCUI/util.h> -#include <LCUI/worker.h> #include <LCUI/main.h> #endif /* LCUI_H */
return fakeMutex back
#include <util/stream/file.h> namespace NCudaLib { - //TODO(noxoomo): check different locks. By design MPI should be able to work with multiple threads, but - //existing implementation could work bad if MPI is accessed from multiple threads - using TMpiLock = TMutex; + //TODO(noxoomo): check different locks performance + using TMpiLock = TFakeMutex; TMpiLock& GetMpiLock(); #define MPI_SAFE_CALL(cmd) \
bsp: wifire: Implement hal_bsp_hw_id This function reads the DEVID register which contains the revision (upper 4 bits) and the device id (lower 28 bits).
#include "hal/hal_bsp.h" #include "bsp/bsp.h" #include <assert.h> +#include <xc.h> const struct hal_flash * hal_bsp_flash_dev(uint8_t id) { return 0; } + +int +hal_bsp_hw_id(uint8_t *id, int max_len) +{ + if (max_len > sizeof(DEVID)) { + max_len = sizeof(DEVID); + } + + memcpy(id, &DEVID, max_len); + return max_len; +} \ No newline at end of file
Ignore TCOD_NODISCARD in document generation.
@@ -2126,6 +2126,7 @@ PREDEFINED = TCODLIB_API= \ "TCODLIB_FORMAT(int,int)= " \ "TCOD_DEPRECATED(msg)= " \ TCOD_DEPRECATED_NOMESSAGE= \ + TCOD_NODISCARD= \ __cplusplus # If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then this
cms: remove most references to EVP_sha1()
@@ -169,6 +169,10 @@ CMS_ContentInfo *CMS_digest_create_ex(BIO *in, const EVP_MD *md, { CMS_ContentInfo *cms; + /* + * Because the EVP_MD is cached and can be a legacy algorithm, we + * cannot fetch the algorithm if it isn't supplied. + */ if (md == NULL) md = EVP_sha1(); cms = ossl_cms_DigestedData_create(md, ctx, propq);
VERSION bump to version 0.10.6
@@ -28,7 +28,7 @@ set(CMAKE_C_FLAGS_DEBUG "-g -O0") # set version set(LIBNETCONF2_MAJOR_VERSION 0) set(LIBNETCONF2_MINOR_VERSION 10) -set(LIBNETCONF2_MICRO_VERSION 5) +set(LIBNETCONF2_MICRO_VERSION 6) set(LIBNETCONF2_VERSION ${LIBNETCONF2_MAJOR_VERSION}.${LIBNETCONF2_MINOR_VERSION}.${LIBNETCONF2_MICRO_VERSION}) set(LIBNETCONF2_SOVERSION ${LIBNETCONF2_MAJOR_VERSION}.${LIBNETCONF2_MINOR_VERSION})
some django-related excludes for import-tests Note: mandatory check (NEED_CHECK) was skipped
@@ -117,6 +117,10 @@ def check_imports(no_check=None, extra=[], skip_func=None): "django.db.backends.*", "django.db.migrations.*", "django.template.backends.jinja2", + "django_template_common.templatetags.*", + "django_template_common.utils", + "django_yauth.authentication_mechanisms.*", + "django_yauth.urls", "pytest_django.compat",
VERSION bump to version 1.4.125
@@ -46,7 +46,7 @@ endif() # micro version is changed with a set of small changes or bugfixes anywhere in the project. set(SYSREPO_MAJOR_VERSION 1) set(SYSREPO_MINOR_VERSION 4) -set(SYSREPO_MICRO_VERSION 124) +set(SYSREPO_MICRO_VERSION 125) set(SYSREPO_VERSION ${SYSREPO_MAJOR_VERSION}.${SYSREPO_MINOR_VERSION}.${SYSREPO_MICRO_VERSION}) # Version of the library
doxygen MAINTENANCE redundant to search for input recursively
@@ -853,7 +853,7 @@ FILE_PATTERNS = *.c \ # be searched for input files as well. # The default value is: NO. -RECURSIVE = YES +RECURSIVE = NO # The EXCLUDE tag can be used to specify files and/or directories that should be # excluded from the INPUT source files. This way you can easily exclude a
Conservative commit without time increase.
:> :> mold generator: produces a mold of a null-terminated list of the :> homogeneous type {a}. - |*(a/$-(* *) $@($~ {i/a t/(list a)})) + |*(a/mold $@($~ {i/a t/(list a)})) :: ++ lone :> single item tuple :: 6; if-then-else :: {$6 b/* c/* d/*} - :: semantic expansion - :: - %+ require - $(fol b.fol) - |= :: fig: boolean - :: - fig/noun - :: apply proper booleans - :: - ?: =(& fig) ^$(fol c.fol) - ?: =(| fig) ^$(fol d.fol) - :: stop on bad test + :: use standard macro expansion (slow) :: - ~ + $(fol =>(fol [2 [0 1] 2 [1 c d] [1 0] 2 [1 2 3] [1 0] 4 4 b])) :: :: 7; composition :: :: this performance fix should unify a bunch of trivial formulas, :: but breaks on certain hacks in ++raid:zuse. :: - ?. ?=(?($axil $leaf) -.sec) raw - [%tsgr [%rock %n ~] raw] + :: ?. ?=(?($axil $leaf) -.sec) raw + :: [%tsgr [%rock %n ~] raw] + raw :: ++ whip |= axe/axis == :: {$bckt *} [%vine boil(gen p.gen) boil(gen q.gen)] - {$bchp *} - :- %weed - :+ %tsgr - [p.gen q.gen] - :^ %brsg [~ ~] - [%bcsm [%$ 2]] - [%tsgr [%$ 15] [%limb %$]] - :: + {$bchp *} [%weed [%brsg [~ ~] p.gen [%bunt [%tsgr [%$ 7] q.gen]]]] {$halo *} [%plow p.gen boil(gen q.gen)] {$bcts *} [%bark p.gen boil(gen q.gen)] {$bcwt *} =+ (turn p.gen |=(a/hoon boil(gen a))) |- ^- hoon ?~ r.gen p.q.a [%tstr [~ p.i.r.gen] q.i.r.gen $(r.gen t.r.gen)] - {$brcl *} [%tsls q.gen [%brdt p.gen r.gen]] + {$brcl *} [%tsls [%cold q.gen] [%brdt p.gen r.gen]] {$brdt *} :+ %brcn p.gen =- [[0 [~ ~] -] ~ ~] (~(put by *(map term (pair what foot))) %$ ~ [%ash q.gen]) == :: {$bckt *} [%vine boil(gen p.gen) boil(gen q.gen)] - {$bchp *} - :- %weed - :+ %tsgr - [p.gen q.gen] - :^ %brsg [~ ~] - [%bcsm [%$ 2]] - [%tsgr [%$ 15] [%limb %$]] - :: + {$bchp *} [%weed [%brsg [~ ~] p.gen [%bunt [%tsgr [%$ 7] q.gen]]]] {$halo *} [%plow p.gen boil(gen q.gen)] {$bcts *} [%bark p.gen boil(gen q.gen)] {$bcwt *} =+ (turn p.gen |=(a/hoon boil(gen a))) ?: fab pro ~| %musk-blocked - ~| [%subject bus ~] - ~| [%formula q.pro ~] !! [p.pro [%1 p.u.jon]] ::
feat: bot-reddit-search.c can search in auto-mode
"C Discord library", "Segfault" ], - "restrict_sr": false, "subreddits": [ "discordapp", "c_programming" - ] + ], + "before":null }
add test for bufr_dump -w (strict mode)
@@ -64,5 +64,10 @@ do ${tools_dir}/bufr_dump -S1 ${data_dir}/bufr/$file >/dev/null done +# Check strict option with 'where' clause +input=${data_dir}/bufr/tropical_cyclone.bufr +ce=`${tools_dir}/bufr_dump -p -w count=3 $input | grep -c 'edition='` +[ $ce -eq 1 ] +# Clean up rm -f $temp1 $temp2
Update documentation of py port.
@@ -21,11 +21,11 @@ Install MetaCall binaries first: curl -sL https://raw.githubusercontent.com/metacall/install/master/install.sh | bash -Then install Python package: +Then install Python package through MetaCall: .. code:: console - pip install metacall + metacall install py metacall Example ========
openssl-cmp.pod.in: Update Insta Demo CA port number in case needed
@@ -1027,7 +1027,7 @@ to issue the following shell commands. cd /path/to/openssl export OPENSSL_CONF=openssl.cnf =begin comment - wget 'http://pki.certificate.fi:8080/install-ca-cert.html/ca-certificate.crt\ + wget 'http://pki.certificate.fi:8081/install-ca-cert.html/ca-certificate.crt\ ?ca-id=632&download-certificate=1' -O insta.ca.crt =end comment openssl genrsa -out insta.priv.pem
.travis.yml: Explicitly install python-serial for ESP32 builds. It's dependency of esptool.py, and don't rely on being preinstalled.
@@ -355,7 +355,7 @@ jobs: - stage: test name: "esp32 ESP-IDFv3 port build" install: - - sudo apt-get install python3-pip + - sudo apt-get install python3-pip python-serial - sudo pip3 install 'pyparsing<2.4' - curl -L https://dl.espressif.com/dl/xtensa-esp32-elf-linux64-1.22.0-80-g6c4433a-5.2.0.tar.gz | tar zxf - - export PATH=$(pwd)/xtensa-esp32-elf/bin:$PATH @@ -375,7 +375,7 @@ jobs: - stage: test name: "esp32 ESP-IDFv4 port build" install: - - sudo apt-get install python3-pip + - sudo apt-get install python3-pip python-serial - sudo pip3 install 'pyparsing<2.4' - curl -L https://dl.espressif.com/dl/xtensa-esp32-elf-gcc8_2_0-esp-2019r2-linux-amd64.tar.gz | tar zxf - - export PATH=$(pwd)/xtensa-esp32-elf/bin:$PATH
[Components][SDIO]fix 'rocr' declared without an initial value
@@ -198,7 +198,7 @@ static int mmc_get_ext_csd(struct rt_mmcsd_card *card, rt_uint8_t **new_ext_csd) */ static int mmc_parse_ext_csd(struct rt_mmcsd_card *card, rt_uint8_t *ext_csd) { - if(RT_NULL == card || RT_NULL == ext_csd) + if(card == RT_NULL || ext_csd == RT_NULL) { LOG_E("emmc parse ext csd fail, invaild args"); return -1; @@ -430,7 +430,7 @@ static rt_int32_t mmcsd_mmc_init_card(struct rt_mmcsd_host *host, { rt_int32_t err; rt_uint32_t resp[4]; - rt_uint32_t rocr; + rt_uint32_t rocr = 0; rt_uint32_t max_data_rate; rt_uint8_t *ext_csd = RT_NULL; struct rt_mmcsd_card *card = RT_NULL;
tree schema BUGFIX top-level schema-only nodes Fixes
@@ -718,7 +718,7 @@ char * lysc_path_until(const struct lysc_node *node, const struct lysc_node *parent, LYSC_PATH_TYPE pathtype, char *buffer, size_t buflen) { - const struct lysc_node *iter; + const struct lysc_node *iter, *par; char *path = NULL; int len = 0; @@ -746,7 +746,14 @@ lysc_path_until(const struct lysc_node *node, const struct lysc_node *parent, LY } else { slash = "/"; } - if (!iter->parent || (iter->parent->module != iter->module)) { + + if (pathtype == LYSC_PATH_DATA) { + par = lysc_data_parent(iter); + } else { + par = iter->parent; + } + + if (!par || (par->module != iter->module)) { /* print prefix */ if (buffer) { len = snprintf(buffer, buflen, "%s%s:%s%s", slash, iter->module->name, id, s ? s : "");
do the don trick for jam
static u3_noun _jam_cap(u3_atom a) { + u3p(jamframe) empty = u3R->cap_p; u3p(u3h_root) har_p = u3h_new(); c3_o nor_o = u3a_is_north(u3R); c3_y wis_y = c3_wiseof(jamframe); c3_ys mov = ( c3y == nor_o ? -wis_y : wis_y ); c3_ys off = ( c3y == nor_o ? 0 : -wis_y ); jamframe* fam = _jam_push(mov, off); + jamframe* don = u3to(jamframe, empty + off); fam->sat_y = JAM_NONE; fam->nun = a; fam->lis = u3_nul; u3_noun q, r = u3_none; - c3_w dep_w; - for ( dep_w = 1; dep_w > 0; ) { + while ( don != fam ) { switch ( fam->sat_y ) { case JAM_NONE: { u3_noun nun = fam->nun; if ( c3n == u3du(nun) ) { r = _jam_flat(nun, lis); fam = _jam_pop(mov, off); - --dep_w; u3z(len); } else { fam->nun = u3h(nun); fam->len = u3qa_add(2, len); fam->lis = u3nc(u3nc(2, 1), lis); - ++dep_w; } } else { r = _jam_ptr(got, lis); } fam = _jam_pop(mov, off); - --dep_w; u3z(len); } break; fam->len = u3qa_add(z, p_r); fam->lis = u3k(q_r); u3z(z); - ++dep_w; break; } case JAM_TAIL: { u3_noun len = fam->len; r = _jam_pair(u3qa_add(2, len), fam->hed, r); fam = _jam_pop(mov, off); - --dep_w; u3z(len); break; }
Update: Cycle.c: improvement in implication processing
@@ -408,27 +408,21 @@ void Cycle_Prediction(long currentTime) } for(int k=0; k<c->precondition_beliefs[0].itemsAmount; k++) { - Implication imp = c->precondition_beliefs[0].array[k]; - Term subject = Term_ExtractSubterm(&imp.term, 1); - if(Variable_Unify(&subject, &e->term).success) + if(!Memory_ImplicationValid(&c->precondition_beliefs[0].array[k])) { - for(int i=0; i<c->precondition_beliefs[0].itemsAmount; i++) - { - if(!Memory_ImplicationValid(&c->precondition_beliefs[0].array[i])) - { - Table_Remove(&c->precondition_beliefs[0], i--); + Table_Remove(&c->precondition_beliefs[0], k--); continue; } - Implication *imp = &c->precondition_beliefs[0].array[i]; + Implication *imp = &c->precondition_beliefs[0].array[k]; + Term precondition = Term_ExtractSubterm(&imp->term, 1); + Substitution subs = Variable_Unify(&precondition, &e->term); + if(subs.success) + { assert(Narsese_copulaEquals(imp->term.atoms[0],'$'), "Not a valid implication term!"); - Term precondition_with_op = Term_ExtractSubterm(&imp->term, 1); - Term precondition = Narsese_GetPreconditionWithoutOp(&precondition_with_op); Concept *c_pre = Memory_FindConceptByTerm(&precondition); if(c_pre != NULL) { Substitution subs = Variable_Unify(&precondition, &e->term); - if(subs.success) - { Implication updated_imp = *imp; bool success; updated_imp.term = Variable_ApplySubstitute(updated_imp.term, subs, &success); @@ -443,9 +437,6 @@ void Cycle_Prediction(long currentTime) } } } - break; - } - } } } }
make-release: get m4 macros so dist won't fail
@@ -89,6 +89,7 @@ pushd "${CHANGES}"/build || die git clone "${SRCDIR}" carbon-c-relay pushd carbon-c-relay git checkout "v${NEXTRELEASE}" || die +libtoolize || glibtoolize # get m4 macros for dist ./configure make dist mv carbon-c-relay-${NEXTRELEASE}.tar.* "${SRCDIR}"/ || die
[chainmaker][#436]modify test 0005 name
@@ -170,7 +170,7 @@ START_TEST(test_001CreateWallet_0004CreateLoadWalletSuccess) } END_TEST -START_TEST(test_001CreateWallet_0005CreateLoadWalletFailureNoExist) +START_TEST(test_001CreateWallet_0005CreateLoadWalletFailurePersistWalletNoExist) { BSINT32 rtnVal; extern BoatIotSdkContext g_boat_iot_sdk_context; @@ -391,7 +391,7 @@ Suite *make_wallet_suite(void) tcase_add_test(tc_wallet_api, test_001CreateWallet_0002CreateOneTimeWalletFailureNullConfig); tcase_add_test(tc_wallet_api, test_001CreateWallet_0003CreatePersistWalletSuccess); tcase_add_test(tc_wallet_api, test_001CreateWallet_0004CreateLoadWalletSuccess); - tcase_add_test(tc_wallet_api, test_001CreateWallet_0005CreateLoadWalletFailureNoExist); + tcase_add_test(tc_wallet_api, test_001CreateWallet_0005CreateLoadWalletFailurePersistWalletNoExist); tcase_add_test(tc_wallet_api, test_001CreateWallet_0006_CreateOneTimeWalletFailureShortSize); tcase_add_test(tc_wallet_api, test_001CreateWallet_0007_CreateOneTimeWalletSucessLongSize); tcase_add_test(tc_wallet_api, test_001CreateWallet_0008_CreateOneTimeWalletFailureUrlFormatError);
web ui jenkins: temporarily enable deploying on non-master, disable full build
@@ -138,12 +138,7 @@ stage("Main builds") { parallel generateMainBuildStages() } -stage("Full builds") { - milestone label: "Full builds" - parallel generateFullBuildStages() -} - -maybeStage("Build artifacts", isMaster()) { +maybeStage("Build artifacts", true) { milestone label: "artifacts" parallel generateArtifactStages() } @@ -152,7 +147,7 @@ maybeStage("Deploy Homepage", isMaster()) { deployHomepage() } -maybeStage("Deploy Web UI", isMaster()) { +maybeStage("Deploy Web UI", true) { deployWebUI() }
client session CHANGE continue receiving notifications after an error Just because one notification was invalid, others do not have to be. Fixes cesnet/netopeer2#178
@@ -1715,8 +1715,6 @@ nc_recv_notif_thread(void *arg) break; } nc_notif_free(notif); - } else if (msgtype == NC_MSG_ERROR) { - break; } usleep(NC_CLIENT_NOTIF_THREAD_SLEEP);
Fix inconsistent markups in catalogs.sgml Some fields related to pg_opclass and pg_opfamily were using incorrect markups, listing them as structname instead of structfield. Author: Fabien Coelho Discussion:
<para> An entry's <structfield>amopmethod</structfield> must match the - <structname>opfmethod</structname> of its containing operator family (including + <structfield>opfmethod</structfield> of its containing operator family (including <structfield>amopmethod</structfield> here is an intentional denormalization of the catalog structure for performance reasons). Also, <structfield>amoplefttype</structfield> and <structfield>amoprighttype</structfield> must match @@ -5064,10 +5064,10 @@ SCRAM-SHA-256$<replaceable>&lt;iteration count&gt;</replaceable>:<replaceable>&l <para> An operator class's <structfield>opcmethod</structfield> must match the - <structname>opfmethod</structname> of its containing operator family. + <structfield>opfmethod</structfield> of its containing operator family. Also, there must be no more than one <structname>pg_opclass</structname> - row having <structname>opcdefault</structname> true for any given combination of - <structname>opcmethod</structname> and <structname>opcintype</structname>. + row having <structfield>opcdefault</structfield> true for any given combination of + <structfield>opcmethod</structfield> and <structfield>opcintype</structfield>. </para> </sect1>
Remove OPENSSL_assert() from crypto/asn1/bio_asn1.c
#include <string.h> #include <internal/bio.h> #include <openssl/asn1.h> +#include "internal/cryptlib.h" /* Must be large enough for biggest tag+length */ #define DEFAULT_ASN1_BUF_SIZE 20 @@ -181,7 +182,8 @@ static int asn1_bio_write(BIO *b, const char *in, int inl) case ASN1_STATE_HEADER: ctx->buflen = ASN1_object_size(0, inl, ctx->asn1_tag) - inl; - OPENSSL_assert(ctx->buflen <= ctx->bufsize); + if (!ossl_assert(ctx->buflen <= ctx->bufsize)) + return 0; p = ctx->buf; ASN1_put_object(&p, 0, inl, ctx->asn1_tag, ctx->asn1_class); ctx->copylen = inl;
Document CentOS yum install New!
@@ -80,6 +80,10 @@ or zypper install the_silver_searcher +* CentOS: + + yum install the_silver_searcher + * SUSE Linux Enterprise: Follow [these simple instructions](https://software.opensuse.org/download.html?project=utilities&package=the_silver_searcher).
host_exerciser: fix build on g++ 11.2.1 Assign bitfields to a temporary variable before logging in order to appease spdlog.
@@ -66,11 +66,17 @@ public: he_status0.value = host_exe_->read64(HE_STATUS0); he_status1.value = host_exe_->read64(HE_STATUS1); - host_exe_->logger_->info("Host Exerciser numReads: {0}", he_status0.numReads); - host_exe_->logger_->info("Host Exerciser numWrites: {0}", he_status0.numWrites); + uint64_t tmp; - host_exe_->logger_->info("Host Exerciser numPendReads: {0}", he_status1.numPendReads); - host_exe_->logger_->info("Host Exerciser numPendWrites: {0}", he_status1.numPendWrites); + tmp = he_status0.numReads; + host_exe_->logger_->info("Host Exerciser numReads: {0}", tmp); + tmp = he_status0.numWrites; + host_exe_->logger_->info("Host Exerciser numWrites: {0}", tmp); + + tmp = he_status1.numPendReads; + host_exe_->logger_->info("Host Exerciser numPendReads: {0}", tmp); + tmp = he_status1.numPendWrites; + host_exe_->logger_->info("Host Exerciser numPendWrites: {0}", tmp); } void host_exerciser_errors() @@ -139,9 +145,14 @@ public: host_exerciser_status(); } - host_exe_->logger_->info("Number of clocks: {0}", dsm_status->num_ticks); - host_exe_->logger_->info("Total number of Reads sent: {0}", dsm_status->num_reads); - host_exe_->logger_->info("Total number of Writes sent: {0}", dsm_status->num_writes); + uint64_t tmp; + + tmp = dsm_status->num_ticks; + host_exe_->logger_->info("Number of clocks: {0}", tmp); + tmp = dsm_status->num_reads; + host_exe_->logger_->info("Total number of Reads sent: {0}", tmp); + tmp = dsm_status->num_writes; + host_exe_->logger_->info("Total number of Writes sent: {0}", tmp); // print bandwidth if (dsm_status->num_ticks > 0) {
Disallow init.lua in binary form (small oversight)
@@ -15,12 +15,12 @@ Sandbox::Sandbox(Scripting* apScripting, sol::environment aBaseEnvironment, cons sol::protected_function_result Sandbox::ExecuteFile(const std::string& acPath) const { - return m_pScripting->GetState().Get().script_file(acPath, m_env); + return m_pScripting->GetState().Get().script_file(acPath, m_env, sol::load_mode::text); } sol::protected_function_result Sandbox::ExecuteString(const std::string& acString) const { - return m_pScripting->GetState().Get().script(acString, m_env); + return m_pScripting->GetState().Get().script(acString, m_env, sol:: detail::default_chunk_name(), sol::load_mode::text); } sol::environment& Sandbox::GetEnvironment()
phb4: Move code around to avoid indenting No functional change.
@@ -3179,8 +3179,8 @@ static int64_t phb4_eeh_next_error(struct phb *phb, } } - /* Mapping errors */ - if (phb4_err_pending(p)) { + if (!phb4_err_pending(p)) + return OPAL_SUCCESS; /* * If the frozen PE is caused by a malfunctioning TLP, we * need reset the PHB. So convert ER to PHB-fatal error @@ -3243,8 +3243,6 @@ static int64_t phb4_eeh_next_error(struct phb *phb, *severity = OPAL_EEH_SEV_NO_ERROR; phb4_set_err_pending(p, false); } - } - return OPAL_SUCCESS; }
ames: scry for sponsor and don't crash on jael response
=| =point =. life.point life =. keys.point (my [life crypto-suite public-key]~) - =. sponsor.point `(^sein:title ship) + =. sponsor.point `(scry-for-sponsor ship) :: (on-publ-full (my [ship point]~)) :: :: =+ ^- [=ship =point] i.points :: + ?. (~(has by keys.point) life.point) + $(points t.points) + :: =/ old-ship-state (~(get by peers.ames-state) ship) :: =. event-core (insert-peer-state ship point) =. life.peer-state life.point =. public-key.peer-state public-key =. symmetric-key.peer-state symmetric-key - =. sponsor.peer-state (fall sponsor.point (^sein:title ship)) + =. sponsor.peer-state + ?^ sponsor.point + u.sponsor.point + (scry-for-sponsor ship) :: automatically set galaxy route, since unix handles lookup :: =? route.peer-state ?=(%czar (clan:title ship)) (~(put by peers.ames-state) ship %known peer-state) :: event-core + :: +scry-for-sponsor: ask jael for .who's sponsoring ship + :: + ++ scry-for-sponsor + |= who=ship + ^- ship + ;; ship + =< q.q %- need %- need + %- scry-gate + [[%141 %noun] ~ %j `beam`[[our %sein %da now] /(scot %p who)]] -- :: +on-take-turf: relay %turf move from jael to unix ::
bump nrpe to 3.0.1
%define nsport 5666 Name: %{pname}%{PROJ_DELIM} -Version: 2.15 +Version: 3.0.1 Release: 2%{?dist} Summary: Host/service/network monitoring agent for Nagios @@ -29,7 +29,7 @@ Group: %{PROJ_NAME}/admin License: GPLv2+ URL: http://www.nagios.org DocDir: %{OHPC_PUB}/doc/contrib -Source0: http://downloads.sourceforge.net/nagios/nrpe-%{version}.tar.gz +Source0: https://github.com/NagiosEnterprises/nrpe/archive/%{version}.tar.gz#/%{pname}-%{version}.tar.gz Source1: nrpe.sysconfig Source2: nrpe-tmpfiles.conf Source3: nrpe.service
added folders that are generated from tests to gitignore
*.Inc.Eigen SeasonalClimateFiles +#files created from running tests +tests/Multi-PlanetCheckpoint/MP_Checkpoint/ +tests/Multi-PlanetCheckpoint/.MP_Checkpoint +tests/Multi-PlanetMpStatus/MP_Status/ +tests/Multi-PlanetMpStatus/.MP_Status +tests/Multi-PlanetParallel/MP_Parallel/ +tests/Multi-PlanetParallel/.MP_Parallel +tests/Multi-PlanetSerial/MP_Serial/ +tests/Multi-PlanetSerial/.MP_Serial +tests/Vspace_Explicit/Explict_Test/ +tests/Vspace_Linear/Linear_Test/ +tests/Vspace_Log/Log_Test/ + # Aux files / build info / garbage merge-from-public.sh merge-from-private.sh