max_stars_count
int64
301
224k
text
stringlengths
6
1.05M
token_count
int64
3
727k
634
<filename>modules/base/lang-impl/src/main/java/com/intellij/codeInsight/editorActions/AbstractIndentingBackspaceHandler.java /* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInsight.editorActions; import com.intellij.codeInsight.CodeInsightSettings; import com.intellij.lang.Language; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.PsiFile; abstract class AbstractIndentingBackspaceHandler extends BackspaceHandlerDelegate { private final SmartBackspaceMode myMode; private boolean myEnabled; AbstractIndentingBackspaceHandler(SmartBackspaceMode mode) { myMode = mode; } @Override public void beforeCharDeleted(char c, PsiFile file, Editor editor) { myEnabled = false; if (editor.isColumnMode() || !StringUtil.isWhiteSpace(c)) { return; } SmartBackspaceMode mode = getBackspaceMode(file.getLanguage()); if (mode != myMode) { return; } doBeforeCharDeleted(c, file, editor); myEnabled = true; } @Override public boolean charDeleted(char c, PsiFile file, Editor editor) { if (!myEnabled) { return false; } return doCharDeleted(c, file, editor); } protected abstract void doBeforeCharDeleted(char c, PsiFile file, Editor editor); protected abstract boolean doCharDeleted(char c, PsiFile file, Editor editor); private static SmartBackspaceMode getBackspaceMode(Language language) { SmartBackspaceMode mode = CodeInsightSettings.getInstance().getBackspaceMode(); BackspaceModeOverride override = LanguageBackspaceModeOverride.INSTANCE.forLanguage(language); if (override != null) { mode = override.getBackspaceMode(mode); } return mode; } }
721
690
<filename>chrome/browser/webapps/android/java/src/org/chromium/chrome/browser/webapps/AddToHomescreenIPHController.java // Copyright 2021 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.chrome.browser.webapps; import android.app.Activity; import android.content.Context; import android.content.pm.ResolveInfo; import android.os.Bundle; import android.os.Handler; import android.view.View; import androidx.annotation.IdRes; import androidx.vectordrawable.graphics.drawable.VectorDrawableCompat; import org.chromium.base.metrics.RecordUserAction; import org.chromium.base.supplier.Supplier; import org.chromium.chrome.browser.banners.AppMenuVerbiage; import org.chromium.chrome.browser.feature_engagement.TrackerFactory; import org.chromium.chrome.browser.flags.ChromeFeatureList; import org.chromium.chrome.browser.profiles.Profile; import org.chromium.chrome.browser.tab.Tab; import org.chromium.chrome.browser.ui.appmenu.AppMenuHandler; import org.chromium.chrome.browser.user_education.IPHCommandBuilder; import org.chromium.chrome.browser.user_education.UserEducationHelper; import org.chromium.components.embedder_support.util.UrlConstants; import org.chromium.components.feature_engagement.EventConstants; import org.chromium.components.feature_engagement.FeatureConstants; import org.chromium.components.feature_engagement.Tracker; import org.chromium.components.messages.MessageBannerProperties; import org.chromium.components.messages.MessageDispatcher; import org.chromium.components.messages.MessageIdentifier; import org.chromium.components.messages.MessageScopeType; import org.chromium.components.webapk.lib.client.WebApkValidator; import org.chromium.components.webapps.AddToHomescreenCoordinator; import org.chromium.components.webapps.AppBannerManager; import org.chromium.components.webapps.WebappsUtils; import org.chromium.content_public.browser.WebContents; import org.chromium.ui.base.WindowAndroid; import org.chromium.ui.modaldialog.ModalDialogManager; import org.chromium.ui.modelutil.PropertyModel; import org.chromium.url.GURL; /** * This class is responsible for rendering the 'Add to home screen' IPH, such as IPH bubble/message * creation, metrics logging etc. */ public class AddToHomescreenIPHController { private static final String VARIATION_KEY_USE_TEXT_BUBBLE = "use_text_bubble"; private static final String VARIATION_KEY_USE_MESSAGE = "use_message"; private Activity mActivity; private AppMenuHandler mAppMenuHandler; private final WindowAndroid mWindowAndroid; private final ModalDialogManager mModalDialogManager; private final @IdRes int mHighlightMenuItemId; private final Supplier<View> mMenuButtonView; private final MessageDispatcher mMessageDispatcher; private final UserEducationHelper mUserEducationHelper; private final Tracker mTracker; private final Handler mHandler = new Handler(); private GURL mLastClickedMostVisitedTileUrl; /** * Creates an {@link AddToHomescreenIPHController}. * @param activity The associated activity. * @param windowAndroid The associated {@link WindowAndroid}. * @param modalDialogManager The {@link ModalDialogManager} for showing the dialog. * @param appMenuHandler The {@link AppMenuHandler}. * @param highlightMenuItemId The resource id of 'Add to Home screen' in the app menu. * @param menuButtonView The view representing the menu button. * @param messageDispatcher The {@link MessageDispatcher} for displaying messages. */ public AddToHomescreenIPHController(Activity activity, WindowAndroid windowAndroid, ModalDialogManager modalDialogManager, AppMenuHandler appMenuHandler, @IdRes int highlightMenuItemId, Supplier<View> menuButtonView, MessageDispatcher messageDispatcher) { mActivity = activity; mWindowAndroid = windowAndroid; mModalDialogManager = modalDialogManager; mAppMenuHandler = appMenuHandler; mHighlightMenuItemId = highlightMenuItemId; mMenuButtonView = menuButtonView; mMessageDispatcher = messageDispatcher; mTracker = TrackerFactory.getTrackerForProfile(Profile.getLastUsedRegularProfile()); mUserEducationHelper = new UserEducationHelper(mActivity, mHandler); } /** * Called to show in-product-help message. * @param tab The current tab. */ public void showAddToHomescreenIPH(Tab tab) { if (mActivity == null) return; if (!canShowAddToHomescreenMenuItem(mActivity, tab)) return; if (ChromeFeatureList.getFieldTrialParamByFeatureAsBoolean( ChromeFeatureList.ADD_TO_HOMESCREEN_IPH, VARIATION_KEY_USE_TEXT_BUBBLE, false)) { showTextBubbleIPH(); } else if (ChromeFeatureList.getFieldTrialParamByFeatureAsBoolean( ChromeFeatureList.ADD_TO_HOMESCREEN_IPH, VARIATION_KEY_USE_MESSAGE, false)) { showMessageIPH(tab); } } /** * Called to notify that the activity is in the process of being destroyed. */ public void destroy() { mActivity = null; mAppMenuHandler = null; } private static boolean canShowAddToHomescreenMenuItem(Context context, Tab tab) { if (tab.isIncognito()) return false; // Always disable can add to home screen for now, to-do add a ContextUtils check if (true) return false; GURL url = tab.getUrl(); if (url.isEmpty() || !url.isValid()) return false; String urlString = url.getSpec(); boolean isChromeScheme = urlString.startsWith(UrlConstants.CHROME_URL_PREFIX) || urlString.startsWith(UrlConstants.CHROME_NATIVE_URL_PREFIX); boolean isFileScheme = urlString.startsWith(UrlConstants.FILE_URL_PREFIX); boolean isContentScheme = urlString.startsWith(UrlConstants.CONTENT_URL_PREFIX); boolean urlSchemeSupported = !isChromeScheme && !isFileScheme && !isContentScheme; if (!urlSchemeSupported) return false; if (!WebappsUtils.isAddToHomeIntentSupported()) return false; // If it is a web apk, don't show the IPH. ResolveInfo resolveInfo = WebApkValidator.queryFirstWebApkResolveInfo(context, urlString); boolean isInstalledAsPwa = resolveInfo != null && resolveInfo.activityInfo.packageName != null; if (isInstalledAsPwa) return false; // If it can be installed as a PWA, don't show the IPH. WebContents webContents = tab.getWebContents(); AppBannerManager manager = webContents != null ? AppBannerManager.forWebContents(webContents) : null; boolean canInstallAsPwa = manager != null && manager.getIsPwa(webContents); if (canInstallAsPwa) return false; return true; } private void showTextBubbleIPH() { mUserEducationHelper.requestShowIPH( new IPHCommandBuilder(mActivity.getResources(), FeatureConstants.ADD_TO_HOMESCREEN_TEXT_BUBBLE_FEATURE, R.string.iph_bubble_add_to_home_screen, R.string.iph_bubble_add_to_home_screen_accessibility) .setAnchorView(mMenuButtonView.get()) .setOnShowCallback(this::turnOnTextBubbleHighlightForMenuItem) .setOnDismissCallback(this::turnOffTextBubbleHighlightForMenuItem) .build()); } private void showMessageIPH(Tab tab) { if (!ChromeFeatureList.isEnabled(ChromeFeatureList.MESSAGES_FOR_ANDROID_INFRASTRUCTURE)) { return; } if (!mTracker.shouldTriggerHelpUI(FeatureConstants.ADD_TO_HOMESCREEN_MESSAGE_FEATURE)) { return; } PropertyModel model = new PropertyModel.Builder(MessageBannerProperties.ALL_KEYS) .with(MessageBannerProperties.MESSAGE_IDENTIFIER, MessageIdentifier.ADD_TO_HOMESCREEN_IPH) .with(MessageBannerProperties.ICON, VectorDrawableCompat.create(mActivity.getResources(), R.drawable.ic_apps_blue_24dp, mActivity.getTheme())) .with(MessageBannerProperties.TITLE, mActivity.getResources().getString( R.string.iph_message_add_to_home_screen_title)) .with(MessageBannerProperties.DESCRIPTION, mActivity.getResources().getString( R.string.iph_message_add_to_home_screen_description)) .with(MessageBannerProperties.PRIMARY_BUTTON_TEXT, mActivity.getResources().getString( R.string.iph_message_add_to_home_screen_action)) .with(MessageBannerProperties.ON_DISMISSED, this::onMessageDismissed) .with(MessageBannerProperties.ON_PRIMARY_ACTION, () -> onMessageAddButtonClicked(tab)) .build(); mMessageDispatcher.enqueueMessage( model, tab.getWebContents(), MessageScopeType.NAVIGATION, false); RecordUserAction.record("Android.AddToHomescreenIPH.Message.Shown"); } private void onMessageAddButtonClicked(Tab tab) { if (tab.isDestroyed() || mActivity == null) return; Bundle menuItemData = new Bundle(); // Used for UMA. menuItemData.putInt( AppBannerManager.MENU_TITLE_KEY, AppMenuVerbiage.APP_MENU_OPTION_ADD_TO_HOMESCREEN); AddToHomescreenCoordinator.showForAppMenu( mActivity, mWindowAndroid, mModalDialogManager, tab.getWebContents(), menuItemData); mTracker.notifyEvent(EventConstants.ADD_TO_HOMESCREEN_DIALOG_SHOWN); RecordUserAction.record("Android.AddToHomescreenIPH.Message.Clicked"); } private void onMessageDismissed(Integer dismissReason) { // TODO(shaktisahu): Record metrics for explicit dismiss vs timeout. mTracker.dismissed(FeatureConstants.ADD_TO_HOMESCREEN_MESSAGE_FEATURE); } private void turnOnTextBubbleHighlightForMenuItem() { if (mAppMenuHandler == null) return; mAppMenuHandler.setMenuHighlight(mHighlightMenuItemId); } private void turnOffTextBubbleHighlightForMenuItem() { if (mAppMenuHandler == null) return; mAppMenuHandler.clearMenuHighlight(); } }
4,396
507
// // Copyright 2020 Autodesk // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // #include "plugRegistryHelper.h" #include <mayaUsd/base/debugCodes.h> #include <pxr/base/js/json.h> #include <pxr/base/js/value.h> #include <pxr/base/plug/registry.h> #include <pxr/base/tf/getenv.h> #include <pxr/base/tf/pathUtils.h> #include <pxr/base/tf/staticTokens.h> #include <pxr/base/tf/stringUtils.h> #include <fstream> #include <string> #if !defined(MAYAUSD_VERSION) #error "MAYAUSD_VERSION is not defined" #endif #if !defined(MAYA_PY_VERSION) #error "MAYA_PY_VERSION is not defined" #endif #define STRINGIFY(x) #x #define TOSTRING(x) STRINGIFY(x) PXR_NAMESPACE_USING_DIRECTIVE namespace { // clang-format off TF_DEFINE_PRIVATE_TOKENS(_Tokens, // Filename tokens ((MayaUsdPlugInfoName, "mayaUsdPlugInfo.json")) // Top level key ((IncludesKey, "MayaUsdIncludes")) // Include keys ((PlugPathKey, "PlugPath")) ((VersionCheckKey, "VersionCheck")) // VersionCheck keys ((CheckPythonKey, "Python")) ((CheckUsdKey, "USD")) ((CheckMayaUsdKey, "MayaUsd")) ); // clang-format on /*! \brief Read the mayaUsd plug info in pathname into result. \note Heavily inspired by /pxr/base/plug/info.cpp \return true if the file could be opened. */ bool readPlugInfoObject(const std::string& pathname, JsObject& result) { result.clear(); // The file may not exist or be readable. std::ifstream ifs; ifs.open(pathname.c_str()); if (!ifs.is_open()) { TF_RUNTIME_ERROR("Plugin info file %s couldn't be read", pathname.c_str()); return false; } // The Js library doesn't allow comments, but we'd like to allow them. // Strip comments, retaining empty lines so line numbers reported in parse // errors match line numbers in the original file content. // NOTE: Joining a vector of strings and calling JsParseString() // is *much* faster than writing to a stringstream and // calling JsParseStream() as of this writing. std::string line; std::vector<std::string> filtered; while (getline(ifs, line)) { if (line.find('#') < line.find_first_not_of(" \t#")) line.clear(); filtered.push_back(line); } // Read JSON. JsParseError error; JsValue plugInfo = JsParseString(TfStringJoin(filtered, "\n"), &error); // Validate. if (plugInfo.IsNull()) { TF_RUNTIME_ERROR( "Plugin info file %s couldn't be read " "(line %d, col %d): %s", pathname.c_str(), error.line, error.column, error.reason.c_str()); } else if (!plugInfo.IsObject()) { // The contents didn't evaluate to a json object.... TF_RUNTIME_ERROR("Plugin info file %s did not contain a JSON object", pathname.c_str()); } else { result = plugInfo.GetJsObject(); } return true; } /*! \brief Perform version check for given MayaUsd plug info \return true if requested version are valid for current MayaUsd runtime configuration */ bool checkPluginVersions( const JsObject& plugInfo, const std::string& pythonVersion, const std::string& usdVersion, const std::string& mayaUsdVersion, const std::string& debugLocation) { JsObject::const_iterator checkIt = plugInfo.find(_Tokens->VersionCheckKey); if (checkIt == plugInfo.end()) { // Version check wasn't requested return true; } if (!checkIt->second.IsObject()) { TF_RUNTIME_ERROR( "Plugin info %s key '%s' doesn't hold an object", debugLocation.c_str(), checkIt->first.c_str()); return false; } JsObject versionCheckObject = checkIt->second.GetJsObject(); auto checkFn = [&versionCheckObject, &debugLocation](const std::string& key, const std::string& versionValue) { JsObject::const_iterator checkIt = versionCheckObject.find(key); if (checkIt == versionCheckObject.end()) { // version check for this key was not requested TF_DEBUG(USDMAYA_PLUG_INFO_VERSION) .Msg( "Plugin info %s version check '%s' not requested\n", debugLocation.c_str(), key.c_str()); return true; } if (!checkIt->second.IsString()) { TF_RUNTIME_ERROR( "Plugin info %s key '%s' doesn't hold a string", debugLocation.c_str(), key.c_str()); return false; } const std::string& requestedVersion = checkIt->second.GetString(); if (versionValue == requestedVersion) return true; else { TF_DEBUG(USDMAYA_PLUG_INFO_VERSION) .Msg( "Plugin info %s version check '%s' NOT match. " "Requested '%s' but run under '%s'\n", debugLocation.c_str(), key.c_str(), requestedVersion.c_str(), versionValue.c_str()); return false; } }; if (!checkFn(_Tokens->CheckPythonKey, pythonVersion)) { return false; } if (!checkFn(_Tokens->CheckUsdKey, usdVersion)) { return false; } if (!checkFn(_Tokens->CheckMayaUsdKey, mayaUsdVersion)) { return false; } return true; } /*! \brief Extract plugin path from MayaUsd plug info \return Valid absolute path for properly configured json file. Empty string otherwise. */ std::string getPluginPath( const JsObject& plugInfo, const std::string& mayaUsdPluginInfoPath, const std::string& debugLocation) { std::string pluginIncludePath; JsObject::const_iterator pluginPathIt = plugInfo.find(_Tokens->PlugPathKey); if (pluginPathIt != plugInfo.end()) { if (!pluginPathIt->second.IsString()) { TF_RUNTIME_ERROR( "Plugin info %s key %s doesn't hold a string", debugLocation.c_str(), pluginPathIt->first.c_str()); } else { const std::string& includePath = pluginPathIt->second.GetString(); if (TfIsRelativePath(includePath)) { pluginIncludePath = TfStringCatPaths(mayaUsdPluginInfoPath, includePath); } else { pluginIncludePath = includePath; } } } return pluginIncludePath; } } // namespace namespace MAYAUSD_NS_DEF { void registerVersionedPlugins() { static std::once_flag once; std::call_once(once, []() { const std::string pythonVersion = TOSTRING(MAYA_PY_VERSION); const std::string usdVersion = TOSTRING(MAYA_USD_VERSION); const std::string mayaUsdVersion = TOSTRING(MAYAUSD_VERSION); std::vector<std::string> pluginsToRegister; const std::string paths = TfGetenv("MAYA_PXR_PLUGINPATH_NAME"); for (const auto& path : TfStringSplit(paths, ARCH_PATH_LIST_SEP)) { if (path.empty()) { continue; } if (TfIsRelativePath(path)) { TF_CODING_ERROR( "Relative paths are unsupported for MAYA_PXR_PLUGINPATH_NAME: '%s'", path.c_str()); continue; } // Append the maya usd plug info file name std::string plugInfoPath = TfStringCatPaths(path, _Tokens->MayaUsdPlugInfoName.GetString()); JsObject plugInfoObject; if (!readPlugInfoObject(plugInfoPath, plugInfoObject)) { continue; } JsObject::const_iterator topIncludesIt = plugInfoObject.find(_Tokens->IncludesKey); if (topIncludesIt == plugInfoObject.end() || !topIncludesIt->second.IsArray()) { TF_RUNTIME_ERROR( "Plugin info file %s key '%s' doesn't hold an object", plugInfoPath.c_str(), _Tokens->IncludesKey.GetString().c_str()); continue; } const JsArray& pluginIncludes = topIncludesIt->second.GetJsArray(); for (size_t i = 0, n = pluginIncludes.size(); i != n; ++i) { const std::string debugLocation = TfStringPrintf( "file %s %s[%zd]", plugInfoPath.c_str(), topIncludesIt->first.c_str(), i); if (!pluginIncludes[i].IsObject()) { TF_RUNTIME_ERROR( "Plugin info %s key '%s' doesn't hold an object", debugLocation.c_str(), topIncludesIt->first.c_str()); continue; } const JsObject& topPluginObject = pluginIncludes[i].GetJsObject(); if (!checkPluginVersions( topPluginObject, pythonVersion, usdVersion, mayaUsdVersion, debugLocation)) { // skipping plugin because it didn't pass version check continue; } std::string pluginPath = getPluginPath(topPluginObject, path, debugLocation); if (!pluginPath.empty()) { TF_DEBUG(USDMAYA_PLUG_INFO_VERSION) .Msg( "Plugin info %s. Will request registration for '%s'\n", debugLocation.c_str(), pluginPath.c_str()); pluginsToRegister.push_back(pluginPath); } } } PlugRegistry::GetInstance().RegisterPlugins(pluginsToRegister); }); } } // namespace MAYAUSD_NS_DEF
4,795
5,169
{ "name": "BookReader", "version": "0.1.0", "summary": "Sample code for PDFKit on iOS 11, clone of iBooks.app built on top of PDFKit.", "description": "https://github.com/kishikawakatsumi/BookReader\nUsage\nImport Your Own PDFs\nThe easiest way to import your PDFs is to email your PDF file to your iOS device. Navigate to the email and ensure that the attachment is there. Tap and hold the document attachment icon. This should open a popover on the iPad, or an action sheet on the iPhone, that shows all of the apps that open your document type. BookReader app should show up in the list. Tap BookReader app icon and BookReader app should launch and receive the document from the email.", "homepage": "https://github.com/iOSDevLog/BookReader", "license": { "type": "MIT", "file": "LICENSE" }, "screenshots": [ "https://raw.githubusercontent.com/iOSDevLog/BookReader/master/Screenshot/0.png", "https://raw.githubusercontent.com/iOSDevLog/BookReader/master/Screenshot/1.png", "https://raw.githubusercontent.com/iOSDevLog/BookReader/master/Screenshot/2.png", "https://raw.githubusercontent.com/iOSDevLog/BookReader/master/Screenshot/3.png", { "type": "MIT", "file": "LICENSE" } ], "authors": { "iosdevlog": "<EMAIL>" }, "source": { "git": "https://github.com/iOSDevLog/BookReader.git", "tag": "0.1.0" }, "social_media_url": "https://twitter.com/iosdevlog", "platforms": { "ios": "11.0" }, "source_files": "BookReader/Classes/**/*", "resource_bundles": { "BookReader": [ "BookReader/Assets/*.png", "BookReader/Assets/*.xib", "BookReader/Assets/*.storyboard" ] }, "frameworks": [ "UIKit", "PDFKit" ], "swift_version": "4.1" }
640
637
<reponame>1st/jenkins<gh_stars>100-1000 package jenkins; import hudson.FilePath.FileCallable; import hudson.remoting.VirtualChannel; import jenkins.security.Roles; import jenkins.slaves.RemotingVersionInfo; import org.jenkinsci.remoting.RoleChecker; import java.io.File; /** * {@link FileCallable}s that are meant to be only used on the master. * * Note that the logic within {@link #invoke(File, VirtualChannel)} should use API of a minimum supported Remoting version. * See {@link RemotingVersionInfo#getMinimumSupportedVersion()}. * * @since 1.587 / 1.580.1 * @param <T> the return type; note that this must either be defined in your plugin or included in the stock JEP-200 whitelist */ public abstract class MasterToSlaveFileCallable<T> implements FileCallable<T> { @Override public void checkRoles(RoleChecker checker) throws SecurityException { checker.check(this, Roles.SLAVE); } private static final long serialVersionUID = 1L; }
313
2,109
<reponame>zaza568/yo /* * This file is part of John the Ripper password cracker, * Copyright (c) 2014 by <NAME> * * Redistribution and use in source and binary forms, with or without * modification, are permitted. * * There's ABSOLUTELY NO WARRANTY, express or implied. */ #include "mask_ext.h" #include "misc.h" // error() #include "options.h" #include "memory.h" #include "memdbg.h" int *mask_skip_ranges = NULL; int mask_max_skip_loc = -1; int mask_int_cand_target = 0; int mask_gpu_is_static = 0; mask_int_cand_ctx mask_int_cand = {NULL, NULL, 1}; static void combination_util(int *data, int start, int end, int index, int r, mask_cpu_context *ptr, int *delta) { int i; if (index == r) { int tmp = 1; for (i = 0; i < r; i++) tmp *= ptr->ranges[data[i]].count; tmp -= mask_int_cand_target; tmp = tmp < 0 ? -tmp : tmp; if (tmp < *delta) { for (i = 0; i < r; i++) mask_skip_ranges[i] = data[i]; mask_max_skip_loc = mask_skip_ranges[i-1]; *delta = tmp; } return; } for (i = start; i <= end && end - i + 1 >= r - index; i++) { data[index] = i; combination_util(data, i + 1, end, index + 1, r, ptr, delta); } } static void generate_int_keys(mask_cpu_context *ptr) { int i, repeat = 1, modulo; #ifdef MASK_DEBUG fprintf(stderr, "%s()\n", __FUNCTION__); #endif #define fill_cand(t) \ for (i = 0; i < mask_int_cand.num_int_cand; i++) \ mask_int_cand.int_cand[i].x[t] = \ ptr->ranges[mask_skip_ranges[t]].chars \ [(i/repeat) % modulo]; #define cond(t) t < MASK_FMT_INT_PLHDR && mask_skip_ranges[t] != -1 for (i = 1; i < MASK_FMT_INT_PLHDR && mask_skip_ranges[i] != -1; i++) repeat *= ptr->ranges[mask_skip_ranges[i]].count; modulo = ptr->ranges[mask_skip_ranges[0]].count; for (i = 0; i < mask_int_cand.num_int_cand; i++) mask_int_cand.int_cand[i].i = 0; fill_cand(0); if (cond(1)) { modulo = ptr->ranges[mask_skip_ranges[1]].count; repeat /= modulo; fill_cand(1); } if (cond(2)) { modulo = ptr->ranges[mask_skip_ranges[2]].count; repeat /= modulo; fill_cand(2); } if (cond(3)) { repeat = 1; modulo = ptr->ranges[mask_skip_ranges[3]].count; fill_cand(3); } #undef fill_cand #undef cond } static void check_static_gpu_mask(int max_static_range) { unsigned int i; mask_gpu_is_static = 1; #ifdef MASK_DEBUG fprintf(stderr, "%s()\n", __FUNCTION__); #endif for (i = 0; i < MASK_FMT_INT_PLHDR; i++) if (max_static_range <= mask_skip_ranges[i]) { mask_gpu_is_static = 0; break; } mask_gpu_is_static |= !(options.flags & FLG_MASK_STACKED); } void mask_calc_combination(mask_cpu_context *ptr, int max_static_range) { int *data, i, n; int delta_to_target = 0x7fffffff; #ifdef MASK_DEBUG fprintf(stderr, "%s()\n", __FUNCTION__); #endif mask_int_cand.num_int_cand = 1; mask_int_cand.int_cpu_mask_ctx = NULL; mask_int_cand.int_cand = NULL; if (!mask_int_cand_target) return; if (MASK_FMT_INT_PLHDR > 4) { fprintf(stderr, "MASK_FMT_INT_PLHDR value must not exceed 4.\n"); error(); } n = ptr->count; data = (int*) mem_alloc(n * sizeof(int)); mask_skip_ranges = (int*) mem_alloc(MASK_FMT_INT_PLHDR * sizeof(int)); for (i = 0; i < MASK_FMT_INT_PLHDR; i++) mask_skip_ranges[i] = -1; /* Fix the maximum number of ranges that can be calculated on GPU to 3 */ for (i = 1; i <= MASK_FMT_INT_PLHDR; i++) combination_util(data, 0, n - 1, 0, i, ptr, &delta_to_target); if (mask_skip_ranges[0] != -1) { mask_int_cand.num_int_cand = 1; for (i = 0; i < MASK_FMT_INT_PLHDR && mask_skip_ranges[i] != -1; i++) mask_int_cand.num_int_cand *= ptr-> ranges[mask_skip_ranges[i]].count; } if (mask_int_cand.num_int_cand > 1) { mask_int_cand.int_cpu_mask_ctx = ptr; mask_int_cand.int_cand = (mask_char4 *) mem_alloc(mask_int_cand.num_int_cand * sizeof(mask_char4)); generate_int_keys(ptr); } check_static_gpu_mask(max_static_range); /*for (i = 0; i < mask_int_cand.num_int_cand && mask_int_cand.int_cand; i++) fprintf(stderr, "%c%c%c%c\n", mask_int_cand.int_cand[i].x[0], mask_int_cand.int_cand[i].x[1], mask_int_cand.int_cand[i].x[2], mask_int_cand.int_cand[i].x[3]);*/ MEM_FREE(data); }
1,931
484
from pathlib import Path import pytest from ansiblelint.rules import RulesCollection from ansiblelint.runner import Runner IMPORTED_PLAYBOOK = '''\ - hosts: all tasks: - name: success fail: msg="fail" when: false ''' MAIN_PLAYBOOK = '''\ - hosts: all tasks: - name: should be shell # noqa command-instead-of-shell no-changed-when shell: echo lol - import_playbook: imported_playbook.yml ''' @pytest.fixture def playbook(tmp_path: Path) -> str: """Create a reusable per-test playbook.""" playbook_path = tmp_path / 'playbook.yml' playbook_path.write_text(MAIN_PLAYBOOK) (tmp_path / 'imported_playbook.yml').write_text(IMPORTED_PLAYBOOK) return str(playbook_path) def test_skip_import_playbook( default_rules_collection: RulesCollection, playbook: str ) -> None: """Verify that a playbook import is skipped after a failure.""" runner = Runner(playbook, rules=default_rules_collection) results = runner.run() assert len(results) == 0
365
1,448
<filename>MaixPy/components/micropython/port/src/nes/mapper/InfoNES_Mapper_099.h /*===================================================================*/ /* */ /* Mapper 099 VS-Unisystem */ /* */ /*===================================================================*/ BYTE Map99_Coin; /*-------------------------------------------------------------------*/ /* Initialize Mapper 099 */ /*-------------------------------------------------------------------*/ void Map99_Init() { /* Initialize Mapper */ MapperInit = Map99_Init; /* Write to Mapper */ MapperWrite = Map0_Write; /* Write to SRAM */ MapperSram = Map0_Sram; /* Write to APU */ MapperApu = Map99_Apu; /* Read from APU */ MapperReadApu = Map99_ReadApu; /* Callback at VSync */ MapperVSync = Map0_VSync; /* Callback at HSync */ MapperHSync = Map0_HSync; /* Callback at PPU */ MapperPPU = Map0_PPU; /* Callback at Rendering Screen ( 1:BG, 0:Sprite ) */ MapperRenderScreen = Map0_RenderScreen; /* Set SRAM Banks */ SRAMBANK = SRAM; /* Set ROM Banks */ if ( NesHeader.byRomSize > 1 ) { ROMBANK0 = ROMPAGE( 0 ); ROMBANK1 = ROMPAGE( 1 ); ROMBANK2 = ROMPAGE( 2 ); ROMBANK3 = ROMPAGE( 3 ); } else if ( NesHeader.byRomSize > 0 ) { ROMBANK0 = ROMPAGE( 0 ); ROMBANK1 = ROMPAGE( 1 ); ROMBANK2 = ROMPAGE( 0 ); ROMBANK3 = ROMPAGE( 1 ); } else { ROMBANK0 = ROMPAGE( 0 ); ROMBANK1 = ROMPAGE( 0 ); ROMBANK2 = ROMPAGE( 0 ); ROMBANK3 = ROMPAGE( 0 ); } /* Set PPU Banks */ if ( NesHeader.byVRomSize > 0 ) { for ( int nPage = 0; nPage < 8; ++nPage ) PPUBANK[ nPage ] = VROMPAGE( nPage ); InfoNES_SetupChr(); } Map99_Coin = 0; /* Set up wiring of the interrupt pin */ K6502_Set_Int_Wiring( 1, 1 ); } /*-------------------------------------------------------------------*/ /* Mapper 99 Write to APU Function */ /*-------------------------------------------------------------------*/ void Map99_Apu( WORD wAddr, BYTE byData ) { if( wAddr == 0x4016 ) { if( byData & 0x04 ) { PPUBANK[ 0 ] = VROMPAGE( 8 % ( NesHeader.byVRomSize << 3 ) ); PPUBANK[ 1 ] = VROMPAGE( 9 % ( NesHeader.byVRomSize << 3 ) ); PPUBANK[ 2 ] = VROMPAGE( 10 % ( NesHeader.byVRomSize << 3 ) ); PPUBANK[ 3 ] = VROMPAGE( 11 % ( NesHeader.byVRomSize << 3 ) ); PPUBANK[ 4 ] = VROMPAGE( 12 % ( NesHeader.byVRomSize << 3 ) ); PPUBANK[ 5 ] = VROMPAGE( 13 % ( NesHeader.byVRomSize << 3 ) ); PPUBANK[ 6 ] = VROMPAGE( 14 % ( NesHeader.byVRomSize << 3 ) ); PPUBANK[ 7 ] = VROMPAGE( 15 % ( NesHeader.byVRomSize << 3 ) ); InfoNES_SetupChr(); } else { PPUBANK[ 0 ] = VROMPAGE( 0 % ( NesHeader.byVRomSize << 3 ) ); PPUBANK[ 1 ] = VROMPAGE( 1 % ( NesHeader.byVRomSize << 3 ) ); PPUBANK[ 2 ] = VROMPAGE( 2 % ( NesHeader.byVRomSize << 3 ) ); PPUBANK[ 3 ] = VROMPAGE( 3 % ( NesHeader.byVRomSize << 3 ) ); PPUBANK[ 4 ] = VROMPAGE( 4 % ( NesHeader.byVRomSize << 3 ) ); PPUBANK[ 5 ] = VROMPAGE( 5 % ( NesHeader.byVRomSize << 3 ) ); PPUBANK[ 6 ] = VROMPAGE( 6 % ( NesHeader.byVRomSize << 3 ) ); PPUBANK[ 7 ] = VROMPAGE( 7 % ( NesHeader.byVRomSize << 3 ) ); InfoNES_SetupChr(); } } if( wAddr == 0x4020 ) { Map99_Coin = byData; } } /*-------------------------------------------------------------------*/ /* Mapper 99 Read from APU Function */ /*-------------------------------------------------------------------*/ BYTE Map99_ReadApu( WORD wAddr ) { if( wAddr == 0x4020 ) { return Map99_Coin; } return ( wAddr >> 8 ); }
1,730
669
<reponame>Dig-Doug/runtime<filename>include/tfrt/bef/bef_buffer.h /* * Copyright 2020 The TensorFlow Runtime Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // This file declares a buffer type for storing BEF binary. #ifndef TFRT_SUPPORT_BEF_BUFFER_H_ #define TFRT_SUPPORT_BEF_BUFFER_H_ #include "tfrt/bef/bef_encoding.h" #include "tfrt/support/aligned_buffer.h" namespace tfrt { // A BEF file should be loaded to a buffer aligned by kAttributeMaxAlignment. constexpr size_t GetRequiredBefAlignment() { return kAttributeMaxAlignment; } // Buffer for storing BEF binary. using BefBuffer = AlignedBuffer<GetRequiredBefAlignment()>; } // namespace tfrt #endif // TFRT_SUPPORT_BEF_BUFFER_H_
386
1,766
<filename>packages/demo/locales/en.json { "title": "Demo System Status", "description": "A marvelous open source Status Page system", "systems": { "items": { "cdn": { "title": "CDN" }, "conversions": { "title": "Conversions", "description": "Conversion from one file to another" }, "site-delivery": { "title": "Site delivery" }, "api": { "title": "API", "description": "The API Endpoints any developers can access to" } } } }
240
373
/* * #%L * ACS AEM Commons Bundle * %% * Copyright (C) 2016 Adobe * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package com.adobe.acs.commons.redirects.models; import org.apache.sling.api.resource.Resource; import org.apache.sling.api.resource.ValueMap; import org.apache.sling.models.annotations.Model; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.inject.Inject; import java.lang.invoke.MethodHandles; import java.time.ZonedDateTime; import java.util.Calendar; import java.util.Objects; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.regex.PatternSyntaxException; @Model(adaptables = Resource.class) public class RedirectRule { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); public static final String SOURCE_PROPERTY_NAME = "source"; public static final String TARGET_PROPERTY_NAME = "target"; public static final String STATUS_CODE_PROPERTY_NAME = "statusCode"; public static final String UNTIL_DATE_PROPERTY_NAME = "untilDate"; public static final String NOTE_PROPERTY_NAME = "note"; @Inject private String source; @Inject private String target; @Inject private int statusCode; @Inject private String note; private ZonedDateTime untilDate; private Pattern ptrn; private SubstitutionElement[] substitutions; public RedirectRule(String source, String target, int statusCode, Calendar calendar, String note) { this.source = source.trim(); this.target = target.trim(); this.statusCode = statusCode; this.note = note; String regex = this.source; if (regex.endsWith("*")) { regex = regex.replaceAll("\\*$", "(.*)"); } ptrn = toRegex(regex); substitutions = SubstitutionElement.parse(this.target); if (calendar != null) { untilDate = ZonedDateTime.ofInstant( calendar.toInstant(), calendar.getTimeZone().toZoneId()); } } public static RedirectRule from(ValueMap resource) { String source = resource.get(SOURCE_PROPERTY_NAME, ""); String target = resource.get(TARGET_PROPERTY_NAME, ""); String note = resource.get(NOTE_PROPERTY_NAME, ""); int statusCode = resource.get(STATUS_CODE_PROPERTY_NAME, 0); Calendar calendar = null; if(resource.containsKey(UNTIL_DATE_PROPERTY_NAME)){ Object o = resource.get(UNTIL_DATE_PROPERTY_NAME); if(o instanceof Calendar) { calendar = (Calendar)o; } } return new RedirectRule(source, target, statusCode, calendar, note); } public String getSource() { return source; } public String getTarget() { return target; } public String getNote() { return note; } public int getStatusCode() { return statusCode; } public Pattern getRegex() { return ptrn; } public ZonedDateTime getUntilDate() { return untilDate; } @Override public String toString() { return String.format("RedirectRule{source='%s', target='%s', statusCode=%s, untilDate=%s, note=%s}", source, target, statusCode, untilDate, note); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } RedirectRule that = (RedirectRule) o; return Objects.equals(source, that.source); } @Override public int hashCode() { return source != null ? source.hashCode() : 0; } public String evaluate(Matcher matcher) { StringBuilder buf = new StringBuilder(); for (int i = 0; i < substitutions.length; i++) { buf.append(substitutions[i].evaluate(matcher)); } return buf.toString(); } static Pattern toRegex(String src) { Pattern ptrn; try { ptrn = Pattern.compile(src); int groupCount = ptrn.matcher("").groupCount(); if (groupCount == 0) { ptrn = null; } } catch (PatternSyntaxException e) { log.info("invalid regex: {}", src); ptrn = null; } return ptrn; } }
2,147
375
<gh_stars>100-1000 /* * Copyright 2018 Nokia Solutions and Networks * Licensed under the Apache License, Version 2.0, * see license.txt file for details. */ package org.robotframework.ide.eclipse.main.plugin.tableeditor.tasks; import static com.google.common.collect.Lists.newArrayList; import java.util.List; import java.util.Optional; import org.eclipse.jface.viewers.ISelectionProvider; import org.robotframework.ide.eclipse.main.plugin.RedImages; import org.robotframework.ide.eclipse.main.plugin.hyperlink.detectors.ITableHyperlinksDetector; import org.robotframework.ide.eclipse.main.plugin.model.RobotElement; import org.robotframework.ide.eclipse.main.plugin.model.RobotSuiteFile; import org.robotframework.ide.eclipse.main.plugin.model.RobotSuiteFileSection; import org.robotframework.ide.eclipse.main.plugin.model.RobotTasksSection; import org.robotframework.ide.eclipse.main.plugin.tableeditor.DISectionEditorPart; import org.robotframework.ide.eclipse.main.plugin.tableeditor.ISectionFormFragment; import org.robotframework.ide.eclipse.main.plugin.tableeditor.SectionEditorPart; import org.robotframework.ide.eclipse.main.plugin.tableeditor.SelectionLayerAccessor; import org.robotframework.ide.eclipse.main.plugin.tableeditor.TreeLayerAccessor; import org.robotframework.ide.eclipse.main.plugin.tableeditor.tasks.TasksEditorPart.TasksEditor; import org.robotframework.red.graphics.ImagesManager; public class TasksEditorPart extends DISectionEditorPart<TasksEditor> { public TasksEditorPart() { super(TasksEditor.class); setTitleImage(ImagesManager.getImage(RedImages.getRobotImage())); } public static class TasksEditor extends SectionEditorPart { private static final String CONTEXT_ID = "org.robotframework.ide.eclipse.tableeditor.tasks.context"; private TasksFormFragment tasksFragment; @Override protected String getContextId() { return CONTEXT_ID; } @Override public String getId() { return "red.tasks"; } @Override protected String getTitle() { return "Tasks"; } @Override protected String getSectionName() { return RobotTasksSection.SECTION_NAME; } @Override public boolean isPartFor(final RobotSuiteFileSection section) { return section instanceof RobotTasksSection; } @Override public void revealElement(final RobotElement robotElement) { tasksFragment.revealElement(robotElement, false); } @Override public void revealElementAndFocus(final RobotElement robotElement) { tasksFragment.revealElement(robotElement, true); } @Override public Optional<? extends RobotSuiteFileSection> provideSection(final RobotSuiteFile suite) { return suite.findSection(RobotTasksSection.class); } @Override protected List<? extends ISectionFormFragment> createFormFragments() { tasksFragment = new TasksFormFragment(); return newArrayList(tasksFragment); } @Override protected ISelectionProvider getSelectionProvider() { return tasksFragment.getSelectionProvider(); } @Override public SelectionLayerAccessor getSelectionLayerAccessor() { return tasksFragment.getSelectionLayerAccessor(); } @Override public Optional<TreeLayerAccessor> getTreeLayerAccessor() { return Optional.of(tasksFragment.getTreeLayerAccessor()); } @Override public void aboutToChangeToOtherPage() { tasksFragment.aboutToChangeToOtherPage(); } @Override public void waitForPendingJobs() { return; } @Override public List<ITableHyperlinksDetector> getDetectors() { return tasksFragment.getDetectors(); } } }
1,623
968
<reponame>thst71/mapstruct-examples<gh_stars>100-1000 /* * Copyright MapStruct Authors. * * Licensed under the Apache License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0 */ package org.mapstruct.example; import org.junit.Test; import org.mapstruct.example.dto.FishTankDto; import org.mapstruct.example.dto.FishTankWithNestedDocumentDto; import org.mapstruct.example.mapper.FishTankMapper; import org.mapstruct.example.mapper.FishTankMapperConstant; import org.mapstruct.example.mapper.FishTankMapperExpression; import org.mapstruct.example.mapper.FishTankMapperWithDocument; import org.mapstruct.example.model.Fish; import org.mapstruct.example.model.FishTank; import org.mapstruct.example.model.Interior; import org.mapstruct.example.model.MaterialType; import org.mapstruct.example.model.Ornament; import org.mapstruct.example.model.WaterPlant; import org.mapstruct.example.model.WaterQuality; import org.mapstruct.example.model.WaterQualityReport; import static org.assertj.core.api.Assertions.assertThat; /** * * @author <NAME> */ public class FishTanksMappersTest { @Test public void shouldAutomapAndHandleSourceAndTargetPropertyNesting() { // -- prepare FishTank source = createFishTank(); // -- action FishTankDto target = FishTankMapper.INSTANCE.map( source ); // -- result assertThat( target.getName() ).isEqualTo( source.getName() ); // fish and fishDto can be automapped assertThat( target.getFish() ).isNotNull(); assertThat( target.getFish().getKind() ).isEqualTo( source.getFish().getType() ); assertThat( target.getFish().getName() ).isNull(); // automapping takes care of mapping property "waterPlant". assertThat( target.getPlant() ).isNotNull(); assertThat( target.getPlant().getKind() ).isEqualTo( source.getPlant().getKind() ); // ornament (nested asymetric source) assertThat( target.getOrnament() ).isNotNull(); assertThat( target.getOrnament().getType() ).isEqualTo( source.getInterior().getOrnament().getType() ); // material (nested asymetric target) assertThat( target.getMaterial() ).isNotNull(); assertThat( target.getMaterial().getManufacturer() ).isNull(); assertThat( target.getMaterial().getMaterialType() ).isNotNull(); assertThat( target.getMaterial().getMaterialType().getType() ).isEqualTo( source.getMaterial().getType() ); // first symetric then asymetric assertThat( target.getQuality() ).isNotNull(); assertThat( target.getQuality().getReport() ).isNotNull(); assertThat( target.getQuality().getReport().getVerdict() ) .isEqualTo( source.getQuality().getReport().getVerdict() ); assertThat( target.getQuality().getReport().getOrganisation().getApproval() ).isNull(); assertThat( target.getQuality().getReport().getOrganisation() ).isNotNull(); assertThat( target.getQuality().getReport().getOrganisation().getName() ) .isEqualTo( source.getQuality().getReport().getOrganisationName() ); } @Test public void shouldAutomapAndHandleSourceAndTargetPropertyNestingReverse() { // -- prepare FishTank source = createFishTank(); // -- action FishTankDto target = FishTankMapper.INSTANCE.map( source ); FishTank source2 = FishTankMapper.INSTANCE.map( target ); // -- result assertThat( source2.getName() ).isEqualTo( source.getName() ); // fish assertThat( source2.getFish() ).isNotNull(); assertThat( source2.getFish().getType() ).isEqualTo( source.getFish().getType() ); // interior, designer will not be mapped (asymetric) to target. Here it shows. assertThat( source2.getInterior() ).isNotNull(); assertThat( source2.getInterior().getDesigner() ).isNull(); assertThat( source2.getInterior().getOrnament() ).isNotNull(); assertThat( source2.getInterior().getOrnament().getType() ) .isEqualTo( source.getInterior().getOrnament().getType() ); // material assertThat( source2.getMaterial() ).isNotNull(); assertThat( source2.getMaterial().getType() ).isEqualTo( source.getMaterial().getType() ); // plant assertThat( source2.getPlant().getKind() ).isEqualTo( source.getPlant().getKind() ); // quality assertThat( source2.getQuality().getReport() ).isNotNull(); assertThat( source2.getQuality().getReport().getOrganisationName() ) .isEqualTo( source.getQuality().getReport().getOrganisationName() ); assertThat( source2.getQuality().getReport().getVerdict() ) .isEqualTo( source.getQuality().getReport().getVerdict() ); } @Test public void shouldAutomapAndHandleSourceAndTargetPropertyNestingAndConstant() { // -- prepare FishTank source = createFishTank(); // -- action FishTankDto target = FishTankMapperConstant.INSTANCE.map( source ); // -- result // fixed value assertThat( target.getFish().getName() ).isEqualTo( "Nemo" ); // automapping takes care of mapping property "waterPlant". assertThat( target.getPlant() ).isNotNull(); assertThat( target.getPlant().getKind() ).isEqualTo( source.getPlant().getKind() ); // non-nested and constant assertThat( target.getMaterial() ).isNotNull(); assertThat( target.getMaterial().getManufacturer() ).isEqualTo( "MMM" ); assertThat( target.getMaterial().getMaterialType() ).isNotNull(); assertThat( target.getMaterial().getMaterialType().getType() ).isEqualTo( source.getMaterial().getType() ); assertThat( target.getOrnament() ).isNull(); assertThat( target.getQuality() ).isNull(); } @Test public void shouldAutomapAndHandleSourceAndTargetPropertyNestingAndExpresion() { // -- prepare FishTank source = createFishTank(); // -- action FishTankDto target = FishTankMapperExpression.INSTANCE.map( source ); // -- result assertThat( target.getFish().getName() ).isEqualTo( "Jaws" ); assertThat( target.getMaterial() ).isNull(); assertThat( target.getOrnament() ).isNull(); assertThat( target.getPlant() ).isNull(); assertThat( target.getQuality() ).isNotNull(); assertThat( target.getQuality().getReport() ).isNotNull(); assertThat( target.getQuality().getReport().getVerdict() ) .isEqualTo( source.getQuality().getReport().getVerdict() ); assertThat( target.getQuality().getReport().getOrganisation() ).isNotNull(); assertThat( target.getQuality().getReport().getOrganisation().getApproval() ).isNull(); assertThat( target.getQuality().getReport().getOrganisation().getName() ).isEqualTo( "Dunno" ); } @Test public void shouldAutomapIntermediateLevelAndMapConstant() { // -- prepare FishTank source = createFishTank(); // -- action FishTankWithNestedDocumentDto target = FishTankMapperWithDocument.INSTANCE.map( source ); // -- result assertThat( target.getFish().getName() ).isEqualTo( "Jaws" ); assertThat( target.getMaterial() ).isNull(); assertThat( target.getOrnament() ).isNull(); assertThat( target.getPlant() ).isNull(); assertThat( target.getQuality() ).isNotNull(); assertThat( target.getQuality().getDocument() ).isNotNull(); assertThat( target.getQuality().getDocument().getVerdict() ) .isEqualTo( source.getQuality().getReport().getVerdict() ); assertThat( target.getQuality().getDocument().getOrganisation() ).isNotNull(); assertThat( target.getQuality().getDocument().getOrganisation().getApproval() ).isNull(); assertThat( target.getQuality().getDocument().getOrganisation().getName() ).isEqualTo( "NoIdeaInc" ); } private FishTank createFishTank() { FishTank fishTank = new FishTank(); Fish fish = new Fish(); fish.setType( "Carp" ); WaterPlant waterplant = new WaterPlant(); waterplant.setKind( "Water Hyacinth" ); Interior interior = new Interior(); interior.setDesigner( "MrVeryFamous" ); Ornament ornament = new Ornament(); ornament.setType( "castle" ); interior.setOrnament( ornament ); WaterQuality quality = new WaterQuality(); WaterQualityReport report = new WaterQualityReport(); report.setVerdict( "PASSED" ); report.setOrganisationName( "ACME" ); quality.setReport( report ); MaterialType materialType = new MaterialType(); materialType.setType( "myMaterialType" ); fishTank.setName( "MyLittleFishTank" ); fishTank.setFish( fish ); fishTank.setPlant( waterplant ); fishTank.setInterior( interior ); fishTank.setMaterial( materialType ); fishTank.setQuality( quality ); return fishTank; } }
3,410
14,668
// Copyright 2019 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef CHROME_UPDATER_WIN_SETUP_SETUP_UTIL_H_ #define CHROME_UPDATER_WIN_SETUP_SETUP_UTIL_H_ #include <guiddef.h> #include <string> #include <vector> #include "base/win/windows_types.h" class WorkItemList; namespace base { class CommandLine; class FilePath; } // namespace base namespace updater { enum class UpdaterScope; bool RegisterWakeTask(const base::CommandLine& run_command, UpdaterScope scope); void UnregisterWakeTask(UpdaterScope scope); std::wstring GetComServerClsidRegistryPath(REFCLSID clsid); std::wstring GetComServerAppidRegistryPath(REFGUID appid); std::wstring GetComIidRegistryPath(REFIID iid); std::wstring GetComTypeLibRegistryPath(REFIID iid); // Returns the resource index for the type library where the interface specified // by the `iid` is defined. For encapsulation reasons, the updater interfaces // are segregated in multiple IDL files, which get compiled to multiple type // libraries. The type libraries are inserted in the compiled binary as // resources with different resource indexes. The resource index becomes a // suffix of the path to where the type library exists, such as // `...\updater.exe\\1`. See the Windows SDK documentation for LoadTypeLib for // details. std::wstring GetComTypeLibResourceIndex(REFIID iid); // Returns the interfaces ids of all interfaces declared in IDL of the updater // that can be installed side-by-side with other instances of the updater. std::vector<IID> GetSideBySideInterfaces(); // Returns the interfaces ids of all interfaces declared in IDL of the updater // that can only be installed for the active instance of the updater. std::vector<IID> GetActiveInterfaces(); // Returns the CLSIDs of servers that can be installed side-by-side with other // instances of the updater. std::vector<CLSID> GetSideBySideServers(UpdaterScope scope); // Returns the CLSIDs of servers that can only be installed for the active // instance of the updater. std::vector<CLSID> GetActiveServers(UpdaterScope scope); // Helper function that joins two vectors and returns the resultant vector. template <typename T> std::vector<T> JoinVectors(const std::vector<T>& vector1, const std::vector<T>& vector2) { std::vector<T> joined_vector = vector1; joined_vector.insert(joined_vector.end(), vector2.begin(), vector2.end()); return joined_vector; } // Adds work items to `list` to install the interface `iid`. void AddInstallComInterfaceWorkItems(HKEY root, const base::FilePath& typelib_path, GUID iid, WorkItemList* list); // Adds work items to `list` to install the server `iid`. void AddInstallServerWorkItems(HKEY root, CLSID iid, const base::FilePath& executable_path, bool internal_service, WorkItemList* list); // Adds work items to `list` to install the COM service. void AddComServiceWorkItems(const base::FilePath& com_service_path, bool internal_service, WorkItemList* list); // Parses the run time dependency file which contains all dependencies of // the `updater` target. This file is a text file, where each line of // text represents a single dependency. Some dependencies are not needed for // updater to run, and are filtered out from the return value of this function. std::vector<base::FilePath> ParseFilesFromDeps(const base::FilePath& deps); } // namespace updater #endif // CHROME_UPDATER_WIN_SETUP_SETUP_UTIL_H_
1,336
2,326
#include <Python.h> #include "server.h" #include "wsgi.h" #include "filewrapper.h" #ifdef WANT_STATSD #include "statsd-client.h" #endif static PyObject* run(PyObject* self, PyObject* args) { ServerInfo info; PyObject* socket; #ifdef WANT_STATSD info.statsd = NULL; int statsd_enabled; char* statsd_host; int statsd_port; char* statsd_ns; char* statsd_tags = NULL; if(!PyArg_ParseTuple(args, "OOiziz|z:server_run", &socket, &info.wsgi_app, &statsd_enabled, &statsd_host, &statsd_port, &statsd_ns, &statsd_tags)) { return NULL; } #else char* ignored_str = NULL; int ignored_int = 0; if(!PyArg_ParseTuple(args, "OO|izizz:server_run", &socket, &info.wsgi_app, &ignored_int, &ignored_str, &ignored_int, &ignored_str, &ignored_str)) { return NULL; } if (ignored_str != NULL || ignored_int != 0) { PyErr_Format(PyExc_TypeError, "Unexpected statsd_* arguments (forgot to compile with statsd support?)"); return NULL; } #endif info.sockfd = PyObject_AsFileDescriptor(socket); if (info.sockfd < 0) { return NULL; } info.host = NULL; if (PyObject_HasAttrString(socket, "getsockname")) { PyObject* sockname = PyObject_CallMethod(socket, "getsockname", NULL); if (sockname == NULL) { return NULL; } if (PyTuple_CheckExact(sockname) && PyTuple_GET_SIZE(sockname) == 2) { /* Standard (ipaddress, port) case */ info.host = PyTuple_GET_ITEM(sockname, 0); info.port = PyTuple_GET_ITEM(sockname, 1); } } #ifdef WANT_STATSD if (statsd_enabled) { if (statsd_host == NULL || *statsd_host == '\0') { statsd_host = "127.0.0.1"; } if (statsd_ns == NULL || *statsd_ns == '\0') { info.statsd = statsd_init(statsd_host, statsd_port); } else { info.statsd = statsd_init_with_namespace(statsd_host, statsd_port, statsd_ns); } #ifdef WANT_STATSD_TAGS info.statsd_tags = statsd_tags; DBG("Statsd: host=%s, port=%d, ns=%s, tags=%s", statsd_host, statsd_port, statsd_ns, statsd_tags); #else DBG("Statsd: host=%s, port=%d, ns=%s", statsd_host, statsd_port, statsd_ns); #endif } else { DBG("Statsd disabled"); } #endif _initialize_request_module(&info); server_run(&info); #ifdef WANT_STATSD statsd_finalize(info.statsd); #endif Py_RETURN_NONE; } static PyMethodDef Bjoern_FunctionTable[] = { {"server_run", (PyCFunction) run, METH_VARARGS, NULL}, {NULL, NULL, 0, NULL} }; #if PY_MAJOR_VERSION >= 3 static struct PyModuleDef module = { PyModuleDef_HEAD_INIT, "bjoern", NULL, -1, /* size of per-interpreter state of the module, or -1 if the module keeps state in global variables. */ Bjoern_FunctionTable, NULL, NULL, NULL, NULL, }; #endif #if PY_MAJOR_VERSION >= 3 #define INIT_BJOERN PyInit__bjoern #else #define INIT_BJOERN init_bjoern #endif PyMODINIT_FUNC INIT_BJOERN(void) { _init_common(); _init_filewrapper(); PyType_Ready(&FileWrapper_Type); assert(FileWrapper_Type.tp_flags & Py_TPFLAGS_READY); PyType_Ready(&StartResponse_Type); assert(StartResponse_Type.tp_flags & Py_TPFLAGS_READY); Py_INCREF(&FileWrapper_Type); Py_INCREF(&StartResponse_Type); PyObject* features = PyDict_New(); #ifdef WANT_SIGNAL_HANDLING PyDict_SetItemString(features, "has_signal_handling", Py_True); #else PyDict_SetItemString(features, "has_signal_handling", Py_False); #endif #ifdef WANT_SIGINT_HANDLING PyDict_SetItemString(features, "has_sigint_handling", Py_True); #else PyDict_SetItemString(features, "has_sigint_handling", Py_False); #endif #ifdef WANT_STATSD PyDict_SetItemString(features, "has_statsd", Py_True); #else PyDict_SetItemString(features, "has_statsd", Py_False); #endif #ifdef WANT_STATSD_TAGS PyDict_SetItemString(features, "has_statsd_tags", Py_True); #else PyDict_SetItemString(features, "has_statsd_tags", Py_False); #endif #if PY_MAJOR_VERSION >= 3 PyObject* bjoern_module = PyModule_Create(&module); if (bjoern_module == NULL) { return NULL; } #else PyObject* bjoern_module = Py_InitModule("_bjoern", Bjoern_FunctionTable); #endif PyModule_AddObject(bjoern_module, "features", features); PyModule_AddObject(bjoern_module, "version", Py_BuildValue("(iii)", 3, 1, 0)); #if PY_MAJOR_VERSION >= 3 return bjoern_module; #endif }
1,857
796
package net.zhuoweizhang.mcpelauncher.ui; import net.zhuoweizhang.mcpelauncher.R; import android.app.Activity; import android.content.*; import android.os.Bundle; import android.widget.*; public class MinecraftNotSupportedActivity extends Activity { public TextView theText; public void onCreate(Bundle icicle) { super.onCreate(icicle); setContentView(R.layout.no_minecraft); theText = (TextView) findViewById(R.id.no_minecraft_text); Intent intent = getIntent(); String minecraftVersion = intent.getStringExtra("minecraftVersion"); String supportedVersion = intent.getStringExtra("supportedVersion"); String textToDisplay = getResources().getString(R.string.minecraft_version_not_supported). toString(); String textWithReplacements = textToDisplay.replaceAll("MINECRAFT_VERSION", minecraftVersion). replaceAll("SUPPORTED_VERSION", supportedVersion); theText.setText(textWithReplacements); } }
288
1,127
// Copyright (C) 2018-2022 Intel Corporation // SPDX-License-Identifier: Apache-2.0 // #include <gtest/gtest.h> #include <broadcast_shape_inference.hpp> #include <openvino/op/broadcast.hpp> #include <openvino/op/ops.hpp> #include <openvino/op/parameter.hpp> #include <utils/shape_inference/shape_inference.hpp> #include <utils/shape_inference/static_shape.hpp> using namespace ov; using namespace ov::intel_cpu; TEST(StaticShapeInferenceTest, BroadcastBidirectionalTest) { auto input = std::make_shared<ov::op::v0::Parameter>(element::f32, PartialShape{-1, -1, -1}); auto target_shape = std::make_shared<ov::op::v0::Parameter>(element::i32, PartialShape{-1}); auto broadcast_v3 = std::make_shared<op::v3::Broadcast>(input, target_shape, op::BroadcastType::BIDIRECTIONAL); int32_t target_shape_val[] = {1, 16, 50, 50}; std::map<size_t, std::shared_ptr<ngraph::runtime::HostTensor>> constant_data; constant_data[1] = std::make_shared<ngraph::runtime::HostTensor>(ngraph::element::Type_t::i32, ov::Shape{4}, target_shape_val); std::vector<StaticShape> static_input_shapes = {StaticShape{16, 1, 1}, StaticShape{4}}, static_output_shapes = {StaticShape{}}; shape_inference(broadcast_v3.get(), static_input_shapes, static_output_shapes, constant_data); ASSERT_EQ(static_output_shapes[0], StaticShape({1, 16, 50, 50})); static_input_shapes = {StaticShape{16, 1, 1}, StaticShape{4}}; static_output_shapes = {StaticShape{}}; EXPECT_THROW(shape_inference(broadcast_v3.get(), static_input_shapes, static_output_shapes, {}), NodeValidationFailure); } TEST(StaticShapeInferenceTest, BroadcastBidirectionalConstantTest) { auto input = std::make_shared<ov::op::v0::Parameter>(element::f32, PartialShape{-1, -1, -1, -1}); auto target_shape = std::make_shared<ov::op::v0::Constant>(element::i32, ov::Shape{3}, std::vector<int32_t>{16, 1, 40}); auto broadcast_v3 = std::make_shared<op::v3::Broadcast>(input, target_shape, op::BroadcastType::BIDIRECTIONAL); std::vector<StaticShape> static_input_shapes = {StaticShape{1, 16, 50, 1}, StaticShape{3}}, static_output_shapes = {StaticShape{}}; shape_inference(broadcast_v3.get(), static_input_shapes, static_output_shapes, {}); ASSERT_EQ(static_output_shapes[0], StaticShape({1, 16, 50, 40})); } TEST(StaticShapeInferenceTest, BroadcastPDPDTest) { auto input = std::make_shared<ov::op::v0::Parameter>(element::f32, PartialShape{-1, -1}); auto target_shape = std::make_shared<ov::op::v0::Parameter>(element::i32, PartialShape{-1}); auto broadcast_v3 = std::make_shared<op::v3::Broadcast>(input, target_shape, op::BroadcastModeSpec(op::BroadcastType::PDPD, 1)); int32_t target_shape_val[] = {2, 3, 6}; std::map<size_t, std::shared_ptr<ngraph::runtime::HostTensor>> constant_data; constant_data[1] = std::make_shared<ngraph::runtime::HostTensor>(ngraph::element::Type_t::i32, ov::Shape{3}, target_shape_val); std::vector<StaticShape> static_input_shapes = {StaticShape{3, 1}, StaticShape{3}}, static_output_shapes = {StaticShape{}}; shape_inference(broadcast_v3.get(), static_input_shapes, static_output_shapes, constant_data); ASSERT_EQ(static_output_shapes[0], StaticShape({2, 3, 6})); static_input_shapes = {StaticShape{3, 1}, StaticShape{3}}; static_output_shapes = {StaticShape{}}; EXPECT_THROW(shape_inference(broadcast_v3.get(), static_input_shapes, static_output_shapes, {}), NodeValidationFailure); } TEST(StaticShapeInferenceTest, BroadcastPDPDConstantTest) { auto input = std::make_shared<ov::op::v0::Parameter>(element::f32, PartialShape{-1, -1}); auto target_shape = std::make_shared<ov::op::v0::Constant>(element::i32, ov::Shape{3}, std::vector<int32_t>{2, 3, 6}); auto broadcast_v3 = std::make_shared<op::v3::Broadcast>(input, target_shape, op::BroadcastModeSpec(op::BroadcastType::PDPD, 1)); std::vector<StaticShape> static_input_shapes = {StaticShape{3, 1}, StaticShape{3}}, static_output_shapes = {StaticShape{}}; shape_inference(broadcast_v3.get(), static_input_shapes, static_output_shapes, {}); ASSERT_EQ(static_output_shapes[0], StaticShape({2, 3, 6})); } TEST(StaticShapeInferenceTest, BroadcastNumpyTest) { auto input = std::make_shared<ov::op::v0::Parameter>(element::f32, PartialShape{-1, -1, -1}); auto target_shape = std::make_shared<ov::op::v0::Parameter>(element::i32, PartialShape{-1}); auto broadcast_v3 = std::make_shared<op::v3::Broadcast>(input, target_shape, op::BroadcastType::NUMPY); int32_t target_shape_val[] = {1, 16, 50, 50}; std::map<size_t, std::shared_ptr<ngraph::runtime::HostTensor>> constant_data; constant_data[1] = std::make_shared<ngraph::runtime::HostTensor>(ngraph::element::Type_t::i32, ov::Shape{4}, target_shape_val); std::vector<StaticShape> static_input_shapes = {StaticShape{16, 1, 1}, StaticShape{4}}, static_output_shapes = {StaticShape{}}; shape_inference(broadcast_v3.get(), static_input_shapes, static_output_shapes, constant_data); ASSERT_EQ(static_output_shapes[0], StaticShape({1, 16, 50, 50})); static_input_shapes = {StaticShape{16, 1, 1}, StaticShape{4}}; static_output_shapes = {StaticShape{}}; EXPECT_THROW(shape_inference(broadcast_v3.get(), static_input_shapes, static_output_shapes, {}), NodeValidationFailure); } TEST(StaticShapeInferenceTest, BroadcastNumpyConstantTest) { auto input = std::make_shared<ov::op::v0::Parameter>(element::f32, PartialShape{-1, -1, -1}); auto target_shape = std::make_shared<ov::op::v0::Constant>(element::i32, ov::Shape{4}, std::vector<int32_t>{1, 16, 50, 50}); auto broadcast_v3 = std::make_shared<op::v3::Broadcast>(input, target_shape, op::BroadcastType::NUMPY); std::vector<StaticShape> static_input_shapes = {StaticShape{16, 1, 1}, StaticShape{4}}, static_output_shapes = {StaticShape{}}; shape_inference(broadcast_v3.get(), static_input_shapes, static_output_shapes, {}); ASSERT_EQ(static_output_shapes[0], StaticShape({1, 16, 50, 50})); } TEST(StaticShapeInferenceTest, BroadcastExplicitTest) { auto input = std::make_shared<ov::op::v0::Parameter>(element::f32, PartialShape{-1}); auto target_shape = std::make_shared<ov::op::v0::Parameter>(element::i32, PartialShape{-1}); auto axes_mapping = std::make_shared<ov::op::v0::Parameter>(element::i32, PartialShape{-1}); auto broadcast_v3 = std::make_shared<op::v3::Broadcast>(input, target_shape, axes_mapping, op::BroadcastType::EXPLICIT); int32_t target_shape_val[] = {1, 16, 50, 50}; int32_t axes_mapping_val[] = {1}; std::map<size_t, std::shared_ptr<ngraph::runtime::HostTensor>> constant_data; constant_data[1] = std::make_shared<ngraph::runtime::HostTensor>(ngraph::element::Type_t::i32, ov::Shape{4}, target_shape_val); constant_data[2] = std::make_shared<ngraph::runtime::HostTensor>(ngraph::element::Type_t::i32, ov::Shape{1}, axes_mapping_val); std::vector<StaticShape> static_input_shapes = {StaticShape{16}, StaticShape{4}, StaticShape{1}}; std::vector<StaticShape> static_output_shapes = {StaticShape{}}; shape_inference(broadcast_v3.get(), static_input_shapes, static_output_shapes, constant_data); ASSERT_EQ(static_output_shapes[0], StaticShape({1, 16, 50, 50})); constant_data.erase(1); EXPECT_THROW(shape_inference(broadcast_v3.get(), static_input_shapes, static_output_shapes, constant_data), NodeValidationFailure); EXPECT_THROW(shape_inference(broadcast_v3.get(), static_input_shapes, static_output_shapes, {}), NodeValidationFailure); } TEST(StaticShapeInferenceTest, BroadcastExplicitConstantTest) { auto input = std::make_shared<ov::op::v0::Parameter>(element::f32, PartialShape{-1}); auto target_shape = std::make_shared<ov::op::v0::Constant>(element::i32, ov::Shape{4}, std::vector<int32_t>{1, 16, 50, 50}); auto axes_mapping = std::make_shared<ov::op::v0::Constant>(element::i32, ov::Shape{1}, std::vector<int32_t>{1}); auto broadcast_v3 = std::make_shared<op::v3::Broadcast>(input, target_shape, axes_mapping, op::BroadcastType::EXPLICIT); std::vector<StaticShape> static_input_shapes = {StaticShape{16}, StaticShape{4}, StaticShape{1}}; std::vector<StaticShape> static_output_shapes = {StaticShape{}}; shape_inference(broadcast_v3.get(), static_input_shapes, static_output_shapes, {}); ASSERT_EQ(static_output_shapes[0], StaticShape({1, 16, 50, 50})); } // BroadcastV1 test TEST(StaticShapeInferenceTest, BroadcastV1PDPDTest) { auto input = std::make_shared<ov::op::v0::Parameter>(element::f32, PartialShape{-1, -1}); auto target_shape = std::make_shared<ov::op::v0::Parameter>(element::i32, PartialShape{-1}); auto broadcast_v1 = std::make_shared<op::v1::Broadcast>(input, target_shape, op::AutoBroadcastSpec(op::AutoBroadcastType::PDPD, 1)); int32_t target_shape_val[] = {2, 3, 6}; std::map<size_t, std::shared_ptr<ngraph::runtime::HostTensor>> constant_data; constant_data[1] = std::make_shared<ngraph::runtime::HostTensor>(ngraph::element::Type_t::i32, ov::Shape{3}, target_shape_val); std::vector<StaticShape> static_input_shapes = {StaticShape{3, 1}, StaticShape{3}}, static_output_shapes = {StaticShape{}}; shape_inference(broadcast_v1.get(), static_input_shapes, static_output_shapes, constant_data); ASSERT_EQ(static_output_shapes[0], StaticShape({2, 3, 6})); static_input_shapes = {StaticShape{3, 1}, StaticShape{3}}; static_output_shapes = {StaticShape{}}; EXPECT_THROW(shape_inference(broadcast_v1.get(), static_input_shapes, static_output_shapes, {}), NodeValidationFailure); } TEST(StaticShapeInferenceTest, BroadcastV1NumpyTest) { auto input = std::make_shared<ov::op::v0::Parameter>(element::f32, PartialShape{-1, -1}); auto target_shape = std::make_shared<ov::op::v0::Parameter>(element::i32, PartialShape{-1}); auto broadcast_v1 = std::make_shared<op::v1::Broadcast>(input, target_shape); int32_t target_shape_val[] = {2, 3, 6}; std::map<size_t, std::shared_ptr<ngraph::runtime::HostTensor>> constant_data; constant_data[1] = std::make_shared<ngraph::runtime::HostTensor>(ngraph::element::Type_t::i32, ov::Shape{3}, target_shape_val); std::vector<StaticShape> static_input_shapes = {StaticShape{3, 1}, StaticShape{3}}, static_output_shapes = {StaticShape{}}; shape_inference(broadcast_v1.get(), static_input_shapes, static_output_shapes, constant_data); ASSERT_EQ(static_output_shapes[0], StaticShape({2, 3, 6})); static_input_shapes = {StaticShape{3, 1}, StaticShape{3}}; static_output_shapes = {StaticShape{}}; EXPECT_THROW(shape_inference(broadcast_v1.get(), static_input_shapes, static_output_shapes, {}), NodeValidationFailure); } TEST(StaticShapeInferenceTest, BroadcastV1ExplicitTest) { auto input = std::make_shared<ov::op::v0::Parameter>(element::f32, PartialShape{-1, -1}); auto target_shape = std::make_shared<ov::op::v0::Parameter>(element::i32, PartialShape{-1}); auto axes_mapping = std::make_shared<ov::op::v0::Parameter>(element::i32, PartialShape{-1}); auto broadcast_v1 = std::make_shared<op::v1::Broadcast>(input, target_shape, axes_mapping); int32_t target_shape_val[] = {2, 3, 1}; int32_t axes_mapping_val[] = {1, 2}; std::map<size_t, std::shared_ptr<ngraph::runtime::HostTensor>> constant_data; constant_data[1] = std::make_shared<ngraph::runtime::HostTensor>(ngraph::element::Type_t::i32, ov::Shape{3}, target_shape_val); constant_data[2] = std::make_shared<ngraph::runtime::HostTensor>(ngraph::element::Type_t::i32, ov::Shape{2}, axes_mapping_val); std::vector<StaticShape> static_input_shapes = {StaticShape{3, 1}, StaticShape{3}, StaticShape{2}}, static_output_shapes = {StaticShape{}}; shape_inference(broadcast_v1.get(), static_input_shapes, static_output_shapes, constant_data); ASSERT_EQ(static_output_shapes[0], StaticShape({2, 3, 1})); static_input_shapes = {StaticShape{3, 1}, StaticShape{3}, StaticShape{2}}; static_output_shapes = {StaticShape{}}; EXPECT_THROW(shape_inference(broadcast_v1.get(), static_input_shapes, static_output_shapes, {}), NodeValidationFailure); }
4,996
190,993
<filename>tensorflow/compiler/mlir/lite/tf_tfl_translate_cl.h /* Copyright 2019 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ #ifndef TENSORFLOW_COMPILER_MLIR_LITE_TF_TFL_TRANSLATE_CL_H_ #define TENSORFLOW_COMPILER_MLIR_LITE_TF_TFL_TRANSLATE_CL_H_ // This file contains command-line options aimed to provide the parameters // required by the TensorFlow Graph(Def) to TF Lite Flatbuffer conversion. It is // only intended to be included by binaries. #include <string> #include "llvm/Support/CommandLine.h" // The commandline options are defined in LLVM style, so the caller should // use llvm::InitLLVM to initialize the options. // // Please see the implementation file for documentation of details of these // options. // TODO(jpienaar): Revise the command line option parsing here. extern llvm::cl::opt<std::string> input_file_name; extern llvm::cl::opt<std::string> output_file_name; extern llvm::cl::opt<bool> use_splatted_constant; extern llvm::cl::opt<bool> input_mlir; extern llvm::cl::opt<bool> output_mlir; extern llvm::cl::list<std::string> custom_opdefs; extern llvm::cl::opt<bool> emit_quant_adaptor_ops; extern llvm::cl::opt<std::string> quant_stats_file_name; extern llvm::cl::opt<bool> convert_tf_while_to_tfl_while; extern llvm::cl::opt<std::string> select_user_tf_ops; extern llvm::cl::opt<bool> allow_all_select_tf_ops; extern llvm::cl::opt<bool> unfold_batchmatmul; extern llvm::cl::opt<bool> unfold_large_splat_constant; extern llvm::cl::opt<bool> guarantee_all_funcs_one_use; // Import saved model. extern llvm::cl::opt<bool> import_saved_model_object_graph; extern llvm::cl::opt<bool> import_saved_model_signature_defs; extern llvm::cl::opt<std::string> saved_model_tags; extern llvm::cl::opt<std::string> saved_model_exported_names; // Import HLO. enum HloImportType { proto, hlotxt, mlir_text }; extern llvm::cl::opt<bool> import_hlo; extern llvm::cl::opt<HloImportType> hlo_import_type; extern llvm::cl::opt<bool> enable_hlo_to_tf_conversion; #endif // TENSORFLOW_COMPILER_MLIR_LITE_TF_TFL_TRANSLATE_CL_H_
901
381
from prices import Money, MoneyRange, TaxedMoney, TaxedMoneyRange from unittest import TestCase from . import (ClassifyingPartitioner, InsufficientStock, Item, ItemLine, ItemList, ItemRange, Partitioner, StockedItem, partition) class Swallow(Item): def get_price_per_item(self, sale=False): if sale: return Money(1, currency='USD') return Money(5, currency='USD') class Robin(Item): def get_price_per_item(self): return TaxedMoney(Money(8, currency='USD'), Money(10, currency='USD')) class RareRobin(Item): def get_price_per_item(self): return TaxedMoney(Money(12, currency='USD'), Money(15, currency='USD')) class SpanishInquisition(Item): def get_price_per_item(self): return Money(15, currency='BTC') class FetchezLaVache(Item): def get_price_per_item(self): return Money(5, currency='BTC') class EmptyRange(ItemRange): def __iter__(self): return iter([]) class ThingsNobodyExpects(ItemRange): def __iter__(self): yield SpanishInquisition() yield FetchezLaVache() class TaxedThings(ItemRange): def __iter__(self): yield Robin() yield RareRobin() class SwallowLine(ItemLine): def get_quantity(self): return 2 def get_price_per_item(self): return Money(5, currency='EUR') class CoconutLine(ItemLine): def get_price_per_item(self): return Money(15, currency='EUR') class LimitedShrubbery(StockedItem): def get_stock(self): return 1 class SwallowPartitioner(ClassifyingPartitioner): def classify(self, item): if isinstance(item, Swallow): return 'swallow' return 'unknown' class ItemTest(TestCase): def test_get_price(self): 'Item.get_price() works' swallow = Swallow() self.assertEqual(swallow.get_price(), Money(5, currency='USD')) self.assertEqual(swallow.get_price(sale=True), Money(1, currency='USD')) robin = Robin() self.assertEqual(robin.get_price(), TaxedMoney( Money(8, currency='USD'), Money(10, currency='USD'))) class ItemRangeTest(TestCase): def test_get_price_range(self): 'ItemRange.get_price_range() works and calls its items' unexpected = ThingsNobodyExpects() self.assertEqual(unexpected.get_price_range(), MoneyRange(Money(5, currency='BTC'), Money(15, currency='BTC'))) taxed = TaxedThings() self.assertEqual(taxed.get_price_range(), TaxedMoneyRange( TaxedMoney(Money(8, currency='USD'), Money(10, currency='USD')), TaxedMoney(Money(12, currency='USD'), Money(15, currency='USD')))) def test_get_price_range_on_empty(self): 'ItemRange.get_price_range() raises an exception on an empty range' empty = EmptyRange() self.assertRaises(AttributeError, empty.get_price_range) class ItemListTest(TestCase): def test_repr(self): 'ItemList.__repr__() returns valid code' item_list = ItemList([1]) self.assertEqual(item_list.__repr__(), 'ItemList([1])') def test_get_total(self): 'ItemSet.get_total() works and calls its lines' coconut_delivery = ItemList([SwallowLine(), CoconutLine()]) self.assertEqual(coconut_delivery.get_total(), Money(25, currency='EUR')) def test_get_total_on_empty(self): 'ItemSet.get_total() raises an exception on an empty cart' empty = ItemList() self.assertRaises(AttributeError, empty.get_total) class PartitionerTest(TestCase): def test_default_is_all_items(self): 'Default implementation returns a single group with all items' fake_cart = ['one', 'two', 'five'] partitioner = Partitioner(fake_cart) self.assertEqual(list(partitioner), [ItemList(fake_cart)]) def test_total_works(self): 'Partitioner returns the same price the cart does' item_set = ItemList([SwallowLine()]) partitioner = Partitioner(item_set) self.assertEqual(partitioner.get_total(), Money(10, currency='EUR')) def test_truthiness(self): 'bool(partitioner) is only true if the set contains items' item_set = ItemList() partitioner = Partitioner(item_set) self.assertFalse(partitioner) item_set = ItemList([SwallowLine()]) partitioner = Partitioner(item_set) self.assertTrue(partitioner) def test_repr(self): 'Partitioner.__repr__() returns valid code' partitioner = Partitioner([1]) self.assertEqual(partitioner.__repr__(), 'Partitioner([1])') class ClassifyingPartitionerTest(TestCase): def test_classification(self): 'Partitions should be split according to the classifying key' swallow = Swallow() inquisition = SpanishInquisition() cow = FetchezLaVache() fake_cart = [inquisition, swallow, cow] partitioner = SwallowPartitioner(fake_cart) self.assertEqual(list(partitioner), [ItemList([swallow]), ItemList([inquisition, cow])]) class PartitionTest(TestCase): def test_basic_classification(self): def keyfunc(item): if item > 5: return 'more' return 'less' partitioner = partition([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], keyfunc) self.assertEqual(list(partitioner), [ItemList([1, 2, 3, 4, 5]), ItemList([6, 7, 8, 9, 10])]) def test_custom_class(self): def keyfunc(item): if item > 5: return 'more' return 'less' partitioner = partition([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], keyfunc, partition_class=list) self.assertEqual(list(partitioner), [[1, 2, 3, 4, 5], [6, 7, 8, 9, 10]]) class StockedItemTest(TestCase): def test_check_valid_quantity(self): 'StockedItem.get_quantity() allows smaller quantities to be used' item = LimitedShrubbery() item.check_quantity(0) item.check_quantity(1) def test_check_negative_quantity(self): 'StockedItem.get_quantity() disallows negative quantities' item = LimitedShrubbery() self.assertRaises(ValueError, lambda: item.check_quantity(-1)) def test_check_excessive_quantity(self): 'StockedItem.get_quantity() disallows excessive quantities' item = LimitedShrubbery() self.assertRaises(InsufficientStock, lambda: item.check_quantity(2))
2,987
785
<filename>servicetalk-transport-api/src/main/java/io/servicetalk/transport/api/AbstractSslConfig.java /* * Copyright © 2021 Apple Inc. and the ServiceTalk project authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.servicetalk.transport.api; import java.io.InputStream; import java.util.List; import java.util.function.Supplier; import javax.annotation.Nullable; import javax.net.ssl.KeyManagerFactory; import javax.net.ssl.TrustManagerFactory; abstract class AbstractSslConfig implements SslConfig { @Nullable private final TrustManagerFactory trustManagerFactory; @Nullable private final Supplier<InputStream> trustCertChainSupplier; @Nullable private final KeyManagerFactory keyManagerFactory; @Nullable private final Supplier<InputStream> keyCertChainSupplier; @Nullable private final Supplier<InputStream> keySupplier; @Nullable private final String keyPassword; @Nullable private final List<String> sslProtocols; @Nullable private final List<String> alpnProtocols; @Nullable private final List<String> ciphers; private final long sessionCacheSize; private final long sessionTimeout; @Nullable private final SslProvider provider; AbstractSslConfig(@Nullable final TrustManagerFactory trustManagerFactory, @Nullable final Supplier<InputStream> trustCertChainSupplier, @Nullable final KeyManagerFactory keyManagerFactory, @Nullable final Supplier<InputStream> keyCertChainSupplier, @Nullable final Supplier<InputStream> keySupplier, @Nullable final String keyPassword, @Nullable final List<String> sslProtocols, @Nullable final List<String> alpnProtocols, @Nullable final List<String> ciphers, final long sessionCacheSize, final long sessionTimeout, @Nullable final SslProvider provider) { this.trustManagerFactory = trustManagerFactory; this.trustCertChainSupplier = trustCertChainSupplier; this.keyManagerFactory = keyManagerFactory; this.keyCertChainSupplier = keyCertChainSupplier; this.keySupplier = keySupplier; this.keyPassword = keyPassword; this.sslProtocols = sslProtocols; this.alpnProtocols = alpnProtocols; this.ciphers = ciphers; this.sessionCacheSize = sessionCacheSize; this.sessionTimeout = sessionTimeout; this.provider = provider; } @Nullable @Override public final TrustManagerFactory trustManagerFactory() { return trustManagerFactory; } @Nullable @Override public final Supplier<InputStream> trustCertChainSupplier() { return trustCertChainSupplier; } @Nullable @Override public final KeyManagerFactory keyManagerFactory() { return keyManagerFactory; } @Nullable @Override public final Supplier<InputStream> keyCertChainSupplier() { return keyCertChainSupplier; } @Nullable @Override public final Supplier<InputStream> keySupplier() { return keySupplier; } @Nullable @Override public final String keyPassword() { return keyPassword; } @Nullable @Override public final List<String> sslProtocols() { return sslProtocols; } @Nullable @Override public final List<String> alpnProtocols() { return alpnProtocols; } @Nullable @Override public final List<String> ciphers() { return ciphers; } @Override public long sessionCacheSize() { return sessionCacheSize; } @Override public final long sessionTimeout() { return sessionTimeout; } @Nullable @Override public final SslProvider provider() { return provider; } }
1,621
348
<reponame>chamberone/Leaflet.PixiOverlay {"nom":"Cardonnette","circ":"4ème circonscription","dpt":"Somme","inscrits":406,"abs":181,"votants":225,"blancs":2,"nuls":0,"exp":223,"res":[{"nuance":"REM","nom":"<NAME>","voix":92},{"nuance":"LR","nom":"<NAME>","voix":44},{"nuance":"FN","nom":"<NAME>","voix":25},{"nuance":"FI","nom":"M. <NAME>","voix":24},{"nuance":"DLF","nom":"Mme <NAME>","voix":13},{"nuance":"ECO","nom":"Mme <NAME>","voix":12},{"nuance":"UDI","nom":"Mme <NAME>","voix":5},{"nuance":"EXG","nom":"M. <NAME>","voix":4},{"nuance":"DIV","nom":"M. <NAME>","voix":3},{"nuance":"EXD","nom":"Mme <NAME>","voix":1}]}
255
2,494
/* ******************************************************************************* * * Copyright (C) 2009-2013, International Business Machines * Corporation and others. All Rights Reserved. * ******************************************************************************* * file name: normalizer2.cpp * encoding: US-ASCII * tab size: 8 (not used) * indentation:4 * * created on: 2009nov22 * created by: <NAME> */ #include "unicode/utypes.h" #if !UCONFIG_NO_NORMALIZATION #include "unicode/localpointer.h" #include "unicode/normalizer2.h" #include "unicode/unistr.h" #include "unicode/unorm.h" #include "cpputils.h" #include "cstring.h" #include "mutex.h" #include "normalizer2impl.h" #include "uassert.h" #include "ucln_cmn.h" #include "uhash.h" U_NAMESPACE_BEGIN // Public API dispatch via Normalizer2 subclasses -------------------------- *** Normalizer2::~Normalizer2() {} UBool Normalizer2::getRawDecomposition(UChar32, UnicodeString &) const { return FALSE; } UChar32 Normalizer2::composePair(UChar32, UChar32) const { return U_SENTINEL; } uint8_t Normalizer2::getCombiningClass(UChar32 /*c*/) const { return 0; } // Normalizer2 implementation for the old UNORM_NONE. class NoopNormalizer2 : public Normalizer2 { virtual ~NoopNormalizer2(); virtual UnicodeString & normalize(const UnicodeString &src, UnicodeString &dest, UErrorCode &errorCode) const { if(U_SUCCESS(errorCode)) { if(&dest!=&src) { dest=src; } else { errorCode=U_ILLEGAL_ARGUMENT_ERROR; } } return dest; } virtual UnicodeString & normalizeSecondAndAppend(UnicodeString &first, const UnicodeString &second, UErrorCode &errorCode) const { if(U_SUCCESS(errorCode)) { if(&first!=&second) { first.append(second); } else { errorCode=U_ILLEGAL_ARGUMENT_ERROR; } } return first; } virtual UnicodeString & append(UnicodeString &first, const UnicodeString &second, UErrorCode &errorCode) const { if(U_SUCCESS(errorCode)) { if(&first!=&second) { first.append(second); } else { errorCode=U_ILLEGAL_ARGUMENT_ERROR; } } return first; } virtual UBool getDecomposition(UChar32, UnicodeString &) const { return FALSE; } // No need to override the default getRawDecomposition(). virtual UBool isNormalized(const UnicodeString &, UErrorCode &) const { return TRUE; } virtual UNormalizationCheckResult quickCheck(const UnicodeString &, UErrorCode &) const { return UNORM_YES; } virtual int32_t spanQuickCheckYes(const UnicodeString &s, UErrorCode &) const { return s.length(); } virtual UBool hasBoundaryBefore(UChar32) const { return TRUE; } virtual UBool hasBoundaryAfter(UChar32) const { return TRUE; } virtual UBool isInert(UChar32) const { return TRUE; } }; NoopNormalizer2::~NoopNormalizer2() {} // Intermediate class: // Has Normalizer2Impl and does boilerplate argument checking and setup. class Normalizer2WithImpl : public Normalizer2 { public: Normalizer2WithImpl(const Normalizer2Impl &ni) : impl(ni) {} virtual ~Normalizer2WithImpl(); // normalize virtual UnicodeString & normalize(const UnicodeString &src, UnicodeString &dest, UErrorCode &errorCode) const { if(U_FAILURE(errorCode)) { dest.setToBogus(); return dest; } const UChar *sArray=src.getBuffer(); if(&dest==&src || sArray==NULL) { errorCode=U_ILLEGAL_ARGUMENT_ERROR; dest.setToBogus(); return dest; } dest.remove(); ReorderingBuffer buffer(impl, dest); if(buffer.init(src.length(), errorCode)) { normalize(sArray, sArray+src.length(), buffer, errorCode); } return dest; } virtual void normalize(const UChar *src, const UChar *limit, ReorderingBuffer &buffer, UErrorCode &errorCode) const = 0; // normalize and append virtual UnicodeString & normalizeSecondAndAppend(UnicodeString &first, const UnicodeString &second, UErrorCode &errorCode) const { return normalizeSecondAndAppend(first, second, TRUE, errorCode); } virtual UnicodeString & append(UnicodeString &first, const UnicodeString &second, UErrorCode &errorCode) const { return normalizeSecondAndAppend(first, second, FALSE, errorCode); } UnicodeString & normalizeSecondAndAppend(UnicodeString &first, const UnicodeString &second, UBool doNormalize, UErrorCode &errorCode) const { uprv_checkCanGetBuffer(first, errorCode); if(U_FAILURE(errorCode)) { return first; } const UChar *secondArray=second.getBuffer(); if(&first==&second || secondArray==NULL) { errorCode=U_ILLEGAL_ARGUMENT_ERROR; return first; } int32_t firstLength=first.length(); UnicodeString safeMiddle; { ReorderingBuffer buffer(impl, first); if(buffer.init(firstLength+second.length(), errorCode)) { normalizeAndAppend(secondArray, secondArray+second.length(), doNormalize, safeMiddle, buffer, errorCode); } } // The ReorderingBuffer destructor finalizes the first string. if(U_FAILURE(errorCode)) { // Restore the modified suffix of the first string. first.replace(firstLength-safeMiddle.length(), 0x7fffffff, safeMiddle); } return first; } virtual void normalizeAndAppend(const UChar *src, const UChar *limit, UBool doNormalize, UnicodeString &safeMiddle, ReorderingBuffer &buffer, UErrorCode &errorCode) const = 0; virtual UBool getDecomposition(UChar32 c, UnicodeString &decomposition) const { UChar buffer[4]; int32_t length; const UChar *d=impl.getDecomposition(c, buffer, length); if(d==NULL) { return FALSE; } if(d==buffer) { decomposition.setTo(buffer, length); // copy the string (Jamos from Hangul syllable c) } else { decomposition.setTo(FALSE, d, length); // read-only alias } return TRUE; } virtual UBool getRawDecomposition(UChar32 c, UnicodeString &decomposition) const { UChar buffer[30]; int32_t length; const UChar *d=impl.getRawDecomposition(c, buffer, length); if(d==NULL) { return FALSE; } if(d==buffer) { decomposition.setTo(buffer, length); // copy the string (algorithmic decomposition) } else { decomposition.setTo(FALSE, d, length); // read-only alias } return TRUE; } virtual UChar32 composePair(UChar32 a, UChar32 b) const { return impl.composePair(a, b); } virtual uint8_t getCombiningClass(UChar32 c) const { return impl.getCC(impl.getNorm16(c)); } // quick checks virtual UBool isNormalized(const UnicodeString &s, UErrorCode &errorCode) const { if(U_FAILURE(errorCode)) { return FALSE; } const UChar *sArray=s.getBuffer(); if(sArray==NULL) { errorCode=U_ILLEGAL_ARGUMENT_ERROR; return FALSE; } const UChar *sLimit=sArray+s.length(); return sLimit==spanQuickCheckYes(sArray, sLimit, errorCode); } virtual UNormalizationCheckResult quickCheck(const UnicodeString &s, UErrorCode &errorCode) const { return Normalizer2WithImpl::isNormalized(s, errorCode) ? UNORM_YES : UNORM_NO; } virtual int32_t spanQuickCheckYes(const UnicodeString &s, UErrorCode &errorCode) const { if(U_FAILURE(errorCode)) { return 0; } const UChar *sArray=s.getBuffer(); if(sArray==NULL) { errorCode=U_ILLEGAL_ARGUMENT_ERROR; return 0; } return (int32_t)(spanQuickCheckYes(sArray, sArray+s.length(), errorCode)-sArray); } virtual const UChar * spanQuickCheckYes(const UChar *src, const UChar *limit, UErrorCode &errorCode) const = 0; virtual UNormalizationCheckResult getQuickCheck(UChar32) const { return UNORM_YES; } const Normalizer2Impl &impl; }; Normalizer2WithImpl::~Normalizer2WithImpl() {} class DecomposeNormalizer2 : public Normalizer2WithImpl { public: DecomposeNormalizer2(const Normalizer2Impl &ni) : Normalizer2WithImpl(ni) {} virtual ~DecomposeNormalizer2(); private: virtual void normalize(const UChar *src, const UChar *limit, ReorderingBuffer &buffer, UErrorCode &errorCode) const { impl.decompose(src, limit, &buffer, errorCode); } using Normalizer2WithImpl::normalize; // Avoid warning about hiding base class function. virtual void normalizeAndAppend(const UChar *src, const UChar *limit, UBool doNormalize, UnicodeString &safeMiddle, ReorderingBuffer &buffer, UErrorCode &errorCode) const { impl.decomposeAndAppend(src, limit, doNormalize, safeMiddle, buffer, errorCode); } virtual const UChar * spanQuickCheckYes(const UChar *src, const UChar *limit, UErrorCode &errorCode) const { return impl.decompose(src, limit, NULL, errorCode); } using Normalizer2WithImpl::spanQuickCheckYes; // Avoid warning about hiding base class function. virtual UNormalizationCheckResult getQuickCheck(UChar32 c) const { return impl.isDecompYes(impl.getNorm16(c)) ? UNORM_YES : UNORM_NO; } virtual UBool hasBoundaryBefore(UChar32 c) const { return impl.hasDecompBoundary(c, TRUE); } virtual UBool hasBoundaryAfter(UChar32 c) const { return impl.hasDecompBoundary(c, FALSE); } virtual UBool isInert(UChar32 c) const { return impl.isDecompInert(c); } }; DecomposeNormalizer2::~DecomposeNormalizer2() {} class ComposeNormalizer2 : public Normalizer2WithImpl { public: ComposeNormalizer2(const Normalizer2Impl &ni, UBool fcc) : Normalizer2WithImpl(ni), onlyContiguous(fcc) {} virtual ~ComposeNormalizer2(); private: virtual void normalize(const UChar *src, const UChar *limit, ReorderingBuffer &buffer, UErrorCode &errorCode) const { impl.compose(src, limit, onlyContiguous, TRUE, buffer, errorCode); } using Normalizer2WithImpl::normalize; // Avoid warning about hiding base class function. virtual void normalizeAndAppend(const UChar *src, const UChar *limit, UBool doNormalize, UnicodeString &safeMiddle, ReorderingBuffer &buffer, UErrorCode &errorCode) const { impl.composeAndAppend(src, limit, doNormalize, onlyContiguous, safeMiddle, buffer, errorCode); } virtual UBool isNormalized(const UnicodeString &s, UErrorCode &errorCode) const { if(U_FAILURE(errorCode)) { return FALSE; } const UChar *sArray=s.getBuffer(); if(sArray==NULL) { errorCode=U_ILLEGAL_ARGUMENT_ERROR; return FALSE; } UnicodeString temp; ReorderingBuffer buffer(impl, temp); if(!buffer.init(5, errorCode)) { // small destCapacity for substring normalization return FALSE; } return impl.compose(sArray, sArray+s.length(), onlyContiguous, FALSE, buffer, errorCode); } virtual UNormalizationCheckResult quickCheck(const UnicodeString &s, UErrorCode &errorCode) const { if(U_FAILURE(errorCode)) { return UNORM_MAYBE; } const UChar *sArray=s.getBuffer(); if(sArray==NULL) { errorCode=U_ILLEGAL_ARGUMENT_ERROR; return UNORM_MAYBE; } UNormalizationCheckResult qcResult=UNORM_YES; impl.composeQuickCheck(sArray, sArray+s.length(), onlyContiguous, &qcResult); return qcResult; } virtual const UChar * spanQuickCheckYes(const UChar *src, const UChar *limit, UErrorCode &) const { return impl.composeQuickCheck(src, limit, onlyContiguous, NULL); } using Normalizer2WithImpl::spanQuickCheckYes; // Avoid warning about hiding base class function. virtual UNormalizationCheckResult getQuickCheck(UChar32 c) const { return impl.getCompQuickCheck(impl.getNorm16(c)); } virtual UBool hasBoundaryBefore(UChar32 c) const { return impl.hasCompBoundaryBefore(c); } virtual UBool hasBoundaryAfter(UChar32 c) const { return impl.hasCompBoundaryAfter(c, onlyContiguous, FALSE); } virtual UBool isInert(UChar32 c) const { return impl.hasCompBoundaryAfter(c, onlyContiguous, TRUE); } const UBool onlyContiguous; }; ComposeNormalizer2::~ComposeNormalizer2() {} class FCDNormalizer2 : public Normalizer2WithImpl { public: FCDNormalizer2(const Normalizer2Impl &ni) : Normalizer2WithImpl(ni) {} virtual ~FCDNormalizer2(); private: virtual void normalize(const UChar *src, const UChar *limit, ReorderingBuffer &buffer, UErrorCode &errorCode) const { impl.makeFCD(src, limit, &buffer, errorCode); } using Normalizer2WithImpl::normalize; // Avoid warning about hiding base class function. virtual void normalizeAndAppend(const UChar *src, const UChar *limit, UBool doNormalize, UnicodeString &safeMiddle, ReorderingBuffer &buffer, UErrorCode &errorCode) const { impl.makeFCDAndAppend(src, limit, doNormalize, safeMiddle, buffer, errorCode); } virtual const UChar * spanQuickCheckYes(const UChar *src, const UChar *limit, UErrorCode &errorCode) const { return impl.makeFCD(src, limit, NULL, errorCode); } using Normalizer2WithImpl::spanQuickCheckYes; // Avoid warning about hiding base class function. virtual UBool hasBoundaryBefore(UChar32 c) const { return impl.hasFCDBoundaryBefore(c); } virtual UBool hasBoundaryAfter(UChar32 c) const { return impl.hasFCDBoundaryAfter(c); } virtual UBool isInert(UChar32 c) const { return impl.isFCDInert(c); } }; FCDNormalizer2::~FCDNormalizer2() {} // instance cache ---------------------------------------------------------- *** struct Norm2AllModes : public UMemory { static Norm2AllModes *createInstance(const char *packageName, const char *name, UErrorCode &errorCode); Norm2AllModes() : comp(impl, FALSE), decomp(impl), fcd(impl), fcc(impl, TRUE) {} Normalizer2Impl impl; ComposeNormalizer2 comp; DecomposeNormalizer2 decomp; FCDNormalizer2 fcd; ComposeNormalizer2 fcc; }; Norm2AllModes * Norm2AllModes::createInstance(const char *packageName, const char *name, UErrorCode &errorCode) { if(U_FAILURE(errorCode)) { return NULL; } LocalPointer<Norm2AllModes> allModes(new Norm2AllModes); if(allModes.isNull()) { errorCode=U_MEMORY_ALLOCATION_ERROR; return NULL; } allModes->impl.load(packageName, name, errorCode); return U_SUCCESS(errorCode) ? allModes.orphan() : NULL; } U_CDECL_BEGIN static UBool U_CALLCONV uprv_normalizer2_cleanup(); U_CDECL_END static Norm2AllModes *nfcSingleton; static Norm2AllModes *nfkcSingleton; static Norm2AllModes *nfkc_cfSingleton; static Normalizer2 *noopSingleton; static UHashtable *cache=NULL; static icu::UInitOnce nfcInitOnce = U_INITONCE_INITIALIZER; static icu::UInitOnce nfkcInitOnce = U_INITONCE_INITIALIZER; static icu::UInitOnce nfkc_cfInitOnce = U_INITONCE_INITIALIZER; static icu::UInitOnce noopInitOnce = U_INITONCE_INITIALIZER; // UInitOnce singleton initialization function static void U_CALLCONV initSingletons(const char *what, UErrorCode &errorCode) { if (uprv_strcmp(what, "nfc") == 0) { nfcSingleton = Norm2AllModes::createInstance(NULL, "nfc", errorCode); } else if (uprv_strcmp(what, "nfkc") == 0) { nfkcSingleton = Norm2AllModes::createInstance(NULL, "nfkc", errorCode); } else if (uprv_strcmp(what, "nfkc_cf") == 0) { nfkc_cfSingleton = Norm2AllModes::createInstance(NULL, "nfkc_cf", errorCode); } else if (uprv_strcmp(what, "noop") == 0) { noopSingleton = new NoopNormalizer2; } else { U_ASSERT(FALSE); // Unknown singleton } ucln_common_registerCleanup(UCLN_COMMON_NORMALIZER2, uprv_normalizer2_cleanup); } U_CDECL_BEGIN static void U_CALLCONV deleteNorm2AllModes(void *allModes) { delete (Norm2AllModes *)allModes; } static UBool U_CALLCONV uprv_normalizer2_cleanup() { delete nfcSingleton; nfcSingleton = NULL; delete nfkcSingleton; nfkcSingleton = NULL; delete nfkc_cfSingleton; nfkc_cfSingleton = NULL; delete noopSingleton; noopSingleton = NULL; uhash_close(cache); cache=NULL; nfcInitOnce.reset(); nfkcInitOnce.reset(); nfkc_cfInitOnce.reset(); noopInitOnce.reset(); return TRUE; } U_CDECL_END const Normalizer2 *Normalizer2Factory::getNFCInstance(UErrorCode &errorCode) { umtx_initOnce(nfcInitOnce, &initSingletons, "nfc", errorCode); return nfcSingleton!=NULL ? &nfcSingleton->comp : NULL; } const Normalizer2 *Normalizer2Factory::getNFDInstance(UErrorCode &errorCode) { umtx_initOnce(nfcInitOnce, &initSingletons, "nfc", errorCode); return nfcSingleton!=NULL ? &nfcSingleton->decomp : NULL; } const Normalizer2 *Normalizer2Factory::getFCDInstance(UErrorCode &errorCode) { umtx_initOnce(nfcInitOnce, &initSingletons, "nfc", errorCode); return nfcSingleton!=NULL ? &nfcSingleton->fcd : NULL; } const Normalizer2 *Normalizer2Factory::getFCCInstance(UErrorCode &errorCode) { umtx_initOnce(nfcInitOnce, &initSingletons, "nfc", errorCode); return nfcSingleton!=NULL ? &nfcSingleton->fcc : NULL; } const Normalizer2 *Normalizer2Factory::getNFKCInstance(UErrorCode &errorCode) { umtx_initOnce(nfkcInitOnce, &initSingletons, "nfkc", errorCode); return nfkcSingleton!=NULL ? &nfkcSingleton->comp : NULL; } const Normalizer2 *Normalizer2Factory::getNFKDInstance(UErrorCode &errorCode) { umtx_initOnce(nfkcInitOnce, &initSingletons, "nfkc", errorCode); return nfkcSingleton!=NULL ? &nfkcSingleton->decomp : NULL; } const Normalizer2 *Normalizer2Factory::getNFKC_CFInstance(UErrorCode &errorCode) { umtx_initOnce(nfkc_cfInitOnce, &initSingletons, "nfkc_cf", errorCode); return nfkc_cfSingleton!=NULL ? &nfkc_cfSingleton->comp : NULL; } const Normalizer2 *Normalizer2Factory::getNoopInstance(UErrorCode &errorCode) { umtx_initOnce(noopInitOnce, &initSingletons, "noop", errorCode); return noopSingleton; } const Normalizer2 * Normalizer2Factory::getInstance(UNormalizationMode mode, UErrorCode &errorCode) { if(U_FAILURE(errorCode)) { return NULL; } switch(mode) { case UNORM_NFD: return getNFDInstance(errorCode); case UNORM_NFKD: return getNFKDInstance(errorCode); case UNORM_NFC: return getNFCInstance(errorCode); case UNORM_NFKC: return getNFKCInstance(errorCode); case UNORM_FCD: return getFCDInstance(errorCode); default: // UNORM_NONE return getNoopInstance(errorCode); } } const Normalizer2Impl * Normalizer2Factory::getNFCImpl(UErrorCode &errorCode) { umtx_initOnce(nfcInitOnce, &initSingletons, "nfc", errorCode); return nfcSingleton!=NULL ? &nfcSingleton->impl : NULL; } const Normalizer2Impl * Normalizer2Factory::getNFKCImpl(UErrorCode &errorCode) { umtx_initOnce(nfkcInitOnce, &initSingletons, "nfkc", errorCode); return nfkcSingleton!=NULL ? &nfkcSingleton->impl : NULL; } const Normalizer2Impl * Normalizer2Factory::getNFKC_CFImpl(UErrorCode &errorCode) { umtx_initOnce(nfkc_cfInitOnce, &initSingletons, "nfkc_cf", errorCode); return nfkc_cfSingleton!=NULL ? &nfkc_cfSingleton->impl : NULL; } const Normalizer2Impl * Normalizer2Factory::getImpl(const Normalizer2 *norm2) { return &((Normalizer2WithImpl *)norm2)->impl; } const Normalizer2 * Normalizer2::getNFCInstance(UErrorCode &errorCode) { return Normalizer2Factory::getNFCInstance(errorCode); } const Normalizer2 * Normalizer2::getNFDInstance(UErrorCode &errorCode) { return Normalizer2Factory::getNFDInstance(errorCode); } const Normalizer2 * Normalizer2::getNFKCInstance(UErrorCode &errorCode) { return Normalizer2Factory::getNFKCInstance(errorCode); } const Normalizer2 * Normalizer2::getNFKDInstance(UErrorCode &errorCode) { return Normalizer2Factory::getNFKDInstance(errorCode); } const Normalizer2 * Normalizer2::getNFKCCasefoldInstance(UErrorCode &errorCode) { return Normalizer2Factory::getNFKC_CFInstance(errorCode); } const Normalizer2 * Normalizer2::getInstance(const char *packageName, const char *name, UNormalization2Mode mode, UErrorCode &errorCode) { if(U_FAILURE(errorCode)) { return NULL; } if(name==NULL || *name==0) { errorCode=U_ILLEGAL_ARGUMENT_ERROR; return NULL; } Norm2AllModes *allModes=NULL; if(packageName==NULL) { if(0==uprv_strcmp(name, "nfc")) { umtx_initOnce(nfcInitOnce, &initSingletons, "nfc", errorCode); allModes=nfcSingleton; } else if(0==uprv_strcmp(name, "nfkc")) { umtx_initOnce(nfkcInitOnce, &initSingletons, "nfkc", errorCode); allModes=nfkcSingleton; } else if(0==uprv_strcmp(name, "nfkc_cf")) { umtx_initOnce(nfkc_cfInitOnce, &initSingletons, "nfkc_cf", errorCode); allModes=nfkc_cfSingleton; } } if(allModes==NULL && U_SUCCESS(errorCode)) { { Mutex lock; if(cache!=NULL) { allModes=(Norm2AllModes *)uhash_get(cache, name); } } if(allModes==NULL) { LocalPointer<Norm2AllModes> localAllModes( Norm2AllModes::createInstance(packageName, name, errorCode)); if(U_SUCCESS(errorCode)) { Mutex lock; if(cache==NULL) { cache=uhash_open(uhash_hashChars, uhash_compareChars, NULL, &errorCode); if(U_FAILURE(errorCode)) { return NULL; } uhash_setKeyDeleter(cache, uprv_free); uhash_setValueDeleter(cache, deleteNorm2AllModes); } void *temp=uhash_get(cache, name); if(temp==NULL) { int32_t keyLength=uprv_strlen(name)+1; char *nameCopy=(char *)uprv_malloc(keyLength); if(nameCopy==NULL) { errorCode=U_MEMORY_ALLOCATION_ERROR; return NULL; } uprv_memcpy(nameCopy, name, keyLength); uhash_put(cache, nameCopy, allModes=localAllModes.orphan(), &errorCode); } else { // race condition allModes=(Norm2AllModes *)temp; } } } } if(allModes!=NULL && U_SUCCESS(errorCode)) { switch(mode) { case UNORM2_COMPOSE: return &allModes->comp; case UNORM2_DECOMPOSE: return &allModes->decomp; case UNORM2_FCD: return &allModes->fcd; case UNORM2_COMPOSE_CONTIGUOUS: return &allModes->fcc; default: break; // do nothing } } return NULL; } U_NAMESPACE_END // C API ------------------------------------------------------------------- *** U_NAMESPACE_USE U_CAPI const UNormalizer2 * U_EXPORT2 unorm2_getNFCInstance(UErrorCode *pErrorCode) { return (const UNormalizer2 *)Normalizer2::getNFCInstance(*pErrorCode); } U_CAPI const UNormalizer2 * U_EXPORT2 unorm2_getNFDInstance(UErrorCode *pErrorCode) { return (const UNormalizer2 *)Normalizer2::getNFDInstance(*pErrorCode); } U_CAPI const UNormalizer2 * U_EXPORT2 unorm2_getNFKCInstance(UErrorCode *pErrorCode) { return (const UNormalizer2 *)Normalizer2::getNFKCInstance(*pErrorCode); } U_CAPI const UNormalizer2 * U_EXPORT2 unorm2_getNFKDInstance(UErrorCode *pErrorCode) { return (const UNormalizer2 *)Normalizer2::getNFKDInstance(*pErrorCode); } U_CAPI const UNormalizer2 * U_EXPORT2 unorm2_getNFKCCasefoldInstance(UErrorCode *pErrorCode) { return (const UNormalizer2 *)Normalizer2::getNFKCCasefoldInstance(*pErrorCode); } U_CAPI const UNormalizer2 * U_EXPORT2 unorm2_getInstance(const char *packageName, const char *name, UNormalization2Mode mode, UErrorCode *pErrorCode) { return (const UNormalizer2 *)Normalizer2::getInstance(packageName, name, mode, *pErrorCode); } U_CAPI void U_EXPORT2 unorm2_close(UNormalizer2 *norm2) { delete (Normalizer2 *)norm2; } U_CAPI int32_t U_EXPORT2 unorm2_normalize(const UNormalizer2 *norm2, const UChar *src, int32_t length, UChar *dest, int32_t capacity, UErrorCode *pErrorCode) { if(U_FAILURE(*pErrorCode)) { return 0; } if( (src==NULL ? length!=0 : length<-1) || (dest==NULL ? capacity!=0 : capacity<0) || (src==dest && src!=NULL) ) { *pErrorCode=U_ILLEGAL_ARGUMENT_ERROR; return 0; } UnicodeString destString(dest, 0, capacity); // length==0: Nothing to do, and n2wi->normalize(NULL, NULL, buffer, ...) would crash. if(length!=0) { const Normalizer2 *n2=(const Normalizer2 *)norm2; const Normalizer2WithImpl *n2wi=dynamic_cast<const Normalizer2WithImpl *>(n2); if(n2wi!=NULL) { // Avoid duplicate argument checking and support NUL-terminated src. ReorderingBuffer buffer(n2wi->impl, destString); if(buffer.init(length, *pErrorCode)) { n2wi->normalize(src, length>=0 ? src+length : NULL, buffer, *pErrorCode); } } else { UnicodeString srcString(length<0, src, length); n2->normalize(srcString, destString, *pErrorCode); } } return destString.extract(dest, capacity, *pErrorCode); } static int32_t normalizeSecondAndAppend(const UNormalizer2 *norm2, UChar *first, int32_t firstLength, int32_t firstCapacity, const UChar *second, int32_t secondLength, UBool doNormalize, UErrorCode *pErrorCode) { if(U_FAILURE(*pErrorCode)) { return 0; } if( (second==NULL ? secondLength!=0 : secondLength<-1) || (first==NULL ? (firstCapacity!=0 || firstLength!=0) : (firstCapacity<0 || firstLength<-1)) || (first==second && first!=NULL) ) { *pErrorCode=U_ILLEGAL_ARGUMENT_ERROR; return 0; } UnicodeString firstString(first, firstLength, firstCapacity); firstLength=firstString.length(); // In case it was -1. // secondLength==0: Nothing to do, and n2wi->normalizeAndAppend(NULL, NULL, buffer, ...) would crash. if(secondLength!=0) { const Normalizer2 *n2=(const Normalizer2 *)norm2; const Normalizer2WithImpl *n2wi=dynamic_cast<const Normalizer2WithImpl *>(n2); if(n2wi!=NULL) { // Avoid duplicate argument checking and support NUL-terminated src. UnicodeString safeMiddle; { ReorderingBuffer buffer(n2wi->impl, firstString); if(buffer.init(firstLength+secondLength+1, *pErrorCode)) { // destCapacity>=-1 n2wi->normalizeAndAppend(second, secondLength>=0 ? second+secondLength : NULL, doNormalize, safeMiddle, buffer, *pErrorCode); } } // The ReorderingBuffer destructor finalizes firstString. if(U_FAILURE(*pErrorCode) || firstString.length()>firstCapacity) { // Restore the modified suffix of the first string. // This does not restore first[] array contents between firstLength and firstCapacity. // (That might be uninitialized memory, as far as we know.) if(first!=NULL) { /* don't dereference NULL */ safeMiddle.extract(0, 0x7fffffff, first+firstLength-safeMiddle.length()); if(firstLength<firstCapacity) { first[firstLength]=0; // NUL-terminate in case it was originally. } } } } else { UnicodeString secondString(secondLength<0, second, secondLength); if(doNormalize) { n2->normalizeSecondAndAppend(firstString, secondString, *pErrorCode); } else { n2->append(firstString, secondString, *pErrorCode); } } } return firstString.extract(first, firstCapacity, *pErrorCode); } U_CAPI int32_t U_EXPORT2 unorm2_normalizeSecondAndAppend(const UNormalizer2 *norm2, UChar *first, int32_t firstLength, int32_t firstCapacity, const UChar *second, int32_t secondLength, UErrorCode *pErrorCode) { return normalizeSecondAndAppend(norm2, first, firstLength, firstCapacity, second, secondLength, TRUE, pErrorCode); } U_CAPI int32_t U_EXPORT2 unorm2_append(const UNormalizer2 *norm2, UChar *first, int32_t firstLength, int32_t firstCapacity, const UChar *second, int32_t secondLength, UErrorCode *pErrorCode) { return normalizeSecondAndAppend(norm2, first, firstLength, firstCapacity, second, secondLength, FALSE, pErrorCode); } U_CAPI int32_t U_EXPORT2 unorm2_getDecomposition(const UNormalizer2 *norm2, UChar32 c, UChar *decomposition, int32_t capacity, UErrorCode *pErrorCode) { if(U_FAILURE(*pErrorCode)) { return 0; } if(decomposition==NULL ? capacity!=0 : capacity<0) { *pErrorCode=U_ILLEGAL_ARGUMENT_ERROR; return 0; } UnicodeString destString(decomposition, 0, capacity); if(reinterpret_cast<const Normalizer2 *>(norm2)->getDecomposition(c, destString)) { return destString.extract(decomposition, capacity, *pErrorCode); } else { return -1; } } U_CAPI int32_t U_EXPORT2 unorm2_getRawDecomposition(const UNormalizer2 *norm2, UChar32 c, UChar *decomposition, int32_t capacity, UErrorCode *pErrorCode) { if(U_FAILURE(*pErrorCode)) { return 0; } if(decomposition==NULL ? capacity!=0 : capacity<0) { *pErrorCode=U_ILLEGAL_ARGUMENT_ERROR; return 0; } UnicodeString destString(decomposition, 0, capacity); if(reinterpret_cast<const Normalizer2 *>(norm2)->getRawDecomposition(c, destString)) { return destString.extract(decomposition, capacity, *pErrorCode); } else { return -1; } } U_CAPI UChar32 U_EXPORT2 unorm2_composePair(const UNormalizer2 *norm2, UChar32 a, UChar32 b) { return reinterpret_cast<const Normalizer2 *>(norm2)->composePair(a, b); } U_CAPI uint8_t U_EXPORT2 unorm2_getCombiningClass(const UNormalizer2 *norm2, UChar32 c) { return reinterpret_cast<const Normalizer2 *>(norm2)->getCombiningClass(c); } U_CAPI UBool U_EXPORT2 unorm2_isNormalized(const UNormalizer2 *norm2, const UChar *s, int32_t length, UErrorCode *pErrorCode) { if(U_FAILURE(*pErrorCode)) { return 0; } if((s==NULL && length!=0) || length<-1) { *pErrorCode=U_ILLEGAL_ARGUMENT_ERROR; return 0; } UnicodeString sString(length<0, s, length); return ((const Normalizer2 *)norm2)->isNormalized(sString, *pErrorCode); } U_CAPI UNormalizationCheckResult U_EXPORT2 unorm2_quickCheck(const UNormalizer2 *norm2, const UChar *s, int32_t length, UErrorCode *pErrorCode) { if(U_FAILURE(*pErrorCode)) { return UNORM_NO; } if((s==NULL && length!=0) || length<-1) { *pErrorCode=U_ILLEGAL_ARGUMENT_ERROR; return UNORM_NO; } UnicodeString sString(length<0, s, length); return ((const Normalizer2 *)norm2)->quickCheck(sString, *pErrorCode); } U_CAPI int32_t U_EXPORT2 unorm2_spanQuickCheckYes(const UNormalizer2 *norm2, const UChar *s, int32_t length, UErrorCode *pErrorCode) { if(U_FAILURE(*pErrorCode)) { return 0; } if((s==NULL && length!=0) || length<-1) { *pErrorCode=U_ILLEGAL_ARGUMENT_ERROR; return 0; } UnicodeString sString(length<0, s, length); return ((const Normalizer2 *)norm2)->spanQuickCheckYes(sString, *pErrorCode); } U_CAPI UBool U_EXPORT2 unorm2_hasBoundaryBefore(const UNormalizer2 *norm2, UChar32 c) { return ((const Normalizer2 *)norm2)->hasBoundaryBefore(c); } U_CAPI UBool U_EXPORT2 unorm2_hasBoundaryAfter(const UNormalizer2 *norm2, UChar32 c) { return ((const Normalizer2 *)norm2)->hasBoundaryAfter(c); } U_CAPI UBool U_EXPORT2 unorm2_isInert(const UNormalizer2 *norm2, UChar32 c) { return ((const Normalizer2 *)norm2)->isInert(c); } // Some properties APIs ---------------------------------------------------- *** U_CAPI uint8_t U_EXPORT2 u_getCombiningClass(UChar32 c) { UErrorCode errorCode=U_ZERO_ERROR; const Normalizer2 *nfd=Normalizer2Factory::getNFDInstance(errorCode); if(U_SUCCESS(errorCode)) { return nfd->getCombiningClass(c); } else { return 0; } } U_CFUNC UNormalizationCheckResult unorm_getQuickCheck(UChar32 c, UNormalizationMode mode) { if(mode<=UNORM_NONE || UNORM_FCD<=mode) { return UNORM_YES; } UErrorCode errorCode=U_ZERO_ERROR; const Normalizer2 *norm2=Normalizer2Factory::getInstance(mode, errorCode); if(U_SUCCESS(errorCode)) { return ((const Normalizer2WithImpl *)norm2)->getQuickCheck(c); } else { return UNORM_MAYBE; } } U_CFUNC uint16_t unorm_getFCD16(UChar32 c) { UErrorCode errorCode=U_ZERO_ERROR; const Normalizer2Impl *impl=Normalizer2Factory::getNFCImpl(errorCode); if(U_SUCCESS(errorCode)) { return impl->getFCD16(c); } else { return 0; } } #endif // !UCONFIG_NO_NORMALIZATION
16,039
652
<filename>src/redis_ai_types/model_type.c #include "model_type.h" #include "redis_ai_objects/model.h" #include "serialization/AOF/rai_aof_rewrite.h" #include "serialization/RDB/encoder/rai_rdb_encode.h" #include "serialization/RDB/decoder/rai_rdb_decoder.h" #include "serialization/RDB/decoder/decode_previous.h" extern RedisModuleType *RedisAI_ModelType; static void *RAI_Model_RdbLoad(struct RedisModuleIO *io, int encver) { if (encver > REDISAI_ENC_VER) { RedisModule_LogIOError( io, "error", "Failed loading model, RedisAI version (%d) is not forward compatible.\n", REDISAI_MODULE_VERSION); return NULL; } else if (encver < REDISAI_ENC_VER) { return Decode_PreviousModel(io, encver); } else { return RAI_RDBLoadModel(io); } } static void RAI_Model_RdbSave(RedisModuleIO *io, void *value) { RAI_RDBSaveModel(io, value); } static void RAI_Model_AofRewrite(RedisModuleIO *aof, RedisModuleString *key, void *value) { RAI_AOFRewriteModel(aof, key, value); } static void RAI_Model_DTFree(void *value) { RAI_Error err = {0}; RAI_ModelFree(value, &err); if (err.code != RAI_OK) { printf("ERR: %s\n", err.detail); RAI_ClearError(&err); } } int ModelType_Register(RedisModuleCtx *ctx) { RedisModuleTypeMethods tmModel = {.version = REDISMODULE_TYPE_METHOD_VERSION, .rdb_load = RAI_Model_RdbLoad, .rdb_save = RAI_Model_RdbSave, .aof_rewrite = RAI_Model_AofRewrite, .mem_usage = NULL, .free = RAI_Model_DTFree, .digest = NULL}; RedisAI_ModelType = RedisModule_CreateDataType(ctx, "AI__MODEL", REDISAI_ENC_VER, &tmModel); return RedisAI_ModelType != NULL; }
955
402
<reponame>xdenser/flow package com.vaadin.flow.uitest.ui.littemplate; public class SimpleLitTemplateNoShadowRootIT extends SimpleLitTemplateShadowRootIT { protected String getTemplateTag() { return "simple-lit-template-no-shadow-root"; } @Override protected boolean shouldHaveShadowRoot() { return false; } }
133
4,551
<filename>integration_tests/ctesque/src/test/java/android/graphics/PathTest.java package android.graphics; import static com.google.common.truth.Truth.assertThat; import androidx.test.runner.AndroidJUnit4; import org.junit.Test; import org.junit.runner.RunWith; import org.robolectric.annotation.internal.DoNotInstrument; /** Compatibility test for {@link Path} */ @DoNotInstrument @RunWith(AndroidJUnit4.class) public class PathTest { // Test constants private static final float LEFT = 10.0f; private static final float RIGHT = 50.0f; private static final float TOP = 10.0f; private static final float BOTTOM = 50.0f; private static final float XCOORD = 40.0f; private static final float YCOORD = 40.0f; @Test public void moveTo() { Path path = new Path(); assertThat(path.isEmpty()).isTrue(); path.moveTo(0, 0); assertThat(path.isEmpty()).isFalse(); } @Test public void lineTo() { Path path = new Path(); assertThat(path.isEmpty()).isTrue(); path.lineTo(XCOORD, YCOORD); assertThat(path.isEmpty()).isFalse(); } @Test public void quadTo() { Path path = new Path(); assertThat(path.isEmpty()).isTrue(); path.quadTo(20.0f, 20.0f, 40.0f, 40.0f); assertThat(path.isEmpty()).isFalse(); } @Test public void addRect1() { Path path = new Path(); assertThat(path.isEmpty()).isTrue(); RectF rect = new RectF(LEFT, TOP, RIGHT, BOTTOM); path.addRect(rect, Path.Direction.CW); assertThat(path.isEmpty()).isFalse(); } @Test public void addRect2() { Path path = new Path(); assertThat(path.isEmpty()).isTrue(); path.addRect(LEFT, TOP, RIGHT, BOTTOM, Path.Direction.CW); assertThat(path.isEmpty()).isFalse(); } @Test public void getFillType() { Path path = new Path(); path.setFillType(Path.FillType.EVEN_ODD); assertThat(path.getFillType()).isEqualTo(Path.FillType.EVEN_ODD); } @Test public void transform() { Path path = new Path(); assertThat(path.isEmpty()).isTrue(); Path dst = new Path(); path.addRect(new RectF(LEFT, TOP, RIGHT, BOTTOM), Path.Direction.CW); path.transform(new Matrix(), dst); assertThat(dst.isEmpty()).isFalse(); } @Test public void testAddCircle() { // new the Path instance Path path = new Path(); assertThat(path.isEmpty()).isTrue(); path.addCircle(XCOORD, YCOORD, 10.0f, Path.Direction.CW); assertThat(path.isEmpty()).isFalse(); } @Test public void arcTo1() { Path path = new Path(); assertThat(path.isEmpty()).isTrue(); RectF oval = new RectF(LEFT, TOP, RIGHT, BOTTOM); path.arcTo(oval, 0.0f, 30.0f, true); assertThat(path.isEmpty()).isFalse(); } @Test public void arcTo2() { Path path = new Path(); assertThat(path.isEmpty()).isTrue(); RectF oval = new RectF(LEFT, TOP, RIGHT, BOTTOM); path.arcTo(oval, 0.0f, 30.0f); assertThat(path.isEmpty()).isFalse(); } @Test public void close() { Path path = new Path(); assertThat(path.isEmpty()).isTrue(); path.close(); } }
1,188
678
/** * This header is generated by class-dump-z 0.2b. * * Source: /System/Library/PrivateFrameworks/IMCore.framework/Frameworks/IMFoundation.framework/IMFoundation */ #import <IMFoundation/XXUnknownSuperclass.h> @interface IMNetworkManager : XXUnknownSuperclass { } + (id)sharedInstance; // 0x237f9 + (id)alloc; // 0x237cd - (BOOL)retainWeakReference; // 0x238a9 - (BOOL)allowsWeakReference; // 0x238a5 @end
148
341
package com.aventstack.extentreports; import java.util.stream.IntStream; import org.testng.annotations.Test; import com.aventstack.extentreports.reporter.ExtentSparkReporter; public class ParallelTest { @Test public void parallelTests() { ExtentReports extent = new ExtentReports(); IntStream.range(0, 10000).parallel().forEach(x -> extent.createTest("Test").info(String.valueOf(x))); } @Test public void parallelTestsWithReporter() { ExtentReports extent = new ExtentReports(); extent.attachReporter(new ExtentSparkReporter("")); IntStream.range(0, 10000).parallel().forEach(x -> extent.createTest("Test").info(String.valueOf(x))); } @Test public void parallelLogs() { ExtentReports extent = new ExtentReports(); ExtentTest test = extent.createTest("Test"); IntStream.range(0, 10000).parallel().forEach(x -> test.info(String.valueOf(x))); } }
362
352
<filename>test/models/test_jastrow.py # Copyright 2021 The NetKet Authors - All rights reserved. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import netket as nk import jax.numpy as jnp import pytest @pytest.mark.parametrize("dtype", [jnp.float64, jnp.complex128]) def test_Jastrow(dtype): N = 8 hi = nk.hilbert.Spin(1 / 2, N) g = nk.graph.Chain(N) ma = nk.models.Jastrow(dtype=dtype) _ = ma.init(nk.jax.PRNGKey(), hi.random_state(nk.jax.PRNGKey())) vmc = nk.VMC( nk.operator.Ising(hi, g, h=1.0), nk.optim.Sgd(0.1), nk.sampler.MetropolisLocal(hi), ma, ) vmc.advance(1)
438
590
/*! @file @author Generate utility by <NAME> @date 01/2009 @module */ #include "../ExportDefine.h" #include "../ExportMarshaling.h" #include "MyGUI_Export_MarshalingWidget.h" #include "../ExportMarshalingType.h" #include <MyGUI.h> namespace Export { //InsertPoint namespace ScopeWidgetEvent_RootKeyChangeFocus { typedef void (MYGUICALLBACK *ExportHandle)( MyGUI::Widget *, Convert<bool>::Type); ExportHandle mExportHandle = nullptr; void OnEvent( MyGUI::Widget * _sender, bool _focus) { mExportHandle( _sender, Convert<bool>::To(_focus)); } MYGUIEXPORT void MYGUICALL ExportWidgetEvent_DelegateRootKeyChangeFocus(ExportHandle _delegate) { mExportHandle = _delegate; } MYGUIEXPORT void MYGUICALL ExportWidgetEvent_AdviseRootKeyChangeFocus(MyGUI::Widget* _widget, bool _advise) { if (_advise) static_cast<MyGUI::Widget*>(_widget)->eventRootKeyChangeFocus += MyGUI::newDelegate(OnEvent); else static_cast<MyGUI::Widget*>(_widget)->eventRootKeyChangeFocus -= MyGUI::newDelegate(OnEvent); } } namespace ScopeWidgetEvent_RootMouseChangeFocus { typedef void (MYGUICALLBACK *ExportHandle)( MyGUI::Widget *, Convert<bool>::Type); ExportHandle mExportHandle = nullptr; void OnEvent( MyGUI::Widget * _sender, bool _focus) { mExportHandle( _sender, Convert<bool>::To(_focus)); } MYGUIEXPORT void MYGUICALL ExportWidgetEvent_DelegateRootMouseChangeFocus(ExportHandle _delegate) { mExportHandle = _delegate; } MYGUIEXPORT void MYGUICALL ExportWidgetEvent_AdviseRootMouseChangeFocus(MyGUI::Widget* _widget, bool _advise) { if (_advise) static_cast<MyGUI::Widget*>(_widget)->eventRootMouseChangeFocus += MyGUI::newDelegate(OnEvent); else static_cast<MyGUI::Widget*>(_widget)->eventRootMouseChangeFocus -= MyGUI::newDelegate(OnEvent); } } namespace ScopeWidgetEvent_KeyButtonReleased { typedef void (MYGUICALLBACK *ExportHandle)( MyGUI::Widget *, Convert<MyGUI::KeyCode>::Type); ExportHandle mExportHandle = nullptr; void OnEvent( MyGUI::Widget * _sender, MyGUI::KeyCode _key) { mExportHandle( _sender, Convert<MyGUI::KeyCode>::To(_key)); } MYGUIEXPORT void MYGUICALL ExportWidgetEvent_DelegateKeyButtonReleased(ExportHandle _delegate) { mExportHandle = _delegate; } MYGUIEXPORT void MYGUICALL ExportWidgetEvent_AdviseKeyButtonReleased(MyGUI::Widget* _widget, bool _advise) { if (_advise) static_cast<MyGUI::Widget*>(_widget)->eventKeyButtonReleased += MyGUI::newDelegate(OnEvent); else static_cast<MyGUI::Widget*>(_widget)->eventKeyButtonReleased -= MyGUI::newDelegate(OnEvent); } } namespace ScopeWidgetEvent_KeyButtonPressed { typedef void (MYGUICALLBACK *ExportHandle)( MyGUI::Widget *, Convert<MyGUI::KeyCode>::Type, Convert<unsigned int>::Type); ExportHandle mExportHandle = nullptr; void OnEvent( MyGUI::Widget * _sender, MyGUI::KeyCode _key, unsigned int _char) { mExportHandle( _sender, Convert<MyGUI::KeyCode>::To(_key), Convert<unsigned int>::To(_char)); } MYGUIEXPORT void MYGUICALL ExportWidgetEvent_DelegateKeyButtonPressed(ExportHandle _delegate) { mExportHandle = _delegate; } MYGUIEXPORT void MYGUICALL ExportWidgetEvent_AdviseKeyButtonPressed(MyGUI::Widget* _widget, bool _advise) { if (_advise) static_cast<MyGUI::Widget*>(_widget)->eventKeyButtonPressed += MyGUI::newDelegate(OnEvent); else static_cast<MyGUI::Widget*>(_widget)->eventKeyButtonPressed -= MyGUI::newDelegate(OnEvent); } } namespace ScopeWidgetEvent_KeySetFocus { typedef void (MYGUICALLBACK *ExportHandle)( MyGUI::Widget *, Convert<MyGUI::Widget *>::Type); ExportHandle mExportHandle = nullptr; void OnEvent( MyGUI::Widget * _sender, MyGUI::Widget * _old) { mExportHandle( _sender, Convert<MyGUI::Widget *>::To(_old)); } MYGUIEXPORT void MYGUICALL ExportWidgetEvent_DelegateKeySetFocus(ExportHandle _delegate) { mExportHandle = _delegate; } MYGUIEXPORT void MYGUICALL ExportWidgetEvent_AdviseKeySetFocus(MyGUI::Widget* _widget, bool _advise) { if (_advise) static_cast<MyGUI::Widget*>(_widget)->eventKeySetFocus += MyGUI::newDelegate(OnEvent); else static_cast<MyGUI::Widget*>(_widget)->eventKeySetFocus -= MyGUI::newDelegate(OnEvent); } } namespace ScopeWidgetEvent_KeyLostFocus { typedef void (MYGUICALLBACK *ExportHandle)( MyGUI::Widget *, Convert<MyGUI::Widget *>::Type); ExportHandle mExportHandle = nullptr; void OnEvent( MyGUI::Widget * _sender, MyGUI::Widget * _new) { mExportHandle( _sender, Convert<MyGUI::Widget *>::To(_new)); } MYGUIEXPORT void MYGUICALL ExportWidgetEvent_DelegateKeyLostFocus(ExportHandle _delegate) { mExportHandle = _delegate; } MYGUIEXPORT void MYGUICALL ExportWidgetEvent_AdviseKeyLostFocus(MyGUI::Widget* _widget, bool _advise) { if (_advise) static_cast<MyGUI::Widget*>(_widget)->eventKeyLostFocus += MyGUI::newDelegate(OnEvent); else static_cast<MyGUI::Widget*>(_widget)->eventKeyLostFocus -= MyGUI::newDelegate(OnEvent); } } namespace ScopeWidgetEvent_MouseButtonDoubleClick { typedef void (MYGUICALLBACK *ExportHandle)( MyGUI::Widget *); ExportHandle mExportHandle = nullptr; void OnEvent( MyGUI::Widget * _sender) { mExportHandle( _sender); } MYGUIEXPORT void MYGUICALL ExportWidgetEvent_DelegateMouseButtonDoubleClick(ExportHandle _delegate) { mExportHandle = _delegate; } MYGUIEXPORT void MYGUICALL ExportWidgetEvent_AdviseMouseButtonDoubleClick(MyGUI::Widget* _widget, bool _advise) { if (_advise) static_cast<MyGUI::Widget*>(_widget)->eventMouseButtonDoubleClick += MyGUI::newDelegate(OnEvent); else static_cast<MyGUI::Widget*>(_widget)->eventMouseButtonDoubleClick -= MyGUI::newDelegate(OnEvent); } } namespace ScopeWidgetEvent_MouseButtonClick { typedef void (MYGUICALLBACK *ExportHandle)( MyGUI::Widget *); ExportHandle mExportHandle = nullptr; void OnEvent( MyGUI::Widget * _sender) { mExportHandle( _sender); } MYGUIEXPORT void MYGUICALL ExportWidgetEvent_DelegateMouseButtonClick(ExportHandle _delegate) { mExportHandle = _delegate; } MYGUIEXPORT void MYGUICALL ExportWidgetEvent_AdviseMouseButtonClick(MyGUI::Widget* _widget, bool _advise) { if (_advise) static_cast<MyGUI::Widget*>(_widget)->eventMouseButtonClick += MyGUI::newDelegate(OnEvent); else static_cast<MyGUI::Widget*>(_widget)->eventMouseButtonClick -= MyGUI::newDelegate(OnEvent); } } namespace ScopeWidgetEvent_MouseButtonReleased { typedef void (MYGUICALLBACK *ExportHandle)( MyGUI::Widget *, Convert<int>::Type, Convert<int>::Type, Convert<MyGUI::MouseButton>::Type); ExportHandle mExportHandle = nullptr; void OnEvent( MyGUI::Widget * _sender, int _left, int _top, MyGUI::MouseButton _id) { mExportHandle( _sender, Convert<int>::To(_left), Convert<int>::To(_top), Convert<MyGUI::MouseButton>::To(_id)); } MYGUIEXPORT void MYGUICALL ExportWidgetEvent_DelegateMouseButtonReleased(ExportHandle _delegate) { mExportHandle = _delegate; } MYGUIEXPORT void MYGUICALL ExportWidgetEvent_AdviseMouseButtonReleased(MyGUI::Widget* _widget, bool _advise) { if (_advise) static_cast<MyGUI::Widget*>(_widget)->eventMouseButtonReleased += MyGUI::newDelegate(OnEvent); else static_cast<MyGUI::Widget*>(_widget)->eventMouseButtonReleased -= MyGUI::newDelegate(OnEvent); } } namespace ScopeWidgetEvent_MouseButtonPressed { typedef void (MYGUICALLBACK *ExportHandle)( MyGUI::Widget *, Convert<int>::Type, Convert<int>::Type, Convert<MyGUI::MouseButton>::Type); ExportHandle mExportHandle = nullptr; void OnEvent( MyGUI::Widget * _sender, int _left, int _top, MyGUI::MouseButton _id) { mExportHandle( _sender, Convert<int>::To(_left), Convert<int>::To(_top), Convert<MyGUI::MouseButton>::To(_id)); } MYGUIEXPORT void MYGUICALL ExportWidgetEvent_DelegateMouseButtonPressed(ExportHandle _delegate) { mExportHandle = _delegate; } MYGUIEXPORT void MYGUICALL ExportWidgetEvent_AdviseMouseButtonPressed(MyGUI::Widget* _widget, bool _advise) { if (_advise) static_cast<MyGUI::Widget*>(_widget)->eventMouseButtonPressed += MyGUI::newDelegate(OnEvent); else static_cast<MyGUI::Widget*>(_widget)->eventMouseButtonPressed -= MyGUI::newDelegate(OnEvent); } } namespace ScopeWidgetEvent_MouseWheel { typedef void (MYGUICALLBACK *ExportHandle)( MyGUI::Widget *, Convert<int>::Type); ExportHandle mExportHandle = nullptr; void OnEvent( MyGUI::Widget * _sender, int _rel) { mExportHandle( _sender, Convert<int>::To(_rel)); } MYGUIEXPORT void MYGUICALL ExportWidgetEvent_DelegateMouseWheel(ExportHandle _delegate) { mExportHandle = _delegate; } MYGUIEXPORT void MYGUICALL ExportWidgetEvent_AdviseMouseWheel(MyGUI::Widget* _widget, bool _advise) { if (_advise) static_cast<MyGUI::Widget*>(_widget)->eventMouseWheel += MyGUI::newDelegate(OnEvent); else static_cast<MyGUI::Widget*>(_widget)->eventMouseWheel -= MyGUI::newDelegate(OnEvent); } } namespace ScopeWidgetEvent_MouseMove { typedef void (MYGUICALLBACK *ExportHandle)( MyGUI::Widget *, Convert<int>::Type, Convert<int>::Type); ExportHandle mExportHandle = nullptr; void OnEvent( MyGUI::Widget * _sender, int _left, int _top) { mExportHandle( _sender, Convert<int>::To(_left), Convert<int>::To(_top)); } MYGUIEXPORT void MYGUICALL ExportWidgetEvent_DelegateMouseMove(ExportHandle _delegate) { mExportHandle = _delegate; } MYGUIEXPORT void MYGUICALL ExportWidgetEvent_AdviseMouseMove(MyGUI::Widget* _widget, bool _advise) { if (_advise) static_cast<MyGUI::Widget*>(_widget)->eventMouseMove += MyGUI::newDelegate(OnEvent); else static_cast<MyGUI::Widget*>(_widget)->eventMouseMove -= MyGUI::newDelegate(OnEvent); } } namespace ScopeWidgetEvent_MouseDrag { typedef void (MYGUICALLBACK *ExportHandle)( MyGUI::Widget *, Convert<int>::Type, Convert<int>::Type, Convert<MyGUI::MouseButton>::Type); ExportHandle mExportHandle = nullptr; void OnEvent( MyGUI::Widget * _sender, int _left, int _top, MyGUI::MouseButton _value4) { mExportHandle( _sender, Convert<int>::To(_left), Convert<int>::To(_top), Convert<MyGUI::MouseButton>::To(_value4)); } MYGUIEXPORT void MYGUICALL ExportWidgetEvent_DelegateMouseDrag(ExportHandle _delegate) { mExportHandle = _delegate; } MYGUIEXPORT void MYGUICALL ExportWidgetEvent_AdviseMouseDrag(MyGUI::Widget* _widget, bool _advise) { if (_advise) static_cast<MyGUI::Widget*>(_widget)->eventMouseDrag += MyGUI::newDelegate(OnEvent); else static_cast<MyGUI::Widget*>(_widget)->eventMouseDrag -= MyGUI::newDelegate(OnEvent); } } namespace ScopeWidgetEvent_MouseSetFocus { typedef void (MYGUICALLBACK *ExportHandle)( MyGUI::Widget *, Convert<MyGUI::Widget *>::Type); ExportHandle mExportHandle = nullptr; void OnEvent( MyGUI::Widget * _sender, MyGUI::Widget * _old) { mExportHandle( _sender, Convert<MyGUI::Widget *>::To(_old)); } MYGUIEXPORT void MYGUICALL ExportWidgetEvent_DelegateMouseSetFocus(ExportHandle _delegate) { mExportHandle = _delegate; } MYGUIEXPORT void MYGUICALL ExportWidgetEvent_AdviseMouseSetFocus(MyGUI::Widget* _widget, bool _advise) { if (_advise) static_cast<MyGUI::Widget*>(_widget)->eventMouseSetFocus += MyGUI::newDelegate(OnEvent); else static_cast<MyGUI::Widget*>(_widget)->eventMouseSetFocus -= MyGUI::newDelegate(OnEvent); } } namespace ScopeWidgetEvent_MouseLostFocus { typedef void (MYGUICALLBACK *ExportHandle)( MyGUI::Widget *, Convert<MyGUI::Widget *>::Type); ExportHandle mExportHandle = nullptr; void OnEvent( MyGUI::Widget * _sender, MyGUI::Widget * _new) { mExportHandle( _sender, Convert<MyGUI::Widget *>::To(_new)); } MYGUIEXPORT void MYGUICALL ExportWidgetEvent_DelegateMouseLostFocus(ExportHandle _delegate) { mExportHandle = _delegate; } MYGUIEXPORT void MYGUICALL ExportWidgetEvent_AdviseMouseLostFocus(MyGUI::Widget* _widget, bool _advise) { if (_advise) static_cast<MyGUI::Widget*>(_widget)->eventMouseLostFocus += MyGUI::newDelegate(OnEvent); else static_cast<MyGUI::Widget*>(_widget)->eventMouseLostFocus -= MyGUI::newDelegate(OnEvent); } } namespace ScopeWidgetMethod_IsMaskPickInside { MYGUIEXPORT Convert<bool>::Type MYGUICALL ExportWidget_IsMaskPickInside__point__coord(MyGUI::Widget* _native, Convert<const MyGUI::types::TPoint < int > &>::Type _point, Convert<const MyGUI::types::TCoord < int > &>::Type _coord) { return Convert<bool>::To(static_cast<MyGUI::Widget*>(_native)->isMaskPickInside( Convert<const MyGUI::types::TPoint < int > &>::From(_point), Convert<const MyGUI::types::TCoord < int > &>::From(_coord))); } } namespace ScopeWidgetProperty_RootKeyFocus { MYGUIEXPORT Convert<bool>::Type MYGUICALL ExportWidget_GetRootKeyFocus(MyGUI::Widget* _native) { return Convert<bool>::To(static_cast<MyGUI::Widget*>(_native)->getRootKeyFocus()); } } namespace ScopeWidgetProperty_RootMouseFocus { MYGUIEXPORT Convert<bool>::Type MYGUICALL ExportWidget_GetRootMouseFocus(MyGUI::Widget* _native) { return Convert<bool>::To(static_cast<MyGUI::Widget*>(_native)->getRootMouseFocus()); } } namespace ScopeWidgetProperty_InheritsPick { MYGUIEXPORT Convert<bool>::Type MYGUICALL ExportWidget_GetInheritsPick(MyGUI::Widget* _native) { return Convert<bool>::To(static_cast<MyGUI::Widget*>(_native)->getInheritsPick()); } MYGUIEXPORT void MYGUICALL ExportWidget_SetInheritsPick(MyGUI::Widget* _native, Convert<bool>::Type _value) { static_cast<MyGUI::Widget*>(_native)->setInheritsPick(Convert<bool>::From(_value)); } } namespace ScopeWidgetProperty_NeedMouseFocus { MYGUIEXPORT Convert<bool>::Type MYGUICALL ExportWidget_GetNeedMouseFocus(MyGUI::Widget* _native) { return Convert<bool>::To(static_cast<MyGUI::Widget*>(_native)->getNeedMouseFocus()); } MYGUIEXPORT void MYGUICALL ExportWidget_SetNeedMouseFocus(MyGUI::Widget* _native, Convert<bool>::Type _value) { static_cast<MyGUI::Widget*>(_native)->setNeedMouseFocus(Convert<bool>::From(_value)); } } namespace ScopeWidgetProperty_NeedKeyFocus { MYGUIEXPORT Convert<bool>::Type MYGUICALL ExportWidget_GetNeedKeyFocus(MyGUI::Widget* _native) { return Convert<bool>::To(static_cast<MyGUI::Widget*>(_native)->getNeedKeyFocus()); } MYGUIEXPORT void MYGUICALL ExportWidget_SetNeedKeyFocus(MyGUI::Widget* _native, Convert<bool>::Type _value) { static_cast<MyGUI::Widget*>(_native)->setNeedKeyFocus(Convert<bool>::From(_value)); } } namespace ScopeWidgetProperty_Pointer { MYGUIEXPORT Convert<const std::string &>::Type MYGUICALL ExportWidget_GetPointer(MyGUI::Widget* _native) { return Convert<const std::string &>::To(static_cast<MyGUI::Widget*>(_native)->getPointer()); } MYGUIEXPORT void MYGUICALL ExportWidget_SetPointer(MyGUI::Widget* _native, Convert<const std::string &>::Type _value) { static_cast<MyGUI::Widget*>(_native)->setPointer(Convert<const std::string &>::From(_value)); } } namespace ScopeWidgetProperty_NeedToolTip { MYGUIEXPORT Convert<bool>::Type MYGUICALL ExportWidget_GetNeedToolTip(MyGUI::Widget* _native) { return Convert<bool>::To(static_cast<MyGUI::Widget*>(_native)->getNeedToolTip()); } MYGUIEXPORT void MYGUICALL ExportWidget_SetNeedToolTip(MyGUI::Widget* _native, Convert<bool>::Type _value) { static_cast<MyGUI::Widget*>(_native)->setNeedToolTip(Convert<bool>::From(_value)); } } }
6,394
6,132
from typing import List import claripy from . import MemoryMixin from ... import sim_options as options from ... import concretization_strategies from ...sim_state_options import SimStateOptions from ...state_plugins.inspect import BP_BEFORE, BP_AFTER from ...errors import SimMergeError, SimUnsatError, SimMemoryAddressError, SimMemoryError from ...storage import DUMMY_SYMBOLIC_READ_VALUE class MultiwriteAnnotation(claripy.Annotation): @property def eliminatable(self): return False @property def relocateable(self): return True def _multiwrite_filter(mem, ast): #pylint:disable=unused-argument # this is a huge hack, but so is the whole multiwrite crap return any(isinstance(a, MultiwriteAnnotation) for a in ast._uneliminatable_annotations) SimStateOptions.register_option("symbolic_ip_max_targets", int, default=256, description="The maximum number of concrete addresses a symbolic instruction pointer " "can be concretized to." ) SimStateOptions.register_option("jumptable_symbolic_ip_max_targets", int, default=16384, description="The maximum number of concrete addresses a symbolic instruction pointer " "can be concretized to if it is part of a jump table." ) class AddressConcretizationMixin(MemoryMixin): """ The address concretization mixin allows symbolic reads and writes to be handled sanely by dispatching them as a number of conditional concrete reads/writes. It provides a "concretization strategies" interface allowing the process of serializing symbolic addresses into concrete ones to be specified. """ def __init__(self, read_strategies=None, write_strategies=None, **kwargs): super().__init__(**kwargs) self.read_strategies = read_strategies self.write_strategies = write_strategies def set_state(self, state): super().set_state(state) if self.state is not None: if self.read_strategies is None: self._create_default_read_strategies() if self.write_strategies is None: self._create_default_write_strategies() @MemoryMixin.memo def copy(self, memo): o = super().copy(memo) o.read_strategies = list(self.read_strategies) o.write_strategies = list(self.write_strategies) return o def merge(self, others, merge_conditions, common_ancestor=None) -> bool: r = super().merge(others, merge_conditions, common_ancestor=common_ancestor) self.read_strategies = self._merge_strategies(self.read_strategies, *[ o.read_strategies for o in others ]) self.write_strategies = self._merge_strategies(self.write_strategies, *[ o.write_strategies for o in others ]) return r def _create_default_read_strategies(self): """ This function is used to populate `self.read_strategies` if by set-state time none have been provided It uses state options to pick defaults. """ self.read_strategies = [ ] if options.APPROXIMATE_MEMORY_INDICES in self.state.options: # first, we try to resolve the read address by approximation self.read_strategies.append( concretization_strategies.SimConcretizationStrategyRange(1024, exact=False), ) # then, we try symbolic reads, with a maximum width of a kilobyte self.read_strategies.append( concretization_strategies.SimConcretizationStrategyRange(1024) ) if options.CONSERVATIVE_READ_STRATEGY not in self.state.options: # finally, we concretize to any one solution self.read_strategies.append( concretization_strategies.SimConcretizationStrategyAny(), ) def _create_default_write_strategies(self): """ This function is used to populate `self.write_strategies` if by set-state time none have been provided. It uses state options to pick defaults. """ self.write_strategies = [ ] if options.APPROXIMATE_MEMORY_INDICES in self.state.options: if options.SYMBOLIC_WRITE_ADDRESSES not in self.state.options: # we try to resolve a unique solution by approximation self.write_strategies.append( concretization_strategies.SimConcretizationStrategySingle(exact=False), ) else: # we try a solution range by approximation self.write_strategies.append( concretization_strategies.SimConcretizationStrategyRange(128, exact=False) ) if options.SYMBOLIC_WRITE_ADDRESSES in self.state.options: # we try to find a range of values self.write_strategies.append( concretization_strategies.SimConcretizationStrategyRange(128) ) else: # we try to find a range of values, but only for ASTs annotated with the multiwrite annotation self.write_strategies.append(concretization_strategies.SimConcretizationStrategyRange( 128, filter=_multiwrite_filter )) # finally, we just grab the maximum solution if options.CONSERVATIVE_WRITE_STRATEGY not in self.state.options: self.write_strategies.append( concretization_strategies.SimConcretizationStrategyMax() ) @staticmethod def _merge_strategies(*strategy_lists): """ Utility function for merging. Does the merge operation on lists of strategies """ if len(set(len(sl) for sl in strategy_lists)) != 1: raise SimMergeError("unable to merge memories with amounts of strategies") merged_strategies = [ ] for strategies in zip(*strategy_lists): if len(set(s.__class__ for s in strategies)) != 1: raise SimMergeError("unable to merge memories with different types of strategies") unique = list(set(strategies)) if len(unique) > 1: unique[0].merge(unique[1:]) merged_strategies.append(unique[0]) return merged_strategies def _apply_concretization_strategies(self, addr, strategies, action): """ Applies concretization strategies on the address until one of them succeeds. """ # we try all the strategies in order for s in strategies: # first, we trigger the SimInspect breakpoint and give it a chance to intervene e = addr self.state._inspect( 'address_concretization', BP_BEFORE, address_concretization_strategy=s, address_concretization_action=action, address_concretization_memory=self, address_concretization_expr=e, address_concretization_add_constraints=True ) s = self.state._inspect_getattr('address_concretization_strategy', s) e = self.state._inspect_getattr('address_concretization_expr', addr) # if the breakpoint None'd out the strategy, we skip it if s is None: continue # let's try to apply it! try: a = s.concretize(self, e) except SimUnsatError: a = None # trigger the AFTER breakpoint and give it a chance to intervene self.state._inspect( 'address_concretization', BP_AFTER, address_concretization_result=a ) a = self.state._inspect_getattr('address_concretization_result', a) # return the result if not None! if a is not None: return a # well, we tried raise SimMemoryAddressError( "Unable to concretize address for %s with the provided strategies." % action ) def concretize_write_addr(self, addr, strategies=None): """ Concretizes an address meant for writing. :param addr: An expression for the address. :param strategies: A list of concretization strategies (to override the default). :returns: A list of concrete addresses. """ if isinstance(addr, int): return [ addr ] elif not self.state.solver.symbolic(addr): return [ self.state.solver.eval(addr) ] strategies = self.write_strategies if strategies is None else strategies return self._apply_concretization_strategies(addr, strategies, 'store') def concretize_read_addr(self, addr, strategies=None): """ Concretizes an address meant for reading. :param addr: An expression for the address. :param strategies: A list of concretization strategies (to override the default). :returns: A list of concrete addresses. """ if isinstance(addr, int): return [ addr ] elif not self.state.solver.symbolic(addr): return [ self.state.solver.eval(addr) ] strategies = self.read_strategies if strategies is None else strategies return self._apply_concretization_strategies(addr, strategies, 'load') # # Real shit # @staticmethod def _interleave_ints(addrs: List[int]) -> List[int]: """ Take a list of integers and return a new list of integers where front and back integers interleave. """ lst = [None] * len(addrs) front, back = 0, len(addrs) - 1 i = 0 while front <= back: lst[i] = addrs[front] i += 1 front += 1 if front < back: lst[i] = addrs[back] i += 1 back -= 1 return lst def _load_one_addr(self, concrete_addr: int, trivial: bool, addr, condition, size, read_value=None, **kwargs): if trivial: sub_condition = condition else: sub_condition = addr == concrete_addr if condition is not None: sub_condition = condition & sub_condition sub_value = super().load(concrete_addr, size=size, condition=sub_condition, **kwargs) if read_value is None: return sub_value else: return self.state.solver.If(addr == concrete_addr, sub_value, read_value) def load(self, addr, size=None, condition=None, **kwargs): if type(size) is not int: raise TypeError("Size must have been specified as an int before reaching address concretization") # Fast path if type(addr) is int: return self._load_one_addr(addr, True, addr, condition, size, read_value=None, **kwargs) elif not self.state.solver.symbolic(addr): return self._load_one_addr(self.state.solver.eval(addr), True, addr, condition, size, read_value=None, **kwargs) if self.state.solver.symbolic(addr) and options.AVOID_MULTIVALUED_READS in self.state.options: return self._default_value(None, size, name='symbolic_read_unconstrained', **kwargs) try: concrete_addrs = self._interleave_ints(sorted(self.concretize_read_addr(addr))) except SimMemoryError: if options.CONSERVATIVE_READ_STRATEGY in self.state.options: return self._default_value(None, size, name='symbolic_read_unconstrained', **kwargs) else: raise # quick optimization so as to not involve the solver if not necessary trivial = len(concrete_addrs) == 1 and (addr == concrete_addrs[0]).is_true() if not trivial: # apply the concretization results to the state constraint_options = [addr == concrete_addr for concrete_addr in concrete_addrs] conditional_constraint = self.state.solver.Or(*constraint_options) self._add_constraints(conditional_constraint, condition=condition, **kwargs) # quick optimization to not introduce the DUMMY value if there's only one loop if len(concrete_addrs) == 1: read_value = None else: read_value = DUMMY_SYMBOLIC_READ_VALUE # this is a sentinel value and should never be touched for concrete_addr in concrete_addrs: # perform each of the loads # the implementation of the "fallback" value ought to be implemented above this in the stack!! read_value = self._load_one_addr(concrete_addr, trivial, addr, condition, size, read_value=read_value, **kwargs) return read_value def _store_one_addr(self, concrete_addr: int, data, trivial: bool, addr, condition, size, **kwargs): if trivial: sub_condition = condition else: sub_condition = addr == concrete_addr if condition is not None: sub_condition = condition & sub_condition super().store(concrete_addr, data, size=size, condition=sub_condition, **kwargs) def store(self, addr, data, size=None, condition=None, **kwargs): # Fast path if type(addr) is int: self._store_one_addr(addr, data, True, addr, condition, size, **kwargs) return elif not self.state.solver.symbolic(addr): self._store_one_addr(self.state.solver.eval(addr), data, True, addr, condition, size, **kwargs) return if self.state.solver.symbolic(addr) and options.AVOID_MULTIVALUED_WRITES in self.state.options: # not completed return try: concrete_addrs = self._interleave_ints(sorted(self.concretize_write_addr(addr))) except SimMemoryError: if options.CONSERVATIVE_WRITE_STRATEGY in self.state.options: return # not completed else: raise # quick optimization so as to not involve the solver if not necessary trivial = len(concrete_addrs) == 1 and (addr == concrete_addrs[0]).is_true() if not trivial: # apply the concretization results to the state constraint_options = [addr == concrete_addr for concrete_addr in concrete_addrs] conditional_constraint = self.state.solver.Or(*constraint_options) self._add_constraints(conditional_constraint, condition=condition, **kwargs) if len(concrete_addrs) == 1: # simple case: avoid conditional write since the address has been concretized to one solution super().store(concrete_addrs[0], data, size=size, **kwargs) return for concrete_addr in concrete_addrs: # perform each of the stores as conditional # the implementation of conditionality must be at the bottom of the stack self._store_one_addr(concrete_addr, data, trivial, addr, condition, size, **kwargs) def permissions(self, addr, permissions=None, **kwargs): if type(addr) is int: pass elif getattr(addr, 'op', None) == 'BVV': addr = addr.args[0] else: raise SimMemoryAddressError("Cannot get/set permissions for a symbolic address") return super().permissions(addr, permissions=permissions, **kwargs) def map_region(self, addr, length, permissions, **kwargs): if type(addr) is int: pass elif getattr(addr, 'op', None) == 'BVV': addr = addr.args[0] else: raise SimMemoryAddressError("Cannot map a region for a symbolic address") return super().map_region(addr, length, permissions, **kwargs) def unmap_region(self, addr, length, **kwargs): if type(addr) is int: pass elif getattr(addr, 'op', None) == 'BVV': addr = addr.args[0] else: raise SimMemoryAddressError("Cannot unmap a region for a symbolic address") return super().unmap_region(addr, length, **kwargs) def concrete_load(self, addr, size, *args, **kwargs): if type(addr) is int: pass elif getattr(addr, 'op', None) == 'BVV': addr = addr.args[0] else: raise SimMemoryAddressError("Cannot unmap a region for a symbolic address") return super().concrete_load(addr, size, *args, **kwargs)
7,340
1,127
// Copyright (C) 2018-2022 Intel Corporation // SPDX-License-Identifier: Apache-2.0 // /** * @brief A header file for definition of abstraction over platform specific shared memory map objects * @file mmap_object.hpp */ #pragma once #include <memory> #include "ngraph/runtime/aligned_buffer.hpp" namespace ov { std::shared_ptr<ngraph::runtime::AlignedBuffer> load_mmap_object(const std::string& path); #ifdef OPENVINO_ENABLE_UNICODE_PATH_SUPPORT std::shared_ptr<ngraph::runtime::AlignedBuffer> load_mmap_object(const std::wstring& path); #endif // OPENVINO_ENABLE_UNICODE_PATH_SUPPORT } // namespace ov
212
3,246
<filename>extensions/config/portability-config-yaml/src/test/java/org/datatransferproject/config/yaml/YamlSettingsExtensionTest.java /* * Copyright 2018 The Data Transfer Project Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.datatransferproject.config.yaml; import static com.google.common.truth.Truth.assertThat; import com.google.common.collect.ImmutableList; import java.io.InputStream; import org.datatransferproject.config.ConfigUtils; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.junit.MockitoJUnitRunner; @RunWith(MockitoJUnitRunner.class) public class YamlSettingsExtensionTest { /** * baseUrl: https://localhost:3000 * baseApiUrl: https://localhost:8080 */ private static String API_SETTINGS_1 = "api-1.yaml"; /** * baseUrl: www.aBaseUrl.com */ private static String API_SETTINGS_2 = "api-2.yaml"; @Test public void parse() { ImmutableList<String> settingsFiles = ImmutableList.of(API_SETTINGS_1); InputStream in = ConfigUtils.getCombinedInputStream(settingsFiles); YamlSettingsExtension settingsExtension = new YamlSettingsExtension(); settingsExtension.parseSimple(in); assertThat((String) settingsExtension.getSetting("baseUrl", null)) .isEqualTo("https://localhost:3000"); assertThat((String) settingsExtension.getSetting("baseApiUrl", null)) .isEqualTo("https://localhost:8080"); } @Test public void parse_override() { // www.aBaseUrl.com should override https://localhost:3000 ImmutableList<String> settingsFiles = ImmutableList.of(API_SETTINGS_1, API_SETTINGS_2); YamlSettingsExtension settingsExtension = new YamlSettingsExtension(); InputStream in = ConfigUtils.getCombinedInputStream(settingsFiles); settingsExtension.parseSimple(in); assertThat((String) settingsExtension.getSetting("baseUrl", null)) .isEqualTo("www.aBaseUrl.com"); // reorder settings files - now https://localhost:3000 should override www.aBaseUrl.com settingsFiles = ImmutableList.of(API_SETTINGS_2, API_SETTINGS_1); in = ConfigUtils.getCombinedInputStream(settingsFiles); settingsExtension.parseSimple(in); assertThat((String) settingsExtension.getSetting("baseUrl", null)) .isEqualTo("https://localhost:3000"); } }
906
2,151
<filename>third_party/openscreen/src/third_party/chromium_quic/build/base/synchronization/synchronization_buildflags.h // Generated by build/write_buildflag_header.py // From "//base:synchronization_buildflags" #ifndef BASE_SYNCHRONIZATION_SYNCHRONIZATION_BUILDFLAGS_H_ #define BASE_SYNCHRONIZATION_SYNCHRONIZATION_BUILDFLAGS_H_ #include "build/buildflag.h" #define BUILDFLAG_INTERNAL_ENABLE_MUTEX_PRIORITY_INHERITANCE() (0) #endif // BASE_SYNCHRONIZATION_SYNCHRONIZATION_BUILDFLAGS_H_
195
1,056
<reponame>timfel/netbeans /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.netbeans.modules.debugger.jpda.ui.models; import com.sun.jdi.AbsentInformationException; import com.sun.jdi.IncompatibleThreadStateException; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.lang.ref.WeakReference; import java.util.Collection; import java.util.HashSet; import org.netbeans.api.debugger.jpda.CallStackFrame; import org.netbeans.api.debugger.jpda.JPDAThread; import org.netbeans.spi.debugger.ContextProvider; import org.netbeans.api.debugger.jpda.JPDADebugger; import org.netbeans.spi.viewmodel.ModelEvent; import org.netbeans.spi.viewmodel.TreeModel; import org.netbeans.spi.viewmodel.ModelListener; import org.netbeans.spi.viewmodel.UnknownTypeException; import org.netbeans.modules.debugger.jpda.JPDADebuggerImpl; import org.netbeans.modules.debugger.jpda.models.JPDAThreadImpl; import org.netbeans.spi.debugger.DebuggerServiceRegistration; import org.openide.util.RequestProcessor.Task; import org.openide.util.WeakListeners; /** * This tree model provides an array of CallStackFrame objects. * * @author <NAME>, <NAME> */ @DebuggerServiceRegistration(path="netbeans-JPDASession/CallStackView", types={TreeModel.class}) public class CallStackTreeModel implements TreeModel { private static boolean verbose = (System.getProperty ("netbeans.debugger.viewrefresh") != null) && (System.getProperty ("netbeans.debugger.viewrefresh").indexOf ('c') >= 0); private JPDADebuggerImpl debugger; private Collection<ModelListener> listeners = new HashSet<ModelListener>(); private Listener listener; public CallStackTreeModel (ContextProvider lookupProvider) { debugger = (JPDADebuggerImpl) lookupProvider. lookupFirst (null, JPDADebugger.class); } /** * * @return threads contained in this group of threads */ public Object[] getChildren (Object parent, int from, int to) throws UnknownTypeException { if ( parent.equals (ROOT) || (parent instanceof JPDAThread) ) { // 1) get Thread JPDAThread thread; if (parent.equals (ROOT)) { thread = debugger.getCurrentThread (); } else { thread = (JPDAThread) parent; } if (thread == null) { return new String[] {"No current thread"}; // TODO make localizable!!! } // 2) get StackFrames for this Thread try { CallStackFrame[] sfs = thread.getCallStack(from, to); return sfs; } catch (AbsentInformationException aiex) { if (aiex.getCause() instanceof IncompatibleThreadStateException) { return new String[] {"Thread is running"}; // TODO make localizable!!! } else { return new String[] {"No call stack information available."}; // TODO make localizable!!! } } } else throw new UnknownTypeException (parent); } /** * Returns number of children for given node. * * @param node the parent node * @throws UnknownTypeException if this TreeModel implementation is not * able to resolve children for given node type * * @return true if node is leaf */ public int getChildrenCount (Object parent) throws UnknownTypeException { if ( parent.equals (ROOT) || (parent instanceof JPDAThread) ) { // Performance, see issue #59058. return Integer.MAX_VALUE; /* // 1) get Thread JPDAThread thread; if (parent.equals (ROOT)) { thread = debugger.getCurrentThread (); } else { thread = (JPDAThread) parent; } if (thread == null) { return 1; //new String [] {"No current thread"}; } return thread.getStackDepth(); */ } else throw new UnknownTypeException (parent); } /** * * @return threads contained in this group of threads */ public Object getRoot () { return ROOT; } public boolean isLeaf (Object node) throws UnknownTypeException { if (node.equals (ROOT)) return false; if (node instanceof CallStackFrame) return true; throw new UnknownTypeException (node); } /** * * @param l the listener to add */ public void addModelListener (ModelListener l) { synchronized (listeners) { listeners.add (l); if (listener == null) { listener = new Listener (this, debugger); } } } /** * * @param l the listener to remove */ public void removeModelListener (ModelListener l) { synchronized (listeners) { listeners.remove (l); if (listeners.size () == 0) { listener.destroy (); listener = null; } } } public void fireTreeChanged () { ModelListener[] ls; synchronized (listeners) { ls = listeners.toArray(new ModelListener[0]); } ModelEvent ev = new ModelEvent.TreeChanged(this); for (int i = 0; i < ls.length; i++) { ls[i].modelChanged (ev); } } /** * Listens on JPDADebugger on PROP_STATE */ private static class Listener implements PropertyChangeListener { private JPDADebugger debugger; private WeakReference<CallStackTreeModel> model; public Listener ( CallStackTreeModel tm, JPDADebugger debugger ) { this.debugger = debugger; model = new WeakReference<CallStackTreeModel>(tm); debugger.addPropertyChangeListener (this); JPDAThreadImpl lastCurrentThread = (JPDAThreadImpl) debugger.getCurrentThread(); if (lastCurrentThread != null) { lastCurrentThread.addPropertyChangeListener( WeakListeners.propertyChange(this, lastCurrentThread)); } } private CallStackTreeModel getModel () { CallStackTreeModel tm = model.get (); if (tm == null) { destroy (); } return tm; } void destroy () { debugger.removePropertyChangeListener (this); if (task != null) { // cancel old task task.cancel (); if (verbose) System.out.println("CSTM cancel old task " + task); task = null; } } // currently waiting / running refresh task // there is at most one private Task task; // check also whether the current thread was resumed/suspended // the call stack needs to be refreshed after invokeMethod() which resumes the thread public synchronized void propertyChange (PropertyChangeEvent e) { boolean refresh = false; String propertyName = e.getPropertyName(); if (propertyName == debugger.PROP_CURRENT_THREAD) { JPDAThreadImpl lastCurrentThread = (JPDAThreadImpl) debugger.getCurrentThread(); if (lastCurrentThread != null) { lastCurrentThread.addPropertyChangeListener( WeakListeners.propertyChange(this, lastCurrentThread)); refresh = true; } } if (propertyName == JPDAThread.PROP_SUSPENDED && Boolean.TRUE.equals(e.getNewValue())) { if (e.getSource() == debugger.getCurrentThread()) { refresh = true; } } if ((propertyName == debugger.PROP_STATE) && (debugger.getState() == debugger.STATE_STOPPED) ) { refresh = true; } if (refresh) { synchronized (this) { if (task == null) { task = ((JPDADebuggerImpl) debugger).getRequestProcessor().create(new Refresher()); } task.schedule(200); } } } private class Refresher extends Object implements Runnable { public void run() { if (debugger.getState () == debugger.STATE_STOPPED) { CallStackTreeModel tm = getModel (); if (tm != null) { tm.fireTreeChanged(); } } } } } }
4,372
1,338
<gh_stars>1000+ /* * Copyright 2009, <NAME>, <EMAIL>. * Distributed under the terms of the MIT License. */ #include <ToolTip.h> #include <new> #include <Message.h> #include <TextView.h> #include <ToolTipManager.h> BToolTip::BToolTip() { _InitData(); } BToolTip::BToolTip(BMessage* archive) { _InitData(); bool sticky; if (archive->FindBool("sticky", &sticky) == B_OK) fIsSticky = sticky; // TODO! } BToolTip::~BToolTip() { } status_t BToolTip::Archive(BMessage* archive, bool deep) const { status_t status = BArchivable::Archive(archive, deep); if (fIsSticky) status = archive->AddBool("sticky", fIsSticky); // TODO! return status; } void BToolTip::SetSticky(bool enable) { fIsSticky = enable; } bool BToolTip::IsSticky() const { return fIsSticky; } void BToolTip::SetMouseRelativeLocation(BPoint location) { fRelativeLocation = location; } BPoint BToolTip::MouseRelativeLocation() const { return fRelativeLocation; } void BToolTip::SetAlignment(BAlignment alignment) { fAlignment = alignment; } BAlignment BToolTip::Alignment() const { return fAlignment; } void BToolTip::AttachedToWindow() { } void BToolTip::DetachedFromWindow() { } bool BToolTip::Lock() { bool lockedLooper; while (true) { lockedLooper = View()->LockLooper(); if (!lockedLooper) { BToolTipManager* manager = BToolTipManager::Manager(); manager->Lock(); if (View()->Window() != NULL) { manager->Unlock(); continue; } } break; } fLockedLooper = lockedLooper; return true; } void BToolTip::Unlock() { if (fLockedLooper) View()->UnlockLooper(); else BToolTipManager::Manager()->Unlock(); } void BToolTip::_InitData() { fIsSticky = false; fRelativeLocation = BPoint(20, 20); fAlignment = BAlignment(B_ALIGN_RIGHT, B_ALIGN_BOTTOM); } // #pragma mark - BTextToolTip::BTextToolTip(const char* text) { _InitData(text); } BTextToolTip::BTextToolTip(BMessage* archive) { // TODO! } BTextToolTip::~BTextToolTip() { delete fTextView; } /*static*/ BTextToolTip* BTextToolTip::Instantiate(BMessage* archive) { if (!validate_instantiation(archive, "BTextToolTip")) return NULL; return new(std::nothrow) BTextToolTip(archive); } status_t BTextToolTip::Archive(BMessage* archive, bool deep) const { status_t status = BToolTip::Archive(archive, deep); // TODO! return status; } BView* BTextToolTip::View() const { return fTextView; } const char* BTextToolTip::Text() const { return fTextView->Text(); } void BTextToolTip::SetText(const char* text) { if (!Lock()) return; fTextView->SetText(text); fTextView->InvalidateLayout(); Unlock(); } void BTextToolTip::_InitData(const char* text) { fTextView = new BTextView("tool tip text"); fTextView->SetText(text); fTextView->MakeEditable(false); fTextView->SetViewUIColor(B_TOOL_TIP_BACKGROUND_COLOR); rgb_color color = ui_color(B_TOOL_TIP_TEXT_COLOR); fTextView->SetFontAndColor(NULL, 0, &color); fTextView->SetWordWrap(false); }
1,197
710
package me.panavtec.cleancontacts.data.repository.contacts.datasources.api.entities.mapper; import me.panavtec.cleancontacts.data.repository.contacts.datasources.api.entities.ApiName; import me.panavtec.cleancontacts.domain.mappers.Mapper; import me.panavtec.cleancontacts.domain.model.Name; public class ApiNameMapper implements Mapper<ApiName, Name> { @Override public Name map(ApiName model) { if (model == null) { return null; } Name name = new Name(); name.setTitle(model.getTitle()); name.setFirst(model.getFirst()); name.setLast(model.getLast()); return name; } }
233
4,126
// ByteBufferTest.cpp // Implements the main app entrypoint for the cByteBuffer class test #include "Globals.h" #include "BoundingBox.h" /** Runs the tests, returns the number of failed tests. */ static int Test(void) { int NumFailed = 0; Vector3d Min(1, 1, 1); Vector3d Max(2, 2, 2); Vector3d LineDefs[] = { Vector3d(1.5, 4, 1.5), Vector3d(1.5, 3, 1.5), // Should intersect at 2, face 1 (YP) Vector3d(1.5, 0, 1.5), Vector3d(1.5, 4, 1.5), // Should intersect at 0.25, face 0 (YM) Vector3d(0, 0, 0), Vector3d(2, 2, 2), // Should intersect at 0.5, face 0, 3 or 5 (anyM) Vector3d(0.999, 0, 1.5), Vector3d(0.999, 4, 1.5), // Should not intersect Vector3d(1.999, 0, 1.5), Vector3d(1.999, 4, 1.5), // Should intersect at 0.25, face 0 (YM) Vector3d(2.001, 0, 1.5), Vector3d(2.001, 4, 1.5), // Should not intersect } ; bool Results[] = {true, true, true, false, true, false}; double LineCoeffs[] = {2, 0.25, 0.5, 0, 0.25, 0}; for (size_t i = 0; i < ARRAYCOUNT(LineDefs) / 2; i++) { double LineCoeff; eBlockFace Face; Vector3d Line1 = LineDefs[2 * i]; Vector3d Line2 = LineDefs[2 * i + 1]; bool res = cBoundingBox::CalcLineIntersection(Min, Max, Line1, Line2, LineCoeff, Face); if (res != Results[i]) { LOGERROR("LineIntersection({%.02f, %.02f, %.02f}, {%.02f, %.02f, %.02f}) -> %d, %.05f, %d", Line1.x, Line1.y, Line1.z, Line2.x, Line2.y, Line2.z, res ? 1 : 0, LineCoeff, Face ); NumFailed += 1; } if (res) { if (std::abs(LineCoeff - LineCoeffs[i]) > 0.0000001) { LOGERROR("LineIntersection({%.02f, %.02f, %.02f}, {%.02f, %.02f, %.02f}) -> %d, %.05f, %d", Line1.x, Line1.y, Line1.z, Line2.x, Line2.y, Line2.z, res ? 1 : 0, LineCoeff, Face ); NumFailed += 1; } } } // for i - LineDefs[] return 0; } int main(int argc, char * argv[]) { LOGD("Test started"); LOGD("Running test"); auto NumFailed = Test(); LOG("BoundingBox test finished, number of failed tests: %d", NumFailed); return NumFailed; }
986
5,316
package com.airbnb.aerosolve.core.transforms; import com.airbnb.aerosolve.core.FeatureVector; import com.typesafe.config.Config; import com.typesafe.config.ConfigFactory; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.*; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; /** * @author <NAME> */ public class ApproximatePercentileTransformTest { private static final Logger log = LoggerFactory.getLogger(ApproximatePercentileTransformTest.class); public FeatureVector makeFeatureVector(double low, double high, double val) { Map<String, Set<String>> stringFeatures = new HashMap<>(); Map<String, Map<String, Double>> floatFeatures = new HashMap<>(); Set list = new HashSet<String>(); list.add("aaa"); list.add("bbb"); stringFeatures.put("strFeature1", list); Map<String, Double> map = new HashMap<>(); map.put("10th", low); map.put("90th", high); floatFeatures.put("DECILES", map); Map<String, Double> map2 = new HashMap<>(); map2.put("foo", val); floatFeatures.put("F", map2); FeatureVector featureVector = new FeatureVector(); featureVector.setStringFeatures(stringFeatures); featureVector.setFloatFeatures(floatFeatures); return featureVector; } public String makeConfig() { return "test_approximate_percentile {\n" + " transform : approximate_percentile\n" + " field1 : DECILES\n" + " low : 10th\n" + " upper : 90th\n" + " minDiff : 10 \n" + " field2 : F\n" + " key2 : foo\n" + " output : PERCENTILE\n" + " outputKey : percentile\n" + "}"; } @Test public void testEmptyFeatureVector() { Config config = ConfigFactory.parseString(makeConfig()); Transform transform = TransformFactory.createTransform(config, "test_approximate_percentile"); FeatureVector featureVector = new FeatureVector(); transform.doTransform(featureVector); assertTrue(featureVector.getStringFeatures() == null); } @Test public void testTransform() { Config config = ConfigFactory.parseString(makeConfig()); Transform transform = TransformFactory.createTransform(config, "test_approximate_percentile"); double[] values = { -1.0, 10.0, 15.0, 20.0, 50.0, 60.0, 100.0, 200.0 }; double[] expected = { 0.0, 0.0, 0.05, 0.11, 0.44, 0.55, 1.0, 1.0 }; for (int i = 0; i < values.length; i++) { double val = values[i]; FeatureVector featureVector = makeFeatureVector(10.0, 100.0, val); transform.doTransform(featureVector); Map<String, Set<String>> stringFeatures = featureVector.getStringFeatures(); assertTrue(stringFeatures.size() == 1); Map<String, Double> out = featureVector.floatFeatures.get("PERCENTILE"); assertTrue(out.size() == 1); assertEquals(expected[i], out.get("percentile"), 0.01); } } @Test public void testAbstain() { Config config = ConfigFactory.parseString(makeConfig()); Transform transform = TransformFactory.createTransform(config, "test_approximate_percentile"); FeatureVector featureVector = makeFeatureVector(10.0, 11.0, 1.0); transform.doTransform(featureVector); assertTrue(featureVector.floatFeatures.get("PERCENTILE") == null); } }
1,221
6,989
<reponame>jochenater/catboost #include <library/cpp/testing/gtest/gtest.h> #include <library/cpp/yt/misc/enum.h> namespace NYT { namespace { //////////////////////////////////////////////////////////////////////////////// DEFINE_ENUM(ESimple, (X) (Y) (Z) ); DEFINE_ENUM(EColor, ((Red) (10)) ((Green)(20)) ((Blue) (30)) (Black) (White) ); DEFINE_BIT_ENUM(EFlag, ((_1)(0x0001)) ((_2)(0x0002)) ((_3)(0x0004)) ((_4)(0x0008)) ); DEFINE_AMBIGUOUS_ENUM_WITH_UNDERLYING_TYPE(EMultipleNames, int, (A1) ((A2)(0)) (B) (C) ((D1)(100)) ((D2)(100)) ); //////////////////////////////////////////////////////////////////////////////// template <class T, size_t N> std::vector<T> ToVector(std::array<T, N> array) { return std::vector<T>(array.begin(), array.end()); } TEST(TEnumTest, Domain) { EXPECT_EQ(3, TEnumTraits<ESimple>::DomainSize); std::vector<ESimple> v { ESimple::X, ESimple::Y, ESimple::Z }; EXPECT_EQ(v, ToVector(TEnumTraits<ESimple>::GetDomainValues())); EXPECT_EQ(ESimple::X, TEnumTraits<ESimple>::GetMinValue()); EXPECT_EQ(ESimple::Z, TEnumTraits<ESimple>::GetMaxValue()); } TEST(TEnumTest, Basic) { EXPECT_EQ(0, static_cast<int>(ESimple::X)); EXPECT_EQ(1, static_cast<int>(ESimple::Y)); EXPECT_EQ(2, static_cast<int>(ESimple::Z)); EXPECT_EQ(0, static_cast<int>(EColor( ))); EXPECT_EQ(5, static_cast<int>(EColor(5))); EXPECT_EQ(10, static_cast<int>(EColor::Red )); EXPECT_EQ(20, static_cast<int>(EColor::Green)); EXPECT_EQ(30, static_cast<int>(EColor::Blue )); EXPECT_EQ(31, static_cast<int>(EColor::Black)); EXPECT_EQ(32, static_cast<int>(EColor::White)); } TEST(TEnumTest, ToString) { EXPECT_EQ("EColor(0)", ToString(EColor( ))); EXPECT_EQ("EColor(5)", ToString(EColor(5))); EXPECT_EQ("Red", ToString(EColor(EColor::Red ))); EXPECT_EQ("Green", ToString(EColor::Green)); EXPECT_EQ("Blue", ToString(EColor(EColor::Blue ))); EXPECT_EQ("Black", ToString(EColor::Black)); EXPECT_EQ("White", ToString(EColor::White)); } TEST(TEnumTest, FromString) { EXPECT_EQ(EColor::Red , TEnumTraits<EColor>::FromString("Red" )); EXPECT_EQ(EColor::Green, TEnumTraits<EColor>::FromString("Green")); EXPECT_EQ(EColor::Blue , TEnumTraits<EColor>::FromString("Blue" )); EXPECT_EQ(EColor::Black, TEnumTraits<EColor>::FromString("Black")); EXPECT_EQ(EColor::White, TEnumTraits<EColor>::FromString("White")); EXPECT_THROW(TEnumTraits<EColor>::FromString("Pink"), std::exception); EColor color; bool returnValue; returnValue = TEnumTraits<EColor>::FindValueByLiteral("Red", &color); EXPECT_EQ(EColor::Red, color); EXPECT_TRUE(returnValue); returnValue = TEnumTraits<EColor>::FindValueByLiteral("Pink", &color); EXPECT_EQ(EColor::Red, color); EXPECT_FALSE(returnValue); } TEST(TEnumTest, Ordering) { ESimple a(ESimple::X); ESimple b(ESimple::Y); ESimple c(ESimple::Y); ESimple d(ESimple::Z); EXPECT_FALSE(a < a); EXPECT_FALSE(a > a); EXPECT_TRUE (a < b); EXPECT_TRUE (b > a); EXPECT_TRUE (a < c); EXPECT_TRUE (c > a); EXPECT_TRUE (a < d); EXPECT_TRUE (d > a); EXPECT_FALSE(b < a); EXPECT_FALSE(a > b); EXPECT_FALSE(b < b); EXPECT_FALSE(b > b); EXPECT_FALSE(b < c); EXPECT_FALSE(c > b); EXPECT_TRUE (b < d); EXPECT_TRUE (d > b); EXPECT_FALSE(c < a); EXPECT_FALSE(a > c); EXPECT_FALSE(c < b); EXPECT_FALSE(b > c); EXPECT_FALSE(c < c); EXPECT_FALSE(c > c); EXPECT_TRUE (c < d); EXPECT_TRUE (d > c); EXPECT_FALSE(d < a); EXPECT_FALSE(a > d); EXPECT_FALSE(d < b); EXPECT_FALSE(b > d); EXPECT_FALSE(d < c); EXPECT_FALSE(c > d); EXPECT_FALSE(d < d); EXPECT_FALSE(d > d); EXPECT_TRUE (a <= b); EXPECT_TRUE (b <= c); EXPECT_TRUE (c <= d); EXPECT_TRUE (a == a); EXPECT_FALSE(a == b); EXPECT_TRUE (b == c); EXPECT_FALSE(c == d); EXPECT_FALSE(d == a); EXPECT_FALSE(a != a); EXPECT_TRUE (a != b); EXPECT_FALSE(b != c); EXPECT_TRUE (c != d); EXPECT_TRUE (d != a); } TEST(TEnumTest, OrderingWithDomainValues) { EColor color(EColor::Black); EXPECT_LT(EColor::Red, color); EXPECT_LT(color, EColor::White); EXPECT_GT(color, EColor::Red); EXPECT_GT(EColor::White, color); EXPECT_LE(EColor::Red, color); EXPECT_LE(color, EColor::White); EXPECT_GE(EColor::White, color); EXPECT_GE(color, EColor::Red); EXPECT_EQ(color, EColor::Black); EXPECT_EQ(EColor::Black, color); EXPECT_NE(color, EColor::Blue); EXPECT_NE(EColor::Blue, color); } TEST(TEnumTest, DomainSize) { EXPECT_EQ(3, TEnumTraits<ESimple>::DomainSize); EXPECT_EQ(5, TEnumTraits<EColor>::DomainSize); } TEST(TEnumTest, DomainValues) { std::vector<ESimple> simpleValues; simpleValues.push_back(ESimple::X); simpleValues.push_back(ESimple::Y); simpleValues.push_back(ESimple::Z); EXPECT_EQ(simpleValues, ToVector(TEnumTraits<ESimple>::GetDomainValues())); std::vector<EColor> colorValues; colorValues.push_back(EColor::Red); colorValues.push_back(EColor::Green); colorValues.push_back(EColor::Blue); colorValues.push_back(EColor::Black); colorValues.push_back(EColor::White); EXPECT_EQ(colorValues, ToVector(TEnumTraits<EColor>::GetDomainValues())); } TEST(TEnumTest, Decompose1) { auto f = EFlag(0); std::vector<EFlag> ff { }; EXPECT_EQ(TEnumTraits<EFlag>::Decompose(f), ff); } TEST(TEnumTest, Decompose2) { auto f = EFlag::_1; std::vector<EFlag> ff {EFlag::_1}; EXPECT_EQ(TEnumTraits<EFlag>::Decompose(f), ff); } TEST(TEnumTest, Decompose3) { auto f = EFlag(EFlag::_1|EFlag::_2); std::vector<EFlag> ff{EFlag::_1, EFlag::_2}; EXPECT_EQ(TEnumTraits<EFlag>::Decompose(f), ff); } TEST(TEnumTest, Decompose4) { auto f = EFlag(EFlag::_2|EFlag::_4); std::vector<EFlag> ff{EFlag::_2, EFlag::_4}; EXPECT_EQ(TEnumTraits<EFlag>::Decompose(f), ff); } TEST(TEnumTest, MultipleNames) { EXPECT_EQ(EMultipleNames::A1, TEnumTraits<EMultipleNames>::FromString("A1")); EXPECT_EQ(EMultipleNames::A1, TEnumTraits<EMultipleNames>::FromString("A2")); EXPECT_EQ(EMultipleNames::B, TEnumTraits<EMultipleNames>::FromString("B")); EXPECT_EQ(EMultipleNames::C, TEnumTraits<EMultipleNames>::FromString("C")); EXPECT_EQ(EMultipleNames::D1, TEnumTraits<EMultipleNames>::FromString("D1")); EXPECT_EQ(EMultipleNames::D1, TEnumTraits<EMultipleNames>::FromString("D2")); EXPECT_EQ("A1", ToString(EMultipleNames::A1)); EXPECT_EQ("A1", ToString(EMultipleNames::A2)); EXPECT_EQ("B", ToString(EMultipleNames::B)); EXPECT_EQ("C", ToString(EMultipleNames::C)); EXPECT_EQ("D1", ToString(EMultipleNames::D1)); EXPECT_EQ("D1", ToString(EMultipleNames::D2)); } //////////////////////////////////////////////////////////////////////////////// } // namespace } // namespace NYT
3,235
842
<gh_stars>100-1000 // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See LICENSE in the project root for license information. #pragma once namespace winrt::Microsoft::Windows::AppLifecycle::implementation { // Association registry key values. static PCWSTR c_openWithProgIdsKeyName{ L"OpenWithProgids" }; static PCWSTR c_softwareClassesKeyPath{ LR"(Software\Classes\)" }; static PCWSTR c_applicationKeyName{ L"Application" }; static PCWSTR c_applicationNameValueName{ L"ApplicationName" }; static PCWSTR c_defaultIconKeyName{ L"DefaultIcon" }; static PCWSTR c_appUserModelIdValueName{ L"AppUserModelId" }; static PCWSTR c_applicationsKeyPath{ LR"(Software\Microsoft\WindowsAppRuntimeApplications\)" }; static PCWSTR c_capabilitiesKeyPath{ LR"(\Capabilties)" }; static PCWSTR c_registeredApplicationsKeyPath{ LR"(Software\RegisteredApplications\)" }; static PCWSTR c_shellKeyName{ L"shell" }; static PCWSTR c_commandKeyName{ L"command" }; static PCWSTR c_delegateExecuteValueName{ L"DelegateExecute" }; static PCWSTR c_urlProtocolValueName{ L"URL Protocol" }; static PCWSTR c_urlDefaultValuePrefix{ L"URL:" }; static PCWSTR c_openVerbName{ L"open" }; static PCWSTR c_commandLineArgumentFormat{ L"%1" }; // ProgId generation values. static PCWSTR c_progIdPrefix{ L"App." }; static PCWSTR c_fileTypeProgIdSuffix{ L".File" }; static PCWSTR c_protocolProgIdSuffix{ L".Protocol" }; enum AssociationType { File, Protocol }; bool IsFileExtension(const std::wstring& extension); std::wstring GetFullIdentityString(); bool HasIdentity(); std::wstring GetModulePath(); std::wstring ComputeAppId(const std::wstring& customSeed = L""); std::wstring ComputeProgId(AssociationType type); std::wstring ComputeProgId(const std::wstring& appId, AssociationType type); std::wstring CreateAssocKeyPath(const std::wstring& assoc); wil::unique_hkey CreateAssocKey(const std::wstring& assoc, REGSAM samDesired = KEY_WRITE); wil::unique_hkey OpenAssocKey(const std::wstring& assoc, REGSAM samDesired = KEY_READ); void DeleteAssocKey(const std::wstring& assoc); wil::unique_hkey RegisterProgId(const std::wstring& progId, const std::wstring& defaultValue = L"", const std::wstring& appUserModelId = L"", const std::wstring& applicationDisplayName = L"", const std::wstring& logo = L""); void UnregisterProgId(const std::wstring& progId); std::wstring CreateApplicationKeyPath(); wil::unique_hkey CreateApplicationKey(const std::wstring& progId, REGSAM samDesired = KEY_WRITE); wil::unique_hkey OpenApplicationKey(const std::wstring& progId, REGSAM samDesired = KEY_READ); void RegisterApplication(const std::wstring& appId); void UnregisterApplication(const std::wstring& appId); void RegisterVerb(const std::wstring& progId, const std::wstring& verb, const std::wstring& command, _In_opt_ const GUID* delegateExecute = nullptr); void UnregisterVerb(const std::wstring& progId, const std::wstring& verb); void RegisterProtocol(const std::wstring& scheme); void UnregisterProtocol(const std::wstring& scheme); void RegisterFileExtension(const std::wstring& extension); void UnregisterFileExtension(const std::wstring& extension); void RegisterAssociationHandler(const std::wstring& handlerAppId, const std::wstring& association, AssociationType type); void UnregisterAssociationHandler(const std::wstring& handlerAppId, const std::wstring& association, AssociationType type); void NotifyShellAssocChanged(); }
1,349
1,724
<reponame>GeGuNa/skift_oss2 #include "system/devices/Device.h" #include "system/devices/Devices.h" Device::Device(DeviceAddress address, DeviceClass klass) : _address(address), _klass(klass), _name(device_claim_name(klass)) { }
100
679
/************************************************************** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * *************************************************************/ #ifndef OOX_HELPER_TEXTINPUTSTREAM_HXX #define OOX_HELPER_TEXTINPUTSTREAM_HXX #include <com/sun/star/uno/Reference.hxx> #include <rtl/ustring.hxx> namespace com { namespace sun { namespace star { namespace io { class XInputStream; } namespace io { class XTextInputStream; } namespace uno { class XComponentContext; } } } } namespace oox { class BinaryInputStream; // ============================================================================ class TextInputStream { public: explicit TextInputStream( const ::com::sun::star::uno::Reference< ::com::sun::star::uno::XComponentContext >& rxContext, const ::com::sun::star::uno::Reference< ::com::sun::star::io::XInputStream >& rxInStrm, rtl_TextEncoding eTextEnc ); explicit TextInputStream( const ::com::sun::star::uno::Reference< ::com::sun::star::uno::XComponentContext >& rxContext, BinaryInputStream& rInStrm, rtl_TextEncoding eTextEnc ); ~TextInputStream(); /** Returns true, if no more text is available in the stream. */ bool isEof() const; /** Reads a text line from the stream. If the last line in the stream is not terminated with line-end character(s), the stream will immediately go into EOF state and return the text line. Otherwise, if the last character in the stream is a line-end character, the next call to this function will turn the stream into EOF state and return an empty string. */ ::rtl::OUString readLine(); /** Reads a text portion from the stream until the specified character is found. If the end of the stream is not terminated with the specified character, the stream will immediately go into EOF state and return the remaining text portion. Otherwise, if the last character in the stream is the specified character (and caller specifies to read and return it, see parameter bIncludeChar), the next call to this function will turn the stream into EOF state and return an empty string. @param cChar The separator character to be read to. @param bIncludeChar True = if found, the specified character will be read from stream and included in the returned string. False = the specified character will neither be read from the stream nor included in the returned string, but will be returned as first character in the next call of this function or readLine(). */ ::rtl::OUString readToChar( sal_Unicode cChar, bool bIncludeChar ); // ------------------------------------------------------------------------ /** Creates a UNO text input stream object from the passed UNO input stream. */ static ::com::sun::star::uno::Reference< ::com::sun::star::io::XTextInputStream > createXTextInputStream( const ::com::sun::star::uno::Reference< ::com::sun::star::uno::XComponentContext >& rxContext, const ::com::sun::star::uno::Reference< ::com::sun::star::io::XInputStream >& rxInStrm, rtl_TextEncoding eTextEnc ); // ------------------------------------------------------------------------ private: void init( const ::com::sun::star::uno::Reference< ::com::sun::star::uno::XComponentContext >& rxContext, const ::com::sun::star::uno::Reference< ::com::sun::star::io::XInputStream >& rxInStrm, rtl_TextEncoding eTextEnc ); /** Adds the pending character in front of the passed string, if existing. */ ::rtl::OUString createFinalString( const ::rtl::OUString& rString ); private: ::com::sun::star::uno::Reference< ::com::sun::star::io::XTextInputStream > mxTextStrm; sal_Unicode mcPendingChar; }; // ============================================================================ } // namespace oox #endif
1,911
1,056
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.netbeans.modules.j2ee.persistence.action; import org.netbeans.api.java.source.TreeUtilities; import org.netbeans.modules.j2ee.persistence.spi.entitymanagergenerator.EntityManagerGenerationStrategyResolver; import com.sun.source.tree.ClassTree; import com.sun.source.tree.CompilationUnitTree; import com.sun.source.tree.Tree; import java.io.IOException; import org.netbeans.api.java.source.JavaSource; import org.netbeans.api.java.source.JavaSource.Phase; import org.netbeans.api.java.source.Task; import org.netbeans.api.java.source.TreeMaker; import org.netbeans.api.java.source.WorkingCopy; import org.netbeans.api.project.FileOwnerQuery; import org.netbeans.api.project.Project; import org.netbeans.modules.j2ee.persistence.spi.entitymanagergenerator.ApplicationManagedResourceTransactionInJ2SE; import org.netbeans.modules.j2ee.persistence.spi.entitymanagergenerator.EntityManagerGenerationStrategy; import org.netbeans.modules.j2ee.persistence.api.PersistenceScope; import org.netbeans.modules.j2ee.persistence.dd.PersistenceMetadata; import org.netbeans.modules.j2ee.persistence.dd.common.Persistence; import org.netbeans.modules.j2ee.persistence.dd.common.PersistenceUnit; import org.openide.DialogDisplayer; import org.openide.NotifyDescriptor; import org.openide.filesystems.FileObject; import org.openide.util.Exceptions; import org.openide.util.NbBundle; import org.openide.util.Parameters; /** * Generates appropriate code for retrieving and invoking <code>javax.persistence.EntityManager</code>. * The generated code depends on the target class' enviroment. * * TODO: move this class to different package if anybody else wants to use it * @author <NAME>, <NAME> */ public final class EntityManagerGenerator { /** * The fully qualified name of the target class. */ private final String fqn; /** * The target java source file. */ private final JavaSource targetSource; /** * The file object of the target source file. */ private final FileObject targetFo; /** * The project to which the target file belongs. */ private final Project project; /** * Creates a new EntityManagerGenerator. * @param targetFo the file object of the target java source file. * @param fqn the fully qualified name of the target java class. */ public EntityManagerGenerator(FileObject targetFo, String fqn) { this.fqn = fqn; this.targetFo = targetFo; this.targetSource = JavaSource.forFileObject(targetFo); this.project = FileOwnerQuery.getOwner(targetFo); } /** * Generates the code needed for retrieving and invoking * an instance of <code>javax.persistence.EntityManager</code>. The generated * code depends on the environment of the target class (e.g. whether * it supports injection or not). * * @param options the options for the generation. Must not be null. * @return the modified file object of the target java class. */ public FileObject generate(final GenerationOptions options) throws IOException{ final Class<? extends EntityManagerGenerationStrategy> strategyClass = getStrategy(); if (strategyClass == null){ NotifyDescriptor d = new NotifyDescriptor.Message( NbBundle.getMessage(EntityManagerGenerator.class, "ERR_NotSupported"), NotifyDescriptor.INFORMATION_MESSAGE); DialogDisplayer.getDefault().notify(d); return targetFo; } return generate(options, strategyClass); } /** * Generates the code needed for retrieving and invoking * an instance of <code>javax.persistence.EntityManager</code>. The generated * code depends on the given <code>strategyClass</code>. * * @param options the options for the generation. Must not be null. * @param strategyClass the generation strategy that should be used. Must not be null. * @return the modified file object of the target java class. */ public FileObject generate(final GenerationOptions options, final Class<? extends EntityManagerGenerationStrategy> strategyClass) throws IOException{ Parameters.notNull("options", options); //NOI18N Parameters.notNull("strategyClass", strategyClass); //NOI18N Task task = new Task<WorkingCopy>() { public void run(WorkingCopy workingCopy) throws Exception { workingCopy.toPhase(Phase.RESOLVED); CompilationUnitTree cut = workingCopy.getCompilationUnit(); TreeMaker make = workingCopy.getTreeMaker(); for (Tree typeDeclaration : cut.getTypeDecls()){ if (TreeUtilities.CLASS_TREE_KINDS.contains(typeDeclaration.getKind())){ ClassTree clazz = (ClassTree) typeDeclaration; EntityManagerGenerationStrategy strategy = instantiateStrategy(strategyClass, workingCopy, make, clazz, options); workingCopy.rewrite(clazz, strategy.generate()); } } } }; targetSource.runModificationTask(task).commit(); return targetFo; } private Class<? extends EntityManagerGenerationStrategy> getStrategy(){ EntityManagerGenerationStrategyResolver resolver = project.getLookup().lookup(EntityManagerGenerationStrategyResolver.class); if (resolver != null){ return resolver.resolveStrategy(targetFo); } // must be a java se project (we don't want it to implement the EntityManagerGenerationStrategyResolver SPI) return ApplicationManagedResourceTransactionInJ2SE.class; } private EntityManagerGenerationStrategy instantiateStrategy(Class<? extends EntityManagerGenerationStrategy> strategy, WorkingCopy workingCopy, TreeMaker make, ClassTree clazz, GenerationOptions options){ EntityManagerGenerationStrategy result = null; try{ result = strategy.newInstance(); result.setClassTree(clazz); result.setWorkingCopy(workingCopy); result.setGenerationOptions(options); result.setTreeMaker(make); result.setPersistenceUnit(getPersistenceUnit()); } catch (IllegalAccessException iae){ throw new RuntimeException(iae); //TODO } catch (InstantiationException ie){ throw new RuntimeException(ie); //TODO } return result; } private PersistenceUnit getPersistenceUnit() { PersistenceScope persistenceScope = PersistenceScope.getPersistenceScope(targetFo); if (persistenceScope == null){ return null; } try { Persistence persistence = PersistenceMetadata.getDefault().getRoot(persistenceScope.getPersistenceXml()); if(persistence != null){ PersistenceUnit[] pus=persistence.getPersistenceUnit(); PersistenceUnit ret=pus.length>0 ? pus[0] : null;//if there is only one pu, return in any case (even if do not contain fqn) if(pus.length>1) {//searchh for best match PersistenceUnit forAll=null; PersistenceUnit forOne=null; for(int i=0;i<pus.length && forOne==null;i++) { PersistenceUnit tmp=pus[i]; if(forAll ==null && !tmp.isExcludeUnlistedClasses()) forAll=tmp;//first match sutable for all entities in the project if(tmp.isExcludeUnlistedClasses()) { String []classes = tmp.getClass2(); for(String clas:classes){ if(fqn.equals(clas)) { forOne = tmp; break; } } } } ret = forOne != null ? forOne : (forAll != null ? forAll : ret); } return ret; } } catch (IOException ex) { Exceptions.printStackTrace(ex); } return null; } }
3,738
1,828
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.alipay.sofa.jraft.rhea.chaos; import java.io.File; import java.io.IOException; import java.util.List; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.TimeUnit; import org.apache.commons.io.FileUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.alipay.sofa.jraft.conf.Configuration; import com.alipay.sofa.jraft.entity.PeerId; import com.alipay.sofa.jraft.rhea.JRaftHelper; import com.alipay.sofa.jraft.rhea.StoreEngine; import com.alipay.sofa.jraft.rhea.TestUtil; import com.alipay.sofa.jraft.rhea.client.DefaultRheaKVStore; import com.alipay.sofa.jraft.rhea.client.RheaKVStore; import com.alipay.sofa.jraft.rhea.client.pd.PlacementDriverClient; import com.alipay.sofa.jraft.rhea.errors.NotLeaderException; import com.alipay.sofa.jraft.rhea.metadata.Peer; import com.alipay.sofa.jraft.rhea.options.BatchingOptions; import com.alipay.sofa.jraft.rhea.options.PlacementDriverOptions; import com.alipay.sofa.jraft.rhea.options.RheaKVStoreOptions; import com.alipay.sofa.jraft.rhea.options.StoreEngineOptions; import com.alipay.sofa.jraft.rhea.options.configured.PlacementDriverOptionsConfigured; import com.alipay.sofa.jraft.rhea.options.configured.RheaKVStoreOptionsConfigured; import com.alipay.sofa.jraft.rhea.options.configured.RocksDBOptionsConfigured; import com.alipay.sofa.jraft.rhea.options.configured.StoreEngineOptionsConfigured; import com.alipay.sofa.jraft.rhea.storage.StorageType; import com.alipay.sofa.jraft.rhea.util.Constants; import com.alipay.sofa.jraft.util.Endpoint; /** * @author jiachun.fjc */ public class ChaosTestCluster { private static final Logger LOG = LoggerFactory.getLogger(ChaosTestCluster.class); public static String CLUSTER_NAME = "chaos_test"; public static String DB_PATH = "chaos_db" + File.separator; public static String RAFT_DATA_PATH = "chaos_raft_data" + File.separator; private final List<PeerId> peerIds; private final StorageType storageType; private final boolean allowBatching; private final boolean onlyLeaderRead; private final List<RheaKVStore> stores = new CopyOnWriteArrayList<>(); public ChaosTestCluster(List<PeerId> peerIds, StorageType storageType, boolean allowBatching, boolean onlyLeaderRead) { this.peerIds = peerIds; this.storageType = storageType; this.allowBatching = allowBatching; this.onlyLeaderRead = onlyLeaderRead; } public synchronized void start() { deleteFiles(); final Configuration conf = new Configuration(this.peerIds); final String initialServerList = conf.toString(); for (final PeerId p : conf.listPeers()) { final PlacementDriverOptions pdOpts = PlacementDriverOptionsConfigured.newConfigured().withFake(true) // use a fake pd .config(); final StoreEngineOptions storeOpts = StoreEngineOptionsConfigured.newConfigured() // .withStorageType(this.storageType) // .withRocksDBOptions(RocksDBOptionsConfigured.newConfigured().withDbPath(DB_PATH).config()) // .withRaftDataPath(RAFT_DATA_PATH) // .withServerAddress(p.getEndpoint()) // .withLeastKeysOnSplit(10) // .config(); final RheaKVStoreOptions opts = RheaKVStoreOptionsConfigured.newConfigured() // .withClusterName(CLUSTER_NAME) // .withInitialServerList(initialServerList).withOnlyLeaderRead(this.onlyLeaderRead) // .withStoreEngineOptions(storeOpts) // .withPlacementDriverOptions(pdOpts) // .withFailoverRetries(30) // .withFutureTimeoutMillis(TimeUnit.SECONDS.toMillis(60)) // .config(); BatchingOptions batchingOptions = opts.getBatchingOptions(); if (batchingOptions == null) { batchingOptions = new BatchingOptions(); } batchingOptions.setAllowBatching(this.allowBatching); opts.setBatchingOptions(batchingOptions); final RheaKVStore store = new DefaultRheaKVStore(); if (!store.init(opts)) { throw new IllegalStateException("fail to init store with options: " + opts); } this.stores.add(store); } awaitLeader(); } public synchronized void stopAll() { for (final RheaKVStore store : this.stores) { store.shutdown(); } deleteFiles(); } private void deleteFiles() { final File dbFile = new File(DB_PATH); if (dbFile.exists()) { try { FileUtils.forceDelete(dbFile); LOG.info("delete db file: {}", dbFile.getAbsolutePath()); } catch (IOException e) { e.printStackTrace(); } } final File raftFile = new File(RAFT_DATA_PATH); if (raftFile.exists()) { try { FileUtils.forceDelete(raftFile); LOG.info("remove raft data: {}", raftFile.getAbsolutePath()); } catch (IOException e) { e.printStackTrace(); } } } public synchronized RheaKVStore getLeaderStore() { awaitLeader(); for (final RheaKVStore store : this.stores) { if (((DefaultRheaKVStore) store).isLeader(Constants.DEFAULT_REGION_ID)) { return store; } } throw new NotLeaderException("no leader"); } public synchronized RheaKVStore getByStorePeer(final PeerId peerId) { awaitLeader(); final Endpoint endpoint = JRaftHelper.toPeer(peerId).getEndpoint(); for (final RheaKVStore store : this.stores) { if (endpoint.equals(getSelfEndpoint(store))) { return store; } } throw new RuntimeException("fail to get peer: " + peerId); } public synchronized void removePeer(final PeerId peerId) { for (int i = this.stores.size() - 1; i >= 0; i--) { final RheaKVStore store = this.stores.get(i); if (peerId.getEndpoint().equals(getSelfEndpoint(store))) { final PlacementDriverClient pdClient = store.getPlacementDriverClient(); if (!pdClient.removeReplica(Constants.DEFAULT_REGION_ID, JRaftHelper.toPeer(peerId), true)) { throw new RuntimeException("fail to remove peer: " + peerId); } store.shutdown(); this.stores.remove(i); this.peerIds.remove(i); LOG.info("Shutdown and remove peer: {}", peerId); return; } } LOG.info("Could not find peer: {}", peerId); } public synchronized void addPeer(final PeerId peerId) { if (this.peerIds.contains(peerId)) { throw new RuntimeException("peerId is exist: " + peerId); } this.peerIds.add(peerId); final Configuration conf = new Configuration(this.peerIds); final String initialServerList = conf.toString(); final PlacementDriverOptions pdOpts = PlacementDriverOptionsConfigured.newConfigured().withFake(true) // use a fake pd .config(); final StoreEngineOptions storeOpts = StoreEngineOptionsConfigured.newConfigured() // .withStorageType(this.storageType) // .withRocksDBOptions(RocksDBOptionsConfigured.newConfigured().withDbPath(DB_PATH).config()) // .withRaftDataPath(RAFT_DATA_PATH) // .withServerAddress(peerId.getEndpoint()) // .config(); final RheaKVStoreOptions opts = RheaKVStoreOptionsConfigured.newConfigured() // .withClusterName("chaos_test") // .withInitialServerList(initialServerList).withStoreEngineOptions(storeOpts) // .withPlacementDriverOptions(pdOpts) // .config(); BatchingOptions batchingOptions = opts.getBatchingOptions(); if (batchingOptions == null) { batchingOptions = new BatchingOptions(); } batchingOptions.setAllowBatching(this.allowBatching); opts.setBatchingOptions(batchingOptions); final RheaKVStore store = new DefaultRheaKVStore(); if (!store.init(opts)) { throw new IllegalStateException("fail to init store with options: " + opts); } final RheaKVStore leader = getLeaderStore(); final PlacementDriverClient pdClient = leader.getPlacementDriverClient(); if (!pdClient.addReplica(Constants.DEFAULT_REGION_ID, JRaftHelper.toPeer(peerId), true)) { throw new RuntimeException("fail to add peer: " + peerId); } this.stores.add(store); awaitLeader(); } public synchronized RheaKVStore getRandomStore() { final ThreadLocalRandom random = ThreadLocalRandom.current(); return this.stores.get(random.nextInt(this.stores.size())); } public synchronized PeerId getRandomPeer() { final ThreadLocalRandom random = ThreadLocalRandom.current(); return this.peerIds.get(random.nextInt(this.peerIds.size())); } public synchronized void randomTransferLeader() { final RheaKVStore leader = getLeaderStore(); final Endpoint leaderEndpoint = getSelfEndpoint(leader); final PlacementDriverClient pdClient = leader.getPlacementDriverClient(); final Peer randomPeer = JRaftHelper.toPeer(getRandomPeer()); boolean result = pdClient.transferLeader(Constants.DEFAULT_REGION_ID, randomPeer, false); if (!result) { throw new RuntimeException("fail to transfer leader [" + leaderEndpoint + " --> " + randomPeer); } LOG.info("Transfer leader from {} to {}", leaderEndpoint, randomPeer.getEndpoint()); } public synchronized Endpoint getSelfEndpoint(final RheaKVStore store) { final StoreEngine storeEngine = TestUtil.getByName(store, "storeEngine", StoreEngine.class); return storeEngine.getSelfEndpoint(); } public synchronized void awaitLeader() { for (int i = 0; i < 100; i++) { for (final RheaKVStore store : this.stores) { if (((DefaultRheaKVStore) store).isLeader(Constants.DEFAULT_REGION_ID)) { return; } } try { Thread.sleep(100); } catch (InterruptedException ignored) { // ignored } } throw new NotLeaderException("wait leader timeout"); } }
4,839
3,269
<filename>Algo and DSA/LeetCode-Solutions-master/Python/max-consecutive-ones.py<gh_stars>1000+ # Time: O(n) # Space: O(1) class Solution(object): def findMaxConsecutiveOnes(self, nums): """ :type nums: List[int] :rtype: int """ result, local_max = 0, 0 for n in nums: local_max = (local_max + 1 if n else 0) result = max(result, local_max) return result
217
860
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package groovy.json; import java.io.File; import java.io.InputStream; import java.io.Reader; /** * This is the parser interface that backs the new JsonSlurper. * It was derived from the Boon JSON parser. * @author <NAME> * @since 2.3.0 */ public interface JsonParser { Object parse(String jsonString); Object parse(byte[] bytes); Object parse(byte[] bytes, String charset); Object parse(CharSequence charSequence); Object parse(char[] chars); Object parse(Reader reader); Object parse(InputStream input); Object parse(InputStream input, String charset); Object parse(File file, String charset); }
414
2,003
<gh_stars>1000+ // Copyright (c) 2015-2018, Baidu.com, Inc. All Rights Reserved // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #pragma once #include <atomic> #include <string> #include <vector> #include <queue> #include <memory> #include "common/thread_pool.h" #include "proto/master_client.h" #include "proto/stat_table.pb.h" #include "proto/table_meta.pb.h" #include "sdk/table_impl.h" #include "tera.h" #include "types.h" namespace tera { namespace sdk { enum class StatTableCustomer { kMaster = 0, kTabletNode = 1, kClient = 2, }; enum class CorruptPhase { kLoading = 0, kCompacting = 1, kUnknown = 10, }; class StatTable { public: enum class CorruptType { kUnknown = 0, kSst = 1, kCurrent = 2, kManifest = 3, kLoadlock = 4, }; // master and ts need set custmer explicit StatTable(ThreadPool* thread_pool, std::shared_ptr<auth::AccessBuilder> access_builder, const StatTableCustomer& c = StatTableCustomer::kClient, const std::string& local_addr = ""); void SelectTabletsFailMessages(const std::vector<std::string>& filters, bool is_detail); // default select all fail msg // set args to limit ts/tablet/timerange void SelectTabletsFailMessages(const CorruptPhase& phase = CorruptPhase::kUnknown, const std::string& ts_addr = "", const std::string& tablename = "", const std::string& tablet = "", int64_t start_ts = kOldestTs, int64_t end_ts = kLatestTs, bool is_detail = false); // record by tabletserver void RecordTabletCorrupt(const std::string& tablet, const std::string& fail_msg); void ErasureTabletCorrupt(const std::string& tablet); static std::string SerializeLoadContext(const LoadTabletRequest& request, const std::string& tabletnode_session_id); static std::string SerializeCorrupt(CorruptPhase phase, const std::string& tabletnode, const std::string& tablet, const std::string& context_str, const std::string& msg); void DeserializeCorrupt(const string& corrupt_str, tera::TabletCorruptMessage* corrupt_msg); bool OpenStatTable(); private: bool CreateStatTable(); static void RecordStatTableCallBack(RowMutation* mutation); private: std::shared_ptr<TableImpl> stat_table_; std::atomic<bool> created_; std::atomic<bool> opened_; std::string local_addr_; StatTableCustomer customer_type_; mutable Mutex mutex_; ThreadPool* thread_pool_; std::shared_ptr<auth::AccessBuilder> access_builder_; }; } // namespace sdk } // namespace tera
1,068
314
<reponame>RyanQinqhw/ZJKitTool<gh_stars>100-1000 // // NSMutableDictionary+ZJHelperKit.h // ZJUIKit // // Created by dzj on 2018/1/19. // Copyright © 2018年 kapokcloud. All rights reserved. // @interface NSMutableDictionary (ZJHelperKit) /** * 使用它来为键设置对象是安全的。 @param anObject 对象 @param aKey 键 @return BOOL */ - (BOOL)zj_setObject:(id)anObject forKey:(id<NSCopying>)aKey; /** * 使用它来设置键值是安全的。 @param value 值 @param key 键 @return bool */ - (BOOL)zj_setValue:(id)value forKey:(NSString *)key; @end
269
734
<gh_stars>100-1000 /* ,--. ,--. ,--. ,--. ,-' '-.,--.--.,--,--.,---.| |,-.,-' '-.`--' ,---. ,--,--, Copyright 2018 '-. .-'| .--' ,-. | .--'| /'-. .-',--.| .-. || \ Tracktion Software | | | | \ '-' \ `--.| \ \ | | | |' '-' '| || | Corporation `---' `--' `--`--'`---'`--'`--' `---' `--' `---' `--''--' www.tracktion.com Tracktion Engine uses a GPL/commercial licence - see LICENCE.md for details. */ namespace tracktion_engine { /** Defines the place to insert Track[s]. @see Edit::insertNewTrack, Edit::moveTrack */ struct TrackInsertPoint { /** Creates an insertion point with a parent and preceeding track. @param parent The parent tracks should be nested in. nullptr means a top-level track @param preceding The track before the insertion point. nullptr means tracks should be inserted at the start of the list. */ TrackInsertPoint (Track* parent, Track* preceding); /** Creates an insertion point with a parent and preceeding track. @param parentTrackID The ID of the parent tracks should be nested in. An invalid ID means a top-level track @param precedingTrackID The ID of the track before the insertion point. An invalid ID means tracks should be inserted at the start of the list. */ TrackInsertPoint (EditItemID parentTrackID, EditItemID precedingTrackID); /** Creates an insertion point around a Track. @param currentPos The track to base insertion around. @param insertBefore Whether new tracks should go before or after the currentPos. */ TrackInsertPoint (Track& currentPos, bool insertBefore); /** Creates an insertion point after a given Track state. */ TrackInsertPoint (const juce::ValueTree&); EditItemID parentTrackID, precedingTrackID; }; //============================================================================== /** An iterable list of Track[s] that live either in an Edit or as subtracks of a Track. @see Edit::getTrackList, Track::getSubTrackList */ struct TrackList : public ValueTreeObjectList<Track>, private juce::AsyncUpdater { /** Creates a TrackList for a parent state. */ TrackList (Edit&, const juce::ValueTree& parent); /** Destructor. */ ~TrackList() override; //============================================================================== /** Returns a Track for a given state. */ Track* getTrackFor (const juce::ValueTree&) const; /** Calls the given function on all Track[s]. Return false from the function to stop the traversal. @returns true if all tracks were visited, false otherwise */ bool visitAllRecursive (const std::function<bool(Track&)>&) const; /** Calls the given function on all top-level Track[s]. Return false from the function to stop the traversal. @returns true if all tracks were visited, false otherwise */ void visitAllTopLevel (const std::function<bool(Track&)>&) const; /** Calls the given function on all Track[s], optionally recursively. Return false from the function to stop the traversal. @param recursive Whether nested tracks should be visited @returns true if all tracks were visited, false otherwise */ void visitAllTracks (const std::function<bool(Track&)>&, bool recursive) const; //============================================================================== /** Returns true if the track is movable. I.e. not a global track. */ static bool isMovableTrack (const juce::ValueTree&) noexcept; /** Returns true if the state is for an ArrangerTrack. */ static bool isArrangerTrack (const juce::ValueTree&) noexcept; /** Returns true if the state is for a ChordTrack. */ static bool isChordTrack (const juce::ValueTree&) noexcept; /** Returns true if the state is for a MarkerTrack. */ static bool isMarkerTrack (const juce::ValueTree&) noexcept; /** Returns true if the state is for a TempoTrack. */ static bool isTempoTrack (const juce::ValueTree&) noexcept; /** Returns true if the state is for a MasterTrack. */ static bool isMasterTrack (const juce::ValueTree&) noexcept; /** Returns true if the track is fixed. I.e. a global track. @see TempoTrack, MarkerTrack, ChordTrack, ArrangerTrack */ static bool isFixedTrack (const juce::ValueTree&) noexcept; /** Returns true if the given ValeTree is for a known Track type. */ static bool isTrack (const juce::ValueTree&) noexcept; /** Returns true if the given Identifier is for a known Track type. */ static bool isTrack (const juce::Identifier&) noexcept; /** Returns true if the track has any sub tracks. @see FolderTrack, AutomationTrack */ static bool hasAnySubTracks (const juce::ValueTree&); //============================================================================== /** Sorts a list of tracks by their type, placing global tracks at the top. */ static void sortTracksByType (juce::ValueTree& editState, juce::UndoManager*); //============================================================================== /** @internal */ bool isSuitableType (const juce::ValueTree&) const override; /** @internal */ Track* createNewObject (const juce::ValueTree&) override; /** @internal */ void deleteObject (Track* t) override; /** @internal */ void newObjectAdded (Track* t) override; /** @internal */ void objectRemoved (Track*) override; /** @internal */ void objectOrderChanged() override; Edit& edit; bool rebuilding = true; private: void handleAsyncUpdate() override; JUCE_DECLARE_NON_COPYABLE_WITH_LEAK_DETECTOR (TrackList) }; //============================================================================== /** Defines a time raneg sectin of a Track. */ struct TrackSection { Track* track = nullptr; /**< The Track this section refers to. */ EditTimeRange range; /**< The time range this section refers to. */ /** Merges an overlapping TrackItem track/time range with this section. @returns true if this item was from the same track and overlapping time range and could be merged. false if it didn't intersect and should be its own section. */ bool merge (const TrackItem& c) { if (c.getTrack() == track && c.getEditTimeRange().overlaps (range.expanded (0.0001))) { range = range.getUnionWith (c.getEditTimeRange()); return true; } return false; } /** Returns a set of TrackSections for the given TrackItems. */ template <typename TrackItemArray> static juce::Array<TrackSection> findSections (const TrackItemArray& trackItems) { juce::Array<TrackSection> result; for (auto&& c : trackItems) { bool segFound = false; for (auto& dstSeg : result) { if (dstSeg.merge (*c)) { segFound = true; break; } } if (! segFound) { TrackSection cs; cs.range = c->getEditTimeRange(); cs.track = c->getTrack(); if (cs.track != nullptr) result.add (cs); } } return result; } }; //============================================================================== /** Holds a reference to a section of automation for a given Track. */ struct TrackAutomationSection { /** Construts an empty section. */ TrackAutomationSection() noexcept = default; /** Construts a section for a given TrackItem. */ TrackAutomationSection (TrackItem&); EditTimeRange position; /** The time range of the automation section. */ Track::Ptr src, /** The source Track. */ dst; /** The destination Track. */ /** Merges another TrackAutomationSection with this one. */ void mergeIn (const TrackAutomationSection&); /** Tests whether another section overlaps with this one. */ bool overlaps (const TrackAutomationSection&) const; /** Tests whether this section contains a given parameter. */ bool containsParameter (AutomatableParameter*) const; /** Holds a parameter and curve section. */ struct ActiveParameters { AutomatableParameter::Ptr param; /**< The parameter. */ AutomationCurve curve; /**< The curve section of this parameter. */ }; juce::Array<ActiveParameters> activeParameters; /**< A list of parameteres and their curves. */ }; /** Moves a set of automation optionally applying an offset and copying the automation (rather than moving it). */ void moveAutomation (const juce::Array<TrackAutomationSection>&, double offset, bool copy); //============================================================================== /** Returns the index of the next item after the given time. */ template <typename ArrayType> int findIndexOfNextItemAt (const ArrayType& items, double time) { for (int i = items.size(); --i >= 0;) { auto pos = items.getUnchecked(i)->getPosition().time; if (pos.getStart() < time) { if (pos.getEnd() > time) return i; return i + 1; } } return 0; } /** Returns the the time range that covers all the given TrackItems. */ template <typename ArrayType> EditTimeRange findUnionOfEditTimeRanges (const ArrayType& items) { EditTimeRange total; bool first = true; for (auto& item : items) { auto time = item->getEditTimeRange(); if (first) { first = false; total = time; } else { total = total.getUnionWith (time); } } return total; } } // namespace tracktion_engine
3,718
1,772
import re import hashlib import logging from defusedxml.ElementTree import parse from dojo.models import Endpoint, Finding logger = logging.getLogger(__name__) class WapitiParser(object): """The web-application vulnerability scanner see: https://wapiti.sourceforge.io/ """ def get_scan_types(self): return ["Wapiti Scan"] def get_label_for_scan_types(self, scan_type): return "Wapiti Scan" def get_description_for_scan_types(self, scan_type): return "Import XML report" def get_findings(self, file, test): tree = parse(file) # get root of tree. root = tree.getroot() # check if it is if 'report' not in root.tag: raise ValueError("This doesn't seem to be a valid Wapiti XML file.") severity_mapping = { '4': 'Critical', '3': 'High', '2': 'Medium', '1': 'Low', '0': 'Info', } url = root.findtext('report_infos/info[@name="target"]') dupes = dict() for vulnerability in root.findall('vulnerabilities/vulnerability'): category = vulnerability.attrib['name'] description = vulnerability.findtext('description') mitigation = vulnerability.findtext('solution') # manage references cwe = None references = [] for reference in vulnerability.findall('references/reference'): reference_title = reference.findtext('title') if reference_title.startswith("CWE"): cwe = self.get_cwe(reference_title) references.append(f"* [{reference_title}]({reference.findtext('url')})") references = "\n".join(references) for entry in vulnerability.findall('entries/entry'): title = category + ": " + entry.findtext('info') # get numerical severity. num_severity = entry.findtext('level') if num_severity in severity_mapping: severity = severity_mapping[num_severity] else: severity = "Info" finding = Finding( title=title, description=description, severity=severity, mitigation=mitigation, references=references, dynamic_finding=True, static_finding=False, nb_occurences=1, ) if cwe: finding.cwe = cwe finding.unsaved_endpoints = [Endpoint.from_uri(url)] finding.unsaved_req_resp = [{"req": entry.findtext('http_request'), "resp": ""}] # make dupe hash key dupe_key = hashlib.sha256(str(description + title + severity).encode('utf-8')).hexdigest() # check if dupes are present. if dupe_key in dupes: find = dupes[dupe_key] find.unsaved_endpoints.extend(finding.unsaved_endpoints) find.unsaved_req_resp.extend(finding.unsaved_req_resp) find.nb_occurences += finding.nb_occurences else: dupes[dupe_key] = finding return list(dupes.values()) @staticmethod def get_cwe(val): # Match only the first CWE! cweSearch = re.search("CWE-(\\d+)", val, re.IGNORECASE) if cweSearch: return int(cweSearch.group(1)) else: return None
1,804
23,220
package com.alibaba.otter.canal.common.zookeeper.running; import java.io.Serializable; import org.apache.commons.lang.builder.ToStringBuilder; import com.alibaba.otter.canal.common.utils.CanalToStringStyle; /** * 服务端running状态信息 * * @author jianghang 2012-11-22 下午03:11:30 * @version 1.0.0 */ public class ServerRunningData implements Serializable { private static final long serialVersionUID = 92260481691855281L; @Deprecated private Long cid; private String address; private boolean active = true; public ServerRunningData(){ } public ServerRunningData(String address){ this.address = address; } public Long getCid() { return cid; } public void setCid(Long cid) { this.cid = cid; } public String getAddress() { return address; } public void setAddress(String address) { this.address = address; } public boolean isActive() { return active; } public void setActive(boolean active) { this.active = active; } public String toString() { return ToStringBuilder.reflectionToString(this, CanalToStringStyle.DEFAULT_STYLE); } }
518
1,350
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. package com.azure.cosmos.implementation.query.metrics; import com.azure.cosmos.implementation.apachecommons.lang.time.StopWatch; import java.time.Duration; import java.time.Instant; import java.util.ArrayList; import java.util.List; import java.util.concurrent.TimeUnit; /** * Accumlator that acts as a builder of FetchExecutionRanges */ public class FetchExecutionRangeAccumulator { private final String partitionKeyRangeId; private final Instant constructionTime; private final StopWatch stopwatch; private List<FetchExecutionRange> fetchExecutionRanges; private Instant startTime; private Instant endTime; private boolean isFetching; public FetchExecutionRangeAccumulator(String partitionKeyRangeId) { this.partitionKeyRangeId = partitionKeyRangeId; this.constructionTime = Instant.now(); // This stopwatch is always running and is only used to calculate deltas that are synchronized with the construction time. this.stopwatch = new StopWatch(); stopwatch.start(); this.fetchExecutionRanges = new ArrayList<FetchExecutionRange>(); } /** * Gets the FetchExecutionRanges and resets the accumulator. * * @return the SchedulingMetricsResult. */ public List<FetchExecutionRange> getExecutionRanges() { List<FetchExecutionRange> returnValue = this.fetchExecutionRanges; this.fetchExecutionRanges = new ArrayList<>(); return returnValue; } /** * Updates the most recent start time internally. */ public void beginFetchRange() { if (!this.isFetching) { // Calculating the start time as the construction time and the stopwatch as a delta. this.startTime = this.constructionTime.plus(Duration.ofMillis(this.stopwatch.getTime(TimeUnit.MILLISECONDS))); this.isFetching = true; } } /** * Updates the most recent end time internally and constructs a new FetchExecutionRange * * @param numberOfDocuments The number of documents that were fetched for this range. * @param retryCount The number of times we retried for this fetch execution range. */ public void endFetchRange(String activityId, long numberOfDocuments, long retryCount) { if (this.isFetching) { // Calculating the end time as the construction time and the stopwatch as a delta. this.endTime = this.constructionTime.plus(Duration.ofMillis(this.stopwatch.getTime(TimeUnit.MILLISECONDS))); FetchExecutionRange fetchExecutionRange = new FetchExecutionRange( activityId, this.startTime, this.endTime, this.partitionKeyRangeId, numberOfDocuments, retryCount); this.fetchExecutionRanges.add(fetchExecutionRange); this.isFetching = false; } } }
1,153
956
<filename>src/dpdk/drivers/common/dpaax/caamflib/desc/algo.h /* SPDX-License-Identifier: (BSD-3-Clause OR GPL-2.0) * * Copyright 2008-2016 Freescale Semiconductor Inc. * Copyright 2016,2019-2021 NXP * */ #ifndef __DESC_ALGO_H__ #define __DESC_ALGO_H__ #include "rta.h" #include "common.h" /** * DOC: Algorithms - Shared Descriptor Constructors * * Shared descriptors for algorithms (i.e. not for protocols). */ /** * cnstr_shdsc_zuce - ZUC Enc (EEA2) as a shared descriptor * @descbuf: pointer to descriptor-under-construction buffer * @ps: if 36/40bit addressing is desired, this parameter must be true * @swap: must be true when core endianness doesn't match SEC endianness * @cipherdata: pointer to block cipher transform definitions * @dir: Cipher direction (DIR_ENC/DIR_DEC) * * Return: size of descriptor written in words or negative number on error */ static inline int cnstr_shdsc_zuce(uint32_t *descbuf, bool ps, bool swap, struct alginfo *cipherdata, uint8_t dir) { struct program prg; struct program *p = &prg; PROGRAM_CNTXT_INIT(p, descbuf, 0); if (swap) PROGRAM_SET_BSWAP(p); if (ps) PROGRAM_SET_36BIT_ADDR(p); SHR_HDR(p, SHR_ALWAYS, 1, 0); KEY(p, KEY1, cipherdata->key_enc_flags, cipherdata->key, cipherdata->keylen, INLINE_KEY(cipherdata)); SEQLOAD(p, CONTEXT1, 0, 16, 0); MATHB(p, SEQINSZ, SUB, MATH2, VSEQINSZ, 4, 0); MATHB(p, SEQINSZ, SUB, MATH2, VSEQOUTSZ, 4, 0); ALG_OPERATION(p, OP_ALG_ALGSEL_ZUCE, OP_ALG_AAI_F8, OP_ALG_AS_INITFINAL, 0, dir); SEQFIFOLOAD(p, MSG1, 0, VLF | LAST1); SEQFIFOSTORE(p, MSG, 0, 0, VLF); return PROGRAM_FINALIZE(p); } /** * cnstr_shdsc_zuca - ZUC Auth (EIA2) as a shared descriptor * @descbuf: pointer to descriptor-under-construction buffer * @ps: if 36/40bit addressing is desired, this parameter must be true * @swap: must be true when core endianness doesn't match SEC endianness * @authdata: pointer to authentication transform definitions * @chk_icv: Whether to compare and verify ICV (true/false) * @authlen: size of digest * * The IV prepended before hmac payload must be 8 bytes consisting * of COUNT||BEAERER||DIR. The COUNT is of 32-bits, bearer is of 5 bits and * direction is of 1 bit - totalling to 38 bits. * * Return: size of descriptor written in words or negative number on error */ static inline int cnstr_shdsc_zuca(uint32_t *descbuf, bool ps, bool swap, struct alginfo *authdata, uint8_t chk_icv, uint32_t authlen) { struct program prg; struct program *p = &prg; int dir = chk_icv ? DIR_DEC : DIR_ENC; PROGRAM_CNTXT_INIT(p, descbuf, 0); if (swap) PROGRAM_SET_BSWAP(p); if (ps) PROGRAM_SET_36BIT_ADDR(p); SHR_HDR(p, SHR_ALWAYS, 1, 0); KEY(p, KEY2, authdata->key_enc_flags, authdata->key, authdata->keylen, INLINE_KEY(authdata)); SEQLOAD(p, CONTEXT2, 0, 8, 0); if (chk_icv == ICV_CHECK_ENABLE) MATHB(p, SEQINSZ, SUB, authlen, VSEQINSZ, 4, IMMED2); else MATHB(p, SEQINSZ, SUB, ZERO, VSEQINSZ, 4, 0); ALG_OPERATION(p, OP_ALG_ALGSEL_ZUCA, OP_ALG_AAI_F9, OP_ALG_AS_INITFINAL, chk_icv, dir); SEQFIFOLOAD(p, MSG2, 0, VLF | CLASS2 | LAST2); if (chk_icv == ICV_CHECK_ENABLE) SEQFIFOLOAD(p, ICV2, authlen, LAST2); else /* Save lower half of MAC out into a 32-bit sequence */ SEQSTORE(p, CONTEXT2, 0, authlen, 0); return PROGRAM_FINALIZE(p); } /** * cnstr_shdsc_snow_f8 - SNOW/f8 (UEA2) as a shared descriptor * @descbuf: pointer to descriptor-under-construction buffer * @ps: if 36/40bit addressing is desired, this parameter must be true * @swap: must be true when core endianness doesn't match SEC endianness * @cipherdata: pointer to block cipher transform definitions * @dir: Cipher direction (DIR_ENC/DIR_DEC) * * Return: size of descriptor written in words or negative number on error */ static inline int cnstr_shdsc_snow_f8(uint32_t *descbuf, bool ps, bool swap, struct alginfo *cipherdata, uint8_t dir) { struct program prg; struct program *p = &prg; PROGRAM_CNTXT_INIT(p, descbuf, 0); if (swap) PROGRAM_SET_BSWAP(p); if (ps) PROGRAM_SET_36BIT_ADDR(p); SHR_HDR(p, SHR_ALWAYS, 1, 0); KEY(p, KEY1, cipherdata->key_enc_flags, cipherdata->key, cipherdata->keylen, INLINE_KEY(cipherdata)); SEQLOAD(p, CONTEXT1, 0, 16, 0); MATHB(p, SEQINSZ, SUB, MATH2, VSEQINSZ, 4, 0); MATHB(p, SEQINSZ, SUB, MATH2, VSEQOUTSZ, 4, 0); ALG_OPERATION(p, OP_ALG_ALGSEL_SNOW_F8, OP_ALG_AAI_F8, OP_ALG_AS_INITFINAL, 0, dir); SEQFIFOLOAD(p, MSG1, 0, VLF | LAST1); SEQFIFOSTORE(p, MSG, 0, 0, VLF); return PROGRAM_FINALIZE(p); } /** * conv_to_zuc_eia_iv - ZUCA IV 16-byte to 8-byte convert * function for 3G. * @iv: 16 bytes of original IV data. * * From the original IV, we extract 32-bits of COUNT, * 5-bits of bearer and 1-bit of direction. * Refer to CAAM refman for ZUCA IV format. Then these values are * appended as COUNT||BEARER||DIR continuously to make a 38-bit block. * This 38-bit block is copied left justified into 8-byte array used as * converted IV. * * Return: 8-bytes of IV data as understood by SEC HW */ static inline uint8_t *conv_to_zuc_eia_iv(uint8_t *iv) { uint8_t dir = (iv[14] & 0x80) ? 4 : 0; iv[12] = iv[4] | dir; iv[13] = 0; iv[14] = 0; iv[15] = 0; iv[8] = iv[0]; iv[9] = iv[1]; iv[10] = iv[2]; iv[11] = iv[3]; return (iv + 8); } /** * conv_to_snow_f9_iv - SNOW/f9 (UIA2) IV 16 byte to 12 byte convert * function for 3G. * @iv: 16 byte original IV data * * Return: 12 byte IV data as understood by SEC HW */ static inline uint8_t *conv_to_snow_f9_iv(uint8_t *iv) { uint8_t temp = (iv[8] == iv[0]) ? 0 : 4; iv[12] = iv[4]; iv[13] = iv[5]; iv[14] = iv[6]; iv[15] = iv[7]; iv[8] = temp; iv[9] = 0x00; iv[10] = 0x00; iv[11] = 0x00; iv[4] = iv[0]; iv[5] = iv[1]; iv[6] = iv[2]; iv[7] = iv[3]; return (iv + 4); } /** * cnstr_shdsc_snow_f9 - SNOW/f9 (UIA2) as a shared descriptor * @descbuf: pointer to descriptor-under-construction buffer * @ps: if 36/40bit addressing is desired, this parameter must be true * @swap: must be true when core endianness doesn't match SEC endianness * @authdata: pointer to authentication transform definitions * @chk_icv: check or generate ICV value * @authlen: size of digest * * Return: size of descriptor written in words or negative number on error */ static inline int cnstr_shdsc_snow_f9(uint32_t *descbuf, bool ps, bool swap, struct alginfo *authdata, uint8_t chk_icv, uint32_t authlen) { struct program prg; struct program *p = &prg; int dir = chk_icv ? DIR_DEC : DIR_ENC; PROGRAM_CNTXT_INIT(p, descbuf, 0); if (swap) PROGRAM_SET_BSWAP(p); if (ps) PROGRAM_SET_36BIT_ADDR(p); SHR_HDR(p, SHR_ALWAYS, 1, 0); KEY(p, KEY2, authdata->key_enc_flags, authdata->key, authdata->keylen, INLINE_KEY(authdata)); SEQLOAD(p, CONTEXT2, 0, 12, 0); if (chk_icv == ICV_CHECK_ENABLE) MATHB(p, SEQINSZ, SUB, authlen, VSEQINSZ, 4, IMMED2); else MATHB(p, SEQINSZ, SUB, ZERO, VSEQINSZ, 4, 0); ALG_OPERATION(p, OP_ALG_ALGSEL_SNOW_F9, OP_ALG_AAI_F9, OP_ALG_AS_INITFINAL, chk_icv, dir); SEQFIFOLOAD(p, MSG2, 0, VLF | CLASS2 | LAST2); if (chk_icv == ICV_CHECK_ENABLE) SEQFIFOLOAD(p, ICV2, authlen, LAST2); else /* Save lower half of MAC out into a 32-bit sequence */ SEQSTORE(p, CONTEXT2, 0, authlen, 0); return PROGRAM_FINALIZE(p); } /** * cnstr_shdsc_blkcipher - block cipher transformation * @descbuf: pointer to descriptor-under-construction buffer * @ps: if 36/40bit addressing is desired, this parameter must be true * @swap: must be true when core endianness doesn't match SEC endianness * @share: sharing type of shared descriptor * @cipherdata: pointer to block cipher transform definitions * Valid algorithm values one of OP_ALG_ALGSEL_* {DES, 3DES, AES} * Valid modes for: * AES: OP_ALG_AAI_* {CBC, CTR} * DES, 3DES: OP_ALG_AAI_CBC * @iv: IV data; if NULL, "ivlen" bytes from the input frame will be read as IV * @ivlen: IV length * @dir: DIR_ENC/DIR_DEC * * Return: size of descriptor written in words or negative number on error */ static inline int cnstr_shdsc_blkcipher(uint32_t *descbuf, bool ps, bool swap, enum rta_share_type share, struct alginfo *cipherdata, uint32_t ivlen, uint8_t dir) { struct program prg; struct program *p = &prg; uint32_t iv_off = 0, counter; const bool need_dk = (dir == DIR_DEC) && (cipherdata->algtype == OP_ALG_ALGSEL_AES) && (cipherdata->algmode == OP_ALG_AAI_CBC); LABEL(keyjmp); LABEL(skipdk); REFERENCE(pkeyjmp); REFERENCE(pskipdk); PROGRAM_CNTXT_INIT(p, descbuf, 0); if (swap) PROGRAM_SET_BSWAP(p); if (ps) PROGRAM_SET_36BIT_ADDR(p); SHR_HDR(p, share, 1, SC); pkeyjmp = JUMP(p, keyjmp, LOCAL_JUMP, ALL_TRUE, SHRD); /* Insert Key */ KEY(p, KEY1, cipherdata->key_enc_flags, cipherdata->key, cipherdata->keylen, INLINE_KEY(cipherdata)); if (need_dk) { ALG_OPERATION(p, cipherdata->algtype, cipherdata->algmode, OP_ALG_AS_INITFINAL, ICV_CHECK_DISABLE, dir); pskipdk = JUMP(p, skipdk, LOCAL_JUMP, ALL_TRUE, 0); } SET_LABEL(p, keyjmp); if (need_dk) { ALG_OPERATION(p, OP_ALG_ALGSEL_AES, cipherdata->algmode | OP_ALG_AAI_DK, OP_ALG_AS_INITFINAL, ICV_CHECK_DISABLE, dir); SET_LABEL(p, skipdk); } else { ALG_OPERATION(p, cipherdata->algtype, cipherdata->algmode, OP_ALG_AS_INITFINAL, ICV_CHECK_DISABLE, dir); } if (cipherdata->algmode == OP_ALG_AAI_CTR) iv_off = 16; /* IV is present first before the actual message */ SEQLOAD(p, CONTEXT1, iv_off, ivlen, 0); /* If IV len is less than 16 bytes, set 'counter' as 1 */ if (cipherdata->algmode == OP_ALG_AAI_CTR && ivlen < 16) { counter = 1; if (!swap) counter = swab32(1); LOAD(p, counter, CONTEXT1, (iv_off + ivlen), 16 - ivlen, IMMED); } MATHB(p, SEQINSZ, SUB, MATH2, VSEQINSZ, 4, 0); MATHB(p, SEQINSZ, SUB, MATH2, VSEQOUTSZ, 4, 0); /* Insert sequence load/store with VLF */ SEQFIFOLOAD(p, MSG1, 0, VLF | LAST1); SEQFIFOSTORE(p, MSG, 0, 0, VLF); PATCH_JUMP(p, pkeyjmp, keyjmp); if (need_dk) PATCH_JUMP(p, pskipdk, skipdk); return PROGRAM_FINALIZE(p); } /** * cnstr_shdsc_hmac - HMAC shared * @descbuf: pointer to descriptor-under-construction buffer * @ps: if 36/40bit addressing is desired, this parameter must be true * @swap: must be true when core endianness doesn't match SEC endianness * @share: sharing type of shared descriptor * @authdata: pointer to authentication transform definitions; * message digest algorithm: OP_ALG_ALGSEL_MD5/ SHA1-512. * @do_icv: 0 if ICV checking is not desired, any other value if ICV checking * is needed for all the packets processed by this shared descriptor * @trunc_len: Length of the truncated ICV to be written in the output buffer, 0 * if no truncation is needed * * Note: There's no support for keys longer than the block size of the * underlying hash function, according to the selected algorithm. * * Return: size of descriptor written in words or negative number on error */ static inline int cnstr_shdsc_hmac(uint32_t *descbuf, bool ps, bool swap, enum rta_share_type share, struct alginfo *authdata, uint8_t do_icv, uint8_t trunc_len) { struct program prg; struct program *p = &prg; uint8_t storelen, opicv, dir; LABEL(keyjmp); LABEL(jmpprecomp); REFERENCE(pkeyjmp); REFERENCE(pjmpprecomp); /* Compute fixed-size store based on alg selection */ switch (authdata->algtype) { case OP_ALG_ALGSEL_MD5: storelen = 16; break; case OP_ALG_ALGSEL_SHA1: storelen = 20; break; case OP_ALG_ALGSEL_SHA224: storelen = 28; break; case OP_ALG_ALGSEL_SHA256: storelen = 32; break; case OP_ALG_ALGSEL_SHA384: storelen = 48; break; case OP_ALG_ALGSEL_SHA512: storelen = 64; break; default: return -EINVAL; } trunc_len = trunc_len && (trunc_len < storelen) ? trunc_len : storelen; opicv = do_icv ? ICV_CHECK_ENABLE : ICV_CHECK_DISABLE; dir = do_icv ? DIR_DEC : DIR_ENC; PROGRAM_CNTXT_INIT(p, descbuf, 0); if (swap) PROGRAM_SET_BSWAP(p); if (ps) PROGRAM_SET_36BIT_ADDR(p); SHR_HDR(p, share, 1, SC); pkeyjmp = JUMP(p, keyjmp, LOCAL_JUMP, ALL_TRUE, SHRD); KEY(p, KEY2, authdata->key_enc_flags, authdata->key, authdata->keylen, INLINE_KEY(authdata)); /* Do operation */ ALG_OPERATION(p, authdata->algtype, OP_ALG_AAI_HMAC, OP_ALG_AS_INITFINAL, opicv, dir); pjmpprecomp = JUMP(p, jmpprecomp, LOCAL_JUMP, ALL_TRUE, 0); SET_LABEL(p, keyjmp); ALG_OPERATION(p, authdata->algtype, OP_ALG_AAI_HMAC_PRECOMP, OP_ALG_AS_INITFINAL, opicv, dir); SET_LABEL(p, jmpprecomp); /* compute sequences */ if (opicv == ICV_CHECK_ENABLE) MATHB(p, SEQINSZ, SUB, trunc_len, VSEQINSZ, 4, IMMED2); else MATHB(p, SEQINSZ, SUB, MATH2, VSEQINSZ, 4, 0); /* Do load (variable length) */ SEQFIFOLOAD(p, MSG2, 0, VLF | LAST2); if (opicv == ICV_CHECK_ENABLE) SEQFIFOLOAD(p, ICV2, trunc_len, LAST2); else SEQSTORE(p, CONTEXT2, 0, trunc_len, 0); PATCH_JUMP(p, pkeyjmp, keyjmp); PATCH_JUMP(p, pjmpprecomp, jmpprecomp); return PROGRAM_FINALIZE(p); } /** * cnstr_shdsc_hash - HASH shared * @descbuf: pointer to descriptor-under-construction buffer * @ps: if 36/40bit addressing is desired, this parameter must be true * @swap: must be true when core endianness doesn't match SEC endianness * @share: sharing type of shared descriptor * @authdata: pointer to authentication transform definitions; * message digest algorithm: OP_ALG_ALGSEL_MD5/ SHA1-512. * @do_icv: 0 if ICV checking is not desired, any other value if ICV checking * is needed for all the packets processed by this shared descriptor * @trunc_len: Length of the truncated ICV to be written in the output buffer, 0 * if no truncation is needed * * Note: There's no support for keys longer than the block size of the * underlying hash function, according to the selected algorithm. * * Return: size of descriptor written in words or negative number on error */ static inline int cnstr_shdsc_hash(uint32_t *descbuf, bool ps, bool swap, enum rta_share_type share, struct alginfo *authdata, uint8_t do_icv, uint8_t trunc_len) { struct program prg; struct program *p = &prg; uint8_t storelen, opicv, dir; /* Compute fixed-size store based on alg selection */ switch (authdata->algtype) { case OP_ALG_ALGSEL_MD5: storelen = 16; break; case OP_ALG_ALGSEL_SHA1: storelen = 20; break; case OP_ALG_ALGSEL_SHA224: storelen = 28; break; case OP_ALG_ALGSEL_SHA256: storelen = 32; break; case OP_ALG_ALGSEL_SHA384: storelen = 48; break; case OP_ALG_ALGSEL_SHA512: storelen = 64; break; default: return -EINVAL; } trunc_len = trunc_len && (trunc_len < storelen) ? trunc_len : storelen; opicv = do_icv ? ICV_CHECK_ENABLE : ICV_CHECK_DISABLE; dir = do_icv ? DIR_DEC : DIR_ENC; PROGRAM_CNTXT_INIT(p, descbuf, 0); if (swap) PROGRAM_SET_BSWAP(p); if (ps) PROGRAM_SET_36BIT_ADDR(p); SHR_HDR(p, share, 1, SC); /* Do operation */ /* compute sequences */ if (opicv == ICV_CHECK_ENABLE) MATHB(p, SEQINSZ, SUB, trunc_len, VSEQINSZ, 4, IMMED2); else MATHB(p, SEQINSZ, SUB, MATH2, VSEQINSZ, 4, 0); ALG_OPERATION(p, authdata->algtype, OP_ALG_AAI_HASH, OP_ALG_AS_INITFINAL, opicv, dir); SEQFIFOLOAD(p, MSG2, 0, VLF | LAST2); if (opicv == ICV_CHECK_ENABLE) SEQFIFOLOAD(p, ICV2, trunc_len, LAST2); else SEQSTORE(p, CONTEXT2, 0, trunc_len, 0); return PROGRAM_FINALIZE(p); } /** * cnstr_shdsc_kasumi_f8 - KASUMI F8 (Confidentiality) as a shared descriptor * (ETSI "Document 1: f8 and f9 specification") * @descbuf: pointer to descriptor-under-construction buffer * @ps: if 36/40bit addressing is desired, this parameter must be true * @swap: must be true when core endianness doesn't match SEC endianness * @cipherdata: pointer to block cipher transform definitions * @dir: cipher direction (DIR_ENC/DIR_DEC) * @count: count value (32 bits) * @bearer: bearer ID (5 bits) * @direction: direction (1 bit) * * Return: size of descriptor written in words or negative number on error */ static inline int cnstr_shdsc_kasumi_f8(uint32_t *descbuf, bool ps, bool swap, struct alginfo *cipherdata, uint8_t dir) { struct program prg; struct program *p = &prg; PROGRAM_CNTXT_INIT(p, descbuf, 0); if (swap) PROGRAM_SET_BSWAP(p); if (ps) PROGRAM_SET_36BIT_ADDR(p); SHR_HDR(p, SHR_ALWAYS, 1, 0); KEY(p, KEY1, cipherdata->key_enc_flags, cipherdata->key, cipherdata->keylen, INLINE_KEY(cipherdata)); SEQLOAD(p, CONTEXT1, 0, 8, 0); MATHB(p, SEQINSZ, SUB, MATH2, VSEQINSZ, 4, 0); MATHB(p, SEQINSZ, SUB, MATH2, VSEQOUTSZ, 4, 0); ALG_OPERATION(p, OP_ALG_ALGSEL_KASUMI, OP_ALG_AAI_F8, OP_ALG_AS_INITFINAL, 0, dir); SEQFIFOLOAD(p, MSG1, 0, VLF | LAST1); SEQFIFOSTORE(p, MSG, 0, 0, VLF); return PROGRAM_FINALIZE(p); } /** * cnstr_shdsc_kasumi_f9 - KASUMI F9 (Integrity) as a shared descriptor * (ETSI "Document 1: f8 and f9 specification") * @descbuf: pointer to descriptor-under-construction buffer * @ps: if 36/40bit addressing is desired, this parameter must be true * @swap: must be true when core endianness doesn't match SEC endianness * @authdata: pointer to authentication transform definitions * @chk_icv: check or generate ICV value * @authlen: size of digest * * Return: size of descriptor written in words or negative number on error */ static inline int cnstr_shdsc_kasumi_f9(uint32_t *descbuf, bool ps, bool swap, struct alginfo *authdata, uint8_t chk_icv, uint32_t authlen) { struct program prg; struct program *p = &prg; int dir = chk_icv ? DIR_DEC : DIR_ENC; PROGRAM_CNTXT_INIT(p, descbuf, 0); if (swap) PROGRAM_SET_BSWAP(p); if (ps) PROGRAM_SET_36BIT_ADDR(p); SHR_HDR(p, SHR_ALWAYS, 1, 0); KEY(p, KEY2, authdata->key_enc_flags, authdata->key, authdata->keylen, INLINE_KEY(authdata)); SEQLOAD(p, CONTEXT2, 0, 12, 0); if (chk_icv == ICV_CHECK_ENABLE) MATHB(p, SEQINSZ, SUB, authlen, VSEQINSZ, 4, IMMED2); else MATHB(p, SEQINSZ, SUB, ZERO, VSEQINSZ, 4, 0); ALG_OPERATION(p, OP_ALG_ALGSEL_KASUMI, OP_ALG_AAI_F9, OP_ALG_AS_INITFINAL, chk_icv, dir); SEQFIFOLOAD(p, MSG2, 0, VLF | CLASS2 | LAST2); if (chk_icv == ICV_CHECK_ENABLE) SEQFIFOLOAD(p, ICV2, authlen, LAST2); else /* Save lower half of MAC out into a 32-bit sequence */ SEQSTORE(p, CONTEXT2, 0, authlen, 0); return PROGRAM_FINALIZE(p); } /** * cnstr_shdsc_crc - CRC32 Accelerator (IEEE 802 CRC32 protocol mode) * @descbuf: pointer to descriptor-under-construction buffer * @swap: must be true when core endianness doesn't match SEC endianness * * Return: size of descriptor written in words or negative number on error */ static inline int cnstr_shdsc_crc(uint32_t *descbuf, bool swap) { struct program prg; struct program *p = &prg; PROGRAM_CNTXT_INIT(p, descbuf, 0); if (swap) PROGRAM_SET_BSWAP(p); SHR_HDR(p, SHR_ALWAYS, 1, 0); MATHB(p, SEQINSZ, SUB, MATH2, VSEQINSZ, 4, 0); ALG_OPERATION(p, OP_ALG_ALGSEL_CRC, OP_ALG_AAI_802 | OP_ALG_AAI_DOC, OP_ALG_AS_FINALIZE, 0, DIR_ENC); SEQFIFOLOAD(p, MSG2, 0, VLF | LAST2); SEQSTORE(p, CONTEXT2, 0, 4, 0); return PROGRAM_FINALIZE(p); } /** * cnstr_shdsc_gcm_encap - AES-GCM encap as a shared descriptor * @descbuf: pointer to descriptor-under-construction buffer * @ps: if 36/40bit addressing is desired, this parameter must be true * @swap: must be true when core endianness doesn't match SEC endianness * @share: sharing type of shared descriptor * @cipherdata: pointer to block cipher transform definitions * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with * OP_ALG_AAI_GCM. * @ivlen: Initialization vector length * @icvsize: integrity check value (ICV) size (truncated or full) * * Return: size of descriptor written in words or negative number on error */ static inline int cnstr_shdsc_gcm_encap(uint32_t *descbuf, bool ps, bool swap, enum rta_share_type share, struct alginfo *cipherdata, uint32_t ivlen, uint32_t icvsize) { struct program prg; struct program *p = &prg; LABEL(keyjmp); LABEL(zeroassocjump2); LABEL(zeroassocjump1); LABEL(zeropayloadjump); REFERENCE(pkeyjmp); REFERENCE(pzeroassocjump2); REFERENCE(pzeroassocjump1); REFERENCE(pzeropayloadjump); PROGRAM_CNTXT_INIT(p, descbuf, 0); if (swap) PROGRAM_SET_BSWAP(p); if (ps) PROGRAM_SET_36BIT_ADDR(p); SHR_HDR(p, share, 1, SC); pkeyjmp = JUMP(p, keyjmp, LOCAL_JUMP, ALL_TRUE, SELF | SHRD); /* Insert Key */ KEY(p, KEY1, cipherdata->key_enc_flags, cipherdata->key, cipherdata->keylen, INLINE_KEY(cipherdata)); SET_LABEL(p, keyjmp); /* class 1 operation */ ALG_OPERATION(p, cipherdata->algtype, cipherdata->algmode, OP_ALG_AS_INITFINAL, ICV_CHECK_DISABLE, DIR_ENC); MATHB(p, DPOVRD, AND, 0x7fffffff, MATH3, 4, IMMED2); /* if assoclen + cryptlen is ZERO, skip to ICV write */ MATHB(p, SEQINSZ, SUB, ivlen, VSEQOUTSZ, 4, IMMED2); pzeroassocjump2 = JUMP(p, zeroassocjump2, LOCAL_JUMP, ALL_TRUE, MATH_Z); SEQFIFOLOAD(p, IV1, ivlen, FLUSH1); /* if assoclen is ZERO, skip reading the assoc data */ MATHB(p, ZERO, ADD, MATH3, VSEQINSZ, 4, 0); pzeroassocjump1 = JUMP(p, zeroassocjump1, LOCAL_JUMP, ALL_TRUE, MATH_Z); /* cryptlen = seqinlen - assoclen */ MATHB(p, SEQINSZ, SUB, MATH3, VSEQOUTSZ, 4, 0); /* if cryptlen is ZERO jump to zero-payload commands */ pzeropayloadjump = JUMP(p, zeropayloadjump, LOCAL_JUMP, ALL_TRUE, MATH_Z); /* read assoc data */ SEQFIFOLOAD(p, AAD1, 0, CLASS1 | VLF | FLUSH1); SET_LABEL(p, zeroassocjump1); MATHB(p, SEQINSZ, SUB, MATH0, VSEQINSZ, 4, 0); /* write encrypted data */ SEQFIFOSTORE(p, MSG, 0, 0, VLF); /* read payload data */ SEQFIFOLOAD(p, MSG1, 0, CLASS1 | VLF | LAST1); /* jump the zero-payload commands */ JUMP(p, 4, LOCAL_JUMP, ALL_TRUE, 0); /* zero-payload commands */ SET_LABEL(p, zeropayloadjump); /* read assoc data */ SEQFIFOLOAD(p, AAD1, 0, CLASS1 | VLF | LAST1); JUMP(p, 2, LOCAL_JUMP, ALL_TRUE, 0); /* There is no input data */ SET_LABEL(p, zeroassocjump2); SEQFIFOLOAD(p, IV1, ivlen, FLUSH1 | LAST1); /* write ICV */ SEQSTORE(p, CONTEXT1, 0, icvsize, 0); PATCH_JUMP(p, pkeyjmp, keyjmp); PATCH_JUMP(p, pzeroassocjump2, zeroassocjump2); PATCH_JUMP(p, pzeroassocjump1, zeroassocjump1); PATCH_JUMP(p, pzeropayloadjump, zeropayloadjump); return PROGRAM_FINALIZE(p); } /** * cnstr_shdsc_gcm_decap - AES-GCM decap as a shared descriptor * @descbuf: pointer to descriptor-under-construction buffer * @ps: if 36/40bit addressing is desired, this parameter must be true * @swap: must be true when core endianness doesn't match SEC endianness * @share: sharing type of shared descriptor * @cipherdata: pointer to block cipher transform definitions * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with * OP_ALG_AAI_GCM. * @icvsize: integrity check value (ICV) size (truncated or full) * * Return: size of descriptor written in words or negative number on error */ static inline int cnstr_shdsc_gcm_decap(uint32_t *descbuf, bool ps, bool swap, enum rta_share_type share, struct alginfo *cipherdata, uint32_t ivlen, uint32_t icvsize) { struct program prg; struct program *p = &prg; LABEL(keyjmp); LABEL(zeroassocjump1); LABEL(zeropayloadjump); REFERENCE(pkeyjmp); REFERENCE(pzeroassocjump1); REFERENCE(pzeropayloadjump); PROGRAM_CNTXT_INIT(p, descbuf, 0); if (swap) PROGRAM_SET_BSWAP(p); if (ps) PROGRAM_SET_36BIT_ADDR(p); SHR_HDR(p, share, 1, SC); pkeyjmp = JUMP(p, keyjmp, LOCAL_JUMP, ALL_TRUE, SELF | SHRD); /* Insert Key */ KEY(p, KEY1, cipherdata->key_enc_flags, cipherdata->key, cipherdata->keylen, INLINE_KEY(cipherdata)); SET_LABEL(p, keyjmp); /* class 1 operation */ ALG_OPERATION(p, cipherdata->algtype, cipherdata->algmode, OP_ALG_AS_INITFINAL, ICV_CHECK_ENABLE, DIR_DEC); MATHB(p, DPOVRD, AND, 0x7fffffff, MATH3, 4, IMMED2); SEQFIFOLOAD(p, IV1, ivlen, FLUSH1); /* if assoclen is ZERO, skip reading the assoc data */ MATHB(p, ZERO, ADD, MATH3, VSEQINSZ, 4, 0); pzeroassocjump1 = JUMP(p, zeroassocjump1, LOCAL_JUMP, ALL_TRUE, MATH_Z); /* read assoc data */ SEQFIFOLOAD(p, AAD1, 0, CLASS1 | VLF | FLUSH1); SET_LABEL(p, zeroassocjump1); /* cryptlen = seqoutlen - assoclen */ MATHB(p, SEQOUTSZ, SUB, MATH0, VSEQINSZ, 4, 0); /* jump to zero-payload command if cryptlen is zero */ pzeropayloadjump = JUMP(p, zeropayloadjump, LOCAL_JUMP, ALL_TRUE, MATH_Z); MATHB(p, SEQOUTSZ, SUB, MATH0, VSEQOUTSZ, 4, 0); /* store encrypted data */ SEQFIFOSTORE(p, MSG, 0, 0, VLF); /* read payload data */ SEQFIFOLOAD(p, MSG1, 0, CLASS1 | VLF | FLUSH1); /* zero-payload command */ SET_LABEL(p, zeropayloadjump); /* read ICV */ SEQFIFOLOAD(p, ICV1, icvsize, CLASS1 | LAST1); PATCH_JUMP(p, pkeyjmp, keyjmp); PATCH_JUMP(p, pzeroassocjump1, zeroassocjump1); PATCH_JUMP(p, pzeropayloadjump, zeropayloadjump); return PROGRAM_FINALIZE(p); } /** * cnstr_shdsc_aes_mac - AES_XCBC_MAC, CMAC cases * @descbuf: pointer to descriptor-under-construction buffer * @ps: if 36/40bit addressing is desired, this parameter must be true * @swap: must be true when core endianness doesn't match SEC endianness * @share: sharing type of shared descriptor * @authdata: pointer to authentication transform definitions; * message digest algorithm: OP_ALG_ALGSEL_AES. * @do_icv: 0 if ICV checking is not desired, any other value if ICV checking * is needed for all the packets processed by this shared descriptor * @trunc_len: Length of the truncated ICV to be written in the output buffer, * 0 if no truncation is needed * * Note: There's no support for keys longer than the block size of the * underlying hash function, according to the selected algorithm. * * Return: size of descriptor written in words or negative number on error */ static inline int cnstr_shdsc_aes_mac(uint32_t *descbuf, bool ps, bool swap, enum rta_share_type share, struct alginfo *authdata, uint8_t do_icv, uint8_t trunc_len) { struct program prg; struct program *p = &prg; uint8_t opicv, dir; opicv = do_icv ? ICV_CHECK_ENABLE : ICV_CHECK_DISABLE; dir = do_icv ? DIR_DEC : DIR_ENC; PROGRAM_CNTXT_INIT(p, descbuf, 0); if (swap) PROGRAM_SET_BSWAP(p); if (ps) PROGRAM_SET_36BIT_ADDR(p); SHR_HDR(p, share, 1, SC); KEY(p, KEY2, authdata->key_enc_flags, authdata->key, authdata->keylen, INLINE_KEY(authdata)); /* compute sequences */ if (opicv == ICV_CHECK_ENABLE) MATHB(p, SEQINSZ, SUB, trunc_len, VSEQINSZ, 4, IMMED2); else MATHB(p, SEQINSZ, SUB, MATH2, VSEQINSZ, 4, 0); /* Do operation */ ALG_OPERATION_NP(p, authdata->algtype, authdata->algmode, OP_ALG_AS_INITFINAL, opicv, dir); /* Do load (variable length) */ SEQFIFOLOAD(p, MSG2, 0, VLF | LAST2); if (opicv == ICV_CHECK_ENABLE) { LOAD(p, trunc_len, ICV2SZ, 0, 4, IMMED); SEQFIFOLOAD(p, ICV2, trunc_len, LAST2); } else SEQSTORE(p, CONTEXT2, 0, trunc_len, 0); return PROGRAM_FINALIZE(p); } #endif /* __DESC_ALGO_H__ */
11,709
361
from lark import Token def find_name_token_among_children(tree): for child in tree.children: if isinstance(child, Token) and child.type == "NAME": return child return None def is_function_public(function_name: str) -> bool: return not function_name.startswith("_")
110
4,538
/* * Copyright (C) 2015-2017 Alibaba Group Holding Limited */ #ifndef AOS_DEBUG_H #define AOS_DEBUG_H #ifdef __cplusplus extern "C" { #endif #define SHORT_FILE __FILENAME__ #define debug_print_assert(A,B,C,D,E,F) #if (!defined(unlikely)) #define unlikely(EXPRESSSION) !!(EXPRESSSION) #endif /* * Check that an expression is true (non-zero). * If expression evalulates to false, this prints debugging information (actual expression string, file, line number, * function name, etc.) using the default debugging output method. * * @param[in] X expression to be evaluated. */ #if (!defined(check)) #define check(X) \ do { \ if (unlikely(!(X))) { \ debug_print_assert(0, #X, NULL, SHORT_FILE, __LINE__, __PRETTY_FUNCTION__); \ } \ } while(1 == 0) #endif /* * Check that an expression is true (non-zero) with an explanation. * If expression evalulates to false, this prints debugging information (actual expression string, file, line number, * function name, etc.) using the default debugging output method. * * @param[in] X expression to be evaluated. * @param[in] STR If the expression evaluate to false, custom string to print. */ #if (!defined(check_string)) #define check_string(X, STR) \ do { \ if (unlikely(!(X))) { \ debug_print_assert(0, #X, STR, SHORT_FILE, __LINE__, __PRETTY_FUNCTION__); \ AOS_ASSERTION_FAIL_ACTION(); \ } \ } while(1 == 0) #endif /* * Requires that an expression evaluate to true. * If expression evalulates to false, this prints debugging information (actual expression string, file, line number, * function name, etc.) using the default debugging output method then jumps to a label. * * @param[in] X expression to be evalulated. * @param[in] LABEL if expression evaluate to false,jumps to the LABEL. */ #if (!defined(require)) #define require(X, LABEL) \ do { \ if (unlikely(!(X))) { \ debug_print_assert( 0, #X, NULL, SHORT_FILE, __LINE__, __PRETTY_FUNCTION__ ); \ goto LABEL; \ } \ } while(1 == 0) #endif /* * Requires that an expression evaluate to true with an explanation. * If expression evalulates to false, this prints debugging information (actual expression string, file, line number, * function name, etc.) and a custom explanation string using the default debugging output method then jumps to a label. * * @param[in] X expression to be evalulated. * @param[in] LABEL if expression evaluate to false,jumps to the LABEL. * @param[in] STR if expression evaluate to false,custom explanation string to print. */ #if (!defined(require_string)) #define require_string(X, LABEL, STR) \ do { \ if (unlikely(!(X))) { \ debug_print_assert(0, #X, STR, SHORT_FILE, __LINE__, __PRETTY_FUNCTION__); \ goto LABEL; \ } \ } while(1 == 0) #endif /* * Requires that an expression evaluate to true. * If expression evalulates to false, this jumps to a label. No debugging information is printed. * * @param[in] X expression to be evalulated * @param[in] LABEL if expression evaluate to false,jumps to the LABEL. */ #if (!defined(require_quiet)) #define require_quiet(X, LABEL) \ do { \ if (unlikely(!(X))) { \ goto LABEL; \ } \ } while(1 == 0) #endif /* * Require that an error code is noErr (0). * If the error code is non-0, this prints debugging information (actual expression string, file, line number, * function name, etc.) using the default debugging output method then jumps to a label. * * @param[in] ERR error to be evaluated * @param[in] LABEL If the error code is non-0,jumps to the LABEL. */ #if (!defined(require_noerr)) #define require_noerr(ERR, LABEL) \ do { \ int localErr; \ \ localErr = (int)(ERR); \ if (unlikely(localErr != 0)) { \ debug_print_assert(localErr, NULL, NULL, SHORT_FILE, __LINE__, __PRETTY_FUNCTION__); \ goto LABEL; \ } \ \ } while(1 == 0) #endif /* * Require that an error code is noErr (0) with an explanation. * If the error code is non-0, this prints debugging information (actual expression string, file, line number, * function name, etc.), and a custom explanation string using the default debugging output method using the * default debugging output method then jumps to a label. * * @param[in] ERR error to be evaluated * @param[in] LABEL If the error code is non-0, jumps to the LABEL. * @param[in] STR If the error code is non-0, custom explanation string to print */ #if (!defined(require_noerr_string)) #define require_noerr_string(ERR, LABEL, STR) \ do { \ int localErr; \ \ localErr = (int)(ERR); \ if (unlikely(localErr != 0)) { \ debug_print_assert(localErr, NULL, STR, SHORT_FILE, __LINE__, __PRETTY_FUNCTION__); \ goto LABEL; \ } \ } while(1 == 0) #endif /* * Require that an error code is noErr (0) with an explanation and action to execute otherwise. * If the error code is non-0, this prints debugging information (actual expression string, file, line number, * function name, etc.), and a custom explanation string using the default debugging output method using the * default debugging output method then executes an action and jumps to a label. * * @param[in] ERR error to be evaluated. * @param[in] LABEL If the error code is non-0, jumps to the LABEL. * @param[in] ACTION If the error code is non-0, custom code to executes. * @param[in] STR If the error code is non-0, custom explanation string to print. */ #if (!defined(require_noerr_action_string)) #define require_noerr_action_string(ERR, LABEL, ACTION, STR) \ do { \ int localErr; \ \ localErr = (int)(ERR); \ if (unlikely(localErr != 0)) { \ debug_print_assert(localErr, NULL, STR, SHORT_FILE, __LINE__, __PRETTY_FUNCTION__); \ { ACTION; } \ goto LABEL; \ } \ } while(1 == 0) #endif /* * Require that an error code is noErr (0). * If the error code is non-0, this jumps to a label. No debugging information is printed. * * @param[in] ERR error to be evaluated. * @param[in] LABEL If the error code is non-0, jumps to the LABEL. */ #if (!defined(require_noerr_quiet)) #define require_noerr_quiet(ERR, LABEL) \ do { \ if (unlikely((ERR) != 0)) { \ goto LABEL; \ } \ } while(1 == 0) #endif /* * Require that an error code is noErr (0) with an action to execute otherwise. * If the error code is non-0, this prints debugging information (actual expression string, file, line number, * function name, etc.) using the default debugging output method then executes an action and jumps to a label. * * @param[in] ERR error to be evaluated. * @param[in] LABEL If the error code is non-0, jumps to the LABEL. * @param[in] ACTION If the error code is non-0, custom code to executes. */ #if (!defined(require_noerr_action)) #define require_noerr_action(ERR, LABEL, ACTION) \ do { \ int localErr; \ \ localErr = (int)(ERR); \ if (unlikely(localErr != 0)) { \ debug_print_assert(localErr, NULL, NULL, SHORT_FILE, __LINE__, __PRETTY_FUNCTION__); \ { ACTION; } \ goto LABEL; \ } \ } while(1 == 0) #endif /* * Require that an error code is noErr (0) with an action to execute otherwise. * If the error code is non-0, this executes an action and jumps to a label. No debugging information is printed. * * @param[in] ERR error to be evaluated. * @param[in] LABEL If the error code is non-0, jumps to the LABEL. * @param[in] ACTION If the error code is non-0, custom code to executes. */ #if (!defined(require_noerr_action_quiet)) #define require_noerr_action_quiet(ERR, LABEL, ACTION) \ do { \ if (unlikely((ERR) != 0)) { \ { ACTION; } \ goto LABEL; \ } \ } while(1 == 0) #endif /* * Requires that an expression evaluate to true with an action to execute otherwise. * If expression evalulates to false, this prints debugging information (actual expression string, file, line number, * function name, etc.) using the default debugging output method then executes an action and jumps to a label. * * @param[in] X expression to be evaluated. * @param[in] LABEL If the expression evaluate to false, jumps to the LABEL. * @param[in] ACTION If the expression evaluate to false, custom code to executes. */ #if (!defined(require_action)) #define require_action(X, LABEL, ACTION) \ do { \ if (unlikely(!(X))) { \ debug_print_assert(0, #X, NULL, SHORT_FILE, __LINE__, __PRETTY_FUNCTION__); \ { ACTION; } \ goto LABEL; \ } \ } while (1 == 0) #endif /* * Requires that an expression evaluate to true with an explanation and action to execute otherwise. * If expression evalulates to false, this prints debugging information (actual expression string, file, line number, * function name, etc.) and a custom explanation string using the default debugging output method then executes an * action and jumps to a label. * * @param[in] X expression to be evaluated. * @param[in] LABEL If the expression evaluate to false, jumps to the LABEL. * @param[in] ACTION If the expression evaluate to false, custom code to executes. * @param[in] STR If the expression evaluate to false, custom string to print. */ #if (!defined(require_action_string)) #define require_action_string(X, LABEL, ACTION, STR) \ do { \ if (unlikely(!(X))) { \ debug_print_assert(0, #X, STR, SHORT_FILE, __LINE__, __PRETTY_FUNCTION__); \ { ACTION; } \ goto LABEL; \ } \ } while (1 == 0) #endif /* * Requires that an expression evaluate to true with an action to execute otherwise. * If expression evalulates to false, this executes an action and jumps to a label. * No debugging information is printed. * * @param[in] X expression to be evaluated. * @param[in] LABEL If the expression evaluate to false, jumps to the LABEL. * @param[in] ACTION If the expression evaluate to false, custom code to executes. */ #if (!defined(require_action_quiet)) #define require_action_quiet(X, LABEL, ACTION) \ do { \ if (unlikely(!(X))) { \ { ACTION; } \ goto LABEL; \ } \ \ } while(1 == 0) #endif #ifdef __cplusplus } #endif #endif /* AOS_DEBUG_H */
9,452
348
<reponame>chamberone/Leaflet.PixiOverlay {"nom":"Carcheto-Brustico","circ":"2ème circonscription","dpt":"Haute-Corse","inscrits":53,"abs":24,"votants":29,"blancs":0,"nuls":0,"exp":29,"res":[{"nuance":"REG","nom":"<NAME>","voix":19},{"nuance":"REM","nom":"<NAME>","voix":10}]}
114
392
// ------------------------------------------------------------------------ // Copyright (C) // Universitat Politecnica de Catalunya BarcelonaTech (UPC) - Spain // University of California Berkeley (UCB) - USA // // <NAME> <<EMAIL>> // <NAME> <<EMAIL>> // June 2014 // ------------------------------------------------------------------------ // This file is part of the MCG package presented in: // <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, // "Multiscale Combinatorial Grouping," // Computer Vision and Pattern Recognition (CVPR) 2014. // Please consider citing the paper if you use this code. // ------------------------------------------------------------------------ #include "mex.h" #include <set> #include "matlab_multiarray.hpp" void mexFunction( int nlhs, mxArray *plhs[], int nrhs, const mxArray*prhs[] ) { /* Check for proper number of arguments */ if (nrhs != 3) { mexErrMsgTxt("Three input arguments required."); } else if (nlhs > 4) { mexErrMsgTxt("Too many output arguments."); } /* Input as a Multiarray */ ConstMatlabMultiArray3<bool> masks(prhs[0]); /* Object masks */ ConstMatlabMultiArray<unsigned char> ground_truth(prhs[1]); /* Object ground trtuh */ ConstMatlabMultiArray<bool> valid_pixels(prhs[2]); /* Pixels to take into account */ /* Input sizes and checks */ size_t sx = masks.shape()[0]; size_t sy = masks.shape()[1]; size_t n_masks = masks.shape()[2]; /* Sweep ground truth to get the number of objects */ std::set<unsigned char> obj_ids; for(std::size_t ii=0; ii<sx; ++ii) for(std::size_t jj=0; jj<sy; ++jj) obj_ids.insert(ground_truth[ii][jj]); obj_ids.erase(0); obj_ids.erase(255); std::size_t n_objs = obj_ids.size(); // mexPrintf("%d\n", obj_ids.size()); /* Allocate results */ plhs[0] = mxCreateDoubleMatrix(1,n_masks,mxREAL); MatlabMultiArray<double> out_areas(plhs[0]); plhs[1] = mxCreateDoubleMatrix(n_objs,n_masks,mxREAL); MatlabMultiArray<double> out_int(plhs[1]); plhs[2] = mxCreateDoubleMatrix(n_objs,n_masks,mxREAL); MatlabMultiArray<double> out_fn(plhs[2]); for (std::size_t ii=0; ii<sx; ++ii) { for (std::size_t jj=0; jj<sy; ++jj) { if (valid_pixels[ii][jj]) // Consider only valid pixels { for(std::size_t kk=0; kk<n_masks; ++kk) { if (masks[ii][jj][kk]) { out_areas[0][kk]++; if (ground_truth[ii][jj]>0) out_int[ground_truth[ii][jj]-1][kk]++; } else if (ground_truth[ii][jj]>0) out_fn[ground_truth[ii][jj]-1][kk]++; } } } } }
1,343
335
<gh_stars>100-1000 { "word": "Vocalist", "definitions": [ "A singer, typically one who regularly performs with a jazz or pop group." ], "parts-of-speech": "Noun" }
77
656
<reponame>dunjin/DataAnalysisInAction from selenium import webdriver import time browser = webdriver.Chrome() # 登录微博 def weibo_login(username, password): # 打开微博登录页 browser.get('https://passport.weibo.cn/signin/login') browser.implicitly_wait(5) time.sleep(1) # 填写登录信息:用户名、密码 browser.find_element_by_id("loginName").send_keys(username) browser.find_element_by_id("loginPassword").send_keys(password) time.sleep(1) # 点击登录 browser.find_element_by_id("loginAction").click() time.sleep(1) # 设置用户名、密码 username = 'XXXX' password = "<PASSWORD>" weibo_login(username, password) # 添加指定的用户 def add_follow(uid): browser.get('https://m.weibo.com/u/' + str(uid)) time.sleep(1) # browser.find_element_by_id("follow").click() follow_button = browser.find_element_by_xpath('//div[@class="m-add-box m-followBtn"]') follow_button.click() time.sleep(1) # 选择分组 group_button = browser.find_element_by_xpath('//div[@class="m-btn m-btn-white m-btn-text-black"]') group_button.click() time.sleep(1) # 每天学点心理学 UID uid = '1890826225' add_follow(uid) # 给指定某条微博添加内容 def add_comment(weibo_url, content): browser.get(weibo_url) browser.implicitly_wait(5) content_textarea = browser.find_element_by_css_selector("textarea.W_input").clear() content_textarea = browser.find_element_by_css_selector("textarea.W_input").send_keys(content) time.sleep(2) comment_button = browser.find_element_by_css_selector(".W_btn_a").click() time.sleep(1) # 发文字微博 def post_weibo(content): # 跳转到用户的首页 browser.get('https://weibo.com') browser.implicitly_wait(5) # 点击右上角的发布按钮 post_button = browser.find_element_by_css_selector("[node-type='publish']").click() # 在弹出的文本框中输入内容 content_textarea = browser.find_element_by_css_selector("textarea.W_input").send_keys(content) time.sleep(2) # 点击发布按钮 post_button = browser.find_element_by_css_selector("[node-type='submit']").click() time.sleep(1) # 给指定的微博写评论 weibo_url = 'https://weibo.com/1890826225/HjjqSahwl' content = 'Gook Luck! 好运已上路!' # 自动发微博 content = '每天学点心理学' post_weibo(content)
1,047
1,085
/* * Copyright (C) 2017-2019 Dremio Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.dremio.sabot.exec; import java.util.List; import java.util.Optional; import com.dremio.exec.proto.CoordExecRPC.FragmentStatus; import com.dremio.sabot.exec.fragment.FragmentExecutor; import com.google.common.collect.Lists; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.protobuf.Empty; /** * Periodically gather current statistics. * * We use a thread that runs periodically to collect current statistics about RUNNING queries, * such as current memory consumption, number of rows processed, and so on. */ public class FragmentStatusThread extends Thread implements AutoCloseable { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(FragmentStatusThread.class); private final static int STATUS_PERIOD_SECONDS = 5; private final Iterable<FragmentExecutor> executors; private final QueriesClerk clerk; private final MaestroProxy maestroProxy; public FragmentStatusThread(Iterable<FragmentExecutor> executors, QueriesClerk clerk, MaestroProxy maestroProxy) { super(); setDaemon(true); setName("fragment-status-reporter"); this.executors = executors; this.clerk = clerk; this.maestroProxy = maestroProxy; } @Override public void run() { while (true) { final List<ListenableFuture<Empty>> futures = Lists.newArrayList(); try { refreshFragmentStatuses(); sendQueryProfiles(futures); } catch (Exception e) { // Exception ignored. Status sender thread should not die due to a random exception } // we'll wait to complete so we don't back up if the cluster is moving slowly. try { Futures.successfulAsList(futures).get(); } catch (final Exception ex) { logger.info("Failure while sending intermediate fragment status to AttemptManager", ex); } try { Thread.sleep(STATUS_PERIOD_SECONDS * 1000); } catch (final InterruptedException e) { logger.debug("Status thread exiting."); break; } } } /** * Refresh the status/metrics for all running fragments. */ private void refreshFragmentStatuses() { for (final FragmentExecutor fragmentExecutor : executors) { final FragmentStatus status = fragmentExecutor.getStatus(); if (status == null) { continue; } maestroProxy.refreshFragmentStatus(status); } } /** * Send the profiles for all queries currently running on this executor, to the * coordinator that initiated the respective query */ private void sendQueryProfiles(List<ListenableFuture<Empty>> futures) { for (final WorkloadTicket workloadTicket : clerk.getWorkloadTickets()) { for (final QueryTicket queryTicket : workloadTicket.getActiveQueryTickets()) { Optional<ListenableFuture<Empty>> future = maestroProxy.sendQueryProfile(queryTicket.getQueryId()); future.ifPresent((x) -> futures.add(x)); } } } @Override public void close() { this.interrupt(); } }
1,214
550
/* * Copyright 2016 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.apps.forscience.whistlepunk.devicemanager; import android.app.Activity; import android.bluetooth.BluetoothAdapter; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.content.pm.ActivityInfo; import android.os.Bundle; import androidx.annotation.NonNull; import androidx.fragment.app.Fragment; import androidx.fragment.app.FragmentManager; import androidx.fragment.app.FragmentTransaction; import androidx.appcompat.app.AppCompatActivity; import android.view.Menu; import com.google.android.apps.forscience.javalib.Success; import com.google.android.apps.forscience.whistlepunk.AppSingleton; import com.google.android.apps.forscience.whistlepunk.DataController; import com.google.android.apps.forscience.whistlepunk.LoggingConsumer; import com.google.android.apps.forscience.whistlepunk.R; import com.google.android.apps.forscience.whistlepunk.WhistlePunkApplication; import com.google.android.apps.forscience.whistlepunk.accounts.AppAccount; import com.google.android.apps.forscience.whistlepunk.analytics.TrackerConstants; import com.google.android.apps.forscience.whistlepunk.filemetadata.Experiment; /** Activity for managing devices. */ public class ManageDevicesActivity extends AppCompatActivity implements DeviceOptionsListener { private static final String TAG = "ManageDevices"; /** String extra which stores the account key that launched this activity. */ public static final String EXTRA_ACCOUNT_KEY = "account_key"; /** String extra which stores the experiment ID that launched this activity. */ public static final String EXTRA_EXPERIMENT_ID = "experiment_id"; private BroadcastReceiver btReceiver; private DataController dataController; private ManageDevicesRecyclerFragment manageFragment; private Experiment currentExperiment; public static DeviceOptionsListener getOptionsListener(Activity activity) { if (activity instanceof DeviceOptionsListener) { return (DeviceOptionsListener) activity; } else { return DeviceOptionsDialog.NULL_LISTENER; } } public static void launch(Context context, AppAccount appAccount, String experimentId) { context.startActivity(launchIntent(context, appAccount, experimentId)); } @NonNull public static Intent launchIntent(Context context, AppAccount appAccount, String experimentId) { Intent intent = new Intent(context, ManageDevicesActivity.class); if (experimentId != null) { intent.putExtra(EXTRA_ACCOUNT_KEY, appAccount.getAccountKey()); intent.putExtra(EXTRA_EXPERIMENT_ID, experimentId); } return intent; } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_manage_devices); boolean isTablet = getResources().getBoolean(R.bool.is_tablet); if (!isTablet) { setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT); } AppAccount appAccount = WhistlePunkApplication.getAccount(this, getIntent(), EXTRA_ACCOUNT_KEY); dataController = AppSingleton.getInstance(this).getDataController(appAccount); } @Override protected void onStart() { super.onStart(); WhistlePunkApplication.getUsageTracker(this) .trackScreenView(TrackerConstants.SCREEN_DEVICE_MANAGER); } @Override protected void onResume() { super.onResume(); setupFragment(); // Set up a broadcast receiver in case the adapter is disabled from the notification shade. registerBtReceiverIfNecessary(); String experimentId = getIntent().getStringExtra(EXTRA_EXPERIMENT_ID); dataController.getExperimentById( experimentId, new LoggingConsumer<Experiment>(TAG, "load experiment with ID = " + experimentId) { @Override public void success(Experiment value) { currentExperiment = value; } }); } @Override protected void onPause() { unregisterBtReceiverIfNecessary(); super.onPause(); } @Override public boolean onCreateOptionsMenu(Menu menu) { return true; } private void setupFragment() { FragmentManager fragmentManager = getSupportFragmentManager(); Fragment fragmentById = fragmentManager.findFragmentById(R.id.fragment); if (fragmentById != null) { manageFragment = (ManageDevicesRecyclerFragment) fragmentById; } else { manageFragment = new ManageDevicesRecyclerFragment(); Bundle args = new Bundle(); args.putString(EXTRA_ACCOUNT_KEY, getIntent().getStringExtra(EXTRA_ACCOUNT_KEY)); args.putString(EXTRA_EXPERIMENT_ID, getIntent().getStringExtra(EXTRA_EXPERIMENT_ID)); manageFragment.setArguments(args); FragmentTransaction ft = fragmentManager.beginTransaction(); ft.replace(R.id.fragment, manageFragment); ft.commitAllowingStateLoss(); } } private void registerBtReceiverIfNecessary() { if (btReceiver == null) { btReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { setupFragment(); } }; IntentFilter filter = new IntentFilter(); filter.addAction(BluetoothAdapter.ACTION_STATE_CHANGED); registerReceiver(btReceiver, filter); } } private void unregisterBtReceiverIfNecessary() { if (btReceiver != null) { unregisterReceiver(btReceiver); btReceiver = null; } } @Override public void onExperimentSensorReplaced(String oldSensorId, String newSensorId) { refreshAfterLoad(); } @Override public void onRemoveSensorFromExperiment(String experimentId, final String sensorId) { if (currentExperiment != null && currentExperiment.getExperimentId().equals(experimentId)) { removeSensorFromExperiment(sensorId); } } private void removeSensorFromExperiment(String sensorId) { dataController.removeSensorFromExperiment( currentExperiment.getExperimentId(), sensorId, new LoggingConsumer<Success>(TAG, "remove sensor from experiment") { @Override public void success(Success value) {} }); } private void refreshAfterLoad() { if (manageFragment != null) { manageFragment.refreshAfterLoad(); } } }
2,310
9,734
<filename>cpp/src/gandiva/tests/filter_project_test.cc<gh_stars>1000+ // Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. #include <gtest/gtest.h> #include "arrow/memory_pool.h" #include "gandiva/filter.h" #include "gandiva/projector.h" #include "gandiva/selection_vector.h" #include "gandiva/tests/test_util.h" #include "gandiva/tree_expr_builder.h" namespace gandiva { using arrow::boolean; using arrow::float32; using arrow::int32; class TestFilterProject : public ::testing::Test { public: void SetUp() { pool_ = arrow::default_memory_pool(); } protected: arrow::MemoryPool* pool_; }; TEST_F(TestFilterProject, TestSimple16) { // schema for input fields auto field0 = field("f0", int32()); auto field1 = field("f1", int32()); auto field2 = field("f2", int32()); auto resultField = field("result", int32()); auto schema = arrow::schema({field0, field1, field2}); // Build condition f0 < f1 auto node_f0 = TreeExprBuilder::MakeField(field0); auto node_f1 = TreeExprBuilder::MakeField(field1); auto node_f2 = TreeExprBuilder::MakeField(field2); auto less_than_function = TreeExprBuilder::MakeFunction("less_than", {node_f0, node_f1}, arrow::boolean()); auto condition = TreeExprBuilder::MakeCondition(less_than_function); auto sum_expr = TreeExprBuilder::MakeExpression("add", {field1, field2}, resultField); auto configuration = TestConfiguration(); std::shared_ptr<Filter> filter; std::shared_ptr<Projector> projector; auto status = Filter::Make(schema, condition, configuration, &filter); EXPECT_TRUE(status.ok()); status = Projector::Make(schema, {sum_expr}, SelectionVector::MODE_UINT16, configuration, &projector); EXPECT_TRUE(status.ok()); // Create a row-batch with some sample data int num_records = 5; auto array0 = MakeArrowArrayInt32({1, 2, 6, 40, 3}, {true, true, true, true, true}); auto array1 = MakeArrowArrayInt32({5, 9, 3, 17, 6}, {true, true, true, true, true}); auto array2 = MakeArrowArrayInt32({1, 2, 6, 40, 3}, {true, true, true, true, false}); // expected output auto result = MakeArrowArrayInt32({6, 11, 0}, {true, true, false}); // prepare input record batch auto in_batch = arrow::RecordBatch::Make(schema, num_records, {array0, array1, array2}); std::shared_ptr<SelectionVector> selection_vector; status = SelectionVector::MakeInt16(num_records, pool_, &selection_vector); EXPECT_TRUE(status.ok()); // Evaluate expression status = filter->Evaluate(*in_batch, selection_vector); EXPECT_TRUE(status.ok()); // Evaluate expression arrow::ArrayVector outputs; status = projector->Evaluate(*in_batch, selection_vector.get(), pool_, &outputs); EXPECT_TRUE(status.ok()); // Validate results EXPECT_ARROW_ARRAY_EQUALS(result, outputs.at(0)); } TEST_F(TestFilterProject, TestSimple32) { // schema for input fields auto field0 = field("f0", int32()); auto field1 = field("f1", int32()); auto field2 = field("f2", int32()); auto resultField = field("result", int32()); auto schema = arrow::schema({field0, field1, field2}); // Build condition f0 < f1 auto node_f0 = TreeExprBuilder::MakeField(field0); auto node_f1 = TreeExprBuilder::MakeField(field1); auto node_f2 = TreeExprBuilder::MakeField(field2); auto less_than_function = TreeExprBuilder::MakeFunction("less_than", {node_f0, node_f1}, arrow::boolean()); auto condition = TreeExprBuilder::MakeCondition(less_than_function); auto sum_expr = TreeExprBuilder::MakeExpression("add", {field1, field2}, resultField); auto configuration = TestConfiguration(); std::shared_ptr<Filter> filter; std::shared_ptr<Projector> projector; auto status = Filter::Make(schema, condition, configuration, &filter); EXPECT_TRUE(status.ok()); status = Projector::Make(schema, {sum_expr}, SelectionVector::MODE_UINT32, configuration, &projector); EXPECT_TRUE(status.ok()); // Create a row-batch with some sample data int num_records = 5; auto array0 = MakeArrowArrayInt32({1, 2, 6, 40, 3}, {true, true, true, true, true}); auto array1 = MakeArrowArrayInt32({5, 9, 3, 17, 6}, {true, true, true, true, true}); auto array2 = MakeArrowArrayInt32({1, 2, 6, 40, 3}, {true, true, true, true, false}); // expected output auto result = MakeArrowArrayInt32({6, 11, 0}, {true, true, false}); // prepare input record batch auto in_batch = arrow::RecordBatch::Make(schema, num_records, {array0, array1, array2}); std::shared_ptr<SelectionVector> selection_vector; status = SelectionVector::MakeInt32(num_records, pool_, &selection_vector); EXPECT_TRUE(status.ok()); // Evaluate expression status = filter->Evaluate(*in_batch, selection_vector); EXPECT_TRUE(status.ok()); // Evaluate expression arrow::ArrayVector outputs; status = projector->Evaluate(*in_batch, selection_vector.get(), pool_, &outputs); ASSERT_OK(status); // Validate results EXPECT_ARROW_ARRAY_EQUALS(result, outputs.at(0)); } TEST_F(TestFilterProject, TestSimple64) { // schema for input fields auto field0 = field("f0", int32()); auto field1 = field("f1", int32()); auto field2 = field("f2", int32()); auto resultField = field("result", int32()); auto schema = arrow::schema({field0, field1, field2}); // Build condition f0 < f1 auto node_f0 = TreeExprBuilder::MakeField(field0); auto node_f1 = TreeExprBuilder::MakeField(field1); auto node_f2 = TreeExprBuilder::MakeField(field2); auto less_than_function = TreeExprBuilder::MakeFunction("less_than", {node_f0, node_f1}, arrow::boolean()); auto condition = TreeExprBuilder::MakeCondition(less_than_function); auto sum_expr = TreeExprBuilder::MakeExpression("add", {field1, field2}, resultField); auto configuration = TestConfiguration(); std::shared_ptr<Filter> filter; std::shared_ptr<Projector> projector; auto status = Filter::Make(schema, condition, configuration, &filter); EXPECT_TRUE(status.ok()); status = Projector::Make(schema, {sum_expr}, SelectionVector::MODE_UINT64, configuration, &projector); ASSERT_OK(status); // Create a row-batch with some sample data int num_records = 5; auto array0 = MakeArrowArrayInt32({1, 2, 6, 40, 3}, {true, true, true, true, true}); auto array1 = MakeArrowArrayInt32({5, 9, 3, 17, 6}, {true, true, true, true, true}); auto array2 = MakeArrowArrayInt32({1, 2, 6, 40, 3}, {true, true, true, true, false}); // expected output auto result = MakeArrowArrayInt32({6, 11, 0}, {true, true, false}); // prepare input record batch auto in_batch = arrow::RecordBatch::Make(schema, num_records, {array0, array1, array2}); std::shared_ptr<SelectionVector> selection_vector; status = SelectionVector::MakeInt64(num_records, pool_, &selection_vector); EXPECT_TRUE(status.ok()); // Evaluate expression status = filter->Evaluate(*in_batch, selection_vector); EXPECT_TRUE(status.ok()); // Evaluate expression arrow::ArrayVector outputs; status = projector->Evaluate(*in_batch, selection_vector.get(), pool_, &outputs); EXPECT_TRUE(status.ok()); // Validate results EXPECT_ARROW_ARRAY_EQUALS(result, outputs.at(0)); } TEST_F(TestFilterProject, TestSimpleIf) { // schema for input fields auto fielda = field("a", int32()); auto fieldb = field("b", int32()); auto fieldc = field("c", int32()); auto schema = arrow::schema({fielda, fieldb, fieldc}); // output fields auto field_result = field("res", int32()); auto node_a = TreeExprBuilder::MakeField(fielda); auto node_b = TreeExprBuilder::MakeField(fieldb); auto node_c = TreeExprBuilder::MakeField(fieldc); auto greater_than_function = TreeExprBuilder::MakeFunction("greater_than", {node_a, node_b}, boolean()); auto filter_condition = TreeExprBuilder::MakeCondition(greater_than_function); auto project_condition = TreeExprBuilder::MakeFunction("less_than", {node_b, node_c}, boolean()); auto if_node = TreeExprBuilder::MakeIf(project_condition, node_b, node_c, int32()); auto expr = TreeExprBuilder::MakeExpression(if_node, field_result); auto configuration = TestConfiguration(); // Build a filter for the expressions. std::shared_ptr<Filter> filter; auto status = Filter::Make(schema, filter_condition, configuration, &filter); EXPECT_TRUE(status.ok()); // Build a projector for the expressions. std::shared_ptr<Projector> projector; status = Projector::Make(schema, {expr}, SelectionVector::MODE_UINT32, configuration, &projector); ASSERT_OK(status); // Create a row-batch with some sample data int num_records = 6; auto array0 = MakeArrowArrayInt32({10, 12, -20, 5, 21, 29}, {true, true, true, true, true, true}); auto array1 = MakeArrowArrayInt32({5, 15, 15, 17, 12, 3}, {true, true, true, true, true, true}); auto array2 = MakeArrowArrayInt32({1, 25, 11, 30, -21, 30}, {true, true, true, true, true, false}); // Create a selection vector std::shared_ptr<SelectionVector> selection_vector; status = SelectionVector::MakeInt32(num_records, pool_, &selection_vector); EXPECT_TRUE(status.ok()); // expected output auto exp = MakeArrowArrayInt32({1, -21, 0}, {true, true, false}); // prepare input record batch auto in_batch = arrow::RecordBatch::Make(schema, num_records, {array0, array1, array2}); // Evaluate filter status = filter->Evaluate(*in_batch, selection_vector); EXPECT_TRUE(status.ok()); // Evaluate project arrow::ArrayVector outputs; status = projector->Evaluate(*in_batch, selection_vector.get(), pool_, &outputs); EXPECT_TRUE(status.ok()); // Validate results EXPECT_ARROW_ARRAY_EQUALS(exp, outputs.at(0)); } } // namespace gandiva
3,554
12,651
import torch import subprocess import numpy as np from torch_geometric.graphgym.config import cfg import logging import os def get_gpu_memory_map(): '''Get the current gpu usage.''' result = subprocess.check_output([ 'nvidia-smi', '--query-gpu=memory.used', '--format=csv,nounits,noheader' ], encoding='utf-8') gpu_memory = np.array([int(x) for x in result.strip().split('\n')]) return gpu_memory def get_current_gpu_usage(): ''' Get the current GPU memory usage. ''' if cfg.gpu_mem and cfg.device != 'cpu' and torch.cuda.is_available(): result = subprocess.check_output([ 'nvidia-smi', '--query-compute-apps=pid,used_memory', '--format=csv,nounits,noheader' ], encoding='utf-8') current_pid = os.getpid() used_memory = 0 for line in result.strip().split('\n'): line = line.split(', ') if current_pid == int(line[0]): used_memory += int(line[1]) return used_memory else: return -1 def auto_select_device(memory_max=8000, memory_bias=200, strategy='random'): r''' Auto select device for the experiment. Useful when having multiple GPUs. Args: memory_max (int): Threshold of existing GPU memory usage. GPUs with memory usage beyond this threshold will be deprioritized. memory_bias (int): A bias GPU memory usage added to all the GPUs. Avoild dvided by zero error. strategy (str, optional): 'random' (random select GPU) or 'greedy' (greedily select GPU) ''' if cfg.device != 'cpu' and torch.cuda.is_available(): if cfg.device == 'auto': memory_raw = get_gpu_memory_map() if strategy == 'greedy' or np.all(memory_raw > memory_max): cuda = np.argmin(memory_raw) logging.info('GPU Mem: {}'.format(memory_raw)) logging.info( 'Greedy select GPU, select GPU {} with mem: {}'.format( cuda, memory_raw[cuda])) elif strategy == 'random': memory = 1 / (memory_raw + memory_bias) memory[memory_raw > memory_max] = 0 gpu_prob = memory / memory.sum() cuda = np.random.choice(len(gpu_prob), p=gpu_prob) logging.info('GPU Mem: {}'.format(memory_raw)) logging.info('GPU Prob: {}'.format(gpu_prob.round(2))) logging.info( 'Random select GPU, select GPU {} with mem: {}'.format( cuda, memory_raw[cuda])) cfg.device = 'cuda:{}'.format(cuda) else: cfg.device = 'cpu'
1,264
649
<filename>src/julia/ipy/monkeypatch_completer.py<gh_stars>100-1000 """ Monkey-patch `IPCompleter` to make code completion work in ``%%julia``. This is done by monkey-patching because it looks like there is no immediate plan for an API to do this: https://github.com/ipython/ipython/pull/10722 """ from __future__ import absolute_import, print_function import re from IPython.core.completer import Completion, IPCompleter class JuliaCompleter(object): def __init__(self, julia=None): from julia import Julia self.julia = Julia() if julia is None else julia self.magic_re = re.compile(r".*(\s|^)%%?julia\s*") # With this regexp, "=%julia Cha<tab>" won't work. But maybe # it's better to be conservative here. @property def jlcomplete(self): from julia.Main._PyJuliaHelper import completions return completions def julia_completions(self, full_text, offset): self.last_text = full_text match = self.magic_re.match(full_text) if not match: return [] prefix_len = match.end() jl_pos = offset - prefix_len jl_code = full_text[prefix_len:] texts, (jl_start, jl_end), should_complete = self.jlcomplete(jl_code, jl_pos) start = jl_start - 1 + prefix_len end = jl_end + prefix_len completions = [Completion(start, end, txt) for txt in texts] self.last_completions = completions # if not should_complete: # return [] return completions class IPCompleterPatcher(object): def __init__(self): from julia.Base import VERSION if (VERSION.major, VERSION.minor) < (0, 7): return self.patch_ipcompleter(IPCompleter, JuliaCompleter()) def patch_ipcompleter(self, IPCompleter, jlcompleter): orig__completions = IPCompleter._completions def _completions(self, full_text, offset, **kwargs): completions = jlcompleter.julia_completions(full_text, offset) if completions: return completions else: return orig__completions(self, full_text, offset, **kwargs) IPCompleter._completions = _completions self.orig__completions = orig__completions self.patched__completions = _completions self.IPCompleter = IPCompleter # Make it work with reload: try: PATCHER except NameError: PATCHER = None def patch_ipcompleter(): global PATCHER if PATCHER is not None: return PATCHER = IPCompleterPatcher() # TODO: write `unpatch_ipcompleter`
1,114
1,391
<reponame>newluhux/plan9port typedef struct DirEntry DirEntry; typedef struct MetaBlock MetaBlock; typedef struct MetaEntry MetaEntry; enum { MetaMagic = 0x5656fc7a, MetaHeaderSize = 12, MetaIndexSize = 4, IndexEntrySize = 8, DirMagic = 0x1c4d9072, }; /* * Mode bits */ enum { ModeOtherExec = (1<<0), ModeOtherWrite = (1<<1), ModeOtherRead = (1<<2), ModeGroupExec = (1<<3), ModeGroupWrite = (1<<4), ModeGroupRead = (1<<5), ModeOwnerExec = (1<<6), ModeOwnerWrite = (1<<7), ModeOwnerRead = (1<<8), ModeSticky = (1<<9), ModeSetUid = (1<<10), ModeSetGid = (1<<11), ModeAppend = (1<<12), /* append only file */ ModeExclusive = (1<<13), /* lock file - plan 9 */ ModeLink = (1<<14), /* sym link */ ModeDir = (1<<15), /* duplicate of DirEntry */ ModeHidden = (1<<16), /* MS-DOS */ ModeSystem = (1<<17), /* MS-DOS */ ModeArchive = (1<<18), /* MS-DOS */ ModeTemporary = (1<<19), /* MS-DOS */ ModeSnapshot = (1<<20), /* read only snapshot */ }; /* optional directory entry fields */ enum { DePlan9 = 1, /* not valid in version >= 9 */ DeNT, /* not valid in version >= 9 */ DeQidSpace, DeGen, /* not valid in version >= 9 */ }; struct DirEntry { char *elem; /* path element */ ulong entry; /* entry in directory for data */ ulong gen; /* generation of data entry */ ulong mentry; /* entry in directory for meta */ ulong mgen; /* generation of meta entry */ uvlong size; /* size of file */ uvlong qid; /* unique file id */ char *uid; /* owner id */ char *gid; /* group id */ char *mid; /* last modified by */ ulong mtime; /* last modified time */ ulong mcount; /* number of modifications: can wrap! */ ulong ctime; /* directory entry last changed */ ulong atime; /* last time accessed */ ulong mode; /* various mode bits */ /* plan 9 */ int plan9; uvlong p9path; ulong p9version; /* sub space of qid */ int qidSpace; uvlong qidOffset; /* qid offset */ uvlong qidMax; /* qid maximum */ }; struct MetaEntry { uchar *p; ushort size; }; struct MetaBlock { int maxsize; /* size of block */ int size; /* size used */ int free; /* free space within used size */ int maxindex; /* entries allocated for table */ int nindex; /* amount of table used */ int botch; /* compensate for my stupidity */ uchar *buf; }; void deCleanup(DirEntry*); void deCopy(DirEntry*, DirEntry*); int deSize(DirEntry*); void dePack(DirEntry*, MetaEntry*); int deUnpack(DirEntry*, MetaEntry*); void mbInit(MetaBlock*, uchar*, int, int); int mbUnpack(MetaBlock*, uchar*, int); void mbInsert(MetaBlock*, int, MetaEntry*); void mbDelete(MetaBlock*, int); void mbPack(MetaBlock*); uchar *mbAlloc(MetaBlock*, int); int mbResize(MetaBlock*, MetaEntry*, int); int mbSearch(MetaBlock*, char*, int*, MetaEntry*); void meUnpack(MetaEntry*, MetaBlock*, int);
1,077
1,056
<reponame>timfel/netbeans /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.netbeans.modules.versioning; import org.netbeans.modules.versioning.core.Utils; import java.io.IOException; import java.io.File; import java.lang.reflect.Field; import java.security.Permission; import java.util.LinkedList; import java.util.List; import org.netbeans.junit.NbTestCase; import org.netbeans.modules.versioning.core.api.VCSFileProxy; import org.netbeans.modules.versioning.core.VersioningManager; import org.netbeans.modules.versioning.core.util.VCSSystemProvider; import org.netbeans.modules.versioning.spi.VersioningSupport; import org.netbeans.modules.versioning.spi.VersioningSystem; import org.netbeans.modules.versioning.spi.testvcs.TestVCS; public class GetOwnerTest extends NbTestCase { protected File dataRootDir; private StatFiles accessMonitor; private SecurityManager defaultSecurityManager; protected File versionedFolder; protected File unversionedFolder; public GetOwnerTest(String testName) { super(testName); accessMonitor = new StatFiles(); } protected File getVersionedFolder() { if (versionedFolder == null) { versionedFolder = new File(dataRootDir, "workdir/root-test-versioned/"); versionedFolder.mkdirs(); } return versionedFolder; } protected File getUnversionedFolder() { if (unversionedFolder == null) { unversionedFolder = new File(dataRootDir, "workdir/unversioned/"); unversionedFolder.mkdirs(); } return unversionedFolder; } protected void setUp() throws Exception { super.setUp(); dataRootDir = getWorkDir(); File userdir = new File(getWorkDir(), "userdir"); userdir.mkdirs(); System.setProperty("netbeans.user", userdir.getAbsolutePath()); if(accessMonitor != null) { if(defaultSecurityManager == null) { defaultSecurityManager = System.getSecurityManager(); } System.setSecurityManager(accessMonitor); } } @Override protected void tearDown() throws Exception { super.tearDown(); if(accessMonitor != null) { System.setSecurityManager(defaultSecurityManager); } } public void testGetOwnerKnowFileType() throws IOException { assertTrue(VersioningSupport.getOwner(getVersionedFolder()).getClass() == getVCS()); File f = new File(getVersionedFolder(), "file"); f.createNewFile(); testGetOwnerKnowFileType(f, true); f = new File(getVersionedFolder(), "folder"); f.mkdirs(); testGetOwnerKnowFileType(f, false); } protected Class getVCS() { return TestVCS.class; } private void testGetOwnerKnowFileType(File f, boolean isFile) throws IOException { accessMonitor.files.clear(); VCSFileProxy proxy = VCSFileProxy.createFileProxy(f); VCSSystemProvider.VersioningSystem vs = VersioningManager.getInstance().getOwner(proxy, isFile); // true => its a file, no io.file.isFile() call needed assertNotNull(vs); // file wasn't accessed even on first shot assertFalse(accessMonitor.files.contains(f.getAbsolutePath())); accessMonitor.files.clear(); vs = VersioningManager.getInstance().getOwner(proxy, isFile); assertNotNull(vs); // file wasn't accessed assertFalse(accessMonitor.files.contains(f.getAbsolutePath())); } public void testGetOwnerVersioned() throws IOException { assertTrue(VersioningSupport.getOwner(getVersionedFolder()).getClass() == getVCS()); File aRoot = new File(getVersionedFolder(), "a.txt"); assertTrue(VersioningSupport.getOwner(aRoot).getClass() == getVCS()); aRoot = new File(getVersionedFolder(), "b-folder"); aRoot.mkdirs(); assertTrue(VersioningSupport.getOwner(aRoot).getClass() == getVCS()); aRoot = new File(aRoot, "deep-file"); aRoot.createNewFile(); assertTrue(VersioningSupport.getOwner(aRoot).getClass() == getVCS()); aRoot = new File(getVersionedFolder(), "nonexistent-file"); assertTrue(VersioningSupport.getOwner(aRoot).getClass() == getVCS()); } public void testGetOwnerUnversioned() throws IOException { File aRoot = File.listRoots()[0]; assertNull(VersioningSupport.getOwner(aRoot)); aRoot = dataRootDir; assertNull(VersioningSupport.getOwner(aRoot)); aRoot = new File(dataRootDir, "workdir"); assertNull(VersioningSupport.getOwner(aRoot)); assertNull(VersioningSupport.getOwner(getUnversionedFolder())); File f = new File(getUnversionedFolder(), "a.txt"); f.createNewFile(); assertNull(VersioningSupport.getOwner(f)); f = new File(getUnversionedFolder(), "notexistent.txt"); assertNull(VersioningSupport.getOwner(f)); } public void testFileOwnerCache() throws IOException { testFileOwnerCache(true /* versioned */ , false /* file */); testFileOwnerCache(false/* versioned */ , false /* file */); } public void testFolderOwnerCache() throws IOException { testFileOwnerCache(true /* unversioned */ , true /* folder */); testFileOwnerCache(false/* unversioned */ , true /* folder */); } public void testExcludedFolders () throws Exception { Field f = Utils.class.getDeclaredField("unversionedFolders"); f.setAccessible(true); f.set(Utils.class, (File[]) null); File a = new File(getWorkDir(), "a"); File b = new File(getWorkDir(), "b"); System.setProperty("versioning.unversionedFolders", a.getAbsolutePath() + ";" + b.getAbsolutePath() + ";"); File c = new File(getWorkDir(), "c"); org.netbeans.modules.versioning.core.api.VersioningSupport.getPreferences().put("unversionedFolders", c.getAbsolutePath()); //NOI18N File userdir = new File(getWorkDir(), "userdir"); System.setProperty("netbeans.user", userdir.getAbsolutePath()); assertTrue(VersioningSupport.isExcluded(a)); assertTrue(VersioningSupport.isExcluded(b)); assertTrue(VersioningSupport.isExcluded(c)); assertTrue(VersioningSupport.isExcluded(userdir)); assertTrue(VersioningSupport.isExcluded(new File(userdir, "ffff"))); assertFalse(VersioningSupport.isExcluded(userdir.getParentFile())); assertEquals(4, ((String[]) f.get(Utils.class)).length); // what if someone still wants to have userdir versioned? System.setProperty("versioning.netbeans.user.versioned", "true"); f.set(Utils.class, (String[]) null); assertTrue(VersioningSupport.isExcluded(a)); assertTrue(VersioningSupport.isExcluded(b)); assertTrue(VersioningSupport.isExcluded(c)); assertFalse(VersioningSupport.isExcluded(userdir)); assertFalse(VersioningSupport.isExcluded(new File(userdir, "ffff"))); assertFalse(VersioningSupport.isExcluded(userdir.getParentFile())); assertEquals(3, ((String[]) f.get(Utils.class)).length); } private void testFileOwnerCache(boolean isVersioned, boolean isFolder) throws IOException { File folder = isVersioned ? getVersionedFolder() : getUnversionedFolder(); File child = new File(folder, "file"); File child2 = new File(folder, "file2"); if(isFolder) { child.mkdirs(); child2.mkdirs(); } else { child.createNewFile(); child2.createNewFile(); } assertFileAccess(child, isVersioned, true /* access */); // try again - shouldn't be accessed anymore assertFileAccess(child, isVersioned, false /* no access */); // try few more times some other file no file access expected assertFileAccess(child2, isVersioned, true /* access */); for (int i = 0; i < 100; i++) { // try some other file assertFileAccess(child2, isVersioned, false /* no access */); } // try the first file again assertFileAccess(child, isVersioned, false /* no access */); } private void assertFileAccess(File f, boolean versioned, boolean access) { accessMonitor.files.clear(); VersioningSystem vs = VersioningSupport.getOwner(f); if(versioned && vs == null) { fail("no VersioningSystem returned for versioned file " + f); } else if(!versioned && vs != null) { fail("VersioningSystem returned for unversioned file " + f); } // file was accessed boolean accessed = accessMonitor.files.contains(f.getAbsolutePath()); if(access && !accessed) { fail(f + " was not but should be accessed"); } else if (!access && accessed) { fail(f + " was accessed but shouldn't"); } } private class StatFiles extends SecurityManager { private List<String> files = new LinkedList<String>(); @Override public void checkRead(String file) { files.add(file); } @Override public void checkPermission(Permission perm) { } } }
4,137
594
#ifndef MAINWINDOW_H #define MAINWINDOW_H #include <QMainWindow> #include <QDebug> #include <QtAutoUpdaterWidgets/UpdateController> #include <QtAutoUpdaterWidgets/UpdateButton> namespace Ui { class MainWindow; } class MainWindow : public QMainWindow { Q_OBJECT public: explicit MainWindow(QWidget *parent = nullptr); ~MainWindow() override; private: Ui::MainWindow *ui; QtAutoUpdater::UpdateController *controller = nullptr; private slots: void initializeUpdater(); }; #endif // MAINWINDOW_H
186
789
<reponame>tdeboer-ilmn/hail import os.path import sys from . import gcloud from .deploy_metadata import get_deploy_metadata def init_parser(parser): parser.add_argument('name', type=str, help='Cluster name.') parser.add_argument('--num-workers', '--n-workers', '-w', type=int, help='New number of worker machines (min. 2).') parser.add_argument('--num-secondary-workers', '--num-preemptible-workers', '--n-pre-workers', '-p', type=int, help='New number of secondary (preemptible) worker machines.') parser.add_argument('--graceful-decommission-timeout', '--graceful', type=str, help='If set, cluster size downgrade will use graceful decommissioning with the given timeout (e.g. "60m").') max_idle_group = parser.add_mutually_exclusive_group() max_idle_group.add_argument('--max-idle', type=str, help='New maximum idle time before shutdown (e.g. "60m").') max_idle_group.add_argument('--no-max-idle', action='store_true', help='Disable auto deletion after idle time.') max_age_group = parser.add_mutually_exclusive_group() max_age_group.add_argument( '--expiration-time', type=str, help=('The time when cluster will be auto-deleted. (e.g. "2020-01-01T20:00:00Z"). ' 'Execute gcloud topic datatimes for more information.')) max_age_group.add_argument( '--max-age', type=str, help=('If the cluster is older than this, it will be auto-deleted. (e.g. "2h")' 'Execute gcloud topic datatimes for more information.')) max_age_group.add_argument( '--no-max-age', action='store_true', help='Disable auto-deletion due to max age or expiration time.') parser.add_argument('--dry-run', action='store_true', help="Print gcloud dataproc command, but don't run it.") parser.add_argument('--zone', '-z', type=str, help='Compute zone for Dataproc cluster.') wheel_group = parser.add_mutually_exclusive_group() wheel_group.add_argument('--update-hail-version', action='store_true', help="Update the version of hail running on cluster to match " "the currently installed version.") wheel_group.add_argument('--wheel', type=str, help='New Hail installation.') def main(args, pass_through_args): modify_args = [] if args.num_workers is not None: modify_args.append('--num-workers={}'.format(args.num_workers)) if args.num_secondary_workers is not None: modify_args.append('--num-secondary-workers={}'.format(args.num_secondary_workers)) if args.graceful_decommission_timeout: if not modify_args: sys.exit("Error: Cannot use --graceful-decommission-timeout without resizing the cluster.") modify_args.append('--graceful-decommission-timeout={}'.format(args.graceful_decommission_timeout)) if args.max_idle: modify_args.append('--max-idle={}'.format(args.max_idle)) if args.no_max_idle: modify_args.append('--no-max-idle') if args.expiration_time: modify_args.append('--expiration_time={}'.format(args.expiration_time)) if args.max_age: modify_args.append('--max-age={}'.format(args.max_age)) if args.no_max_age: modify_args.append('--no-max-age') if modify_args: cmd = ['dataproc', 'clusters', 'update', args.name] + modify_args if args.beta: cmd.insert(0, 'beta') cmd.extend(pass_through_args) # print underlying gcloud command print('gcloud ' + ' '.join(cmd[:4]) + ' \\\n ' + ' \\\n '.join(cmd[4:])) # Update cluster if not args.dry_run: print("Updating cluster '{}'...".format(args.name)) gcloud.run(cmd) wheel = None if args.update_hail_version: deploy_metadata = get_deploy_metadata() wheel = deploy_metadata["wheel"] else: wheel = args.wheel if wheel is not None: zone = args.zone if args.zone else gcloud.get_config("compute/zone") if not zone: raise RuntimeError("Could not determine compute zone. Use --zone argument to hailctl, or use `gcloud config set compute/zone <my-zone>` to set a default.") wheelfile = os.path.basename(wheel) cmds = [] if wheel.startswith("gs://"): cmds.append([ 'compute', 'ssh', '{}-m'.format(args.name), '--zone={}'.format(zone), '--', f'sudo gsutil cp {wheel} /tmp/ && ' 'sudo /opt/conda/default/bin/pip uninstall -y hail && ' f'sudo /opt/conda/default/bin/pip install --no-dependencies /tmp/{wheelfile} && ' f"unzip /tmp/{wheelfile} && " "grep 'Requires-Dist: ' hail*dist-info/METADATA | sed 's/Requires-Dist: //' | sed 's/ (//' | sed 's/)//' | grep -v 'pyspark' | xargs /opt/conda/default/bin/pip install" ]) else: cmds.extend([ [ 'compute', 'scp', '--zone={}'.format(zone), wheel, '{}-m:/tmp/'.format(args.name) ], [ 'compute', 'ssh', f'{args.name}-m', f'--zone={zone}', '--', 'sudo /opt/conda/default/bin/pip uninstall -y hail && ' f'sudo /opt/conda/default/bin/pip install --no-dependencies /tmp/{wheelfile} && ' f"unzip /tmp/{wheelfile} && " "grep 'Requires-Dist: ' hail*dist-info/METADATA | sed 's/Requires-Dist: //' | sed 's/ (//' | sed 's/)//' | grep -v 'pyspark' | xargs /opt/conda/default/bin/pip install" ] ]) for cmd in cmds: print('gcloud ' + ' '.join(cmd)) if not args.dry_run: gcloud.run(cmd) if not wheel and not modify_args and pass_through_args: sys.stderr.write('ERROR: found pass-through arguments but not known modification args.') sys.exit(1)
3,015
319
<reponame>dqnykamp/sympy<filename>doc/src/modules/mpmath/plots/ai_c.py # Airy function Ai(z) in the complex plane cplot(airyai, [-8,8], [-8,8], points=50000)
67
1,371
<reponame>moogacs/aws-sdk-go-v2<filename>codegen/smithy-aws-go-codegen/src/main/java/software/amazon/smithy/aws/go/codegen/AwsRestXml.java package software.amazon.smithy.aws.go.codegen; import software.amazon.smithy.aws.traits.protocols.RestXmlTrait; import software.amazon.smithy.model.shapes.ShapeId; /** * Handles generating the aws.rest-xml protocol for services. * * @inheritDoc * * @see RestXmlProtocolGenerator */ public final class AwsRestXml extends RestXmlProtocolGenerator { @Override protected String getDocumentContentType() { return "application/xml"; } @Override public ShapeId getProtocol() { return RestXmlTrait.ID; } }
261
648
{"resourceType":"DataElement","id":"List.entry.flag","meta":{"lastUpdated":"2017-04-19T07:44:43.294+10:00"},"url":"http://hl7.org/fhir/DataElement/List.entry.flag","status":"draft","experimental":true,"stringency":"fully-specified","element":[{"id":"List.entry.flag","path":"List.entry.flag","short":"Status/Workflow information about this item","definition":"The flag allows the system constructing the list to indicate the role and significance of the item in the list.","comment":"The flag can only be understood in the context of the List.code. If the flag means that the entry has actually been deleted from the list, the deleted element SHALL be true. Deleted can only be used if the List.mode is \"changes\".","requirements":"This field is present to support various clinical uses of lists, such as a discharge summary medication list, where flags specify whether the medication was added, modified, or deleted from the list.","min":0,"max":"1","type":[{"code":"CodeableConcept"}],"binding":{"extension":[{"url":"http://hl7.org/fhir/StructureDefinition/elementdefinition-bindingName","valueString":"ListItemFlag"}],"strength":"example","description":"Codes that provide further information about the reason and meaning of the item in the list","valueSetReference":{"reference":"http://hl7.org/fhir/ValueSet/list-item-flag"}},"mapping":[{"identity":"rim","map":".outBoundRelationship[typeCode=COMP].target[classCode=OBS\"].value"}]}]}
361
1,380
<reponame>rift-labs-developer/colour # -*- coding: utf-8 -*- """ Apple RGB Colourspace ===================== Defines the *Apple RGB* colourspace: - :attr:`colour.models.RGB_COLOURSPACE_APPLE_RGB`. References ---------- - :cite:`Susstrunk1999a` : <NAME>., <NAME>., & <NAME>. (1999). Standard RGB Color Spaces. """ import numpy as np from functools import partial from colour.colorimetry import CCS_ILLUMINANTS from colour.models.rgb import (RGB_Colourspace, gamma_function, normalised_primary_matrix) __author__ = 'Colour Developers' __copyright__ = 'Copyright (C) 2013-2021 - Colour Developers' __license__ = 'New BSD License - https://opensource.org/licenses/BSD-3-Clause' __maintainer__ = 'Colour Developers' __email__ = '<EMAIL>' __status__ = 'Production' __all__ = [ 'PRIMARIES_APPLE_RGB', 'WHITEPOINT_NAME_APPLE_RGB', 'CCS_WHITEPOINT_APPLE_RGB', 'MATRIX_APPLE_RGB_TO_XYZ', 'MATRIX_XYZ_TO_APPLE_RGB', 'RGB_COLOURSPACE_APPLE_RGB' ] PRIMARIES_APPLE_RGB = np.array([ [0.6250, 0.3400], [0.2800, 0.5950], [0.1550, 0.0700], ]) """ *Apple RGB* colourspace primaries. PRIMARIES_APPLE_RGB : ndarray, (3, 2) """ WHITEPOINT_NAME_APPLE_RGB = 'D65' """ *Apple RGB* colourspace whitepoint name. WHITEPOINT_NAME_APPLE_RGB : unicode """ CCS_WHITEPOINT_APPLE_RGB = (CCS_ILLUMINANTS[ 'CIE 1931 2 Degree Standard Observer'][WHITEPOINT_NAME_APPLE_RGB]) """ *Apple RGB* colourspace whitepoint chromaticity coordinates. CCS_WHITEPOINT_APPLE_RGB : ndarray """ MATRIX_APPLE_RGB_TO_XYZ = normalised_primary_matrix(PRIMARIES_APPLE_RGB, CCS_WHITEPOINT_APPLE_RGB) """ *Apple RGB* colourspace to *CIE XYZ* tristimulus values matrix. MATRIX_APPLE_RGB_TO_XYZ : array_like, (3, 3) """ MATRIX_XYZ_TO_APPLE_RGB = np.linalg.inv(MATRIX_APPLE_RGB_TO_XYZ) """ *CIE XYZ* tristimulus values to *Apple RGB* colourspace matrix. MATRIX_XYZ_TO_APPLE_RGB : array_like, (3, 3) """ RGB_COLOURSPACE_APPLE_RGB = RGB_Colourspace( 'Apple RGB', PRIMARIES_APPLE_RGB, CCS_WHITEPOINT_APPLE_RGB, WHITEPOINT_NAME_APPLE_RGB, MATRIX_APPLE_RGB_TO_XYZ, MATRIX_XYZ_TO_APPLE_RGB, partial(gamma_function, exponent=1 / 1.8), partial(gamma_function, exponent=1.8), ) RGB_COLOURSPACE_APPLE_RGB.__doc__ = """ *Apple RGB* colourspace. References ---------- :cite:`Susstrunk1999a` RGB_COLOURSPACE_APPLE_RGB : RGB_Colourspace """
1,100
669
<gh_stars>100-1000 // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. #include "lib/Api.Ort/pch.h" #include "OnnxruntimeEnvironment.h" #include "OnnxruntimeErrors.h" #include "core/platform/windows/TraceLoggingConfig.h" #include <evntrace.h> #include <windows.h> #include <winrt/Windows.ApplicationModel.h> #include <winrt/Windows.ApplicationModel.Core.h> using namespace _winml; static bool debug_output_ = false; EXTERN_C IMAGE_DOS_HEADER __ImageBase; static std::wstring CurrentModulePath() { WCHAR path[MAX_PATH]; FAIL_FAST_IF(0 == GetModuleFileNameW((HINSTANCE)&__ImageBase, path, _countof(path))); WCHAR absolute_path[MAX_PATH]; WCHAR* name; FAIL_FAST_IF(0 == GetFullPathNameW(path, _countof(path), absolute_path, &name)); auto idx = std::distance(absolute_path, name); auto out_path = std::wstring(absolute_path); out_path.resize(idx); return out_path; } static HRESULT GetOnnxruntimeLibrary(HMODULE& module) { #if WINAPI_FAMILY == WINAPI_FAMILY_PC_APP // Store + Redist (note that this is never built into the inbox dll) auto out_module = LoadPackagedLibrary(L"onnxruntime.dll", 0); #else auto onnxruntime_dll = CurrentModulePath() + L"\\onnxruntime.dll"; auto out_module = LoadLibraryExW(onnxruntime_dll.c_str(), nullptr, 0); #endif if (out_module == nullptr) { return HRESULT_FROM_WIN32(GetLastError()); } module = out_module; return S_OK; } const OrtApi* _winml::GetVersionedOrtApi() { HMODULE onnxruntime_dll; FAIL_FAST_IF_FAILED(GetOnnxruntimeLibrary(onnxruntime_dll)); using OrtGetApiBaseSignature = decltype(OrtGetApiBase); auto ort_get_api_base_fn = reinterpret_cast<OrtGetApiBaseSignature*>(GetProcAddress(onnxruntime_dll, "OrtGetApiBase")); if (ort_get_api_base_fn == nullptr) { FAIL_FAST_HR(HRESULT_FROM_WIN32(GetLastError())); } const auto ort_api_base = ort_get_api_base_fn(); return ort_api_base->GetApi(ORT_API_VERSION); } static const WinmlAdapterApi* GetVersionedWinmlAdapterApi(const OrtApi* ort_api) { HMODULE onnxruntime_dll; FAIL_FAST_IF_FAILED(GetOnnxruntimeLibrary(onnxruntime_dll)); using OrtGetWinMLAdapterSignature = decltype(OrtGetWinMLAdapter); auto ort_get_winml_adapter_fn = reinterpret_cast<OrtGetWinMLAdapterSignature*>(GetProcAddress(onnxruntime_dll, "OrtGetWinMLAdapter")); if (ort_get_winml_adapter_fn == nullptr) { FAIL_FAST_HR(HRESULT_FROM_WIN32(GetLastError())); } return ort_get_winml_adapter_fn(ORT_API_VERSION); } const WinmlAdapterApi* _winml::GetVersionedWinmlAdapterApi() { return GetVersionedWinmlAdapterApi(GetVersionedOrtApi()); } static void __stdcall WinmlOrtLoggingCallback(void* param, OrtLoggingLevel severity, const char* category, const char* logger_id, const char* code_location, const char* message) noexcept { UNREFERENCED_PARAMETER(param); UNREFERENCED_PARAMETER(logger_id); // ORT Fatal and Error Messages are logged as Telemetry, rest are non-telemetry. switch (severity) { case OrtLoggingLevel::ORT_LOGGING_LEVEL_FATAL: //Telemetry TraceLoggingWrite( winml_trace_logging_provider, "WinMLLogSink", TelemetryPrivacyDataTag(PDT_ProductAndServicePerformance), TraceLoggingKeyword(WINML_PROVIDER_KEYWORD_DEFAULT), TraceLoggingLevel(WINEVENT_LEVEL_CRITICAL), TraceLoggingOpcode(EVENT_TRACE_TYPE_INFO), TraceLoggingString(category), TraceLoggingUInt32((UINT32)severity), TraceLoggingString(message), TraceLoggingString(code_location), TraceLoggingKeyword(MICROSOFT_KEYWORD_MEASURES)); break; case OrtLoggingLevel::ORT_LOGGING_LEVEL_ERROR: //Telemetry TraceLoggingWrite( winml_trace_logging_provider, "WinMLLogSink", TelemetryPrivacyDataTag(PDT_ProductAndServicePerformance), TraceLoggingKeyword(WINML_PROVIDER_KEYWORD_DEFAULT), TraceLoggingLevel(WINEVENT_LEVEL_ERROR), TraceLoggingOpcode(EVENT_TRACE_TYPE_INFO), TraceLoggingString(category), TraceLoggingUInt32((UINT32)severity), TraceLoggingString(message), TraceLoggingString(code_location), TraceLoggingKeyword(MICROSOFT_KEYWORD_MEASURES)); break; case OrtLoggingLevel::ORT_LOGGING_LEVEL_WARNING: TraceLoggingWrite( winml_trace_logging_provider, "WinMLLogSink", TraceLoggingKeyword(WINML_PROVIDER_KEYWORD_DEFAULT), TraceLoggingLevel(WINEVENT_LEVEL_WARNING), TraceLoggingOpcode(EVENT_TRACE_TYPE_INFO), TraceLoggingString(category), TraceLoggingUInt32((UINT32)severity), TraceLoggingString(message), TraceLoggingString(code_location)); break; case OrtLoggingLevel::ORT_LOGGING_LEVEL_INFO: TraceLoggingWrite( winml_trace_logging_provider, "WinMLLogSink", TraceLoggingKeyword(WINML_PROVIDER_KEYWORD_DEFAULT), TraceLoggingLevel(WINEVENT_LEVEL_INFO), TraceLoggingOpcode(EVENT_TRACE_TYPE_INFO), TraceLoggingString(category), TraceLoggingUInt32((UINT32)severity), TraceLoggingString(message), TraceLoggingString(code_location)); break; case OrtLoggingLevel::ORT_LOGGING_LEVEL_VERBOSE: __fallthrough; //Default is Verbose too. default: TraceLoggingWrite( winml_trace_logging_provider, "WinMLLogSink", TraceLoggingKeyword(WINML_PROVIDER_KEYWORD_DEFAULT), TraceLoggingLevel(WINEVENT_LEVEL_VERBOSE), TraceLoggingOpcode(EVENT_TRACE_TYPE_INFO), TraceLoggingString(category), TraceLoggingUInt32((UINT32)severity), TraceLoggingString(message), TraceLoggingString(code_location)); } if (debug_output_) { OutputDebugStringA((std::string(message) + "\r\n").c_str()); } } static void __stdcall WinmlOrtProfileEventCallback(const OrtProfilerEventRecord* profiler_record) noexcept { if (profiler_record->category_ == OrtProfilerEventCategory::NODE_EVENT) { TraceLoggingWrite( winml_trace_logging_provider, "OnnxRuntimeProfiling", TraceLoggingKeyword(WINML_PROVIDER_KEYWORD_LOTUS_PROFILING), TraceLoggingLevel(WINEVENT_LEVEL_VERBOSE), TraceLoggingOpcode(EVENT_TRACE_TYPE_INFO), TraceLoggingString(profiler_record->category_name_, "Category"), TraceLoggingInt64(profiler_record->duration_, "Duration (us)"), TraceLoggingInt64(profiler_record->time_span_, "Time Stamp (us)"), TraceLoggingString(profiler_record->event_name_, "Event Name"), TraceLoggingInt32(profiler_record->process_id_, "Process ID"), TraceLoggingInt32(profiler_record->thread_id_, "Thread ID"), TraceLoggingString(profiler_record->op_name_, "Operator Name"), TraceLoggingString(profiler_record->execution_provider_, "Execution Provider")); } else { TraceLoggingWrite( winml_trace_logging_provider, "OnnxRuntimeProfiling", TraceLoggingKeyword(WINML_PROVIDER_KEYWORD_LOTUS_PROFILING), TraceLoggingLevel(WINEVENT_LEVEL_VERBOSE), TraceLoggingOpcode(EVENT_TRACE_TYPE_INFO), TraceLoggingString(profiler_record->category_name_, "Category"), TraceLoggingInt64(profiler_record->duration_, "Duration (us)"), TraceLoggingInt64(profiler_record->time_span_, "Time Stamp (us)"), TraceLoggingString(profiler_record->event_name_, "Event Name"), TraceLoggingInt32(profiler_record->process_id_, "Process ID"), TraceLoggingInt32(profiler_record->thread_id_, "Thread ID")); } } static void OnSuspending(winrt::Windows::Foundation::IInspectable const& sender, winrt::Windows::ApplicationModel::SuspendingEventArgs const& args) { telemetry_helper.LogWinMLSuspended(); } void OnnxruntimeEnvironment::RegisterSuspendHandler() { try { auto suspend_event_handler = winrt::Windows::Foundation::EventHandler<winrt::Windows::ApplicationModel::SuspendingEventArgs>(&OnSuspending); suspend_token_ = winrt::Windows::ApplicationModel::Core::CoreApplication::Suspending(suspend_event_handler); } catch (...) { } //Catch in case CoreApplication cannot be found for non-UWP executions } OnnxruntimeEnvironment::OnnxruntimeEnvironment(const OrtApi* ort_api) : ort_env_(nullptr, nullptr) { OrtEnv* ort_env = nullptr; THROW_IF_NOT_OK_MSG(ort_api->CreateEnv(OrtLoggingLevel::ORT_LOGGING_LEVEL_VERBOSE, "Default", &ort_env), ort_api); THROW_IF_NOT_OK_MSG(ort_api->SetLanguageProjection(ort_env, OrtLanguageProjection::ORT_PROJECTION_WINML), ort_api); ort_env_ = UniqueOrtEnv(ort_env, ort_api->ReleaseEnv); // Configure the environment with the winml logger auto winml_adapter_api = GetVersionedWinmlAdapterApi(ort_api); THROW_IF_NOT_OK_MSG(winml_adapter_api->EnvConfigureCustomLoggerAndProfiler(ort_env_.get(), &WinmlOrtLoggingCallback, &WinmlOrtProfileEventCallback, nullptr, OrtLoggingLevel::ORT_LOGGING_LEVEL_VERBOSE, "Default", &ort_env), ort_api); THROW_IF_NOT_OK_MSG(winml_adapter_api->OverrideSchema(), ort_api); // Register suspend handler for UWP applications RegisterSuspendHandler(); } OnnxruntimeEnvironment::~OnnxruntimeEnvironment() { if (suspend_token_) { winrt::Windows::ApplicationModel::Core::CoreApplication::Suspending(suspend_token_); } } HRESULT OnnxruntimeEnvironment::GetOrtEnvironment(_Out_ OrtEnv** ort_env) { *ort_env = ort_env_.get(); return S_OK; } HRESULT OnnxruntimeEnvironment::EnableDebugOutput(bool is_enabled) { debug_output_ = is_enabled; return S_OK; }
4,131
1,039
package com.android.gpstest.chart; import com.github.mikephil.charting.components.AxisBase; import com.github.mikephil.charting.components.XAxis; import com.github.mikephil.charting.data.Entry; import com.github.mikephil.charting.formatter.IAxisValueFormatter; import com.github.mikephil.charting.formatter.IValueFormatter; import com.github.mikephil.charting.utils.ViewPortHandler; import java.text.DecimalFormat; public class DistanceValueFormatter implements IValueFormatter, IAxisValueFormatter { private final DecimalFormat mFormat; private String mSuffix; public DistanceValueFormatter(String suffix) { mFormat = new DecimalFormat(); mFormat.setMaximumFractionDigits(0); mSuffix = suffix; } @Override public String getFormattedValue(float value, Entry entry, int dataSetIndex, ViewPortHandler viewPortHandler) { return mFormat.format(value) + " " + mSuffix; } @Override public String getFormattedValue(float value, AxisBase axis) { if (axis instanceof XAxis) { return mFormat.format(value); } else if (value > 0) { return mFormat.format(value) + " " + mSuffix; } else { return mFormat.format(value); } } }
482
852
<filename>CondFormats/DataRecord/interface/BTagCalibrationRcd.h<gh_stars>100-1000 #ifndef DataRecord_BTagCalibrationRcd_h #define DataRecord_BTagCalibrationRcd_h #include "FWCore/Framework/interface/EventSetupRecordImplementation.h" class BTagCalibrationRcd : public edm::eventsetup::EventSetupRecordImplementation<BTagCalibrationRcd> {}; #endif
122
2,288
<gh_stars>1000+ /* MIT (BSD) license - see LICENSE file for details */ #include "graphql.h" #include "ccan/tal/str/str.h" #include "ccan/utf8/utf8.h" /* GraphQL character classes * * These definitions are meant to reflect the GraphQL specification as * literally as possible. */ #define SOURCE_CHAR(c) ((c) == '\t' || (c) == '\n' || (c) == '\r' || ((c) >= 32 && (c) <= 65535)) #define WHITE_SPACE(c) ((c) == '\t' || (c) == ' ') #define LINE_TERMINATOR(c) ((c) == '\n' || (c) == '\r') #define COMMENT(c) ((c) == '#') #define COMMENT_CHAR(c) (SOURCE_CHAR(c) && !LINE_TERMINATOR(c)) #define STRING_CHAR(c) (SOURCE_CHAR(c) && !LINE_TERMINATOR(c) && (c)!='"' && (c)!='\\') #define BLOCK_STRING_CHAR(c) (SOURCE_CHAR(c)) #define COMMA(c) ((c) == ',') #define EOF_CHAR(c) ((c) == 0 || (c) == 4) #define PUNCTUATOR(c) (strchr("!$&().:=@[]{|}", c)) #define HEX_DIGIT(c) (DIGIT(c) || ((c) >= 'a' && (c) <= 'f') || ((c) >= 'A' && (c) <= 'F')) #define DIGIT(c) ((c) >= '0' && (c) <= '9') #define NAME_START(c) (((c) >= 'a' && (c) <= 'z') || ((c) >= 'A' && (c) <= 'Z') || (c) == '_') #define NAME_CONTINUE(c) (NAME_START(c) || DIGIT(c)) // Helper for copying an overlapping string, since strcpy() is not safe for that #define cpystr(d,s) { char *cpystr_p; char *cpystr_q; for(cpystr_p = (s), cpystr_q = (d); *cpystr_p;) *cpystr_q++ = *cpystr_p++; *cpystr_q++ = *cpystr_p++; } /* Parser shorthands * * These shorthands are motivated by the parser functions, so they can be * written in a format that corresponds closely to the specification. */ #define RET static void * #define PARAMS struct list_head *tokens, struct list_head *used, const char **err #define ARGS tokens, used, err #define INIT(type) \ struct graphql_token *rollback_top = list_top(tokens, struct graphql_token, node); \ struct graphql_##type *obj = talz(tokens, struct graphql_##type); \ (void)rollback_top; /* avoids unused variable warning */ \ #define EXIT \ goto exit_label; /* avoids unused label warning */ \ exit_label: \ if (*err) obj = tal_free(obj); \ return obj; \ #define CONSUME_ONE list_add(used, &list_pop(tokens, struct graphql_token, node)->node); #define RESTORE_ONE list_add(tokens, &list_pop(used, struct graphql_token, node)->node); #define ROLLBACK(args) while (list_top(tokens, struct graphql_token, node) != rollback_top) { RESTORE_ONE; } #define OR if (!*err) goto exit_label; *err = NULL; #define REQ if (*err) { ROLLBACK(args); goto exit_label; } #define OPT *err = NULL; #define WHILE_OPT while(!*err); *err = NULL; #define LOOKAHEAD(args, tok) struct graphql_token *tok = list_top(tokens, struct graphql_token, node); #define MSG(msg) if (*err) *err = msg; /* The following parser functions are written in a way that corresponds to the * grammar defined in the GraphQL specification. The code is not intended to * look like normal C code; it's designed for parsing clarity rather than C * style. Think of it as something generated rather than something to read. * For that reason, the functions follow special rules: * * - The declaration is standardized with RET and PARAMS * - The "err" argument is assumed to be NULL upon entrance * - The "err" argument is set on failure * - If the function fails to parse, then "tokens" shall be as it was upon entrance * - INIT and EXIT macros are used * - Macros such as REQ and OPT facilitate readability and conciseness */ /* The following functions construct the "leaves" of the abstract syntax tree. */ RET parse_keyword(PARAMS, const char *keyword, const char *errmsg) { struct graphql_token *tok = list_top(tokens, struct graphql_token, node); if (!tok || tok->token_type != 'a') { *err = errmsg; return NULL; } if (!streq(tok->token_string, keyword)) { *err = errmsg; return NULL; } CONSUME_ONE; return tok; } // Note: a static buffer is used here. RET parse_punct(PARAMS, int punct) { static char punctbuf[16]; struct graphql_token *tok = list_top(tokens, struct graphql_token, node); if (!tok || tok->token_type != punct) { if (punct == PUNCT_SPREAD) sprintf(punctbuf, "expected: '...'"); else sprintf(punctbuf, "expected: '%c'", punct); *err = punctbuf; return NULL; } CONSUME_ONE; return tok; } RET parse_name(PARAMS) { struct graphql_token *tok = list_top(tokens, struct graphql_token, node); if (!tok || tok->token_type != 'a') { *err = "name expected"; return NULL; } CONSUME_ONE; return tok; } RET parse_int(PARAMS) { struct graphql_token *tok = list_top(tokens, struct graphql_token, node); if (!tok || tok->token_type != 'i') { *err = "integer expected"; return NULL; } CONSUME_ONE; return tok; } RET parse_float(PARAMS) { struct graphql_token *tok = list_top(tokens, struct graphql_token, node); if (!tok || tok->token_type != 'f') { *err = "float expected"; return NULL; } CONSUME_ONE; return tok; } RET parse_string(PARAMS) { struct graphql_token *tok = list_top(tokens, struct graphql_token, node); if (!tok || tok->token_type != 's') { *err = "string expected"; return NULL; } CONSUME_ONE; return tok; } // The following functions create the branches of the AST. /* RET parse_non_null_type_2(PARAMS) { INIT(non_null_type); parse_list_type(ARGS); REQ; parse_punct(ARGS, '!'); REQ; EXIT; } RET parse_non_null_type_1(PARAMS) { INIT(non_null_type); parse_named_type(ARGS); REQ; parse_punct(ARGS, '!'); REQ; EXIT; } RET parse_non_null_type(PARAMS) { INIT(non_null_type); parse_non_null_type_1(ARGS); OR parse_non_null_type_2(ARGS); EXIT; } RET parse_list_type(PARAMS) { INIT(list_type); parse_punct(ARGS, '['); REQ parse_type(ARGS); REQ parse_punct(ARGS, ']'); REQ EXIT; } */ RET parse_named_type(PARAMS) { INIT(named_type); obj->name = parse_name(ARGS); EXIT; } RET parse_type(PARAMS) { INIT(type); obj->named = parse_named_type(ARGS); /* OR obj->list = parse_list_type(ARGS); OR obj->non_null = parse_non_null_type(ARGS); */ EXIT; } RET parse_variable(PARAMS) { INIT(variable); parse_punct(ARGS, '$'); REQ obj->name = parse_name(ARGS); REQ EXIT; } RET parse_value(PARAMS); RET parse_list_value(PARAMS) { INIT(list_value); parse_punct(ARGS, '['); REQ parse_punct(ARGS, ']'); while (*err) { *err = NULL; parse_value(ARGS); MSG("expected: value or ']'"); REQ parse_punct(ARGS, ']'); } EXIT; } RET parse_enum_value(PARAMS) { INIT(enum_value); obj->val = parse_name(ARGS); REQ struct graphql_token *tok = list_top(used, struct graphql_token, node); if (streq(tok->token_string, "true") || streq(tok->token_string, "false") || streq(tok->token_string, "null")) { *err = "enum value cannot be true, false, or null"; ROLLBACK(ARGS); } EXIT; } RET parse_null_value(PARAMS) { INIT(null_value); obj->val = parse_keyword(ARGS, "null", "null expected"); EXIT; } RET parse_string_value(PARAMS) { INIT(string_value); obj->val = parse_string(ARGS); EXIT; } RET parse_boolean_value(PARAMS) { INIT(boolean_value); obj->val = parse_keyword(ARGS, "true", "invalid boolean value"); OR obj->val = parse_keyword(ARGS, "false", "invalid boolean value"); EXIT; } RET parse_float_value(PARAMS) { INIT(float_value); obj->val = parse_float(ARGS); EXIT; } RET parse_int_value(PARAMS) { INIT(int_value); obj->val = parse_int(ARGS); EXIT; } RET parse_object_field(PARAMS) { INIT(object_field); obj->name = parse_name(ARGS); REQ parse_punct(ARGS, ':'); REQ obj->val = parse_value(ARGS); REQ EXIT; } RET parse_object_value(PARAMS) { INIT(object_value); parse_punct(ARGS, '{'); REQ parse_punct(ARGS, '}'); struct graphql_object_field *p = NULL; while (*err) { *err = NULL; if (!p) { obj->first = p = parse_object_field(ARGS); MSG("expected: object field or '}'"); REQ } else { p->next = parse_object_field(ARGS); MSG("expected: object field or '}'"); REQ p = p->next; } parse_punct(ARGS, '}'); } EXIT; } RET parse_default_value(PARAMS) { INIT(default_value); parse_punct(ARGS, '='); REQ obj->val = parse_value(ARGS); REQ EXIT; } RET parse_value(PARAMS) { INIT(value); obj->var = parse_variable(ARGS); // FIXME: if not const OR obj->int_val = parse_int_value(ARGS); OR obj->float_val = parse_float_value(ARGS); OR obj->str_val = parse_string_value(ARGS); OR obj->bool_val = parse_boolean_value(ARGS); OR obj->null_val = parse_null_value(ARGS); OR obj->enum_val = parse_enum_value(ARGS); OR obj->list_val = parse_list_value(ARGS); OR obj->obj_val = parse_object_value(ARGS); EXIT; } RET parse_type_condition(PARAMS) { INIT(type_condition); parse_keyword(ARGS, "on", "expected: 'on'"); REQ obj->named_type = parse_named_type(ARGS); REQ EXIT; } RET parse_fragment_name(PARAMS) { INIT(fragment_name); obj->name = parse_name(ARGS); REQ struct graphql_token *tok = list_top(used, struct graphql_token, node); if (streq(tok->token_string, "on")) { *err = "invalid fragment name"; ROLLBACK(ARGS); } EXIT; } RET parse_alias(PARAMS) { INIT(alias); obj->name = parse_name(ARGS); REQ parse_punct(ARGS, ':'); REQ EXIT; } RET parse_argument(PARAMS) { INIT(argument); obj->name = parse_name(ARGS); REQ parse_punct(ARGS, ':'); REQ obj->val = parse_value(ARGS); REQ EXIT; } RET parse_arguments(PARAMS) { INIT(arguments); parse_punct(ARGS, '('); REQ obj->first = parse_argument(ARGS); REQ struct graphql_argument *p = obj->first; parse_punct(ARGS, ')'); while (*err) { *err = NULL; p->next = parse_argument(ARGS); MSG("expected: argument or ')'"); REQ; p = p->next; parse_punct(ARGS, ')'); } EXIT; } RET parse_directive(PARAMS) { INIT(directive); parse_punct(ARGS, '@'); REQ obj->name = parse_name(ARGS); REQ obj->args = parse_arguments(ARGS); OPT EXIT; } RET parse_directives(PARAMS) { INIT(directives); obj->first = parse_directive(ARGS); REQ struct graphql_directive *p = obj->first; do { p->next = parse_directive(ARGS); p = p->next; } WHILE_OPT; EXIT; } RET parse_fragment_spread(PARAMS) { INIT(fragment_spread); parse_punct(ARGS, PUNCT_SPREAD); REQ obj->name = parse_fragment_name(ARGS); REQ obj->directives = parse_directives(ARGS); OPT EXIT; } RET parse_variable_definition(PARAMS) { INIT(variable_definition); obj->var = parse_variable(ARGS); REQ parse_punct(ARGS, ':'); REQ obj->type = parse_type(ARGS); REQ obj->default_val = parse_default_value(ARGS); OPT obj->directives = parse_directives(ARGS); OPT EXIT; } RET parse_variable_definitions(PARAMS) { INIT(variable_definitions); parse_punct(ARGS, '('); REQ obj->first = parse_variable_definition(ARGS); REQ struct graphql_variable_definition *p = obj->first; parse_punct(ARGS, ')'); while (*err) { *err = NULL; p->next = parse_variable_definition(ARGS); MSG("expected: variable definition or ')'"); REQ p = p->next; parse_punct(ARGS, ')'); } EXIT; } RET parse_selection_set(PARAMS); RET parse_fragment_definition(PARAMS) { INIT(fragment_definition); parse_keyword(ARGS, "fragment", "fragment expected"); REQ obj->name = parse_fragment_name(ARGS); REQ obj->type_cond = parse_type_condition(ARGS); REQ obj->directives = parse_directives(ARGS); OPT obj->sel_set = parse_selection_set(ARGS); REQ EXIT; } RET parse_inline_fragment(PARAMS) { INIT(inline_fragment); parse_punct(ARGS, PUNCT_SPREAD); REQ obj->type_cond = parse_type_condition(ARGS); OPT obj->directives = parse_directives(ARGS); OPT obj->sel_set = parse_selection_set(ARGS); REQ EXIT; } RET parse_field(PARAMS) { INIT(field); obj->alias = parse_alias(ARGS); OPT obj->name = parse_name(ARGS); REQ obj->args = parse_arguments(ARGS); OPT obj->directives = parse_directives(ARGS); OPT obj->sel_set = parse_selection_set(ARGS); OPT EXIT; } RET parse_selection(PARAMS) { INIT(selection); obj->field = parse_field(ARGS); OR obj->frag_spread = parse_fragment_spread(ARGS); OR obj->inline_frag = parse_inline_fragment(ARGS); MSG("expected: field, fragment spread, or inline fragment"); EXIT; } RET parse_selection_set(PARAMS) { INIT(selection_set); parse_punct(ARGS, '{'); REQ; obj->first = parse_selection(ARGS); REQ; struct graphql_selection *p = obj->first; parse_punct(ARGS, '}'); while (*err) { *err = NULL; p->next = parse_selection(ARGS); MSG("expected: selection or '}'"); REQ; p = p->next; parse_punct(ARGS, '}'); } EXIT; } RET parse_operation_type(PARAMS) { INIT(operation_type); const char *errmsg = "expected: query, mutation, or subscription"; obj->op_type = parse_keyword(ARGS, "query", errmsg); OR obj->op_type = parse_keyword(ARGS, "mutation", errmsg); OR obj->op_type = parse_keyword(ARGS, "subscription", errmsg); EXIT; } RET parse_operation_definition(PARAMS) { INIT(operation_definition); obj->op_type = parse_operation_type(ARGS); if (!*err) { obj->op_name = parse_name(ARGS); OPT obj->vars = parse_variable_definitions(ARGS); OPT obj->directives = parse_directives(ARGS); OPT } else *err = NULL; obj->sel_set = parse_selection_set(ARGS); if (*err) ROLLBACK(ARGS); EXIT; } RET parse_executable_definition(PARAMS) { INIT(executable_definition); obj->op_def = parse_operation_definition(ARGS); MSG("invalid operation or fragment definition"); OR obj->frag_def = parse_fragment_definition(ARGS); MSG("invalid operation or fragment definition"); EXIT; } RET parse_executable_document(PARAMS) { INIT(executable_document); obj->first_def = parse_executable_definition(ARGS); REQ struct graphql_executable_definition *p = obj->first_def; do { p->next_def = parse_executable_definition(ARGS); p = p->next_def; } WHILE_OPT; EXIT; } RET parse_definition(PARAMS) { INIT(definition); obj->executable_def = parse_executable_definition(ARGS); /* OR obj->type_system_def = parse_type_system_definition_or_extension(ARGS); // NOTE: Optional type system is not (yet) implemented. */ EXIT; } RET parse_document(PARAMS) { INIT(document); obj->first_def = parse_definition(ARGS); REQ struct graphql_definition *p = obj->first_def; do { p->next_def = parse_definition(ARGS); p = p->next_def; } WHILE_OPT; EXIT; } void *currently_unused = parse_document; // to hide the warning till this is used /* Convert input string into tokens. * * All data (i.e. the list and the tokens it contains) are allocated to the * specified tal context. */ const char *graphql_lex(const tal_t *ctx, const char *input, struct list_head **tokens) { unsigned int c; const char *p, *line_beginning; unsigned int line_num = 1; struct list_head *tok_list; struct graphql_token *tok; // Initialize token output list. tok_list = tal(ctx, struct list_head); if (tokens) *tokens = tok_list; list_head_init(tok_list); // Note: label and goto are used here like a continue statement except that // it skips iteration, for when characters are fetched in the loop body. p = input; line_beginning = p; do { c = *p++; newchar: // Consume line terminators and increment line counter. if (LINE_TERMINATOR(c)) { unsigned int c0 = c; c = *p++; if (c0 == 10 || c0 == 13) line_num++; if (c0 == 13 && c == 10) c = *p++; line_beginning = p - 1; goto newchar; } // Consume other ignored tokens. if (COMMA(c) || WHITE_SPACE(c)) { c = *p++; goto newchar; } if (COMMENT(c)) { while (!EOF_CHAR(c) && COMMENT_CHAR(c)) c = *p++; goto newchar; } // Return success when end is reached. if (EOF_CHAR(c)) return GRAPHQL_SUCCESS; // Punctuator tokens. if (PUNCTUATOR(c)) { // Note beginning of token in input. const char *start = p - 1; // Handle the ... multi-character case. if (c == '.') { c = *p++; if (c != '.') return "unrecognized punctuator"; c = *p++; if (c != '.') return "unrecognized punctuator"; c = PUNCT_SPREAD; } tok = talz(tok_list, struct graphql_token); list_add_tail(tok_list, &tok->node); tok->token_type = c; tok->token_string = NULL; tok->source_line = line_num; tok->source_column = start - line_beginning + 1; tok->source_offset = start - input; tok->source_len = p - start; } else if (NAME_START(c)) { // Name/identifier tokens. tok = talz(tok_list, struct graphql_token); list_add_tail(tok_list, &tok->node); tok->token_type = 'a'; // tok->token_string updated below. tok->source_line = line_num; tok->source_column = p - line_beginning; // tok->source_len updated below. // Note the beginning of the name. const char *name_begin = p - 1; const char *name_end; int name_len; // Consume the rest of the token. do { c = *p++; } while (NAME_CONTINUE(c)); // Note the end of the name and calculate the length. name_end = p - 1; name_len = name_end - name_begin; tok->source_offset = name_begin - input; tok->source_len = name_len; // Copy the token string. tok->token_string = tal_strndup(tok, name_begin, name_len); goto newchar; } else if (DIGIT(c) || c == '-') { // Number tokens. const char *num_start = p - 1; char type = 'i'; if (c == '-') { c = *p++; if (!DIGIT(c)) return "negative sign must precede a number"; } if (c == '0') { c = *p++; if (DIGIT(c)) return "leading zeros are not allowed"; } else { do { c = *p++; } while(DIGIT(c)); } if (c == '.') { type = 'f'; if (!DIGIT(*p)) return "invalid float value fractional part"; do { c = *p++; } while(DIGIT(c)); } if (c == 'e' || c == 'E') { type = 'f'; c = *p++; if (c == '+' || c == '-') c = *p++; if (!DIGIT(*p)) return "invalid float value exponent part"; do { c = *p++; } while(DIGIT(c)); } if (c == '.' || NAME_START(c)) return "invalid numeric value"; const char *num_end = p - 1; int num_len = num_end - num_start; tok = talz(tok_list, struct graphql_token); list_add_tail(tok_list, &tok->node); tok->token_type = type; tok->token_string = tal_strndup(tok, num_start, num_len); tok->source_line = line_num; tok->source_column = num_start - line_beginning + 1; tok->source_offset = num_start - input; tok->source_len = num_len; goto newchar; } else if (c == '"') { // String tokens. c = *p++; const char *str_begin = p - 1; const char *str_end; bool str_block = false; if (c == '"') { c = *p++; if (c == '"') { // block string str_block = true; str_begin += 2; int quotes = 0; do { c = *p++; if (c == '\"') quotes++; else quotes = 0; if (quotes == 3 && *(p-4) == '\\') quotes = 0; } while (BLOCK_STRING_CHAR(c) && quotes < 3); if (quotes == 3) { c = *--p; c = *--p; } str_end = p - 1; if (c != '"') return "unterminated string or invalid character"; c = *p++; if (c != '"') return "invalid string termination"; c = *p++; if (c != '"') return "invalid string termination"; } else { // empty string str_end = str_begin; --p; } } else { // normal string --p; do { c = *p++; if (c == '\\') { c = *p++; if (strchr("\"\\/bfnrtu", c)) { if (c == 'u') { c = *p++; if (!HEX_DIGIT(c)) return "invalid unicode escape sequence"; c = *p++; if (!HEX_DIGIT(c)) return "invalid unicode escape sequence"; c = *p++; if (!HEX_DIGIT(c)) return "invalid unicode escape sequence"; c = *p++; if (!HEX_DIGIT(c)) return "invalid unicode escape sequence"; } else { c = 'a'; // anything besides a quote to let the loop continue } } else { return "invalid string escape sequence"; } } } while (STRING_CHAR(c)); if (c != '"') return "unterminated string or invalid character"; str_end = p - 1; } int str_len = str_end - str_begin; tok = talz(tok_list, struct graphql_token); list_add_tail(tok_list, &tok->node); tok->token_type = 's'; tok->token_string = tal_strndup(tok, str_begin, str_len); tok->source_line = line_num; tok->source_column = str_begin - line_beginning + 1; tok->source_offset = str_begin - input; tok->source_len = str_len; // Process escape sequences. These always shorten the string (so the memory allocation is always enough). char d; char *q = tok->token_string; char *rewrite_dest; int quotes = 0; while ((d = *q++)) { if (str_block) { if (d == '\"') quotes++; else quotes = 0; if (quotes == 3 && *(q-4) == '\\') { quotes = 0; rewrite_dest = q - 4; cpystr(rewrite_dest, q - 3); } } else { if (d == '\\') { rewrite_dest = q - 1; d = *q++; switch (d) { case '\"': *rewrite_dest++ = '\"'; cpystr(rewrite_dest, q--); break; case 'b': *rewrite_dest++ = '\b'; cpystr(rewrite_dest, q--); break; case 'f': *rewrite_dest++ = '\f'; cpystr(rewrite_dest, q--); break; case 'n': *rewrite_dest++ = '\n'; cpystr(rewrite_dest, q--); break; case 'r': *rewrite_dest++ = '\r'; cpystr(rewrite_dest, q--); break; case 't': *rewrite_dest++ = '\t'; cpystr(rewrite_dest, q--); break; case 'u': { // Insert escaped character using UTF-8 multi-byte encoding. char buf[5], *b = buf; for (int i = 0; i < 4; i++) *b++ = *q++; *b = 0; int code_point = strtol(buf, 0, 16); int bytes = utf8_encode(code_point, rewrite_dest); // note: if bytes == 0 // due to encoding failure, // the following will safely // eliminate the invalid char. rewrite_dest += bytes; cpystr(rewrite_dest, q--); } break; default: cpystr(rewrite_dest, --q); } } } } if (str_block) { // Strip leading lines. q = tok->token_string; for (;;) { d = *q++; while (WHITE_SPACE(d)) d = *q++; if (LINE_TERMINATOR(d)) { while (LINE_TERMINATOR(d)) d = *q++; cpystr(tok->token_string, q - 1); q = tok->token_string; } else break; } // Strip trailing lines. q = tok->token_string + strlen(tok->token_string); for (;;) { d = *--q; while (WHITE_SPACE(d)) d = *--q; if (LINE_TERMINATOR(d)) { while (LINE_TERMINATOR(d)) d = *--q; *++q = 0; } else break; } // Look for common indentation. char *this_indent_start; const char *this_indent_end; const char *common_indent_start = NULL; const char *common_indent_end = common_indent_start; const char *r; q = tok->token_string; do { d = *q++; this_indent_start = q - 1; while (WHITE_SPACE(d)) d = *q++; this_indent_end = q - 1; if (LINE_TERMINATOR(d)) { while (LINE_TERMINATOR(d)) d = *q++; continue; } if (EOF_CHAR(d)) continue; if (common_indent_start == NULL) { common_indent_start = this_indent_start; common_indent_end = this_indent_end; } for (r = this_indent_start; r < this_indent_end && (r - this_indent_start + common_indent_start < common_indent_end); r++) { if (*r != *(r - this_indent_start + common_indent_start)) break; } common_indent_end = r - this_indent_start + common_indent_start; while (!LINE_TERMINATOR(d) && !EOF_CHAR(d)) d = *q++; while (LINE_TERMINATOR(d)) d = *q++; --q; } while (d); // Remove common indentation. int common_indent_len = common_indent_end - common_indent_start; if (common_indent_len > 0) { q = tok->token_string; do { d = *q++; this_indent_start = q - 1; while (WHITE_SPACE(d)) d = *q++; this_indent_end = q - 1; if (LINE_TERMINATOR(d)) { while (LINE_TERMINATOR(d)) d = *q++; continue; } if (EOF_CHAR(d)) continue; while (!LINE_TERMINATOR(d) && !EOF_CHAR(d)) d = *q++; --q; cpystr(this_indent_start, this_indent_start + common_indent_len); q -= common_indent_len; d = *q++; while (LINE_TERMINATOR(d)) d = *q++; --q; } while (d); } } c = *p++; goto newchar; } else { return "invalid source character encountered"; } } while (!EOF_CHAR(c)); return "unexpected end-of-input encountered"; } // Convert lexed tokens into AST. const char *graphql_parse(struct list_head *tokens, struct graphql_executable_document **doc) { struct list_head used = LIST_HEAD_INIT(used); const char *err = NULL; *doc = parse_executable_document(tokens, &used, &err); return err; } // Convert input string into AST. const char *graphql_lexparse(const tal_t *ctx, const char *input, struct list_head **tokens, struct graphql_executable_document **doc) { const char *err = graphql_lex(ctx, input, tokens); if (!err) err = graphql_parse(*tokens, doc); return err; }
11,126
32,544
<reponame>zeesh49/tutorials package com.baeldung.petstore.app; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.context.annotation.Import; @SpringBootApplication @Import(PetStoreIntegrationConfig.class) public class PetStoreApplication { public static void main(String[] args) throws Exception { SpringApplication.run(PetStoreApplication.class, args); } }
146
1,738
<filename>dev/Code/Deprecated/Sandbox/Editor/FacialEditor/Vicon/Vicon_ClientCodes.cpp /* * All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or * its licensors. * * For complete copyright and license terms please see the LICENSE at the root of this * distribution (the "License"). All use of this software is governed by the License, * or, if provided, by the license below or the license accompanying this file. Do not * remove or modify any license notices. This file is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * */ // Original file Copyright Crytek GMBH or its affiliates, used under license. // Description : Integration of Vicon into cryengine. // Client codes for Vicon SDK #include "StdAfx.h" //----------------------------------------------------------------------------- // ClientCodes //----------------------------------------------------------------------------- #include "Vicon_ClientCodes.h" #ifndef DISABLE_VICON const std::vector< string > ClientCodes::MarkerTokens = MakeMarkerTokens(); const std::vector< string > ClientCodes::BodyTokens = MakeBodyTokens(); #include <WinSock2.h> #define NRESULT uint32 #define NET_OK 0x00000000 #define NET_FAIL 0x80000000 #define NET_FAILED(a)(((a)&NET_FAIL)?1:0) #define NET_SUCCEDED(a)(((a)&NET_FAIL)?0:1) #define MAKE_NRESULT(severity, facility, code)(severity | facility | code) #define NET_FACILITY_SOCKET 0x01000000 //! regular BSD/UNIX error (errno) //@{ #define NET_EINTR MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAEINTR) #define NET_EBADF MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAEBADF) #define NET_EACCES MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAEACCES) #define NET_EFAULT MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAEFAULT) #define NET_EINVAL MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAEINVAL) #define NET_EMFILE MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAEMFILE) #define NET_WSAEINTR MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAEINTR) #define NET_WSAEBADF MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAEBADF) #define NET_WSAEACCES MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAEACCES) #define NET_WSAEFAULT MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAEFAULT) #define NET_WSAEINVAL MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAEINVAL) #define NET_WSAEMFILE MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAEMFILE) #define NET_EWOULDBLOCK MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAEWOULDBLOCK) #define NET_EINPROGRESS MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAEINPROGRESS) #define NET_EALREADY MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAEALREADY) #define NET_ENOTSOCK MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAENOTSOCK) #define NET_EDESTADDRREQ MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAEDESTADDRREQ) #define NET_EMSGSIZE MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAEMSGSIZE) #define NET_EPROTOTYPE MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAEPROTOTYPE) #define NET_ENOPROTOOPT MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAENOPROTOOPT) #define NET_EPROTONOSUPPORT MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAEPROTONOSUPPORT) #define NET_ESOCKTNOSUPPORT MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAESOCKTNOSUPPORT) #define NET_EOPNOTSUPP MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAEOPNOTSUPP) #define NET_EPFNOSUPPORT MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAEPFNOSUPPORT) #define NET_EAFNOSUPPORT MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAEAFNOSUPPORT) #define NET_EADDRINUSE MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAEADDRINUSE) #define NET_EADDRNOTAVAIL MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAEADDRNOTAVAIL) #define NET_ENETDOWN MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAENETDOWN) #define NET_ENETUNREACH MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAENETUNREACH) #define NET_ENETRESET MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAENETRESET) #define NET_ECONNABORTED MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAECONNABORTED) #define NET_ECONNRESET MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAECONNRESET) #define NET_ENOBUFS MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAENOBUFS) #define NET_EISCONN MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAEISCONN) #define NET_ENOTCONN MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAENOTCONN) #define NET_ESHUTDOWN MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAESHUTDOWN) #define NET_ETOOMANYREFS MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAETOOMANYREFS) #define NET_ETIMEDOUT MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAETIMEDOUT) #define NET_ECONNREFUSED MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAECONNREFUSED) #define NET_ELOOP MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAELOOP) #define NET_ENAMETOOLONG MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAENAMETOOLONG) #define NET_EHOSTDOWN MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAEHOSTDOWN) #define NET_EHOSTUNREACH MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAEHOSTUNREACH) #define NET_ENOTEMPTY MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAENOTEMPTY) #define NET_EPROCLIM MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAEPROCLIM) #define NET_EUSERS MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAEUSERS) #define NET_EDQUOT MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAEDQUOT) #define NET_ESTALE MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAESTALE) #define NET_EREMOTE MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAEREMOTE) //@} //! regular BSD/UNIX netdb error (h_errno) // the error code is stored with a bias NET_H_ERRNO_BIAS to avoid a conflict // with the errno codes. //@{ #define NET_H_ERRNO_BIAS (1024) #define NET_HOST_NOT_FOUND MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAHOST_NOT_FOUND) #define NET_TRY_AGAIN MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSATRY_AGAIN) #define NET_NO_RECOVERY MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSANO_RECOVERY) #define NET_NO_DATA MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSANO_DATA) #define NET_NO_ADDRESS MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSANO_ADDRESS) //@} #ifdef _WIN32 //! extended winsock errors //@{ #define NET_SYSNOTREADY MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSASYSNOTREADY) #define NET_VERNOTSUPPORTED MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAVERNOTSUPPORTED) #define NET_NOTINITIALISED MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSANOTINITIALISED) #define NET_EDISCON MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, WSAEDISCON) //@} #endif // CryNet specific errors and messages #define NET_FACILITY_CRYNETWORK 0x02000000 #define NET_NOIMPL MAKE_NRESULT(NET_FAIL, NET_FACILITY_CRYNETWORK, 0x01) #define NET_SOCKET_NOT_CREATED MAKE_NRESULT(NET_FAIL, NET_FACILITY_CRYNETWORK, 0x02) struct tNetError tNetErrors[]= { {NET_OK, "No Error"}, {NET_FAIL, "Generic Error"}, // SOCKET {NET_EINTR, "WSAEINTR - interrupted function call"}, {NET_EBADF, "WSAEBADF - Bad file number"}, {NET_EACCES, "WSAEACCES - error in accessing socket"}, {NET_EFAULT, "WSAEFAULT - bad address"}, {NET_EINVAL, "WSAEINVAL - invalid argument"}, {NET_EMFILE, "WSAEMFILE - too many open files"}, {NET_EWOULDBLOCK, "WSAEWOULDBLOCK - resource temporarily unavailable"}, {NET_EINPROGRESS, "WSAEINPROGRESS - operation now in progress"}, {NET_EALREADY, "WSAEALREADY - operation already in progress"}, {NET_ENOTSOCK, "WSAENOTSOCK - socket operation on non-socket"}, {NET_EDESTADDRREQ, "WSAEDESTADDRREQ - destination address required"}, {NET_EMSGSIZE, "WSAEMSGSIZE - message to long"}, {NET_EPROTOTYPE, "WSAEPROTOTYPE - protocol wrong type for socket"}, {NET_ENOPROTOOPT, "WSAENOPROTOOPT - bad protocol option"}, {NET_EPROTONOSUPPORT, "WSAEPROTONOSUPPORT - protocol not supported"}, {NET_ESOCKTNOSUPPORT, "WSAESOCKTNOSUPPORT - socket type not supported"}, {NET_EOPNOTSUPP, "WSAEOPNOTSUPP - operation not supported"}, {NET_EPFNOSUPPORT, "WSAEPFNOSUPPORT - protocol family not supported"}, {NET_EAFNOSUPPORT, "WSAEAFNOSUPPORT - address family not supported by protocol"}, {NET_EADDRINUSE, "WSAEADDRINUSE - address is in use"}, {NET_EADDRNOTAVAIL, "WSAEADDRNOTAVAIL - address is not valid in context"}, {NET_ENETDOWN, "WSAENETDOWN - network is down"}, {NET_ENETUNREACH, "WSAENETUNREACH - network is unreachable"}, {NET_ENETRESET, "WSAENETRESET - network dropped connection on reset"}, {NET_ECONNABORTED, "WSACONNABORTED - software caused connection aborted"}, {NET_ECONNRESET, "WSAECONNRESET - connection reset by peer"}, {NET_ENOBUFS, "WSAENOBUFS - no buffer space available"}, {NET_EISCONN, "WSAEISCONN - socket is already connected"}, {NET_ENOTCONN, "WSAENOTCONN - socket is not connected"}, {NET_ESHUTDOWN, "WSAESHUTDOWN - cannot send after socket shutdown"}, {NET_ETOOMANYREFS, "WSAETOOMANYREFS - Too many references: cannot splice"}, {NET_ETIMEDOUT, "WSAETIMEDOUT - connection timed out"}, {NET_ECONNREFUSED, "WSAECONNREFUSED - connection refused"}, {NET_ELOOP, "WSAELOOP - Too many levels of symbolic links"}, {NET_ENAMETOOLONG, "WSAENAMETOOLONG - File name too long"}, {NET_EHOSTDOWN, "WSAEHOSTDOWN - host is down"}, {NET_EHOSTUNREACH, "WSAEHOSTUNREACH - no route to host"}, {NET_ENOTEMPTY, "WSAENOTEMPTY - Cannot remove a directory that is not empty"}, {NET_EUSERS, "WSAEUSERS - Ran out of quota"}, {NET_EDQUOT, "WSAEDQUOT - Ran out of disk quota"}, {NET_ESTALE, "WSAESTALE - File handle reference is no longer available"}, {NET_EREMOTE, "WSAEREMOTE - Item is not available locally"}, // extended winsock errors(not BSD compliant) #ifdef _WIN32 {NET_EPROCLIM, "WSAEPROCLIM - too many processes"}, {NET_SYSNOTREADY, "WSASYSNOTREADY - network subsystem is unavailable"}, {NET_VERNOTSUPPORTED, "WSAVERNOTSUPPORTED - winsock.dll verison out of range"}, {NET_NOTINITIALISED, "WSANOTINITIALISED - WSAStartup not yet performed"}, {NET_NO_DATA, "WSANO_DATA - valid name, no data record of requested type"}, {NET_EDISCON, "WSAEDISCON - graceful shutdown in progress"}, #endif // extended winsock errors (corresponding to BSD h_errno) {NET_HOST_NOT_FOUND, "WSAHOST_NOT_FOUND - host not found"}, {NET_TRY_AGAIN, "WSATRY_AGAIN - non-authoritative host not found"}, {NET_NO_RECOVERY, "WSANO_RECOVERY - non-recoverable error"}, {NET_NO_DATA, "WSANO_DATA - valid name, no data record of requested type"}, {NET_NO_ADDRESS, "WSANO_ADDRESS - no address, look for MX record"}, // XNetwork specific {NET_NOIMPL, "XNetwork - Function not implemented"}, {NET_SOCKET_NOT_CREATED, "XNetwork - socket not yet created"}, {0, 0} // sentinel }; ////////////////////////////////////////////////////////////////////////// const char *CViconClient::GetErrorDescription(uint32 nError) { uint32 nRes=MAKE_NRESULT(NET_FAIL, NET_FACILITY_SOCKET, nError); int n=0; while (tNetErrors[n].sErrorDescription!='\0') { if (tNetErrors[n].nrErrorCode==nRes) return(tNetErrors[n].sErrorDescription); n++; } return (NULL); } ////////////////////////////////////////////////////////////////////////// void CViconClient::PrintErrorDescription(uint32 nError) { /* uint32 numJoints=sizeof(DefSkel)/sizeof(SDefaultJoint); const char *szRes=GetErrorDescription(nError); if (szRes) gEnv->pLog->LogError(szRes); */ } #endif
4,878
1,825
package com.github.unidbg.linux.android.dvm.apk; public interface AssetResolver { byte[] resolveAsset(String fileName); }
44
403
// // NonMainThreadUncaughtNSException.h // ImpactTestMac // // Created by <NAME> on 2019-10-01. // Copyright © 2019 Chime Systems Inc. All rights reserved. // #import "CrashInvocation.h" NS_ASSUME_NONNULL_BEGIN @interface NonMainThreadUncaughtNSException : CrashInvocation @end NS_ASSUME_NONNULL_END
111
6,451
from great_expectations.execution_engine import ( PandasExecutionEngine, SparkDFExecutionEngine, SqlAlchemyExecutionEngine, ) from great_expectations.expectations.metrics.import_manager import sa from great_expectations.expectations.metrics.map_metric_provider import ( ColumnPairMapMetricProvider, column_pair_condition_partial, ) class ColumnPairValuesEqual(ColumnPairMapMetricProvider): condition_metric_name = "column_pair_values.equal" condition_domain_keys = ( "batch_id", "table", "column_A", "column_B", "row_condition", "condition_parser", "ignore_row_if", ) condition_value_keys = () # noinspection PyPep8Naming @column_pair_condition_partial(engine=PandasExecutionEngine) def _pandas(cls, column_A, column_B, **kwargs): return column_A == column_B # noinspection PyPep8Naming @column_pair_condition_partial(engine=SqlAlchemyExecutionEngine) def _sqlalchemy(cls, column_A, column_B, **kwargs): row_wise_cond = sa.and_( column_A == column_B, sa.not_(sa.or_(column_A == None, column_B == None)) ) return row_wise_cond # noinspection PyPep8Naming @column_pair_condition_partial(engine=SparkDFExecutionEngine) def _spark(cls, column_A, column_B, **kwargs): row_wise_cond = column_A.eqNullSafe(column_B) return row_wise_cond
593
428
/** * Copyright 2008 - 2015 The Loon Game Engine Authors * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. * * @project loon * @author cping * @email:<EMAIL> * @version 0.5 */ package loon.android; import java.io.BufferedReader; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.net.HttpURLConnection; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import loon.Assets; import loon.LRelease; import loon.Sound; import loon.canvas.Image; import loon.canvas.ImageImpl; import loon.utils.Scale; import loon.utils.StringUtils; import android.content.res.AssetFileDescriptor; import android.content.res.AssetManager; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.Typeface; public class AndroidAssets extends Assets { private final static String DEF_RES = "assets/"; public static interface Resource extends LRelease { InputStream getInputStream(); String getResourceName(); URI getURI(); } public static abstract class DataRes { String path; String name; InputStream in; URI uri; @Override public int hashCode() { return (name == null) ? super.hashCode() : name.hashCode(); } public void close() { if (in != null) { try { in.close(); in = null; } catch (IOException e) { } } if (uri != null) { uri = null; } } } public static class ClassRes extends DataRes implements Resource { private ClassLoader classLoader; public ClassRes(String path) { this(path, null); } public ClassRes(String path, ClassLoader classLoader) { this.path = path; this.name = "classpath://" + path; this.classLoader = classLoader; } @Override public InputStream getInputStream() { try { if (classLoader == null) { return (in = AndroidAssets.classLoader.getResourceAsStream(path)); } else { return (in = classLoader.getResourceAsStream(path)); } } catch (Exception e) { e.printStackTrace(); } return null; } @Override public String getResourceName() { return name; } @Override public URI getURI() { try { if (uri != null) { return uri; } return (uri = classLoader.getResource(path).toURI()); } catch (URISyntaxException ex) { throw new RuntimeException(ex); } } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } ClassRes other = (ClassRes) obj; if (name == null) { if (other.name != null) { return false; } } else if (!name.equals(other.name)) { return false; } return true; } @Override public int hashCode() { return super.hashCode(); } } public static class FileRes extends DataRes implements Resource { public FileRes(String path) { this.path = path; this.name = "file://" + path; } @Override public InputStream getInputStream() { try { if (in != null) { return in; } File file = new File(path); return (in = new FileInputStream(file)); } catch (FileNotFoundException e) { throw new RuntimeException("file " + name + " not found !", e); } } @Override public String getResourceName() { return name; } @Override public URI getURI() { try { if (uri != null) { return uri; } return (uri = new URL(path).toURI()); } catch (Exception e) { throw new RuntimeException(e); } } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } FileRes other = (FileRes) obj; if (name == null) { if (other.name != null) { return false; } } else if (!name.equals(other.name)) { return false; } return true; } @Override public int hashCode() { return super.hashCode(); } } public static class RemoteRes extends DataRes implements Resource { public RemoteRes(String url) { this.path = url; this.name = url; } @Override public InputStream getInputStream() { try { if (in != null) { return in; } return in = new URL(path).openStream(); } catch (Exception e) { throw new RuntimeException(e); } } @Override public String getResourceName() { return name; } @Override public URI getURI() { try { return new URL(path).toURI(); } catch (Exception e) { throw new RuntimeException(e); } } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } RemoteRes other = (RemoteRes) obj; if (name == null) { if (other.name != null) { return false; } } else if (!name.equals(other.name)) { return false; } return true; } @Override public int hashCode() { return super.hashCode(); } } public static class SDRes extends DataRes implements Resource { public SDRes(String path) { if (isMoutedSD()) { File f = android.os.Environment.getExternalStorageDirectory(); String tmp = f.getPath(); if (StringUtils.startsWith(path, '/')) { path = path.substring(1); } if (!StringUtils.endsWith(tmp, '/')) { path = tmp + "/" + path; } else { path = tmp + path; } } else { path = Loon.self.getCacheDir().getAbsolutePath(); path = StringUtils.replaceIgnoreCase(path, "\\", "/"); if (StringUtils.startsWith(path, '/') || StringUtils.startsWith(path, '\\')) { path = path.substring(1, path.length()); } } this.path = path; this.name = "sdcard://" + path; } public final static boolean isMoutedSD() { String sdState = android.os.Environment.getExternalStorageState(); return sdState.equals(android.os.Environment.MEDIA_MOUNTED); } @Override public InputStream getInputStream() { try { if (in != null) { return in; } return (in = new FileInputStream(new File(path))); } catch (FileNotFoundException e) { throw new RuntimeException("file " + name + " not found !", e); } } @Override public String getResourceName() { return name; } @Override public URI getURI() { try { if (uri != null) { return uri; } return (uri = new URL(path).toURI()); } catch (Exception e) { throw new RuntimeException(e); } } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } SDRes other = (SDRes) obj; if (name == null) { if (other.name != null) { return false; } } else if (!name.equals(other.name)) { return false; } return true; } @Override public int hashCode() { return super.hashCode(); } } public static Resource classRes(String path) { return new ClassRes(path); } public static Resource fileRes(String path) { return new FileRes(path); } public static Resource remoteRes(String path) { return new RemoteRes(path); } public static Resource sdRes(String path) { return new SDRes(path); } private InputStream filestream(String path) { try { File file = new File(path); if (file.exists()) { return new FileInputStream(file); } else { file = new File(StringUtils.replaceIgnoreCase(getPath(path), DEF_RES, "")); if (file.exists()) { return new FileInputStream(file); } else { return classLoader.getResourceAsStream(path); } } } catch (Throwable t) { return null; } } public InputStream strRes(final String path) { if (path == null) { return null; } InputStream in = filestream(path); if (in != null) { return in; } if (path.indexOf("->") == -1) { if (path.startsWith("sd:")) { in = sdRes(path.substring(3, path.length())).getInputStream(); } else if (path.startsWith("class:")) { in = classRes(path.substring(6, path.length())).getInputStream(); } else if (path.startsWith("path:")) { in = fileRes(path.substring(5, path.length())).getInputStream(); } else if (path.startsWith("url:")) { in = remoteRes(path.substring(4, path.length())).getInputStream(); } } return in; } private static ClassLoader classLoader; static { try { classLoader = AndroidAssets.class.getClassLoader(); } catch (Throwable ex) { classLoader = null; } } public class BitmapOptions extends BitmapFactory.Options { public Scale scale; } public interface BitmapOptionsAdjuster { void adjustOptions(String path, BitmapOptions options); } private final AndroidGame game; private final AssetManager assetMgr; private Scale assetScale = null; private BitmapOptionsAdjuster optionsAdjuster = new BitmapOptionsAdjuster() { public void adjustOptions(String path, BitmapOptions options) { } }; public AndroidAssets(AndroidGame game) { super(game.asyn()); Assets.pathPrefix = ""; this.game = game; this.assetMgr = game.activity.getResources().getAssets(); this.setPathPrefix(""); } public void setAssetScale(float scaleFactor) { this.assetScale = new Scale(scaleFactor); } public void setBitmapOptionsAdjuster(BitmapOptionsAdjuster optionsAdjuster) { this.optionsAdjuster = optionsAdjuster; } @Override public Image getRemoteImage(final String url, int width, int height) { final ImageImpl image = createImage(true, width, height, url); asyn.invokeAsync(new Runnable() { public void run() { try { BitmapOptions options = createOptions(url, false, Scale.ONE); Bitmap bmp = downloadBitmap(url, options); image.succeed(new ImageImpl.Data(options.scale, bmp, bmp.getWidth(), bmp.getHeight())); } catch (Exception error) { image.fail(error); } } }); return image; } protected AndroidAudio _audio; protected AndroidAudio getNativeAudio() { if (_audio == null) { _audio = new AndroidAudio(); } return _audio; } @Override public Sound getSound(String path) { if (_audio == null) { _audio = new AndroidAudio(); } return _audio.createSound(path); } @Override public Sound getMusic(String path) { if (_audio == null) { _audio = new AndroidAudio(); } return _audio.createMusic(path); } @Override public String getTextSync(String path) throws Exception { InputStream is = openAsset(path); try { StringBuilder fileData = new StringBuilder(1000); BufferedReader reader = new BufferedReader(new InputStreamReader(is)); char[] buf = new char[1024]; int numRead = 0; while ((numRead = reader.read(buf)) != -1) { String readData = String.valueOf(buf, 0, numRead); fileData.append(readData); } reader.close(); return fileData.toString(); } finally { is.close(); } } @Override public byte[] getBytesSync(String path) throws Exception { InputStream is = openAsset(path); try { ByteArrayOutputStream out = new ByteArrayOutputStream(); byte[] buf = new byte[1024]; while (true) { int r = is.read(buf); if (r == -1) { break; } out.write(buf, 0, r); } return out.toByteArray(); } finally { is.close(); } } @Override protected ImageImpl createImage(boolean async, int rwid, int rhei, String source) { return new AndroidImage(game, async, rwid, rhei, source); } @Override protected ImageImpl.Data load(String path) throws Exception { if (path == null || "<canvas>".equals(path)) { return null; } Exception error = null; for (Scale.ScaledResource rsrc : assetScale().getScaledResources(path)) { try { InputStream is = openAsset(rsrc.path); try { BitmapOptions options = createOptions(path, true, rsrc.scale); Bitmap bitmap = BitmapFactory.decodeStream(is, null, options); return new ImageImpl.Data(options.scale, bitmap, bitmap.getWidth(), bitmap.getHeight()); } finally { is.close(); } } catch (FileNotFoundException ex) { error = ex; } catch (Exception e) { error = e; break; } } game.log().warn("Could not load image: " + pathPrefix + path, error); throw error != null ? error : new FileNotFoundException(path); } Typeface getTypeface(String path) { return Typeface.createFromAsset(assetMgr, getPath(path)); } protected AssetFileDescriptor openAssetFd(String path) throws IOException { String fullPath = getPath(path); return assetMgr.openFd(fullPath); } protected Scale assetScale() { return (assetScale != null) ? assetScale : game.graphics().scale(); } protected InputStream openAsset(String path) throws IOException { String newPath = getPath(path); InputStream is = openResource(newPath); if (is == null) { is = assetMgr.open(newPath, AssetManager.ACCESS_STREAMING); } if (is == null) { throw new FileNotFoundException("not found resource: " + newPath); } return is; } public InputStream openResource(String resName) throws IOException { InputStream resource = strRes(resName); if (resource != null) { return resource; } if (resName.indexOf('\\') != -1) { resName = resName.replace('\\', '/'); } String fileName = resName.toLowerCase(); if (fileName.startsWith(DEF_RES) || fileName.startsWith('/' + DEF_RES)) { boolean flag = resName.startsWith("/"); String file; if (flag) { file = resName.substring(1); } else { file = resName; } int index = file.indexOf('/') + 1; if (index != -1) { file = resName.substring(index); } else { int length = file.length(); int size = file.lastIndexOf('/', 0) + 1; if (size < length) { file = file.substring(size, length); } } return this.assetMgr.open(file); } if (classLoader != null) { InputStream in = null; try { in = classLoader.getResourceAsStream(resName); } catch (Exception e) { } return in; } else { return this.assetMgr.open(resName); } } protected BitmapOptions createOptions(String path, boolean purgeable, Scale scale) { BitmapOptions options = new BitmapOptions(); options.inScaled = false; options.inMutable = true; options.inPreferredConfig = game.graphics().preferredBitmapConfig; //options.inDither = true; //options.inPurgeable = purgeable; //options.inInputShareable = true; options.scale = scale; optionsAdjuster.adjustOptions(path, options); return options; } protected Bitmap downloadBitmap(String url, BitmapOptions options) throws Exception { try { URL imageurl = new URL(url); HttpURLConnection connection = (HttpURLConnection) imageurl.openConnection(); connection.setDoInput(true); connection.setDoOutput(false); connection.setRequestMethod("GET"); connection.setConnectTimeout(5000); connection.setUseCaches(true); connection.connect(); int responseCode = connection.getResponseCode(); if (responseCode == 200) { InputStream in = connection.getInputStream(); try { return BitmapFactory.decodeStream(in, null, options); } finally { if (in != null) { in.close(); } if (connection != null) { connection.disconnect(); } } } } catch (Exception e) { game.reportError("bitmap from " + url, e); throw e; } return null; } }
6,217
892
{ "schema_version": "1.2.0", "id": "GHSA-f4wj-3824-vj6g", "modified": "2022-05-13T01:46:01Z", "published": "2022-05-13T01:46:01Z", "aliases": [ "CVE-2017-5136" ], "details": "An issue was discovered on SendQuick Entera and Avera devices before 2HF16. The application failed to check the access control of the request which could result in an attacker being able to shutdown the system.", "severity": [ { "type": "CVSS_V3", "score": "CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H" } ], "affected": [ ], "references": [ { "type": "ADVISORY", "url": "https://nvd.nist.gov/vuln/detail/CVE-2017-5136" }, { "type": "WEB", "url": "https://niantech.io/blog/2017/02/05/vulns-multiple-vulns-in-sendquick-entera-avera-sms-gateway-appliances/" }, { "type": "WEB", "url": "http://www.securityfocus.com/bid/96031" } ], "database_specific": { "cwe_ids": [ "CWE-862" ], "severity": "HIGH", "github_reviewed": false } }
501
483
/* * Copyright (c) 2017-2020. Nitrite author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.dizitart.no2.common.util; import com.fasterxml.jackson.databind.introspect.AnnotatedMethodMap; import junit.framework.JUnit4TestAdapterCache; import lombok.Data; import org.apache.commons.lang3.mutable.MutableByte; import org.apache.commons.lang3.mutable.MutableDouble; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.StopFilter; import org.dizitart.no2.collection.Document; import org.dizitart.no2.collection.NitriteId; import org.dizitart.no2.exceptions.ValidationException; import org.dizitart.no2.repository.annotations.Entity; import org.dizitart.no2.repository.annotations.Index; import org.dizitart.no2.integration.repository.data.ChildClass; import org.dizitart.no2.integration.repository.data.Employee; import org.junit.Test; import java.io.Serializable; import java.time.LocalDateTime; import static org.dizitart.no2.common.util.ObjectUtils.newInstance; import static org.junit.Assert.*; /** * @author <NAME>. */ public class ObjectUtilsTest implements Serializable { @Test public void testGetEntityName() { assertEquals("java.lang.Object", ObjectUtils.getEntityName(Object.class)); } @Test public void testFindRepositoryName() { assertEquals("entityName", ObjectUtils.findRepositoryName("entityName", "")); assertEquals("entityName+key", ObjectUtils.findRepositoryName("entityName", "key")); assertEquals("java.lang.Object+key", ObjectUtils.findRepositoryName(Object.class, "key")); assertEquals("java.lang.Object", ObjectUtils.findRepositoryName(Object.class, "")); } @Test public void testDeepEquals() { assertFalse(ObjectUtils.deepEquals("o1", "o2")); assertFalse(ObjectUtils.deepEquals(null, "o2")); assertFalse(ObjectUtils.deepEquals(new AnnotatedMethodMap(), "o2")); assertTrue(ObjectUtils.deepEquals(null, null)); assertFalse(ObjectUtils.deepEquals(new MutableByte(), "o2")); assertFalse(ObjectUtils.deepEquals(new JUnit4TestAdapterCache(), "o2")); assertFalse(ObjectUtils.deepEquals("o1", null)); } @Test public void testDeepEquals2() { CharArraySet makeStopSetResult = StopFilter.makeStopSet(new String[]{"foo", "foo", "foo"}, true); makeStopSetResult.add((Object) "foo"); assertFalse(ObjectUtils.deepEquals(makeStopSetResult, new AnnotatedMethodMap())); } @Test public void testDeepEquals3() { MutableByte o1 = new MutableByte(); assertFalse(ObjectUtils.deepEquals(o1, new MutableDouble())); } @Test public void testDeepEquals4() { JUnit4TestAdapterCache o1 = new JUnit4TestAdapterCache(); assertTrue(ObjectUtils.deepEquals(o1, new JUnit4TestAdapterCache())); } @Test public void testDeepEquals5() { MutableByte o1 = new MutableByte(); assertTrue(ObjectUtils.deepEquals(o1, new MutableByte())); } @Test public void testDeepEquals6() { CharArraySet makeStopSetResult = StopFilter.makeStopSet(new String[]{"foo", "foo", "foo"}, true); makeStopSetResult.add((Object) "foo"); CharArraySet makeStopSetResult1 = StopFilter.makeStopSet(new String[]{"foo", "foo", "foo"}, true); makeStopSetResult1.add((Object) "foo"); assertTrue(ObjectUtils.deepEquals(makeStopSetResult, makeStopSetResult1)); } @Test public void testDeepEquals7() { AnnotatedMethodMap o1 = new AnnotatedMethodMap(); assertTrue(ObjectUtils.deepEquals(o1, new AnnotatedMethodMap())); } @Test public void testDeepEquals8() { MutableByte o2 = new MutableByte((byte) 65); assertFalse(ObjectUtils.deepEquals(new MutableByte(), o2)); } @Test public void testNewInstance() { EnclosingType type = newInstance(EnclosingType.class, true); System.out.println(type); } @Test public void testIsValueType() { assertFalse(ObjectUtils.isValueType(Object.class)); } @Test public void testIsCompatibleTypes() { Class<?> type1 = Object.class; assertTrue(ObjectUtils.isCompatibleTypes(type1, Object.class)); } @Test public void testDeepCopy() { assertNull(ObjectUtils.deepCopy(null)); assertEquals(NitriteId.createId("42"), ObjectUtils.deepCopy(NitriteId.createId("42"))); assertNotEquals(NitriteId.createId("41"), ObjectUtils.deepCopy(NitriteId.createId("42"))); assertEquals(Document.createDocument("foo", "foo"), ObjectUtils.deepCopy(Document.createDocument("foo", "foo"))); // equals() not implemented so reference check should not be equal assertNotEquals(this, ObjectUtils.deepCopy(this)); } @Test(expected = ValidationException.class) public void testInvalidEntity1() { ObjectUtils.getEntityName(InvalidEntity1.class); } @Test(expected = ValidationException.class) public void testInvalidEntity2() { ObjectUtils.getEntityName(InvalidEntity2.class); } @Test public void testValidEntity() { assertEquals("org.dizitart.no2.common.util.ObjectUtilsTest$ValidEntity3", ObjectUtils.getEntityName(ValidEntity3.class)); assertEquals("org.dizitart.no2.common.util.ObjectUtilsTest$ValidEntity4", ObjectUtils.getEntityName(ValidEntity4.class)); assertEquals("org.dizitart.no2.common.util.ObjectUtilsTest$ValidEntity5", ObjectUtils.getEntityName(ValidEntity5.class)); assertEquals("a-b", ObjectUtils.getEntityName(ValidEntity6.class)); } @Data private static class EnclosingType { private ChildClass childClass; private FieldType fieldType; } @Data private static class FieldType { private Employee employee; private LocalDateTime currentDate; } @Data @Entity(value = "a+b") private static class InvalidEntity1 { private String value; } @Data @Entity(value = "+") private static class InvalidEntity2 { private String value; } @Data @Entity(value = "") private static class ValidEntity3 { private String value; } @Data @Entity private static class ValidEntity4 { private String value; } @Data @Entity(indices = { @Index(value = "value") }) private static class ValidEntity5 { private String value; } @Data @Entity(value = "a-b") private static class ValidEntity6 { private String value; } }
2,740
1,208
<reponame>allendeng/XcodeBenchmark // // TLChatViewController+Conversation.h // TLChat // // Created by 李伯坤 on 2017/12/26. // Copyright © 2017年 李伯坤. All rights reserved. // #import "TLChatViewController.h" #import "TLConversation.h" @interface TLChatViewController (Conversation) - (instancetype)initWithConversation:(TLConversation *)conversation; @end
143
4,358
from tpot import TPOTClassifier from sklearn.datasets import load_digits from sklearn.model_selection import train_test_split digits = load_digits() X_train, X_test, y_train, y_test = train_test_split( digits.data, digits.target, train_size=0.75, test_size=0.25 ) tpot = TPOTClassifier(generations=5, population_size=20, verbosity=2) tpot.fit(X_train, y_train) print(tpot.score(X_test, y_test)) tpot.export("tpot_mnist_pipeline.py")
174
598
""" Tests for L{eliot._output}. """ from sys import stdout from unittest import TestCase, skipUnless # Make sure to use StringIO that only accepts unicode: from io import BytesIO, StringIO import json as pyjson from tempfile import mktemp from time import time from uuid import UUID from threading import Thread try: import numpy as np except ImportError: np = None from zope.interface.verify import verifyClass from .._output import ( MemoryLogger, ILogger, Destinations, Logger, bytesjson as json, to_file, FileDestination, _safe_unicode_dictionary, ) from .._action import start_action from .._validation import ValidationError, Field, _MessageSerializer from .._traceback import write_traceback from ..testing import assertContainsFields from .common import CustomObject, CustomJSONEncoder class MemoryLoggerTests(TestCase): """ Tests for L{MemoryLogger}. """ def test_interface(self): """ L{MemoryLogger} implements L{ILogger}. """ verifyClass(ILogger, MemoryLogger) def test_write(self): """ Dictionaries written with L{MemoryLogger.write} are stored on a list. """ logger = MemoryLogger() logger.write({"a": "b"}) logger.write({"c": 1}) self.assertEqual(logger.messages, [{"a": "b"}, {"c": 1}]) logger.validate() def test_notStringFieldKeys(self): """ Field keys must be unicode or bytes; if not L{MemoryLogger.validate} raises a C{TypeError}. """ logger = MemoryLogger() logger.write({123: "b"}) self.assertRaises(TypeError, logger.validate) def test_bytesMustBeUTF8(self): """ Field keys can be bytes, but only if they're UTF-8 encoded Unicode. """ logger = MemoryLogger() logger.write({"\u1234".encode("utf-16"): "b"}) self.assertRaises(UnicodeDecodeError, logger.validate) def test_serializer(self): """ L{MemoryLogger.validate} calls the given serializer's C{validate()} method with the message, as does L{MemoryLogger.write}. """ class FakeValidator(list): def validate(self, message): self.append(message) def serialize(self, obj): return obj validator = FakeValidator() logger = MemoryLogger() message = {"message_type": "mymessage", "X": 1} logger.write(message, validator) self.assertEqual(validator, [message]) logger.validate() self.assertEqual(validator, [message, message]) def test_failedValidation(self): """ L{MemoryLogger.validate} will allow exceptions raised by the serializer to pass through. """ serializer = _MessageSerializer( [Field.forValue("message_type", "mymessage", "The type")] ) logger = MemoryLogger() logger.write({"message_type": "wrongtype"}, serializer) self.assertRaises(ValidationError, logger.validate) def test_JSON(self): """ L{MemoryLogger.validate} will encode the output of serialization to JSON. """ serializer = _MessageSerializer( [ Field.forValue("message_type", "type", "The type"), Field("foo", lambda value: object(), "The type"), ] ) logger = MemoryLogger() logger.write( {"message_type": "type", "foo": "will become object()"}, serializer ) self.assertRaises(TypeError, logger.validate) @skipUnless(np, "NumPy is not installed.") def test_EliotJSONEncoder(self): """ L{MemoryLogger.validate} uses the EliotJSONEncoder by default to do encoding testing. """ logger = MemoryLogger() logger.write({"message_type": "type", "foo": np.uint64(12)}, None) logger.validate() def test_JSON_custom_encoder(self): """ L{MemoryLogger.validate} will use a custom JSON encoder if one was given. """ logger = MemoryLogger(encoder=CustomJSONEncoder) logger.write( {"message_type": "type", "custom": CustomObject()}, None, ) logger.validate() def test_serialize(self): """ L{MemoryLogger.serialize} returns a list of serialized versions of the logged messages. """ serializer = _MessageSerializer( [ Field.forValue("message_type", "mymessage", "The type"), Field("length", len, "The length"), ] ) messages = [ {"message_type": "mymessage", "length": "abc"}, {"message_type": "mymessage", "length": "abcd"}, ] logger = MemoryLogger() for message in messages: logger.write(message, serializer) self.assertEqual( logger.serialize(), [ {"message_type": "mymessage", "length": 3}, {"message_type": "mymessage", "length": 4}, ], ) def test_serializeCopies(self): """ L{MemoryLogger.serialize} does not mutate the original logged messages. """ serializer = _MessageSerializer( [ Field.forValue("message_type", "mymessage", "The type"), Field("length", len, "The length"), ] ) message = {"message_type": "mymessage", "length": "abc"} logger = MemoryLogger() logger.write(message, serializer) logger.serialize() self.assertEqual(logger.messages[0]["length"], "abc") def write_traceback(self, logger, exception): """ Write an exception as a traceback to the logger. """ try: raise exception except: write_traceback(logger) def test_tracebacksCauseTestFailure(self): """ Logging a traceback to L{MemoryLogger} will add its exception to L{MemoryLogger.tracebackMessages}. """ logger = MemoryLogger() exception = Exception() self.write_traceback(logger, exception) self.assertEqual(logger.tracebackMessages[0]["reason"], exception) def test_flushTracebacksNoTestFailure(self): """ Any tracebacks cleared by L{MemoryLogger.flushTracebacks} (as specified by exception type) are removed from L{MemoryLogger.tracebackMessages}. """ logger = MemoryLogger() exception = RuntimeError() self.write_traceback(logger, exception) logger.flushTracebacks(RuntimeError) self.assertEqual(logger.tracebackMessages, []) def test_flushTracebacksReturnsExceptions(self): """ L{MemoryLogger.flushTracebacks} returns the traceback messages. """ exceptions = [ZeroDivisionError(), ZeroDivisionError()] logger = MemoryLogger() logger.write({"x": 1}) for exc in exceptions: self.write_traceback(logger, exc) logger.write({"x": 1}) flushed = logger.flushTracebacks(ZeroDivisionError) self.assertEqual(flushed, logger.messages[1:3]) def test_flushTracebacksUnflushedTestFailure(self): """ Any tracebacks uncleared by L{MemoryLogger.flushTracebacks} (because they are of a different type) are still listed in L{MemoryLogger.tracebackMessages}. """ logger = MemoryLogger() exception = RuntimeError() self.write_traceback(logger, exception) logger.flushTracebacks(KeyError) self.assertEqual(logger.tracebackMessages[0]["reason"], exception) def test_flushTracebacksUnflushedUnreturned(self): """ Any tracebacks uncleared by L{MemoryLogger.flushTracebacks} (because they are of a different type) are not returned. """ logger = MemoryLogger() exception = RuntimeError() self.write_traceback(logger, exception) self.assertEqual(logger.flushTracebacks(KeyError), []) def test_reset(self): """ L{MemoryLogger.reset} clears all logged messages and tracebacks. """ logger = MemoryLogger() logger.write({"key": "value"}, None) logger.reset() self.assertEqual( (logger.messages, logger.serializers, logger.tracebackMessages), ([], [], []), ) def test_threadSafeWrite(self): """ L{MemoryLogger.write} can be called from multiple threads concurrently. """ # Some threads will log some messages thread_count = 10 # A lot of messages. This will keep the threads running long enough # to give them a chance to (try to) interfere with each other. write_count = 10000 # They'll all use the same MemoryLogger instance. logger = MemoryLogger() # Each thread will have its own message and serializer that it writes # to the log over and over again. def write(msg, serializer): for i in range(write_count): logger.write(msg, serializer) # Generate a single distinct message for each thread to log. msgs = list({"i": i} for i in range(thread_count)) # Generate a single distinct serializer for each thread to log. serializers = list(object() for i in range(thread_count)) # Pair them all up. This gives us a simple invariant we can check # later on. write_args = zip(msgs, serializers) # Create the threads. threads = list(Thread(target=write, args=args) for args in write_args) # Run them all. Note threads early in this list will start writing to # the log before later threads in the list even get a chance to start. # That's part of why we have each thread write so many messages. for t in threads: t.start() # Wait for them all to finish. for t in threads: t.join() # Check that we got the correct number of messages in the log. expected_count = thread_count * write_count self.assertEqual(len(logger.messages), expected_count) self.assertEqual(len(logger.serializers), expected_count) # Check the simple invariant we created above. Every logged message # must be paired with the correct serializer, where "correct" is # defined by ``write_args`` above. for position, (msg, serializer) in enumerate( zip(logger.messages, logger.serializers) ): # The indexes must match because the objects are paired using # zip() above. msg_index = msgs.index(msg) serializer_index = serializers.index(serializer) self.assertEqual( msg_index, serializer_index, "Found message #{} with serializer #{} at position {}".format( msg_index, serializer_index, position ), ) class MyException(Exception): """ Custom exception. """ class BadDestination(list): """ A destination that throws an exception the first time it is called. """ called = 0 def __call__(self, msg): if not self.called: self.called = True raise MyException("ono") self.append(msg) class DestinationsTests(TestCase): """ Tests for L{Destinations}. """ def test_send(self): """ L{Destinations.send} calls all destinations added with L{Destinations.add} with the given dictionary. """ destinations = Destinations() message = {"hoorj": "blargh"} dest = [] dest2 = [] dest3 = [] destinations.add(dest.append, dest2.append) destinations.add(dest3.append) destinations.send(message) self.assertEqual(dest, [message]) self.assertEqual(dest2, [message]) self.assertEqual(dest3, [message]) def test_destination_exception_multiple_destinations(self): """ If one destination throws an exception, other destinations still get the message. """ destinations = Destinations() dest = [] dest2 = BadDestination() dest3 = [] destinations.add(dest.append) destinations.add(dest2) destinations.add(dest3.append) message = {"hello": 123} destinations.send(message) self.assertIn(message, dest) self.assertIn(message, dest3) def test_destination_exception_continue(self): """ If a destination throws an exception, future messages are still sent to it. """ destinations = Destinations() dest = BadDestination() destinations.add(dest) msg1 = {"hello": 123} msg2 = {"world": 456} destinations.send(msg1) self.assertNotIn(msg1, dest) destinations.send(msg2) self.assertIn(msg2, dest) def test_remove(self): """ A destination removed with L{Destinations.remove} will no longer receive messages from L{Destionations.add} calls. """ destinations = Destinations() message = {"hello": 123} dest = [] destinations.add(dest.append) destinations.remove(dest.append) destinations.send(message) self.assertEqual(dest, []) def test_removeNonExistent(self): """ Removing a destination that has not previously been added with result in a C{ValueError} being thrown. """ destinations = Destinations() self.assertRaises(ValueError, destinations.remove, [].append) def test_addGlobalFields(self): """ L{Destinations.addGlobalFields} adds the given fields and values to the messages being passed in. """ destinations = Destinations() dest = [] destinations.add(dest.append) destinations.addGlobalFields(x=123, y="hello") destinations.send({"z": 456}) self.assertEqual(dest, [{"x": 123, "y": "hello", "z": 456}]) def test_addGlobalFieldsCumulative(self): """ L{Destinations.addGlobalFields} adds the given fields to those set by previous calls. """ destinations = Destinations() dest = [] destinations.add(dest.append) destinations.addGlobalFields(x=123, y="hello") destinations.addGlobalFields(x=456, z=456) destinations.send({"msg": "X"}) self.assertEqual(dest, [{"x": 456, "y": "hello", "z": 456, "msg": "X"}]) def test_buffering(self): """ Before any destinations are set up to 1000 messages are buffered, and then delivered to the first registered destinations. """ destinations = Destinations() messages = [{"k": i} for i in range(1050)] for m in messages: destinations.send(m) dest, dest2 = [], [] destinations.add(dest.append, dest2.append) self.assertEqual((dest, dest2), (messages[-1000:], messages[-1000:])) def test_buffering_second_batch(self): """ The second batch of added destination don't get the buffered messages. """ destinations = Destinations() message = {"m": 1} message2 = {"m": 2} destinations.send(message) dest = [] dest2 = [] destinations.add(dest.append) destinations.add(dest2.append) destinations.send(message2) self.assertEqual((dest, dest2), ([message, message2], [message2])) def test_global_fields_buffering(self): """ Global fields are added to buffered messages, when possible. """ destinations = Destinations() message = {"m": 1} destinations.send(message) destinations.addGlobalFields(k=123) dest = [] destinations.add(dest.append) self.assertEqual(dest, [{"m": 1, "k": 123}]) def makeLogger(): """ Return a tuple (L{Logger} instance, C{list} of written messages). """ logger = Logger() logger._destinations = Destinations() written = [] logger._destinations.add(written.append) return logger, written class LoggerTests(TestCase): """ Tests for L{Logger}. """ def test_interface(self): """ L{Logger} implements L{ILogger}. """ verifyClass(ILogger, Logger) def test_global(self): """ A global L{Destinations} is used by the L{Logger} class. """ self.assertIsInstance(Logger._destinations, Destinations) def test_write(self): """ L{Logger.write} sends the given dictionary L{Destinations} object. """ logger, written = makeLogger() d = {"hello": 1} logger.write(d) self.assertEqual(written, [d]) def test_serializer(self): """ If a L{_MessageSerializer} is passed to L{Logger.write}, it is used to serialize the message before it is passed to the destination. """ logger, written = makeLogger() serializer = _MessageSerializer( [ Field.forValue("message_type", "mymessage", "The type"), Field("length", len, "The length of a thing"), ] ) logger.write({"message_type": "mymessage", "length": "thething"}, serializer) self.assertEqual(written, [{"message_type": "mymessage", "length": 8}]) def test_passedInDictionaryUnmodified(self): """ The dictionary passed in to L{Logger.write} is not modified. """ logger, written = makeLogger() serializer = _MessageSerializer( [ Field.forValue("message_type", "mymessage", "The type"), Field("length", len, "The length of a thing"), ] ) d = {"message_type": "mymessage", "length": "thething"} original = d.copy() logger.write(d, serializer) self.assertEqual(d, original) def test_safe_unicode_dictionary(self): """ L{_safe_unicode_dictionary} converts the given dictionary's values and keys to unicode using C{safeunicode}. """ class badobject(object): def __repr__(self): raise TypeError() dictionary = {badobject(): 123, 123: badobject()} badMessage = "eliot: unknown, unicode() raised exception" self.assertEqual( eval(_safe_unicode_dictionary(dictionary)), {badMessage: "123", "123": badMessage}, ) def test_safe_unicode_dictionary_fallback(self): """ If converting the dictionary failed for some reason, L{_safe_unicode_dictionary} runs C{repr} on the object. """ self.assertEqual(_safe_unicode_dictionary(None), "None") def test_safe_unicode_dictionary_fallback_failure(self): """ If all else fails, L{_safe_unicode_dictionary} just gives up. """ class badobject(object): def __repr__(self): raise TypeError() self.assertEqual( _safe_unicode_dictionary(badobject()), "eliot: unknown, unicode() raised exception", ) def test_serializationErrorTraceback(self): """ If serialization fails in L{Logger.write}, a traceback is logged, along with a C{eliot:serialization_failure} message for debugging purposes. """ logger, written = makeLogger() def raiser(i): raise RuntimeError("oops") serializer = _MessageSerializer( [ Field.forValue("message_type", "mymessage", "The type"), Field("fail", raiser, "Serialization fail"), ] ) message = {"message_type": "mymessage", "fail": "will"} logger.write(message, serializer) self.assertEqual(len(written), 2) tracebackMessage = written[0] assertContainsFields( self, tracebackMessage, { "exception": "%s.RuntimeError" % (RuntimeError.__module__,), "message_type": "eliot:traceback", }, ) self.assertIn("RuntimeError: oops", tracebackMessage["traceback"]) # Calling _safe_unicode_dictionary multiple times leads to # inconsistent results due to hash ordering, so compare contents: assertContainsFields( self, written[1], {"message_type": "eliot:serialization_failure"} ) self.assertEqual( eval(written[1]["message"]), dict((repr(key), repr(value)) for (key, value) in message.items()), ) def test_destination_exception_caught(self): """ If a destination throws an exception, an appropriate error is logged. """ logger = Logger() logger._destinations = Destinations() dest = BadDestination() logger._destinations.add(dest) message = {"hello": 123} logger.write({"hello": 123}) assertContainsFields( self, dest[0], { "message_type": "eliot:destination_failure", "message": _safe_unicode_dictionary(message), "reason": "ono", "exception": "eliot.tests.test_output.MyException", }, ) def test_destination_multiple_exceptions_caught(self): """ If multiple destinations throw an exception, an appropriate error is logged for each. """ logger = Logger() logger._destinations = Destinations() logger._destinations.add(BadDestination()) logger._destinations.add(lambda msg: 1 / 0) messages = [] logger._destinations.add(messages.append) try: 1 / 0 except ZeroDivisionError as e: zero_divide = str(e) zero_type = ZeroDivisionError.__module__ + ".ZeroDivisionError" message = {"hello": 123} logger.write({"hello": 123}) def remove(key): return [message.pop(key) for message in messages[1:]] # Make sure we have task_level & task_uuid in exception messages. task_levels = remove("task_level") task_uuids = remove("task_uuid") timestamps = remove("timestamp") self.assertEqual( ( abs(timestamps[0] + timestamps[1] - 2 * time()) < 1, task_levels == [[1], [1]], len([UUID(uuid) for uuid in task_uuids]) == 2, messages, ), ( True, True, True, [ message, { "message_type": "eliot:destination_failure", "message": _safe_unicode_dictionary(message), "reason": "ono", "exception": "eliot.tests.test_output.MyException", }, { "message_type": "eliot:destination_failure", "message": _safe_unicode_dictionary(message), "reason": zero_divide, "exception": zero_type, }, ], ), ) def test_destination_exception_caught_twice(self): """ If a destination throws an exception, and the logged error about it also causes an exception, then just drop that exception on the floor, since there's nothing we can do with it. """ logger = Logger() logger._destinations = Destinations() def always_raise(message): raise ZeroDivisionError() logger._destinations.add(always_raise) # Just a message. No exception raised; since everything is dropped no # other assertions to be made. logger.write({"hello": 123}) # With an action. No exception raised; since everything is dropped no # other assertions to be made. with start_action(logger, "sys:do"): logger.write({"hello": 123}) class PEP8Tests(TestCase): """ Tests for PEP 8 method compatibility. """ def test_flush_tracebacks(self): """ L{MemoryLogger.flush_tracebacks} is the same as L{MemoryLogger.flushTracebacks} """ self.assertEqual(MemoryLogger.flush_tracebacks, MemoryLogger.flushTracebacks) class ToFileTests(TestCase): """ Tests for L{to_file}. """ def test_to_file_adds_destination(self): """ L{to_file} adds a L{FileDestination} destination with the given file. """ f = stdout to_file(f) expected = FileDestination(file=f) self.addCleanup(Logger._destinations.remove, expected) self.assertIn(expected, Logger._destinations._destinations) def test_to_file_custom_encoder(self): """ L{to_file} accepts a custom encoder, and sets it on the resulting L{FileDestination}. """ f = stdout encoder = object() to_file(f, encoder=encoder) expected = FileDestination(file=f, encoder=encoder) self.addCleanup(Logger._destinations.remove, expected) self.assertIn(expected, Logger._destinations._destinations) def test_bytes_values(self): """ DEPRECATED: On Python 3L{FileDestination} will encode bytes as if they were UTF-8 encoded strings when writing to BytesIO only. """ message = {"x": b"abc"} bytes_f = BytesIO() destination = FileDestination(file=bytes_f) destination(message) self.assertEqual( [json.loads(line) for line in bytes_f.getvalue().splitlines()], [{"x": "abc"}], ) @skipUnless(np, "NumPy is not installed.") def test_default_encoder_is_EliotJSONEncoder(self): """The default encoder if none are specified is EliotJSONEncoder.""" message = {"x": np.int64(3)} f = StringIO() destination = FileDestination(file=f) destination(message) self.assertEqual( [json.loads(line) for line in f.getvalue().splitlines()], [{"x": 3}] ) def test_filedestination_writes_json_bytes(self): """ L{FileDestination} writes JSON-encoded messages to a file that accepts bytes. """ message1 = {"x": 123} message2 = {"y": None, "x": "abc"} bytes_f = BytesIO() destination = FileDestination(file=bytes_f) destination(message1) destination(message2) self.assertEqual( [json.loads(line) for line in bytes_f.getvalue().splitlines()], [message1, message2], ) def test_filedestination_custom_encoder(self): """ L{FileDestionation} can use a custom encoder. """ custom = object() class CustomEncoder(pyjson.JSONEncoder): def default(self, o): if o is custom: return "CUSTOM!" else: return pyjson.JSONEncoder.default(self, o) message = {"x": 123, "z": custom} f = BytesIO() destination = FileDestination(file=f, encoder=CustomEncoder) destination(message) self.assertEqual( json.loads(f.getvalue().splitlines()[0]), {"x": 123, "z": "CUSTOM!"} ) def test_filedestination_flushes(self): """ L{FileDestination} flushes after every write, to ensure logs get written out even if the local buffer hasn't filled up. """ path = mktemp() # File with large buffer: f = open(path, "wb", 1024 * 1024 * 10) # and a small message that won't fill the buffer: message1 = {"x": 123} destination = FileDestination(file=f) destination(message1) # Message got written even though buffer wasn't filled: self.assertEqual( [json.loads(line) for line in open(path, "rb").read().splitlines()], [message1], ) def test_filedestination_writes_json_unicode(self): """ L{FileDestination} writes JSON-encoded messages to file that only accepts Unicode. """ message = {"x": "\u1234"} unicode_f = StringIO() destination = FileDestination(file=unicode_f) destination(message) self.assertEqual(pyjson.loads(unicode_f.getvalue()), message) def test_filedestination_unwriteable_file(self): """ L{FileDestination} raises a runtime error if the given file isn't writeable. """ path = mktemp() open(path, "w").close() f = open(path, "r") with self.assertRaises(RuntimeError): FileDestination(f)
12,972
310
#include <catch.hpp> #include <chi/Context.hpp> #include <chi/DataType.hpp> #include <chi/LangModule.hpp> #include <chi/NodeType.hpp> #include <chi/Support/Result.hpp> using namespace chi; TEST_CASE("LangModule", "[module]") { GIVEN("A context with LangModule in it") { Context c; c.loadModule("lang"); ChiModule* mod = c.moduleByFullName("lang"); THEN("We try to get associated types with correct parameters, it works") { DataType test; Result res; res = c.typeFromModule("lang", "i32", &test); REQUIRE(!!res); REQUIRE(test.llvmType() == LLVMInt32TypeInContext(c.llvmContext())); REQUIRE(&test.module() == mod); REQUIRE(test.unqualifiedName() == "i32"); REQUIRE(test.qualifiedName() == "lang:i32"); res = c.typeFromModule("lang", "i8*", &test); REQUIRE(!!res); REQUIRE(test.llvmType() == LLVMPointerType(LLVMInt8TypeInContext(c.llvmContext()), 0)); REQUIRE(&test.module() == mod); REQUIRE(test.unqualifiedName() == "i8*"); REQUIRE(test.qualifiedName() == "lang:i8*"); res = c.typeFromModule("lang", "float", &test); REQUIRE(!!res); REQUIRE(test.llvmType() == LLVMDoubleTypeInContext(c.llvmContext())); REQUIRE(&test.module() == mod); REQUIRE(test.unqualifiedName() == "float"); REQUIRE(test.qualifiedName() == "lang:float"); } THEN( "We try to get associated types with incorrect parameters, it returns the correct " "errors") { DataType test; Result res; res = c.typeFromModule("lang", "i32a", &test); REQUIRE(!res); REQUIRE(res.result_json[0]["errorcode"] == "E37"); res = c.typeFromModule("lang", "i32*a", &test); REQUIRE(!res); REQUIRE(res.result_json[0]["errorcode"] == "E37"); res = c.typeFromModule("lang", "*i32**", &test); REQUIRE(!res); REQUIRE(res.result_json[0]["errorcode"] == "E37"); res = c.typeFromModule("lang", "&i8", &test); REQUIRE(!res); REQUIRE(res.result_json[0]["errorcode"] == "E37"); res = c.typeFromModule("lang", "pq", &test); REQUIRE(!res); REQUIRE(res.result_json[0]["errorcode"] == "E37"); } WHEN("We try to get if node") { Result res; std::unique_ptr<NodeType> ifNode = nullptr; res = c.nodeTypeFromModule("lang", "if", {}, &ifNode); REQUIRE(!!res); THEN("It should be totally valid") { REQUIRE(ifNode != nullptr); REQUIRE(ifNode->execInputs().size() == 1); REQUIRE(ifNode->execOutputs().size() == 2); REQUIRE(ifNode->dataInputs().size() == 1); REQUIRE(ifNode->dataOutputs().size() == 0); // make sure it is actually a if REQUIRE(ifNode->name() == "if"); } WHEN("We clone it") { std::unique_ptr<NodeType> clone = ifNode->clone(); THEN("The clone will be valid") { REQUIRE(ifNode != nullptr); REQUIRE(ifNode->execInputs().size() == 1); REQUIRE(ifNode->execOutputs().size() == 2); REQUIRE(ifNode->dataInputs().size() == 1); REQUIRE(ifNode->dataOutputs().size() == 0); // make sure it is actually a if REQUIRE(ifNode->name() == "if"); } } } WHEN("We try to get entry node") { Result res; std::unique_ptr<NodeType> entryNode = nullptr; res = c.nodeTypeFromModule( "lang", "entry", nlohmann::json::parse( R"end( { "data": [{"hello": "lang:i32"}, {"hello2": "lang:i8*"}], "exec": [""] } )end"), &entryNode); REQUIRE(!!res); THEN("It should be totally valid") { REQUIRE(entryNode != nullptr); REQUIRE(entryNode->execInputs().size() == 0); REQUIRE(entryNode->execOutputs().size() == 1); REQUIRE(entryNode->dataInputs().size() == 0); REQUIRE(entryNode->dataOutputs().size() == 2); // make sure it is actually an entry REQUIRE(entryNode->name() == "entry"); } WHEN("We clone it") { std::unique_ptr<NodeType> clone = entryNode->clone(); THEN("The clone will be valid") { REQUIRE(clone != nullptr); REQUIRE(clone->execInputs().size() == 0); REQUIRE(clone->execOutputs().size() == 1); REQUIRE(clone->dataInputs().size() == 0); REQUIRE(clone->dataOutputs().size() == 2); // make sure it is actually an entry REQUIRE(clone->name() == "entry"); } } } } }
1,815
985
/* * (C) 2007-2012 Alibaba Group Holding Limited. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * Authors: * wuhua <<EMAIL>> , boyan <<EMAIL>> */ package com.taobao.metamorphosis.server.network; import org.easymock.classextension.EasyMock; import org.easymock.classextension.IMocksControl; import com.taobao.gecko.service.Connection; import com.taobao.metamorphosis.server.BrokerZooKeeper; import com.taobao.metamorphosis.server.assembly.BrokerCommandProcessor; import com.taobao.metamorphosis.server.assembly.ExecutorsManager; import com.taobao.metamorphosis.server.filter.ConsumerFilterManager; import com.taobao.metamorphosis.server.stats.StatsManager; import com.taobao.metamorphosis.server.store.MessageStoreManager; import com.taobao.metamorphosis.server.utils.MetaConfig; import com.taobao.metamorphosis.utils.IdWorker; public abstract class BaseProcessorUnitTest { protected MessageStoreManager storeManager; protected MetaConfig metaConfig; protected Connection conn; protected IMocksControl mocksControl; protected BrokerCommandProcessor commandProcessor; protected StatsManager statsManager; protected IdWorker idWorker; protected BrokerZooKeeper brokerZooKeeper; protected ExecutorsManager executorsManager; protected SessionContext sessionContext; protected ConsumerFilterManager consumerFilterManager; protected void mock() { this.metaConfig = new MetaConfig(); this.mocksControl = EasyMock.createControl(); this.storeManager = this.mocksControl.createMock(MessageStoreManager.class); this.conn = this.mocksControl.createMock(Connection.class); try { this.consumerFilterManager = new ConsumerFilterManager(this.metaConfig); } catch (Exception e) { throw new RuntimeException(e); } this.sessionContext = new SessionContextImpl(null, this.conn); EasyMock.expect(this.conn.getAttribute(SessionContextHolder.GLOBAL_SESSION_KEY)).andReturn(this.sessionContext) .anyTimes(); this.statsManager = new StatsManager(new MetaConfig(), null, null); this.idWorker = this.mocksControl.createMock(IdWorker.class); this.brokerZooKeeper = this.mocksControl.createMock(BrokerZooKeeper.class); this.executorsManager = this.mocksControl.createMock(ExecutorsManager.class); this.commandProcessor = new BrokerCommandProcessor(); this.commandProcessor.setMetaConfig(this.metaConfig); this.commandProcessor.setStoreManager(this.storeManager); this.commandProcessor.setStatsManager(this.statsManager); this.commandProcessor.setBrokerZooKeeper(this.brokerZooKeeper); this.commandProcessor.setIdWorker(this.idWorker); this.commandProcessor.setExecutorsManager(this.executorsManager); this.commandProcessor.setConsumerFilterManager(this.consumerFilterManager); } }
1,152
2,294
""" Changing the initial conditions for this A always produces oscillatory dynamics. The only difference is the radii of the resulting elliptical trajectories. """;
39
519
<gh_stars>100-1000 """ Tests for Hyperparameters Distribution Spaces ============================================= .. Copyright 2019, Neuraxio Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import pytest from neuraxle.hyperparams.distributions import * from neuraxle.hyperparams.space import HyperparameterSpace, HyperparameterSamples, RecursiveDict hyperparams_flat_and_dict_pairs = [ # Pair 1: ({ "a__learning_rate": 7 }, { "a": { "learning_rate": 7 } }), # Pair 2: ({ "b__a__learning_rate": 7, "b__learning_rate": 9 }, { "b": { "a": { "learning_rate": 7 }, "learning_rate": 9 } }), ] @pytest.mark.parametrize("class_to_test", [RecursiveDict, HyperparameterSamples, HyperparameterSpace]) @pytest.mark.parametrize("flat,expected_dic", hyperparams_flat_and_dict_pairs) def test_flat_to_dict_hyperparams(flat: dict, expected_dic: dict, class_to_test): from_flat_dic = class_to_test(flat) from_nested_dic = class_to_test(expected_dic) assert from_flat_dic == from_nested_dic assert from_flat_dic.to_flat_dict() == flat assert from_nested_dic.to_flat_dict() == flat assert from_nested_dic.to_nested_dict() == expected_dic assert from_flat_dic.to_nested_dict() == expected_dic HYPE_SPACE = HyperparameterSpace({ "a__test": Boolean(), "a__lr": Choice([0, 1, False, "Test"]), "a__b__c": PriorityChoice([0, 1, False, "Test"]), "a__b__q": Quantized(Uniform(-10, 10)), "d__param": RandInt(-10, 10), "d__u": Uniform(-10, 10), "e__other": LogUniform(0.001, 10), "e__alpha": Normal(0.0, 1.0), "e__f__g": LogNormal(0.0, 2.0), "p__other_nondistribution_params": "hey", "p__could_also_be_as_fixed": FixedHyperparameter("also hey"), "p__its_over_9k": 9001 }) def test_hyperparams_space_rvs_outputs_samples(): space = copy.deepcopy(HYPE_SPACE) samples = space.rvs() assert isinstance(samples, HyperparameterSamples) assert len(samples) == len(space) for k, v in samples.iter_flat(): assert k in space assert not isinstance(v, HyperparameterDistribution)
1,134
938
#!/usr/bin/env python3 # iam_user_enum.py by <NAME> of Rhino Security Labs # https://github.com/RhinoSecurityLabs/ # https://github.com/RhinoSecurityLabs/Security-Research/tree/master/tools/aws-pentest-tools/iam_user_enum import argparse import boto3 import botocore import sys def main(args): attempts = 0 valid_users = [] if args.profile == None: session = boto3.session.Session() print('No AWS CLI profile passed in, choose one below or re=run the script using the -p/--profile argument:') profiles = session.available_profiles for i in range(0, len(profiles)): print('[{}] {}'.format(i, profiles[i])) profile_number = int(input('Choose a profile (Ctrl+C to exit): ').strip()) session = boto3.session.Session(profile_name=profiles[profile_number]) else: try: session = boto3.session.Session(profile_name=args.profile) except botocore.exceptions.ProfileNotFound as error: print('Did not find the specified AWS CLI profile: {}\n'.format(args.profile)) session = boto3.session.Session() print('Profiles that are available: {}\n'.format(session.available_profiles)) print('Quitting...\n') sys.exit(1) client = session.client('iam') if args.self_check: users = [] with_mfa = 0 without_mfa = 0 print('\nSkipping cross-account enumeration. Checking the current account...\n') response = client.list_users() users.extend(response['Users']) while 'IsTruncated' in response and response['IsTruncated'] is True: response = client.list_users( Marker=response['Marker'] ) users.extend(response['Users']) print('Found {} users.\n'.format(len(users))) print('Has MFA?\n') for user in users: mfa = False response = client.list_mfa_devices( UserName=user['UserName'] ) if 'MFADevices' in response and response['MFADevices']: if response['MFADevices'][0]['UserName'] == user['UserName']: with_mfa += 1 print(' {}: Yes'.format(user['UserName'])) continue without_mfa += 1 print(' {}: NO!'.format(user['UserName'])) print('\nNumber of users with MFA: {}'.format(with_mfa)) print('Number of users without MFA: {}\n'.format(without_mfa)) return True if args.word_list is None: word_list_path = './default-word-list.txt' else: word_list_path = args.word_list.strip() with open(word_list_path, 'r') as f: word_list = f.read().splitlines() print('Targeting account ID: {}\n'.format(args.account_id)) print('Starting user enumeration...\n') for word in word_list: user_arn = 'arn:aws:iam::{}:user/{}'.format(args.account_id, word) attempts += 1 try: client.update_assume_role_policy( RoleName=args.role_name, PolicyDocument='{{"Version":"2012-10-17","Statement":[{{"Effect":"Deny","Principal":{{"AWS":"{}"}},"Action":"sts:AssumeRole"}}]}}'.format(user_arn) ) print(' Found user: {}'.format(user_arn)) valid_users.append(user_arn) except botocore.exceptions.ClientError as error: if 'MalformedPolicyDocument' in str(error): # User doesn't exist, continue on pass elif 'NoSuchEntity' in str(error): print(' Error: You did not pass in a valid role name. An existing role is required for this script.') sys.exit(1) else: print(' Unhandled error: {}'.format(str(error))) sys.exit(1) if len(valid_users) == 0: print('No users were found.\n') else: print('\nFound {} user(s):\n'.format(len(valid_users))) for user in valid_users: print(' {}'.format(user)) print('\n{} completed after {} guess(es).\n'.format(sys.argv[0], attempts)) return True if __name__ == '__main__': parser = argparse.ArgumentParser(description='This script takes in a valid AWS account ID and tries to enumerate existing IAM users within that account. It does so by trying to update the AssumeRole policy document of the role that you pass into --role-name. For your safety, it updates the policy with an explicit deny against the AWS account/IAM user, so that no security holes are opened in your account during enumeration. NOTE: It is recommended to use personal AWS access keys for this script, as it will spam CloudTrail with "iam:UpdateAssumeRolePolicy" logs. The target account will not see anything in their logs though! The keys used must have the iam:UpdateAssumeRolePolicy permission on the role that you pass into --role-name to be able to identify a valid IAM user.') parser.add_argument('-s', '--self-check', required=False, default=False, action='store_true', help='Perform a self check against your own AWS account. This flag will skip the bruteforcing and instead list out all IAM users in your account and whether or not they have MFA enabled. This will give you an idea of potential targets in your account and how vulnerable they are to an attack of this kind.') parser.add_argument('-p', '--profile', required=False, default=None, help='The AWS CLI profile to use for making API calls. This is usually stored under ~/.aws/credentials. You will be prompted by default.') parser.add_argument('-w', '--word-list', required=False, default=None, help='File path to a different word list to use. There is a default word list with 1063 words. The word list should contain words, one on each line, to use to try and guess IAM role names. Role names ARE case-sensitive.') parser.add_argument('-r', '--role-name', required=False, default=None, help='The name of a valid role in the current users account to try and update the AssumeRole policy document for.') parser.add_argument('-i', '--account-id', required=False, default=None, help='The AWS account ID of the target account (12 numeric characters).') args = parser.parse_args() if not args.self_check and not args.role_name and not args.account_id: print('Error: --role-name and --account-id are required if you are not using the --self-check option.\n') elif not args.self_check and (not len(args.account_id) == 12 or not args.account_id.isdigit()): print('Error: An AWS account ID is a number of length 12. You supplied: {}\n'.format(args.account_id)) else: if not args.self_check: print('\nWarning: This script does not check if the keys you supplied have the correct permissions. Make sure they are allowed to use iam:UpdateAssumeRolePolicy on the role that you pass into --role-name!\n') main(args)
2,671
577
package org.python.core; /** * This interface provides a base for the key interface of the buffer API, {@link PyBuffer}, * including symbolic constants used by the consumer of a {@code PyBuffer} to specify its * requirements and assumptions. The Jython buffer API emulates the CPython buffer API. There are * two reasons for separating parts of {@code PyBuffer} into this interface: * <ul> * <li>The constants defined in CPython have the names {@code PyBUF_SIMPLE}, {@code PyBUF_WRITABLE}, * etc., and the trick of defining ours here means we can write {@code PyBUF.SIMPLE}, * {@code PyBUF.WRITABLE}, etc. so source code looks similar.</li> * <li>It is not so easy in Java as it is in C to treat a {@code byte} array as storing anything * other than {@code byte}, and we prepare for the possibility of buffers with a series of different * primitive types by defining here those methods that would be in common between * (Byte){@code Buffer} and an assumed future {@code FloatBuffer} or {@code TypedBuffer<T>}. * (Compare {@code java.nio.Buffer}.)</li> * </ul> * It is unlikely any classes would implement {@code PyBUF}, except indirectly through other * interfaces. Users of the Jython buffer API can mostly overlook the distinction and just use * {@code PyBuffer}. */ public interface PyBUF { /** * Determine whether the consumer is entitled to write to the exported storage. * * @return true if writing is not allowed, false if it is. */ boolean isReadonly(); /** * The number of dimensions to the buffer. This number is the length of the {@code shape} array. * The actual storage may be a linear array, but this is the number of dimensions in the * interpretation that the exporting object gives the data. * * @return number of dimensions */ int getNdim(); /** * An array reporting the size of the buffer, considered as a multidimensional array, in each * dimension and (by its length) giving the number of dimensions. The size of the buffer is its * size in "items". An item is the amount of buffer content addressed by one index or set of * indices. In the simplest case an item is a single unit (byte), and there is one dimension. In * complex cases, the array is multi-dimensional, and the item at each location is multi-unit * (multi-byte). The consumer must not modify this array. A valid {@code shape} array is always * returned (difference from CPython). * * @return the dimensions of the buffer as an array */ int[] getShape(); /** * The number of bytes stored in each indexable item. * * @return the number of bytes comprising each item. */ int getItemsize(); /** * The total number of bytes represented by the view, which will be the product of the elements * of the {@code shape} array, and the item size in bytes. * * @return the total number of bytes represented. */ int getLen(); /** * The {@code strides} array gives the distance in the storage array between adjacent items (in * each dimension). In the rawest parts of the buffer API, the consumer of the buffer is able to * navigate the exported storage. The "strides" array is part of the support for interpreting * the buffer as an n-dimensional array of items. It provides the coefficients of the * "addressing polynomial". (More on this in the CPython documentation.) The consumer must not * modify this array. A valid {@code strides} array is always returned (difference from * CPython). * * @return the distance in the storage array between adjacent items (in each dimension) */ int[] getStrides(); /** * The {@code suboffsets} array is a further part of the support for interpreting the buffer as * an n-dimensional array of items, where the array potentially uses indirect addressing (like a * real Java array of arrays, in fact). This is only applicable when there is more than 1 * dimension, and it works in conjunction with the {@code strides} array. (More on this in the * CPython documentation.) When used, {@code suboffsets[k]} is an integer index, not a byte * offset as in CPython. The consumer must not modify this array. When not needed for navigation * {@code null} is returned (as in CPython). * * @return suboffsets array or {@code null} if not necessary for navigation */ int[] getSuboffsets(); /** * Enquire whether the array is represented contiguously in the backing storage, according to C * or Fortran ordering. A one-dimensional contiguous array is both. * * @param order 'C', 'F' or 'A', as the storage order is C, Fortran or either. * @return true iff the array is stored contiguously in the order specified */ boolean isContiguous(char order); /* Constants taken from CPython object.h in v3.3 */ /** * The maximum allowed number of dimensions (CPython restriction). */ static final int MAX_NDIM = 64; /** * A constant used by the consumer in its call to {@link BufferProtocol#getBuffer(int)} to * specify that it expects to write to the buffer contents. {@code getBuffer} will raise an * exception if the exporter's buffer cannot meet this requirement. */ static final int WRITABLE = 0x0001; /** * A constant used by the consumer in its call to {@link BufferProtocol#getBuffer(int)} to * specify that it assumes a simple one-dimensional organisation of the exported storage with * item size of one. {@code getBuffer} will raise an exception if the consumer sets this flag * and the exporter cannot represent itself as byte array data. */ static final int SIMPLE = 0; /** * A constant used by the consumer in its call to {@link BufferProtocol#getBuffer(int)} to * specify that it requires {@link PyBuffer#getFormat()} to return a {@code String} indicating * the type of the unit. This exists for compatibility with CPython, as in Jython the format is * always provided by {@code getFormat()}. */ static final int FORMAT = 0x0004; /** * A constant used by the consumer in its call to {@link BufferProtocol#getBuffer(int)} to * specify that it is prepared to navigate the buffer as multi-dimensional using the * {@code shape} array. {@code getBuffer} will raise an exception if consumer does not specify * the flag but the exporter's buffer cannot be navigated without taking into account its * multiple dimensions. */ static final int ND = 0x0008; /** * A constant used by the consumer in its call to {@link BufferProtocol#getBuffer(int)} to * specify that it expects to use the {@code strides} array. {@code getBuffer} will raise an * exception if consumer does not specify the flag but the exporter's buffer cannot be navigated * without using the {@code strides} array. */ static final int STRIDES = 0x0010 | ND; /** * A constant used by the consumer in its call to {@link BufferProtocol#getBuffer(int)} to * specify that it will assume C-order organisation of the items. {@code getBuffer} will raise * an exception if the exporter's buffer is not C-ordered. {@code C_CONTIGUOUS} implies * {@code STRIDES}. */ // It is possible this should have been (0x20|ND) expressing the idea that C-order addressing // will be assumed *instead of* using a strides array. static final int C_CONTIGUOUS = 0x0020 | STRIDES; /** * A constant used by the consumer in its call to {@link BufferProtocol#getBuffer(int)} to * specify that it will assume Fortran-order organisation of the items. {@code getBuffer} will * raise an exception if the exporter's buffer is not Fortran-ordered. {@code F_CONTIGUOUS} * implies {@code STRIDES}. */ static final int F_CONTIGUOUS = 0x0040 | STRIDES; /** * A constant used by the consumer in its call to {@link BufferProtocol#getBuffer(int)} to * specify that it will assume a contiguous organisation of the items, but will enquire which * organisation it actually is. * * {@code getBuffer} will raise an exception if the exporter's buffer is not contiguous. * {@code ANY_CONTIGUOUS} implies {@code STRIDES}. */ // Further CPython strangeness since it uses the strides array to answer the enquiry. static final int ANY_CONTIGUOUS = 0x0080 | STRIDES; /** * A constant used by the consumer in its call to {@link BufferProtocol#getBuffer(int)} to * specify that it understands the {@code suboffsets} array. {@code getBuffer} will raise an * exception if consumer does not specify the flag but the exporter's buffer cannot be navigated * without understanding the {@code suboffsets} array. {@code INDIRECT} implies {@code STRIDES}. */ static final int INDIRECT = 0x0100 | STRIDES; /** * Equivalent to {@code (ND | WRITABLE)} */ static final int CONTIG = ND | WRITABLE; /** * Equivalent to {@code ND} */ static final int CONTIG_RO = ND; /** * Equivalent to {@code (STRIDES | WRITABLE)} */ static final int STRIDED = STRIDES | WRITABLE; /** * Equivalent to {@code STRIDES} */ static final int STRIDED_RO = STRIDES; /** * Equivalent to {@code (STRIDES | WRITABLE | FORMAT)} */ static final int RECORDS = STRIDES | WRITABLE | FORMAT; /** * Equivalent to {@code (STRIDES | FORMAT)} */ static final int RECORDS_RO = STRIDES | FORMAT; /** * Equivalent to {@code (INDIRECT | WRITABLE | FORMAT)}. Also use this in the request if you * plan only to use the fully-encapsulated API ({@code byteAt}, {@code storeAt}, {@code copyTo}, * {@code copyFrom}, etc.), without ever calling {@link PyBuffer#getNIOByteBuffer()} or using * {@link PyBuffer.Pointer}. */ static final int FULL = INDIRECT | WRITABLE | FORMAT; /** * Equivalent to {@code (INDIRECT | FORMAT)}. Also use this in the request if you plan only to * use the fully-encapsulated API ({@code byteAt}, {@code copyTo}, etc.), read only, without * ever calling {@link PyBuffer#getNIOByteBuffer()} or using {@link PyBuffer.Pointer}. */ static final int FULL_RO = INDIRECT | FORMAT; /* Constants for additional feature(s), not standard for CPython */ /** * A constant used by the consumer in its call to {@link BufferProtocol#getBuffer(int)} to * specify that it expects to access the buffer contents directly as an array (rather than * through the purely abstract part of the API). {@code getBuffer} will raise an exception if * the exporter cannot expose its storage as Java array. */ // XXX Pending: @Deprecated static final int AS_ARRAY = 0x10000000; /* Constants for readability, not standard for CPython */ /** * Field mask, used as in {@code if ((flags&NAVIGATION) == STRIDES) ...}. The importance of the * subset of flags defined by this mask is not so much in their "navigational" character as in * the way they are treated in a buffer request. * <p> * The {@code NAVIGATION} set are used to specify which navigation arrays the consumer will use, * and therefore the consumer must ask for all those necessary to use the buffer successfully * (which is a function of the buffer's actual type). Asking for extra ones is not an error, * since all are supplied (in Jython): asking for too few is an error. * <p> * Flags outside the {@code NAVIGATION} set, work the other way round. Asking for one the buffer * cannot match is an error: not asking for a feature the buffer does not have is an error. */ static final int NAVIGATION = SIMPLE | ND | STRIDES | INDIRECT; /** * A constant used by the exporter in processing {@link BufferProtocol#getBuffer(int)} to check * for assumed C-order organisation of the items. * {@code C_CONTIGUOUS = IS_C_CONTIGUOUS | STRIDES}. */ static final int IS_C_CONTIGUOUS = C_CONTIGUOUS & ~STRIDES; /** * A constant used by the exporter in processing {@link BufferProtocol#getBuffer(int)} to check * for assumed C-order Fortran-order organisation of the items. * {@code F_CONTIGUOUS = IS_F_CONTIGUOUS | STRIDES}. */ static final int IS_F_CONTIGUOUS = F_CONTIGUOUS & ~STRIDES; /** * Field mask, used as in if {@code ((flags&CONTIGUITY)== ... ) ...}. */ static final int CONTIGUITY = (C_CONTIGUOUS | F_CONTIGUOUS | ANY_CONTIGUOUS) & ~STRIDES; }
4,054
1,079
/** * Copyright (c) 2015-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. */ package com.facebook.react.animation; import android.view.View; /** * Subclass of {@link AnimationPropertyUpdater} for animating view's rotation */ public class RotationAnimationPropertyUpdater extends AbstractSingleFloatProperyUpdater { public RotationAnimationPropertyUpdater(float toValue) { super(toValue); } @Override protected float getProperty(View view) { return view.getRotation(); } @Override protected void setProperty(View view, float propertyValue) { view.setRotation((float) Math.toDegrees(propertyValue)); } }
248
335
{ "word": "Little", "definitions": [ "Small in size, amount, or degree (often used to convey an appealing diminutiveness or express an affectionate or condescending attitude)", "(of a person) young or younger.", "Denoting something, especially a place, that is the smaller or smallest of those so named or is named after a similar larger one.", "Used in names of animals and plants that are smaller than related kinds, e.g. little grebe.", "Of short distance or duration.", "Relatively unimportant or trivial (often used ironically)" ], "parts-of-speech": "Adjective" }
199
14,668
// Copyright 2020 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "third_party/blink/public/web/web_disallow_transition_scope.h" #include "build/build_config.h" #include "testing/gtest/include/gtest/gtest.h" #include "third_party/blink/renderer/core/frame/frame_test_helpers.h" #include "third_party/blink/renderer/core/frame/web_local_frame_impl.h" namespace blink { using blink::frame_test_helpers::WebViewHelper; class WebDisallowTransitionScopeTest : public testing::Test { protected: Document* TopDocument() const; WebDocument TopWebDocument() const; WebViewHelper web_view_helper_; }; Document* WebDisallowTransitionScopeTest::TopDocument() const { return To<LocalFrame>(web_view_helper_.GetWebView()->GetPage()->MainFrame()) ->GetDocument(); } WebDocument WebDisallowTransitionScopeTest::TopWebDocument() const { return web_view_helper_.LocalMainFrame()->GetDocument(); } #if !defined(OS_ANDROID) // TODO(crbug.com/1067036): the death test fails on Android. TEST_F(WebDisallowTransitionScopeTest, TestDisallowTransition) { // Make the death test thread-safe. For more info, see: // https://github.com/google/googletest/blob/master/googletest/docs/advanced.md#death-tests-and-threads ::testing::FLAGS_gtest_death_test_style = "threadsafe"; web_view_helper_.InitializeAndLoad("about:blank"); WebDocument web_doc = TopWebDocument(); Document* core_doc = TopDocument(); // Legal transition. core_doc->Lifecycle().AdvanceTo(DocumentLifecycle::kLayoutClean); { // Illegal transition. WebDisallowTransitionScope disallow(&web_doc); EXPECT_DEATH(core_doc->Lifecycle().EnsureStateAtMost( DocumentLifecycle::kVisualUpdatePending), "Cannot rewind document lifecycle"); } // Legal transition. core_doc->Lifecycle().EnsureStateAtMost( DocumentLifecycle::kVisualUpdatePending); } #endif } // namespace blink
693
14,668
// Copyright 2021 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef COMPONENTS_POLICY_TEST_SUPPORT_SIGNATURE_PROVIDER_H_ #define COMPONENTS_POLICY_TEST_SUPPORT_SIGNATURE_PROVIDER_H_ #include <map> #include <memory> #include <string> #include <vector> namespace crypto { class RSAPrivateKey; } // namespace crypto namespace policy { // Provides access to predefined test signing keys and allows for data signing // using those keys. Keys are indexed and retrieved by 1-based key versions. class SignatureProvider { public: // Provides access to a predefined test signing key. class SigningKey { public: SigningKey(std::unique_ptr<crypto::RSAPrivateKey> private_key, const std::map<std::string, std::string>& signatures); SigningKey(SigningKey&& signing_key); SigningKey& operator=(SigningKey&& signing_key); ~SigningKey(); // Looks up the domain's signature in the passed dictionary. Returns true if // domain is in |signatures_| or false otherwise. bool GetSignatureForDomain(const std::string& domain, std::string* signature) const; // Signs |str| using the private key. bool Sign(const std::string& str, std::string* signature) const; const std::string& public_key() const { return public_key_; } private: // The key used for signing. std::unique_ptr<crypto::RSAPrivateKey> private_key_; // The public key corresponding to |private_key_|. std::string public_key_; // Maps domains to the corresponding signatures. std::map<std::string, std::string> signatures_; }; // Domains with pre-computed signatures. static constexpr char kTestDomain1[] = "example.com"; static constexpr char kTestDomain2[] = "chromepolicytest.com"; static constexpr char kTestDomain3[] = "managedchrome.com"; SignatureProvider(); SignatureProvider(SignatureProvider&& signature_provider); SignatureProvider& operator=(SignatureProvider&& signature_provider); virtual ~SignatureProvider(); // Returns the key corresponding to |key_version| (1-based) or nullptr if // |key_version| is out-of-bounds. Used when a key version is specified by the // client. const SigningKey* GetKeyByVersion(int key_version) const; // Shortcut for |GetKeyByVersion(current_key_version_)|, used when the client // doesn't specify the key version to be used. const SigningKey* GetCurrentKey() const; const std::vector<SigningKey>& signing_keys() const { return signing_keys_; } void set_signing_keys(std::vector<SigningKey> signing_keys) { signing_keys_ = std::move(signing_keys); } int current_key_version() const { return current_key_version_; } void set_current_key_version(int current_key_version) { current_key_version_ = current_key_version; } private: std::vector<SigningKey> signing_keys_; // The key version to be used if no key version is defined by the client. int current_key_version_ = 1; }; } // namespace policy #endif // COMPONENTS_POLICY_TEST_SUPPORT_SIGNATURE_PROVIDER_H_
1,012
4,071
/* Copyright (C) 2016-2018 Alibaba Group Holding Limited Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ #include "xdl/data_io/packer/pack_feature.h" #include <cstdlib> #include <ctime> #include <iostream> #include <map> #include <string> #include <vector> #include "gtest/gtest.h" #include "xdl/data_io/pool.h" #include "xdl/core/framework/cpu_device.h" namespace xdl { namespace io { class PackFeatureTest: public ::testing::Test { static const size_t kBatchSize; static const size_t kSGCount; static const size_t kTableCount; public: static void SetUpTestCase(); static void TearDownTestCase(); static size_t sample_count(size_t ktable); static size_t batch_size(size_t ktable); static void TestStat(); static void TestSetup(); static void TestRun(); static PackFeature *pack_; static Batch *batch_; private: static void CheckIndicator(); static void CheckFeature(); static Device *dev_; static Schema schema_; static std::vector<SampleGroup> sgs_; }; const size_t PackFeatureTest::kBatchSize = 8192; //const size_t PackFeatureTest::kBatchSize = 4; const size_t PackFeatureTest::kSGCount = 128; //const size_t PackFeatureTest::kSGCount = 2; const size_t PackFeatureTest::kTableCount = 2; Device *PackFeatureTest::dev_ = nullptr; Schema PackFeatureTest::schema_; PackFeature *PackFeatureTest::pack_ = nullptr; Batch *PackFeatureTest::batch_ = nullptr; std::vector<SampleGroup> PackFeatureTest::sgs_; size_t PackFeatureTest::sample_count(size_t ktable) { size_t c = kBatchSize/kSGCount - 1; for (int k = 0; k < ktable+1; ++k) { c = c / 2 + 1; } return c; } size_t PackFeatureTest::batch_size(size_t ktable) { size_t c = sample_count(ktable); return c * kSGCount; } void PackFeatureTest::SetUpTestCase() { dev_ = new CpuDevice(); schema_.batch_size_ = kBatchSize; schema_.padding_ = false; for (int ktable = 0; ktable < kTableCount; ++ktable) { FeatureOption *s = new FeatureOption(); s->set_name(std::to_string(ktable)+"s"); s->set_type(kSparse); s->set_table(ktable); schema_.Add(s); FeatureOption *d = new FeatureOption(); d->set_name(std::to_string(ktable)+"a"); d->set_type(kDense); d->set_nvec(2); d->set_table(ktable); schema_.Add(d); } sgs_.resize(kSGCount); for (int i = 0; i < kSGCount; ++i) { auto &sg = sgs_[i]; int count = kBatchSize/kSGCount - 1; for (int ktable = 0; ktable < kTableCount; ++ktable) { count = count / 2 + 1; auto ft = sg.add_feature_tables(); for (int n = 0; n < count; ++n) { auto fl = ft->add_feature_lines(); auto f = fl->add_features(); f->set_name(std::to_string(ktable)+"d"); f->set_type(kDense); auto v = f->add_values(); for (int m = 0; m < 2; ++m) { v->add_vector(0.1*m); } f = fl->add_features(); f->set_name(std::to_string(ktable)+"s"); f->set_type(kSparse); v = f->add_values(); v->set_key(1); v->set_value(0.6); /// refer if (ktable < kTableCount - 1) { fl->set_refer((n+1)/2); } } } } pack_ = new PackFeature(dev_, &schema_); } void PackFeatureTest::TearDownTestCase() { BatchPool::Get()->Release(batch_); batch_ = nullptr; delete pack_; pack_ = nullptr; } void PackFeatureTest::TestStat() { PParam pparam; for (int i = 0; i < kSGCount; ++i) { int count = kBatchSize/kSGCount - 1; pparam.begin_ = 0; pparam.end_ = sgs_[i].feature_tables(0).feature_lines_size(); for (int ktable = 0; ktable < kTableCount; ++ktable) { count = count / 2 + 1; pparam.ftable_ = &sgs_[i].feature_tables(ktable); pparam.ktable_ = ktable; pparam.isgroup_ = i; EXPECT_GE(pparam.begin_, 0); EXPECT_LE(pparam.end_, count); //std::cout << "stat[" << pparam.isgroup_ << ", " << ktable << "] (0)" << pparam.begin_ // << " -> " << pparam.end_ << "(" << pparam.ftable_->feature_lines_size() << ")" << std::endl; auto range = pack_->Stat(pparam); pparam.begin_ = range.first; pparam.end_ = range.second; } } } void PackFeatureTest::TestSetup() { ASSERT_TRUE(pack_->Setup()); for (auto &it: schema_.feature_opts()) { auto opt = it.second; auto blk = batch_->GetMutable(opt->name()); ASSERT_NE(nullptr, blk); ASSERT_NE(nullptr, blk->ts_[Block::kValue]); auto vdims = blk->ts_[Block::kValue]->Shape().Dims(); auto ktable = opt->table(); size_t bs = batch_size(ktable); if (opt->type() == kSparse) { ASSERT_NE(nullptr, blk->ts_[Block::kKey]); auto kdims = blk->ts_[Block::kKey]->Shape().Dims(); ASSERT_NE(nullptr, blk->ts_[Block::kSegment]); auto sdims = blk->ts_[Block::kSegment]->Shape().Dims(); ASSERT_EQ(bs, sdims[0]); ASSERT_EQ(3, blk->ts_count_); } else { ASSERT_EQ(2, vdims.size()); ASSERT_EQ(bs, vdims[0]); ASSERT_EQ(1, blk->ts_count_); } } ASSERT_EQ(kTableCount*(3+1) + kTableCount-1, batch_->ts_count_); } void PackFeatureTest::TestRun() { PParam pparam; for (int i = 0; i < kSGCount; ++i) { pparam.begin_ = 0; pparam.end_ = sgs_[i].feature_tables(0).feature_lines_size(); for (int ktable = 0; ktable < kTableCount; ++ktable) { pparam.ftable_ = &sgs_[i].feature_tables(ktable); pparam.ktable_ = ktable; pparam.isgroup_ = i; //std::cout << "run[" << pparam.isgroup_ << ", " << ktable << "] (0)" << pparam.begin_ // << " -> " << pparam.end_ << "(" << pparam.ftable_->feature_lines_size() << ")" << std::endl; auto range = pack_->Run(pparam); pparam.begin_ = range.first; pparam.end_ = range.second; } } } TEST_F(PackFeatureTest, Run) { batch_ = BatchPool::Get()->Acquire(); EXPECT_NE(nullptr, batch_); EXPECT_TRUE(pack_->Init(batch_)); TestStat(); TestSetup(); TestRun(); batch_->Reuse(); batch_ = nullptr; //std::cout << "cycles: " << pack_->cycles_ << std::endl; } } // io } // xdl int main(int argc, char **argv) { ::testing::InitGoogleTest(&argc, argv); return RUN_ALL_TESTS(); }
2,832
488
<reponame>enfoTek/tomato.linksys.e2000.nvram-mod /*--------------------------------------------------------------------------*/ /* File name : err3.java */ /* : */ /* Cause : Evaluation sequence of the formula which used */ /* : the substitution operator is not performed correctly. */ /* : */ /* Message : NG1:[27]-->[9] */ /* : NG2:[27]-->[9] */ /* : */ /* Note : JLS 15.6 Evaluation Order (p305) */ /* S15.6.1 Evaluate Left-Hand Operand First */ /* : A formula should be evaluated to 9*3 instead of 3*3. */ /*--------------------------------------------------------------------------*/ public class err3 { public static void main(String[] args) { int x = 9; x *= (x = 3); if ( x == 27 ) { System.out.println("OK1"); } else { System.out.println("NG1:[27]-->["+x+"]"); } int y = 9; y = y * (y = 3); if ( y == 27 ) { System.out.println("OK2"); } else { System.out.println("NG2:[27]-->["+y+"]"); } } }
832
4,140
<reponame>FANsZL/hive /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.metastore.utils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class RetryUtilities { public static class RetryException extends Exception { private static final long serialVersionUID = 1L; public RetryException(Exception ex) { super(ex); } public RetryException(String msg) { super(msg); } } /** * Interface used to create a ExponentialBackOffRetry policy */ public static interface ExponentialBackOffRetry<T> { /** * This method should be called by implementations of this ExponentialBackOffRetry policy * It represents the actual work which needs to be done based on a given batch size * @param batchSize The batch size for the work which needs to be executed * @return * @throws Exception */ public T execute(int batchSize) throws Exception; } /** * This class is a base implementation of a simple exponential back retry policy. The batch size * and decaying factor are provided with the constructor. It reduces the batch size by dividing * it by the decaying factor every time there is an exception in the execute method. */ public static abstract class ExponentiallyDecayingBatchWork<T> implements ExponentialBackOffRetry<T> { private int batchSize; private final int decayingFactor; private int maxRetries; private static final Logger LOG = LoggerFactory.getLogger(ExponentiallyDecayingBatchWork.class); public ExponentiallyDecayingBatchWork(int batchSize, int reducingFactor, int maxRetries) { if (batchSize <= 0) { throw new IllegalArgumentException(String.format( "Invalid batch size %d provided. Batch size must be greater than 0", batchSize)); } this.batchSize = batchSize; if (reducingFactor <= 1) { throw new IllegalArgumentException(String.format( "Invalid decaying factor %d provided. Decaying factor must be greater than 1", batchSize)); } if (maxRetries < 0) { throw new IllegalArgumentException(String.format( "Invalid number of maximum retries %d provided. It must be a non-negative integer value", maxRetries)); } //if maxRetries is 0 code retries until batch decays to zero this.maxRetries = maxRetries; this.decayingFactor = reducingFactor; } public T run() throws Exception { int attempt = 0; while (true) { int size = getNextBatchSize(); if (size == 0) { throw new RetryException("Batch size reduced to zero"); } try { return execute(size); } catch (Exception ex) { LOG.warn(String.format("Exception thrown while processing using a batch size %d", size), ex); } finally { attempt++; if (attempt == maxRetries) { throw new RetryException(String.format("Maximum number of retry attempts %d exhausted", maxRetries)); } } } } private int getNextBatchSize() { int ret = batchSize; batchSize /= decayingFactor; return ret; } } }
1,345