max_stars_count
int64
301
224k
text
stringlengths
6
1.05M
token_count
int64
3
727k
350
/* * Copyright (C) 2017 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.google.android.accessibility.utils; import androidx.annotation.IntDef; import androidx.core.view.accessibility.AccessibilityEventCompat; import androidx.core.view.accessibility.AccessibilityNodeInfoCompat; import androidx.core.view.accessibility.AccessibilityNodeInfoCompat.AccessibilityActionCompat; import androidx.core.view.accessibility.AccessibilityNodeInfoCompat.CollectionInfoCompat; import androidx.core.view.accessibility.AccessibilityRecordCompat; import android.view.accessibility.AccessibilityEvent; import android.view.accessibility.AccessibilityNodeInfo; import android.widget.ProgressBar; import android.widget.SeekBar; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import org.checkerframework.checker.nullness.qual.Nullable; /** Utility methods for managing AccessibilityNodeInfo Roles. */ public class Role { /** * Ids of user-interface element roles, which are flexibly mapped from specific UI classes. This * mapping allows us to abstract similar UI elements to the same role, and to isolate UI element * interpretation logic. */ @IntDef({ ROLE_NONE, ROLE_BUTTON, ROLE_CHECK_BOX, ROLE_CHECKED_TEXT_VIEW, ROLE_DROP_DOWN_LIST, ROLE_EDIT_TEXT, ROLE_GRID, ROLE_IMAGE, ROLE_IMAGE_BUTTON, ROLE_LIST, ROLE_PAGER, ROLE_RADIO_BUTTON, ROLE_SEEK_CONTROL, ROLE_SWITCH, ROLE_TAB_BAR, ROLE_TOGGLE_BUTTON, ROLE_VIEW_GROUP, ROLE_WEB_VIEW, ROLE_PROGRESS_BAR, ROLE_ACTION_BAR_TAB, ROLE_DRAWER_LAYOUT, ROLE_SLIDING_DRAWER, ROLE_ICON_MENU, ROLE_TOAST, ROLE_ALERT_DIALOG, ROLE_DATE_PICKER_DIALOG, ROLE_TIME_PICKER_DIALOG, ROLE_DATE_PICKER, ROLE_TIME_PICKER, ROLE_NUMBER_PICKER, ROLE_SCROLL_VIEW, ROLE_HORIZONTAL_SCROLL_VIEW, ROLE_KEYBOARD_KEY, ROLE_TALKBACK_EDIT_TEXT_OVERLAY, ROLE_TEXT_ENTRY_KEY, }) @Retention(RetentionPolicy.SOURCE) public @interface RoleName {} // Please keep the constants in this list sorted by constant index order, and not by // alphabetical order. If you add a new constant, it must also be added to the RoleName // annotation interface. public static final int ROLE_NONE = 0; public static final int ROLE_BUTTON = 1; public static final int ROLE_CHECK_BOX = 2; public static final int ROLE_DROP_DOWN_LIST = 3; public static final int ROLE_EDIT_TEXT = 4; public static final int ROLE_GRID = 5; public static final int ROLE_IMAGE = 6; public static final int ROLE_IMAGE_BUTTON = 7; public static final int ROLE_LIST = 8; public static final int ROLE_RADIO_BUTTON = 9; public static final int ROLE_SEEK_CONTROL = 10; public static final int ROLE_SWITCH = 11; public static final int ROLE_TAB_BAR = 12; public static final int ROLE_TOGGLE_BUTTON = 13; public static final int ROLE_VIEW_GROUP = 14; public static final int ROLE_WEB_VIEW = 15; public static final int ROLE_PAGER = 16; public static final int ROLE_CHECKED_TEXT_VIEW = 17; public static final int ROLE_PROGRESS_BAR = 18; public static final int ROLE_ACTION_BAR_TAB = 19; public static final int ROLE_DRAWER_LAYOUT = 20; public static final int ROLE_SLIDING_DRAWER = 21; public static final int ROLE_ICON_MENU = 22; public static final int ROLE_TOAST = 23; public static final int ROLE_ALERT_DIALOG = 24; public static final int ROLE_DATE_PICKER_DIALOG = 25; public static final int ROLE_TIME_PICKER_DIALOG = 26; public static final int ROLE_DATE_PICKER = 27; public static final int ROLE_TIME_PICKER = 28; public static final int ROLE_NUMBER_PICKER = 29; public static final int ROLE_SCROLL_VIEW = 30; public static final int ROLE_HORIZONTAL_SCROLL_VIEW = 31; public static final int ROLE_KEYBOARD_KEY = 32; public static final int ROLE_TALKBACK_EDIT_TEXT_OVERLAY = 33; public static final int ROLE_TEXT_ENTRY_KEY = 34; // Number of roles: 34 /** Used to identify and ignore a11y overlay windows created by Talkback. */ public static final String TALKBACK_EDIT_TEXT_OVERLAY_CLASSNAME = "TalkbackEditTextOverlay"; /** * Gets the source {@link Role} from the {@link AccessibilityEvent}. * * <p>It checks the role with {@link AccessibilityEvent#getClassName()}. If it returns {@link * #ROLE_NONE}, fallback to check {@link AccessibilityNodeInfoCompat#getClassName()} of the source * node. */ public static @RoleName int getSourceRole(AccessibilityEvent event) { if (event == null) { return ROLE_NONE; } // Try to get role from event's class name. @RoleName int role = sourceClassNameToRole(event); if (role != ROLE_NONE) { return role; } // Extract event's source node, and map source node class to role. AccessibilityRecordCompat eventRecord = AccessibilityEventCompat.asRecord(event); AccessibilityNodeInfoCompat source = eventRecord.getSource(); try { return getRole(source); } finally { AccessibilityNodeInfoUtils.recycleNodes(source); } } /** Find role from source event's class name string. */ private static @RoleName int sourceClassNameToRole(AccessibilityEvent event) { if (event == null) { return ROLE_NONE; } // Event TYPE_NOTIFICATION_STATE_CHANGED always has null source node. CharSequence eventClassName = event.getClassName(); // When comparing event.getClassName() to class name of standard widgets, we should take care of // the order of the "if" statements: check subclasses before checking superclasses. // Toast.TN is a private class, thus we have to hard code the class name. // "$TN" is only in the class-name before android-R. if (ClassLoadingCache.checkInstanceOf(eventClassName, "android.widget.Toast$TN") || ClassLoadingCache.checkInstanceOf(eventClassName, "android.widget.Toast")) { return ROLE_TOAST; } // Some events have different value for getClassName() and getSource().getClass() if (ClassLoadingCache.checkInstanceOf(eventClassName, android.app.ActionBar.Tab.class)) { return ROLE_ACTION_BAR_TAB; } // ////////////////////////////////////////////////////////////////////////////////////////// // Subclasses of ViewGroup. // Inheritance: View->ViewGroup->DrawerLayout if (ClassLoadingCache.checkInstanceOf( eventClassName, androidx.drawerlayout.widget.DrawerLayout.class) || ClassLoadingCache.checkInstanceOf( eventClassName, "android.support.v4.widget.DrawerLayout")) { return ROLE_DRAWER_LAYOUT; } // Inheritance: View->ViewGroup->SlidingDrawer if (ClassLoadingCache.checkInstanceOf(eventClassName, android.widget.SlidingDrawer.class)) { return ROLE_SLIDING_DRAWER; } // Inheritance: View->ViewGroup->IconMenuView // IconMenuView is a hidden class, thus we have to hard code the class name. if (ClassLoadingCache.checkInstanceOf( eventClassName, "com.android.internal.view.menu.IconMenuView")) { return ROLE_ICON_MENU; } // Inheritance: View->ViewGroup->FrameLayout->DatePicker if (ClassLoadingCache.checkInstanceOf(eventClassName, android.widget.DatePicker.class)) { return ROLE_DATE_PICKER; } // Inheritance: View->ViewGroup->FrameLayout->TimePicker if (ClassLoadingCache.checkInstanceOf(eventClassName, android.widget.TimePicker.class)) { return ROLE_TIME_PICKER; } // Inheritance: View->ViewGroup->LinearLayout->NumberPicker if (ClassLoadingCache.checkInstanceOf(eventClassName, android.widget.NumberPicker.class)) { return ROLE_NUMBER_PICKER; } // ////////////////////////////////////////////////////////////////////////////////////////// // Subclasses of Dialog. // Inheritance: Dialog->AlertDialog->DatePickerDialog if (ClassLoadingCache.checkInstanceOf(eventClassName, android.app.DatePickerDialog.class)) { return ROLE_DATE_PICKER_DIALOG; } // Inheritance: Dialog->AlertDialog->TimePickerDialog if (ClassLoadingCache.checkInstanceOf(eventClassName, android.app.TimePickerDialog.class)) { return ROLE_TIME_PICKER_DIALOG; } // Inheritance: Dialog->AlertDialog if (ClassLoadingCache.checkInstanceOf(eventClassName, android.app.AlertDialog.class) || ClassLoadingCache.checkInstanceOf( eventClassName, "androidx.appcompat.app.AlertDialog")) { return ROLE_ALERT_DIALOG; } return ROLE_NONE; } /** Gets {@link Role} for {@link AccessibilityNodeInfoCompat}. */ public static @RoleName int getRole(@Nullable AccessibilityNodeInfoCompat node) { if (node == null) { return ROLE_NONE; } // We check Text entry key from property instead of class, so it needs to be in the beginning. if (AccessibilityNodeInfoUtils.isTextEntryKey(node)) { return ROLE_TEXT_ENTRY_KEY; } CharSequence className = node.getClassName(); // When comparing node.getClassName() to class name of standard widgets, we should take care of // the order of the "if" statements: check subclasses before checking superclasses. // e.g. RadioButton is a subclass of Button, we should check Role RadioButton first and fall // down to check Role Button. // Identifies a11y overlay added by Talkback on edit texts. if (ClassLoadingCache.checkInstanceOf(className, TALKBACK_EDIT_TEXT_OVERLAY_CLASSNAME)) { return ROLE_TALKBACK_EDIT_TEXT_OVERLAY; } // Inheritance: View->ImageView if (ClassLoadingCache.checkInstanceOf(className, android.widget.ImageView.class)) { return node.isClickable() ? ROLE_IMAGE_BUTTON : ROLE_IMAGE; } // ////////////////////////////////////////////////////////////////////////////////////////// // Subclasses of TextView. // Inheritance: View->TextView->Button->CompoundButton->Switch if (ClassLoadingCache.checkInstanceOf(className, android.widget.Switch.class)) { return ROLE_SWITCH; } // Inheritance: View->TextView->Button->CompoundButton->ToggleButton if (ClassLoadingCache.checkInstanceOf(className, android.widget.ToggleButton.class)) { return ROLE_TOGGLE_BUTTON; } // Inheritance: View->TextView->Button->CompoundButton->RadioButton if (ClassLoadingCache.checkInstanceOf(className, android.widget.RadioButton.class)) { return ROLE_RADIO_BUTTON; } // Inheritance: View->TextView->Button->CompoundButton if (ClassLoadingCache.checkInstanceOf(className, android.widget.CompoundButton.class)) { return ROLE_CHECK_BOX; } // Inheritance: View->TextView->Button if (ClassLoadingCache.checkInstanceOf(className, android.widget.Button.class)) { return ROLE_BUTTON; } // Inheritance: View->TextView->CheckedTextView if (ClassLoadingCache.checkInstanceOf(className, android.widget.CheckedTextView.class)) { return ROLE_CHECKED_TEXT_VIEW; } // Inheritance: View->TextView->EditText if (ClassLoadingCache.checkInstanceOf(className, android.widget.EditText.class)) { return ROLE_EDIT_TEXT; } // ////////////////////////////////////////////////////////////////////////////////////////// // Subclasses of ProgressBar. // Inheritance: View->ProgressBar->AbsSeekBar->SeekBar if (ClassLoadingCache.checkInstanceOf(className, SeekBar.class) || (AccessibilityNodeInfoUtils.hasValidRangeInfo(node) && AccessibilityNodeInfoUtils.supportsAction( node, android.R.id.accessibilityActionSetProgress))) { return ROLE_SEEK_CONTROL; } // Inheritance: View->ProgressBar if (ClassLoadingCache.checkInstanceOf(className, ProgressBar.class) || (AccessibilityNodeInfoUtils.hasValidRangeInfo(node) && !AccessibilityNodeInfoUtils.supportsAction( node, android.R.id.accessibilityActionSetProgress))) { // ProgressBar check must come after SeekBar, because SeekBar specializes ProgressBar. return ROLE_PROGRESS_BAR; } if (ClassLoadingCache.checkInstanceOf( className, android.inputmethodservice.Keyboard.Key.class)) { return ROLE_KEYBOARD_KEY; } // ////////////////////////////////////////////////////////////////////////////////////////// // Subclasses of ViewGroup. // Inheritance: View->ViewGroup->AbsoluteLayout->WebView if (ClassLoadingCache.checkInstanceOf(className, android.webkit.WebView.class)) { return ROLE_WEB_VIEW; } // Inheritance: View->ViewGroup->LinearLayout->TabWidget if (ClassLoadingCache.checkInstanceOf(className, android.widget.TabWidget.class)) { return ROLE_TAB_BAR; } // Inheritance: View->ViewGroup->FrameLayout->HorizontalScrollView // If there is a CollectionInfo, fall into a ROLE_LIST/ROLE_GRID if (ClassLoadingCache.checkInstanceOf(className, android.widget.HorizontalScrollView.class) && node.getCollectionInfo() == null) { return ROLE_HORIZONTAL_SCROLL_VIEW; } // Inheritance: View->ViewGroup->FrameLayout->ScrollView if (ClassLoadingCache.checkInstanceOf(className, android.widget.ScrollView.class)) { return ROLE_SCROLL_VIEW; } // Inheritance: View->ViewGroup->ViewPager if (ClassLoadingCache.checkInstanceOf(className, androidx.viewpager.widget.ViewPager.class) || ClassLoadingCache.checkInstanceOf(className, "android.support.v4.view.ViewPager")) { return ROLE_PAGER; } // TODO: Check if we should add Role RecyclerView. // By default, RecyclerView node has CollectionInfo, so that it will be classified as a List or // Grid. // View->ViewGroup->RecyclerView /* TODO: if (ClassLoadingCache.checkInstanceOf(className, "androidx.recyclerview.widget.RecyclerView") || ClassLoadingCache.checkInstanceOf( className, "androidx.recyclerview.widget.RecyclerView")) { return ROLE_RECYCLER_VIEW; } */ // Inheritance: View->ViewGroup->AdapterView->AbsSpinner->Spinner if (ClassLoadingCache.checkInstanceOf(className, android.widget.Spinner.class)) { return ROLE_DROP_DOWN_LIST; } // Inheritance: View->ViewGroup->AdapterView->AbsListView->GridView if (ClassLoadingCache.checkInstanceOf(className, android.widget.GridView.class)) { return ROLE_GRID; } // Inheritance: View->ViewGroup->AdapterView->AbsListView if (ClassLoadingCache.checkInstanceOf(className, android.widget.AbsListView.class)) { return ROLE_LIST; } // Inheritance: View->ViewGroup->ViewPager2 if (AccessibilityNodeInfoUtils.supportsAction( node, AccessibilityActionCompat.ACTION_PAGE_UP.getId()) || AccessibilityNodeInfoUtils.supportsAction( node, AccessibilityActionCompat.ACTION_PAGE_DOWN.getId()) || AccessibilityNodeInfoUtils.supportsAction( node, AccessibilityActionCompat.ACTION_PAGE_LEFT.getId()) || AccessibilityNodeInfoUtils.supportsAction( node, AccessibilityActionCompat.ACTION_PAGE_RIGHT.getId())) { return ROLE_PAGER; } CollectionInfoCompat collection = node.getCollectionInfo(); if (collection != null) { // RecyclerView will be classified as a list or grid. if (collection.getRowCount() > 1 && collection.getColumnCount() > 1) { return ROLE_GRID; } else { return ROLE_LIST; } } // Inheritance: View->ViewGroup if (ClassLoadingCache.checkInstanceOf(className, android.view.ViewGroup.class)) { return ROLE_VIEW_GROUP; } return ROLE_NONE; } /** * Gets {@link Role} for {@link AccessibilityNodeInfo}. @See {@link * #getRole(AccessibilityNodeInfoCompat)} */ public static @RoleName int getRole(AccessibilityNodeInfo node) { if (node == null) { return Role.ROLE_NONE; } AccessibilityNodeInfoCompat nodeCompat = AccessibilityNodeInfoUtils.toCompat(node); return getRole(nodeCompat); } /** For use in logging. */ public static String roleToString(@RoleName int role) { switch (role) { case ROLE_NONE: return "ROLE_NONE"; case ROLE_BUTTON: return "ROLE_BUTTON"; case ROLE_CHECK_BOX: return "ROLE_CHECK_BOX"; case ROLE_DROP_DOWN_LIST: return "ROLE_DROP_DOWN_LIST"; case ROLE_EDIT_TEXT: return "ROLE_EDIT_TEXT"; case ROLE_GRID: return "ROLE_GRID"; case ROLE_IMAGE: return "ROLE_IMAGE"; case ROLE_IMAGE_BUTTON: return "ROLE_IMAGE_BUTTON"; case ROLE_LIST: return "ROLE_LIST"; case ROLE_RADIO_BUTTON: return "ROLE_RADIO_BUTTON"; case ROLE_SEEK_CONTROL: return "ROLE_SEEK_CONTROL"; case ROLE_SWITCH: return "ROLE_SWITCH"; case ROLE_TAB_BAR: return "ROLE_TAB_BAR"; case ROLE_TOGGLE_BUTTON: return "ROLE_TOGGLE_BUTTON"; case ROLE_VIEW_GROUP: return "ROLE_VIEW_GROUP"; case ROLE_WEB_VIEW: return "ROLE_WEB_VIEW"; case ROLE_PAGER: return "ROLE_PAGER"; case ROLE_CHECKED_TEXT_VIEW: return "ROLE_CHECKED_TEXT_VIEW"; case ROLE_PROGRESS_BAR: return "ROLE_PROGRESS_BAR"; case ROLE_ACTION_BAR_TAB: return "ROLE_ACTION_BAR_TAB"; case ROLE_DRAWER_LAYOUT: return "ROLE_DRAWER_LAYOUT"; case ROLE_SLIDING_DRAWER: return "ROLE_SLIDING_DRAWER"; case ROLE_ICON_MENU: return "ROLE_ICON_MENU"; case ROLE_TOAST: return "ROLE_TOAST"; case ROLE_ALERT_DIALOG: return "ROLE_ALERT_DIALOG"; case ROLE_DATE_PICKER_DIALOG: return "ROLE_DATE_PICKER_DIALOG"; case ROLE_TIME_PICKER_DIALOG: return "ROLE_TIME_PICKER_DIALOG"; case ROLE_DATE_PICKER: return "ROLE_DATE_PICKER"; case ROLE_TIME_PICKER: return "ROLE_TIME_PICKER"; case ROLE_NUMBER_PICKER: return "ROLE_NUMBER_PICKER"; case ROLE_SCROLL_VIEW: return "ROLE_SCROLL_VIEW"; case ROLE_HORIZONTAL_SCROLL_VIEW: return "ROLE_HORIZONTAL_SCROLL_VIEW"; case ROLE_KEYBOARD_KEY: return "ROLE_KEYBOARD_KEY"; case ROLE_TALKBACK_EDIT_TEXT_OVERLAY: return "ROLE_TALKBACK_EDIT_TEXT_OVERLAY"; case ROLE_TEXT_ENTRY_KEY: return "ROLE_TEXT_ENTRY_KEY"; default: return "(unknown role " + role + ")"; } } }
7,085
10,225
<filename>integration-tests/artemis-jms/src/test/java/io/quarkus/it/artemis/ArtemisHealthCheckITCase.java<gh_stars>1000+ package io.quarkus.it.artemis; import io.quarkus.test.junit.NativeImageTest; @NativeImageTest public class ArtemisHealthCheckITCase extends ArtemisHealthCheckTest { }
101
347
<reponame>liranr23/ovirt-engine package org.ovirt.engine.core.bll.storage.disk; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.concurrent.locks.Lock; import javax.inject.Inject; import org.ovirt.engine.core.bll.LockMessagesMatchUtil; import org.ovirt.engine.core.bll.NonTransactiveCommandAttribute; import org.ovirt.engine.core.bll.context.CommandContext; import org.ovirt.engine.core.bll.validator.VmValidator; import org.ovirt.engine.core.bll.validator.storage.DiskVmElementValidator; import org.ovirt.engine.core.bll.validator.storage.StorageDomainValidator; import org.ovirt.engine.core.common.AuditLogType; import org.ovirt.engine.core.common.action.LockProperties; import org.ovirt.engine.core.common.action.LockProperties.Scope; import org.ovirt.engine.core.common.action.VmDiskOperationParameterBase; import org.ovirt.engine.core.common.businessentities.StorageDomain; import org.ovirt.engine.core.common.businessentities.VMStatus; import org.ovirt.engine.core.common.businessentities.VmDevice; import org.ovirt.engine.core.common.businessentities.VmDeviceId; import org.ovirt.engine.core.common.businessentities.storage.Disk; import org.ovirt.engine.core.common.businessentities.storage.DiskImage; import org.ovirt.engine.core.common.businessentities.storage.DiskStorageType; import org.ovirt.engine.core.common.businessentities.storage.DiskVmElement; import org.ovirt.engine.core.common.errors.EngineMessage; import org.ovirt.engine.core.common.locks.LockingGroup; import org.ovirt.engine.core.common.utils.Pair; import org.ovirt.engine.core.common.vdscommands.VDSCommandType; import org.ovirt.engine.core.compat.Guid; import org.ovirt.engine.core.dao.DiskDao; import org.ovirt.engine.core.dao.DiskImageDao; import org.ovirt.engine.core.dao.DiskVmElementDao; import org.ovirt.engine.core.dao.StorageDomainDao; import org.ovirt.engine.core.dao.VmDeviceDao; import org.ovirt.engine.core.dao.VmStaticDao; @NonTransactiveCommandAttribute public class HotPlugDiskToVmCommand<T extends VmDiskOperationParameterBase> extends AbstractDiskVmCommand<T> { private Disk disk; private DiskVmElement diskVmElement; protected VmDevice oldVmDevice; @Inject private StorageDomainDao storageDomainDao; @Inject private VmDeviceDao vmDeviceDao; @Inject private DiskImageDao diskImageDao; @Inject private DiskDao diskDao; @Inject private VmStaticDao vmStaticDao; @Inject private DiskVmElementDao diskVmElementDao; public HotPlugDiskToVmCommand(T parameters, CommandContext commandContext) { super(parameters, commandContext); } @Override protected LockProperties applyLockProperties(LockProperties lockProperties) { return lockProperties.withScope(Scope.Execution); } @Override protected void setActionMessageParameters() { addValidationMessage(EngineMessage.VAR__ACTION__HOT_PLUG); addValidationMessage(EngineMessage.VAR__TYPE__DISK); } @Override protected boolean validate() { performDbLoads(); VmValidator vmValidator = new VmValidator(getVm()); return validate(vmValidator.isVmExists()) && validate(vmValidator.isVmStatusIn(VMStatus.Up, VMStatus.Paused, VMStatus.Down)) && canRunActionOnNonManagedVm() && isDiskExistAndAttachedToVm(getDisk()) && interfaceDiskValidation() && checkCanPerformPlugUnPlugDisk() && isVmNotInPreviewSnapshot() && imageStorageValidation() && virtIoScsiDiskValidation() && isPassDiscardSupported(); } private boolean virtIoScsiDiskValidation() { DiskVmElementValidator diskVmElementValidator = getDiskVmElementValidator(disk, getDiskVmElement()); return validate(diskVmElementValidator.isVirtIoScsiValid(getVm())); } private boolean isPassDiscardSupported() { Guid storageDomainId = getDisk().getDiskStorageType() == DiskStorageType.IMAGE ? ((DiskImage) getDisk()).getStorageIds().get(0) : null; return validate(getDiskVmElementValidator(getDisk(), getDiskVmElement()).isPassDiscardSupported(storageDomainId)); } private boolean interfaceDiskValidation() { DiskVmElementValidator diskVmElementValidator = getDiskVmElementValidator(disk, getDiskVmElement()); return validate(diskVmElementValidator.isDiskInterfaceSupported(getVm())); } private boolean imageStorageValidation() { // If the VM is not an image then it does not use the storage domain. // If the VM is not in UP or PAUSED status, then we know that there is no running qemu process, // so we don't need to check the storage domain activity. if (getDisk().getDiskStorageType() != DiskStorageType.IMAGE || !getVm().getStatus().isRunningOrPaused()) { return true; } DiskImage diskImage = (DiskImage) getDisk(); StorageDomain storageDomain = storageDomainDao.getForStoragePool( diskImage.getStorageIds().get(0), diskImage.getStoragePoolId()); StorageDomainValidator storageDomainValidator = getStorageDomainValidator(storageDomain); if (!isSupportedByManagedBlockStorageDomain(storageDomain)) { return false; } return validate(storageDomainValidator.isDomainExistAndActive()) && validate(storageDomainValidator.isNotBackupDomain()); } protected StorageDomainValidator getStorageDomainValidator(StorageDomain storageDomain) { return new StorageDomainValidator(storageDomain); } private void performDbLoads() { if (getDiskVmElement() == null) { return; } oldVmDevice = vmDeviceDao.get(new VmDeviceId(getDiskVmElement().getDiskId(), getVmId())); if (oldVmDevice != null) { if (oldVmDevice.getSnapshotId() != null) { disk = diskImageDao.getDiskSnapshotForVmSnapshot(getDiskVmElement().getDiskId(), oldVmDevice.getSnapshotId()); } else { disk = diskDao.get(getDiskVmElement().getDiskId()); } } } private boolean checkCanPerformPlugUnPlugDisk() { if (getVm().getStatus().isUpOrPaused() && !isDiskSupportedForPlugUnPlug(getDiskVmElement(), disk.getDiskAlias())) { return false; } if (getPlugAction() == VDSCommandType.HotPlugDisk && oldVmDevice.isPlugged()) { return failValidation(EngineMessage.HOT_PLUG_DISK_IS_NOT_UNPLUGGED); } if (getPlugAction() == VDSCommandType.HotUnPlugDisk && !oldVmDevice.isPlugged()) { return failValidation(EngineMessage.HOT_UNPLUG_DISK_IS_NOT_PLUGGED); } return true; } protected VDSCommandType getPlugAction() { return VDSCommandType.HotPlugDisk; } @Override protected void executeVmCommand() { boolean hotPlug = getVm().getStatus().isUpOrPaused(); Lock vmDevicesLock = getVmDevicesLock(hotPlug); vmDevicesLock.lock(); try { if (hotPlug) { updateDisksFromDb(); performPlugCommand(getPlugAction(), getDisk(), oldVmDevice); } // At this point the disk is already plugged to or unplugged from VM // (depends on the command), so we can update the needed device properties updateDeviceProperties(); vmStaticDao.incrementDbGeneration(getVm().getId()); setSucceeded(true); } finally { vmDevicesLock.unlock(); } } protected void updateDeviceProperties() { VmDevice device = vmDeviceDao.get(oldVmDevice.getId()); device.setPlugged(true); device.setAlias(getDeviceAliasForDisk(disk)); device.setAddress(oldVmDevice.getAddress()); vmDeviceDao.update(device); } @Override protected Map<String, Pair<String, String>> getSharedLocks() { return Collections.singletonMap(getVmId().toString(), LockMessagesMatchUtil.makeLockingPair(LockingGroup.VM, EngineMessage.ACTION_TYPE_FAILED_VM_IS_LOCKED)); } @Override protected Map<String, Pair<String, String>> getExclusiveLocks() { Map<String, Pair<String, String>> exclusiveLock = null; if (getDisk() != null) { exclusiveLock = new HashMap<>(); exclusiveLock.put(getDisk().getId().toString(), LockMessagesMatchUtil.makeLockingPair(LockingGroup.DISK, EngineMessage.ACTION_TYPE_FAILED_DISKS_LOCKED.name() + String.format("$diskAliases %1$s", getDiskAlias()))); if (getDiskVmElement() != null && getDiskVmElement().isBoot()) { exclusiveLock.put(getVmId().toString(), LockMessagesMatchUtil.makeLockingPair(LockingGroup.VM_DISK_BOOT, EngineMessage.ACTION_TYPE_FAILED_OBJECT_LOCKED)); } } return exclusiveLock; } @Override public AuditLogType getAuditLogTypeValue() { return getSucceeded() ? AuditLogType.USER_HOTPLUG_DISK : AuditLogType.USER_FAILED_HOTPLUG_DISK; } @Override public String getDiskAlias() { return getDisk().getDiskAlias(); } protected Disk getDisk() { if (disk == null) { disk = diskDao.get(super.getDiskVmElement().getDiskId()); } return disk; } // As all the validation should be done against the DiskVmElement loaded from the DB since the parameters may // not contain all relevant data @Override protected DiskVmElement getDiskVmElement() { if (diskVmElement == null && getDisk() != null) { diskVmElement = diskVmElementDao.get(new VmDeviceId(getDisk().getId(), getVmId())); } return diskVmElement; } }
4,076
335
<filename>M/Mustard_noun.json { "word": "Mustard", "definitions": [ "A hot-tasting yellow or brown paste made from the crushed seeds of certain plants, typically eaten with meat or used as a cooking ingredient.", "The yellow-flowered Eurasian plant of the cabbage family whose seeds are used to make mustard.", "Used in names of related plants, only some of which are used to produce mustard for the table, e.g. hedge mustard.", "A brownish yellow colour." ], "parts-of-speech": "Noun" }
174
392
<reponame>paralax/apollo<gh_stars>100-1000 package io.logz.apollo.excpetions; /** * Created by roiravhon on 1/30/17. */ public class ApolloParseException extends Exception { public ApolloParseException() { } public ApolloParseException(String message) { super(message); } public ApolloParseException(String message, Throwable cause) { super(message, cause); } public ApolloParseException(Throwable cause) { super(cause); } public ApolloParseException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) { super(message, cause, enableSuppression, writableStackTrace); } }
241
346
<reponame>FluffyQuack/ja2-stracciatella #include "Debug.h" #include "FileMan.h" #include "LoadSaveData.h" #include "LoadSaveEMail.h" static void LoadEMailFromFile(HWFILE const File) { UINT32 uiSizeOfSubject; File->read(&uiSizeOfSubject, sizeof(UINT32)); // XXX HACK000B File->seek(uiSizeOfSubject, FILE_SEEK_FROM_CURRENT); // XXX HACK000B UINT16 usOffset; UINT16 usLength; UINT8 ubSender; UINT32 iDate; INT32 iFirstData; UINT32 uiSecondData; BOOLEAN fRead; BYTE Data[44]; File->read(Data, sizeof(Data)); DataReader S{Data}; EXTR_U16(S, usOffset) EXTR_U16(S, usLength) EXTR_U8(S, ubSender) EXTR_SKIP(S, 3) EXTR_U32(S, iDate) EXTR_SKIP(S, 4) EXTR_I32(S, iFirstData) EXTR_U32(S, uiSecondData) EXTR_SKIP(S, 16) EXTR_BOOL(S, fRead) EXTR_SKIP(S, 3) Assert(S.getConsumed() == lengthof(Data)); AddEmailMessage(usOffset, usLength, iDate, ubSender, fRead, iFirstData, uiSecondData); } void LoadEmailFromSavedGame(HWFILE const File) { ShutDownEmailList(); UINT32 uiNumOfEmails; File->read(&uiNumOfEmails, sizeof(UINT32)); for (UINT32 cnt = 0; cnt < uiNumOfEmails; cnt++) { LoadEMailFromFile(File); } } static void SaveEMailIntoFile(HWFILE const File, Email const* const Mail) { BYTE Data[48]; DataWriter D{Data}; INJ_U32(D, 0) // was size of subject INJ_U16(D, Mail->usOffset) INJ_U16(D, Mail->usLength) INJ_U8(D, Mail->ubSender) INJ_SKIP(D, 3) INJ_U32(D, Mail->iDate) INJ_SKIP(D, 4) INJ_I32(D, Mail->iFirstData) INJ_U32(D, Mail->uiSecondData) INJ_SKIP(D, 16) INJ_BOOL(D, Mail->fRead) INJ_SKIP(D, 3) Assert(D.getConsumed() == lengthof(Data)); File->write(Data, sizeof(Data)); } void SaveEmailToSavedGame(HWFILE const File) { const Email* pEmail; // Count the emails UINT32 uiNumOfEmails = 0; for (pEmail = pEmailList; pEmail != NULL; pEmail = pEmail->Next) { uiNumOfEmails++; } File->write(&uiNumOfEmails, sizeof(UINT32)); for (pEmail = pEmailList; pEmail != NULL; pEmail = pEmail->Next) { SaveEMailIntoFile(File, pEmail); } }
918
11,868
<reponame>therockstorm/openapi-generator /* * test * test * * The version of the OpenAPI document: 1.0.0 * * * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). * https://openapi-generator.tech * Do not edit the class manually. */ package org.openapitools.client.model; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonSubTypes; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonValue; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; import org.openapitools.client.model.MySchemaNameCharacters; import org.junit.Assert; import org.junit.Ignore; import org.junit.Test; /** * Model tests for Parent */ public class ParentTest { private final Parent model = new Parent(); /** * Model tests for Parent */ @Test public void testParent() { // TODO: test Parent } /** * Test the property 'prop1' */ @Test public void prop1Test() { } }
429
1,338
/* * Copyright 2001-2010, Haiku Inc. All rights reserved. * This file may be used under the terms of the MIT License. * * Authors: * <NAME> */ #include "DataStream.h" #include "CachedBlock.h" #include "Volume.h" //#define TRACE_EXT2 #ifdef TRACE_EXT2 # define TRACE(x...) dprintf("\33[34mext2:\33[0m " x) #else # define TRACE(x...) ; #endif #define ERROR(x...) dprintf("\33[34mext2:\33[0m " x) DataStream::DataStream(Volume* volume, ext2_data_stream* stream, off_t size) : kBlockSize(volume->BlockSize()), kIndirectsPerBlock(kBlockSize / sizeof(uint32)), kIndirectsPerBlock2(kIndirectsPerBlock * kIndirectsPerBlock), kIndirectsPerBlock3(kIndirectsPerBlock2 * kIndirectsPerBlock), kMaxDirect(EXT2_DIRECT_BLOCKS), kMaxIndirect(kMaxDirect + kIndirectsPerBlock), kMaxDoubleIndirect(kMaxIndirect + kIndirectsPerBlock2), fVolume(volume), fStream(stream), fFirstBlock(volume->FirstDataBlock()), fAllocated(0), fAllocatedPos(fFirstBlock), fWaiting(0), fFreeStart(0), fFreeCount(0), fRemovedBlocks(0), fSize(size) { fNumBlocks = size == 0 ? 0 : ((size - 1) >> fVolume->BlockShift()) + 1; } DataStream::~DataStream() { } status_t DataStream::FindBlock(off_t offset, fsblock_t& block, uint32 *_count) { uint32 index = offset >> fVolume->BlockShift(); if (offset >= fSize) { TRACE("FindBlock: offset larger than inode size\n"); return B_ENTRY_NOT_FOUND; } // TODO: we could return the size of the sparse range, as this might be more // than just a block if (index < EXT2_DIRECT_BLOCKS) { // direct blocks block = B_LENDIAN_TO_HOST_INT32(fStream->direct[index]); ASSERT(block != 0); if (_count) { *_count = 1; uint32 nextBlock = block; while (++index < EXT2_DIRECT_BLOCKS && fStream->direct[index] == ++nextBlock) (*_count)++; } } else if ((index -= EXT2_DIRECT_BLOCKS) < kIndirectsPerBlock) { // indirect blocks CachedBlock cached(fVolume); uint32* indirectBlocks = (uint32*)cached.SetTo(B_LENDIAN_TO_HOST_INT32( fStream->indirect)); if (indirectBlocks == NULL) return B_IO_ERROR; block = B_LENDIAN_TO_HOST_INT32(indirectBlocks[index]); ASSERT(block != 0); if (_count) { *_count = 1; uint32 nextBlock = block; while (++index < kIndirectsPerBlock && indirectBlocks[index] == ++nextBlock) (*_count)++; } } else if ((index -= kIndirectsPerBlock) < kIndirectsPerBlock2) { // double indirect blocks CachedBlock cached(fVolume); uint32* indirectBlocks = (uint32*)cached.SetTo(B_LENDIAN_TO_HOST_INT32( fStream->double_indirect)); if (indirectBlocks == NULL) return B_IO_ERROR; uint32 indirectIndex = B_LENDIAN_TO_HOST_INT32(indirectBlocks[index / kIndirectsPerBlock]); if (indirectIndex == 0) { // a sparse indirect block block = 0; } else { indirectBlocks = (uint32*)cached.SetTo(indirectIndex); if (indirectBlocks == NULL) return B_IO_ERROR; block = B_LENDIAN_TO_HOST_INT32( indirectBlocks[index & (kIndirectsPerBlock - 1)]); if (_count) { *_count = 1; uint32 nextBlock = block; while (((++index & (kIndirectsPerBlock - 1)) != 0) && indirectBlocks[index & (kIndirectsPerBlock - 1)] == ++nextBlock) (*_count)++; } } ASSERT(block != 0); } else if ((index -= kIndirectsPerBlock2) < kIndirectsPerBlock3) { // triple indirect blocks CachedBlock cached(fVolume); uint32* indirectBlocks = (uint32*)cached.SetTo(B_LENDIAN_TO_HOST_INT32( fStream->triple_indirect)); if (indirectBlocks == NULL) return B_IO_ERROR; uint32 indirectIndex = B_LENDIAN_TO_HOST_INT32(indirectBlocks[index / kIndirectsPerBlock2]); if (indirectIndex == 0) { // a sparse indirect block block = 0; } else { indirectBlocks = (uint32*)cached.SetTo(indirectIndex); if (indirectBlocks == NULL) return B_IO_ERROR; indirectIndex = B_LENDIAN_TO_HOST_INT32( indirectBlocks[(index / kIndirectsPerBlock) & (kIndirectsPerBlock - 1)]); if (indirectIndex == 0) { // a sparse indirect block block = 0; } else { indirectBlocks = (uint32*)cached.SetTo(indirectIndex); if (indirectBlocks == NULL) return B_IO_ERROR; block = B_LENDIAN_TO_HOST_INT32( indirectBlocks[index & (kIndirectsPerBlock - 1)]); if (_count) { *_count = 1; uint32 nextBlock = block; while (((++index & (kIndirectsPerBlock - 1)) != 0) && indirectBlocks[index & (kIndirectsPerBlock - 1)] == ++nextBlock) (*_count)++; } } } ASSERT(block != 0); } else { // Outside of the possible data stream ERROR("ext2: block outside datastream!\n"); return B_ERROR; } TRACE("FindBlock(offset %" B_PRIdOFF "): %" B_PRIu64" %" B_PRIu32 "\n", offset, block, _count != NULL ? *_count : 1); return B_OK; } status_t DataStream::Enlarge(Transaction& transaction, off_t& numBlocks) { TRACE("DataStream::Enlarge(): current size: %" B_PRIdOFF ", target size: %" B_PRIdOFF "\n", fNumBlocks, numBlocks); off_t targetBlocks = numBlocks; fWaiting = _BlocksNeeded(numBlocks); numBlocks = fWaiting; status_t status; if (fNumBlocks <= kMaxDirect) { status = _AddForDirectBlocks(transaction, targetBlocks); if (status != B_OK) { ERROR("DataStream::Enlarge(): _AddForDirectBlocks() failed\n"); return status; } TRACE("DataStream::Enlarge(): current size: %" B_PRIdOFF ", target size: %" B_PRIdOFF "\n", fNumBlocks, targetBlocks); if (fNumBlocks == targetBlocks) return B_OK; } TRACE("DataStream::Enlarge(): indirect current size: %" B_PRIdOFF ", target size: %" B_PRIdOFF "\n", fNumBlocks, targetBlocks); if (fNumBlocks <= kMaxIndirect) { status = _AddForIndirectBlock(transaction, targetBlocks); if (status != B_OK) { ERROR("DataStream::Enlarge(): _AddForIndirectBlock() failed\n"); return status; } TRACE("DataStream::Enlarge(): current size: %" B_PRIdOFF ", target size: %" B_PRIdOFF "\n", fNumBlocks, targetBlocks); if (fNumBlocks == targetBlocks) return B_OK; } TRACE("DataStream::Enlarge(): indirect2 current size: %" B_PRIdOFF ", target size: %" B_PRIdOFF "\n", fNumBlocks, targetBlocks); if (fNumBlocks <= kMaxDoubleIndirect) { status = _AddForDoubleIndirectBlock(transaction, targetBlocks); if (status != B_OK) { ERROR("DataStream::Enlarge(): _AddForDoubleIndirectBlock() failed\n"); return status; } TRACE("DataStream::Enlarge(): current size: %" B_PRIdOFF ", target size: %" B_PRIdOFF "\n", fNumBlocks, targetBlocks); if (fNumBlocks == targetBlocks) return B_OK; } TRACE("DataStream::Enlarge(): indirect3 current size: %" B_PRIdOFF ", target size: %" B_PRIdOFF "\n", fNumBlocks, targetBlocks); TRACE("DataStream::Enlarge(): allocated: %" B_PRIu32 ", waiting: %" B_PRIu32 "\n", fAllocated, fWaiting); return _AddForTripleIndirectBlock(transaction, targetBlocks); } status_t DataStream::Shrink(Transaction& transaction, off_t& numBlocks) { TRACE("DataStream::Shrink(): current size: %" B_PRIdOFF ", target size: %" B_PRIdOFF "\n", fNumBlocks, numBlocks); fFreeStart = 0; fFreeCount = 0; fRemovedBlocks = 0; off_t oldNumBlocks = fNumBlocks; off_t blocksToRemove = fNumBlocks - numBlocks; status_t status; if (numBlocks < kMaxDirect) { status = _RemoveFromDirectBlocks(transaction, numBlocks); if (status != B_OK) { ERROR("DataStream::Shrink(): _RemoveFromDirectBlocks() failed\n"); return status; } if (fRemovedBlocks == blocksToRemove) { fNumBlocks -= fRemovedBlocks; numBlocks = _BlocksNeeded(oldNumBlocks); return _PerformFree(transaction); } } if (numBlocks < kMaxIndirect) { status = _RemoveFromIndirectBlock(transaction, numBlocks); if (status != B_OK) { ERROR("DataStream::Shrink(): _RemoveFromIndirectBlock() failed\n"); return status; } if (fRemovedBlocks == blocksToRemove) { fNumBlocks -= fRemovedBlocks; numBlocks = _BlocksNeeded(oldNumBlocks); return _PerformFree(transaction); } } if (numBlocks < kMaxDoubleIndirect) { status = _RemoveFromDoubleIndirectBlock(transaction, numBlocks); if (status != B_OK) { ERROR("DataStream::Shrink(): _RemoveFromDoubleIndirectBlock() failed\n"); return status; } if (fRemovedBlocks == blocksToRemove) { fNumBlocks -= fRemovedBlocks; numBlocks = _BlocksNeeded(oldNumBlocks); return _PerformFree(transaction); } } status = _RemoveFromTripleIndirectBlock(transaction, numBlocks); if (status != B_OK) { ERROR("DataStream::Shrink(): _RemoveFromTripleIndirectBlock() failed\n"); return status; } fNumBlocks -= fRemovedBlocks; numBlocks = _BlocksNeeded(oldNumBlocks); return _PerformFree(transaction); } uint32 DataStream::_BlocksNeeded(off_t numBlocks) { TRACE("DataStream::BlocksNeeded(): num blocks %" B_PRIdOFF "\n", numBlocks); off_t blocksNeeded = 0; if (numBlocks > fNumBlocks) { blocksNeeded += numBlocks - fNumBlocks; if (numBlocks > kMaxDirect) { if (fNumBlocks <= kMaxDirect) blocksNeeded += 1; if (numBlocks > kMaxIndirect) { if (fNumBlocks <= kMaxIndirect) { blocksNeeded += 2 + (numBlocks - kMaxIndirect - 1) / kIndirectsPerBlock; } else { blocksNeeded += (numBlocks - kMaxIndirect - 1) / kIndirectsPerBlock - (fNumBlocks - kMaxIndirect - 1) / kIndirectsPerBlock; } if (numBlocks > kMaxDoubleIndirect) { if (fNumBlocks <= kMaxDoubleIndirect) { blocksNeeded += 2 + (numBlocks - kMaxDoubleIndirect - 1) / kIndirectsPerBlock2; } else { blocksNeeded += (numBlocks - kMaxDoubleIndirect - 1) / kIndirectsPerBlock - (fNumBlocks - kMaxDoubleIndirect - 1) / kIndirectsPerBlock; } } } } } TRACE("DataStream::BlocksNeeded(): %" B_PRIdOFF "\n", blocksNeeded); return blocksNeeded; } status_t DataStream::_GetBlock(Transaction& transaction, uint32& blockNum) { TRACE("DataStream::_GetBlock(): allocated: %" B_PRIu32 ", pos: %" B_PRIu64 ", waiting: %" B_PRIu32 "\n", fAllocated, fAllocatedPos, fWaiting); if (fAllocated == 0) { uint32 blockGroup = (fAllocatedPos - fFirstBlock) / fVolume->BlocksPerGroup(); status_t status = fVolume->AllocateBlocks(transaction, 1, fWaiting, blockGroup, fAllocatedPos, fAllocated); if (status != B_OK) { ERROR("DataStream::_GetBlock(): AllocateBlocks() failed()\n"); return status; } if (fAllocatedPos > UINT_MAX) return B_FILE_TOO_LARGE; fWaiting -= fAllocated; TRACE("DataStream::_GetBlock(): newAllocated: %" B_PRIu32 ", newpos: %" B_PRIu64 ", newwaiting: %" B_PRIu32 "\n", fAllocated, fAllocatedPos, fWaiting); } fAllocated--; blockNum = (uint32)fAllocatedPos++; return B_OK; } status_t DataStream::_PrepareBlock(Transaction& transaction, uint32* pos, uint32& blockNum, bool& clear) { blockNum = B_LENDIAN_TO_HOST_INT32(*pos); clear = false; if (blockNum == 0) { status_t status = _GetBlock(transaction, blockNum); if (status != B_OK) { ERROR("DataStream::_PrepareBlock() _GetBlock() failed blockNum %" B_PRIu32 "\n", blockNum); return status; } *pos = B_HOST_TO_LENDIAN_INT32(blockNum); clear = true; } return B_OK; } status_t DataStream::_AddBlocks(Transaction& transaction, uint32* block, off_t _count) { off_t count = _count; TRACE("DataStream::_AddBlocks(): count: %" B_PRIdOFF "\n", count); while (count > 0) { uint32 blockNum; status_t status = _GetBlock(transaction, blockNum); if (status != B_OK) return status; *(block++) = B_HOST_TO_LENDIAN_INT32(blockNum); --count; } fNumBlocks += _count; return B_OK; } status_t DataStream::_AddBlocks(Transaction& transaction, uint32* block, off_t start, off_t end, int recursion) { TRACE("DataStream::_AddBlocks(): start: %" B_PRIdOFF ", end %" B_PRIdOFF ", recursion: %d\n", start, end, recursion); bool clear; uint32 blockNum; status_t status = _PrepareBlock(transaction, block, blockNum, clear); if (status != B_OK) return status; CachedBlock cached(fVolume); uint32* childBlock = (uint32*)cached.SetToWritable(transaction, blockNum, clear); if (childBlock == NULL) return B_IO_ERROR; if (recursion == 0) return _AddBlocks(transaction, &childBlock[start], end - start); uint32 elementWidth; if (recursion == 1) elementWidth = kIndirectsPerBlock; else if (recursion == 2) elementWidth = kIndirectsPerBlock2; else { panic("Undefined recursion level\n"); elementWidth = 0; } uint32 elementPos = start / elementWidth; uint32 endPos = end / elementWidth; TRACE("DataStream::_AddBlocks(): element pos: %" B_PRIu32 ", end pos: %" B_PRIu32 "\n", elementPos, endPos); recursion--; if (elementPos == endPos) { return _AddBlocks(transaction, &childBlock[elementPos], start % elementWidth, end % elementWidth, recursion); } if (start % elementWidth != 0) { status = _AddBlocks(transaction, &childBlock[elementPos], start % elementWidth, elementWidth, recursion); if (status != B_OK) { ERROR("DataStream::_AddBlocks() _AddBlocks() start failed\n"); return status; } elementPos++; } while (elementPos < endPos) { status = _AddBlocks(transaction, &childBlock[elementPos], 0, elementWidth, recursion); if (status != B_OK) { ERROR("DataStream::_AddBlocks() _AddBlocks() mid failed\n"); return status; } elementPos++; } if (end % elementWidth != 0) { status = _AddBlocks(transaction, &childBlock[elementPos], 0, end % elementWidth, recursion); if (status != B_OK) { ERROR("DataStream::_AddBlocks() _AddBlocks() end failed\n"); return status; } } return B_OK; } status_t DataStream::_AddForDirectBlocks(Transaction& transaction, uint32 numBlocks) { TRACE("DataStream::_AddForDirectBlocks(): current size: %" B_PRIdOFF ", target size: %" B_PRIu32 "\n", fNumBlocks, numBlocks); uint32* direct = &fStream->direct[fNumBlocks]; uint32 end = numBlocks > kMaxDirect ? kMaxDirect : numBlocks; return _AddBlocks(transaction, direct, end - fNumBlocks); } status_t DataStream::_AddForIndirectBlock(Transaction& transaction, uint32 numBlocks) { TRACE("DataStream::_AddForIndirectBlocks(): current size: %" B_PRIdOFF ", target size: %" B_PRIu32 "\n", fNumBlocks, numBlocks); uint32 *indirect = &fStream->indirect; uint32 start = fNumBlocks - kMaxDirect; uint32 end = numBlocks - kMaxDirect; if (end > kIndirectsPerBlock) end = kIndirectsPerBlock; return _AddBlocks(transaction, indirect, start, end, 0); } status_t DataStream::_AddForDoubleIndirectBlock(Transaction& transaction, uint32 numBlocks) { TRACE("DataStream::_AddForDoubleIndirectBlock(): current size: %" B_PRIdOFF ", target size: %" B_PRIu32 "\n", fNumBlocks, numBlocks); uint32 *doubleIndirect = &fStream->double_indirect; uint32 start = fNumBlocks - kMaxIndirect; uint32 end = numBlocks - kMaxIndirect; if (end > kIndirectsPerBlock2) end = kIndirectsPerBlock2; return _AddBlocks(transaction, doubleIndirect, start, end, 1); } status_t DataStream::_AddForTripleIndirectBlock(Transaction& transaction, uint32 numBlocks) { TRACE("DataStream::_AddForTripleIndirectBlock(): current size: %" B_PRIdOFF ", target size: %" B_PRIu32 "\n", fNumBlocks, numBlocks); uint32 *tripleIndirect = &fStream->triple_indirect; uint32 start = fNumBlocks - kMaxDoubleIndirect; uint32 end = numBlocks - kMaxDoubleIndirect; return _AddBlocks(transaction, tripleIndirect, start, end, 2); } status_t DataStream::_PerformFree(Transaction& transaction) { TRACE("DataStream::_PerformFree(): start: %" B_PRIu32 ", count: %" B_PRIu32 "\n", fFreeStart, fFreeCount); status_t status; if (fFreeCount == 0) status = B_OK; else status = fVolume->FreeBlocks(transaction, fFreeStart, fFreeCount); fFreeStart = 0; fFreeCount = 0; return status; } status_t DataStream::_MarkBlockForRemoval(Transaction& transaction, uint32* block) { TRACE("DataStream::_MarkBlockForRemoval(*(%p) = %" B_PRIu32 "): free start: %" B_PRIu32 ", free count: %" B_PRIu32 "\n", block, B_LENDIAN_TO_HOST_INT32(*block), fFreeStart, fFreeCount); uint32 blockNum = B_LENDIAN_TO_HOST_INT32(*block); *block = 0; if (blockNum != fFreeStart + fFreeCount) { if (fFreeCount != 0) { status_t status = fVolume->FreeBlocks(transaction, fFreeStart, fFreeCount); if (status != B_OK) return status; } fFreeStart = blockNum; fFreeCount = 0; } fFreeCount++; return B_OK; } status_t DataStream::_FreeBlocks(Transaction& transaction, uint32* block, uint32 _count) { uint32 count = _count; TRACE("DataStream::_FreeBlocks(%p, %" B_PRIu32 ")\n", block, count); while (count > 0) { status_t status = _MarkBlockForRemoval(transaction, block); if (status != B_OK) return status; block++; count--; } fRemovedBlocks += _count; return B_OK; } status_t DataStream::_FreeBlocks(Transaction& transaction, uint32* block, off_t start, off_t end, bool freeParent, int recursion) { // TODO: Designed specifically for shrinking. Perhaps make it more general? TRACE("DataStream::_FreeBlocks(%p, %" B_PRIdOFF ", %" B_PRIdOFF ", %c, %d)\n", block, start, end, freeParent ? 't' : 'f', recursion); uint32 blockNum = B_LENDIAN_TO_HOST_INT32(*block); if (freeParent) { status_t status = _MarkBlockForRemoval(transaction, block); if (status != B_OK) return status; } CachedBlock cached(fVolume); uint32* childBlock = (uint32*)cached.SetToWritable(transaction, blockNum); if (childBlock == NULL) return B_IO_ERROR; if (recursion == 0) return _FreeBlocks(transaction, &childBlock[start], end - start); uint32 elementWidth; if (recursion == 1) elementWidth = kIndirectsPerBlock; else if (recursion == 2) elementWidth = kIndirectsPerBlock2; else { panic("Undefinied recursion level\n"); elementWidth = 0; } uint32 elementPos = start / elementWidth; uint32 endPos = end / elementWidth; recursion--; if (elementPos == endPos) { bool free = freeParent || start % elementWidth == 0; return _FreeBlocks(transaction, &childBlock[elementPos], start % elementWidth, end % elementWidth, free, recursion); } status_t status = B_OK; if (start % elementWidth != 0) { status = _FreeBlocks(transaction, &childBlock[elementPos], start % elementWidth, elementWidth, false, recursion); if (status != B_OK) return status; elementPos++; } while (elementPos < endPos) { status = _FreeBlocks(transaction, &childBlock[elementPos], 0, elementWidth, true, recursion); if (status != B_OK) return status; elementPos++; } if (end % elementWidth != 0) { status = _FreeBlocks(transaction, &childBlock[elementPos], 0, end % elementWidth, true, recursion); } return status; } status_t DataStream::_RemoveFromDirectBlocks(Transaction& transaction, uint32 numBlocks) { TRACE("DataStream::_RemoveFromDirectBlocks(): current size: %" B_PRIdOFF ", target size: %" B_PRIu32 "\n", fNumBlocks, numBlocks); uint32* direct = &fStream->direct[numBlocks]; off_t end = fNumBlocks > kMaxDirect ? kMaxDirect : fNumBlocks; return _FreeBlocks(transaction, direct, end - numBlocks); } status_t DataStream::_RemoveFromIndirectBlock(Transaction& transaction, uint32 numBlocks) { TRACE("DataStream::_RemoveFromIndirectBlock(): current size: %" B_PRIdOFF ", target size: %" B_PRIu32 "\n", fNumBlocks, numBlocks); uint32* indirect = &fStream->indirect; off_t start = numBlocks <= kMaxDirect ? 0 : numBlocks - kMaxDirect; off_t end = fNumBlocks - kMaxDirect; if (end > kIndirectsPerBlock) end = kIndirectsPerBlock; bool freeAll = start == 0; return _FreeBlocks(transaction, indirect, start, end, freeAll, 0); } status_t DataStream::_RemoveFromDoubleIndirectBlock(Transaction& transaction, uint32 numBlocks) { TRACE("DataStream::_RemoveFromDoubleIndirectBlock(): current size: %" B_PRIdOFF ", target size: %" B_PRIu32 "\n", fNumBlocks, numBlocks); uint32* doubleIndirect = &fStream->double_indirect; off_t start = numBlocks <= kMaxIndirect ? 0 : numBlocks - kMaxIndirect; off_t end = fNumBlocks - kMaxIndirect; if (end > kIndirectsPerBlock2) end = kIndirectsPerBlock2; bool freeAll = start == 0; return _FreeBlocks(transaction, doubleIndirect, start, end, freeAll, 1); } status_t DataStream::_RemoveFromTripleIndirectBlock(Transaction& transaction, uint32 numBlocks) { TRACE("DataStream::_RemoveFromTripleIndirectBlock(): current size: %" B_PRIdOFF ", target size: %" B_PRIu32 "\n", fNumBlocks, numBlocks); uint32* tripleIndirect = &fStream->triple_indirect; off_t start = numBlocks <= kMaxDoubleIndirect ? 0 : numBlocks - kMaxDoubleIndirect; off_t end = fNumBlocks - kMaxDoubleIndirect; bool freeAll = start == 0; return _FreeBlocks(transaction, tripleIndirect, start, end, freeAll, 2); }
7,926
1,168
<filename>java-sample/release/output.json<gh_stars>1000+ [{"outputType":{"type":"APK"},"apkInfo":{"type":"MAIN","splits":[],"versionCode":58},"path":"BLE-v3.1.0.apk","properties":{"packageId":"com.example.admin.mybledemo","split":"","minSdkVersion":"19"}}]
88
1,293
<reponame>ricbit/manifold<gh_stars>1000+ /* * Copyright (c) 2019 - Manifold Systems LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package manifold.science.measures; import manifold.science.api.AbstractProductUnit; import manifold.science.api.UnitCache; import manifold.science.util.Rational; import static manifold.science.measures.LengthUnit.Centi; import static manifold.science.measures.LengthUnit.Meter; import static manifold.science.util.CoercionConstants.r; import static manifold.science.util.MetricFactorConstants.KILO; public final class VolumeUnit extends AbstractProductUnit<LengthUnit, AreaUnit, Volume, VolumeUnit> { private static final UnitCache<VolumeUnit> CACHE = new UnitCache<>(); public static final VolumeUnit BASE = get( Meter, AreaUnit.get( Meter ) ); public static final VolumeUnit LITER = get( Centi, AreaUnit.get( Centi ), KILO, "Litre", "L" ); public static final VolumeUnit MILLI_LITER = get( Centi, AreaUnit.get( Centi ), 1 r, "Millilitre", "mL" ); public static final VolumeUnit FLUID_OZ = get( Centi, AreaUnit.get( Centi ), 29.5735295625r, "Fluid Ounce", "fl oz." ); public static final VolumeUnit GALLON = get( Centi, AreaUnit.get( Centi ), 3785.411784r, "Gallon", "gal." ); public static final VolumeUnit QUART = get( Centi, AreaUnit.get( Centi ), 946.352946r, "Quart", "qt." ); public static final VolumeUnit PINT = get( Centi, AreaUnit.get( Centi ), 473.176473r, "Pint", "pt." ); public static final VolumeUnit CUP = get( Centi, AreaUnit.get( Centi ), 236.5882365r, "Cup", "c." ); public static final VolumeUnit TABLE_SPOON = get( Centi, AreaUnit.get( Centi ), 14.78676478125r, "Tablespoon", "tbsp" ); public static final VolumeUnit TEA_SPOON = get( Centi, AreaUnit.get( Centi ), 4.92892159375r, "Teaspoon", "tsp" ); private final String _symbolProvided; public static VolumeUnit get( LengthUnit lengthUnit, AreaUnit areaUnit ) { return get( lengthUnit, areaUnit, null, null, null ); } public static VolumeUnit get( LengthUnit lengthUnit, AreaUnit areaUnit, Rational factor, String name, String symbol ) { VolumeUnit unit = new VolumeUnit( lengthUnit, areaUnit, factor, name, symbol ); return CACHE.get( unit ); } private VolumeUnit( LengthUnit lengthUnit, AreaUnit areaUnit, Rational factor, String name, String symbol ) { super( lengthUnit, areaUnit == null ? AreaUnit.get( lengthUnit ) : areaUnit, factor, name, symbol ); _symbolProvided = symbol; } @Override public Volume makeDimension( Number amount ) { return new Volume( Rational.get( amount ), this ); } public AreaUnit getAreaUnit() { return getRightUnit(); } public LengthUnit getLengthUnit() { return getLeftUnit(); } public String getFullName() { return getAreaUnit().isSquare() && getAreaUnit().getWidthUnit() == getLengthUnit() ? getLengthUnit().getFullName() + "\u00B3" : getAreaUnit().getFullName() + "\u00D7" + getLengthUnit().getFullName(); } public String getFullSymbol() { return getAreaUnit().isSquare() && getAreaUnit().getWidthUnit() == getLengthUnit() ? getLengthUnit().getFullSymbol() + "\u00B3" : getAreaUnit().getFullSymbol() + "\u00D7" + getLengthUnit().getFullSymbol(); } @Override public String getSymbol() { if( _symbolProvided != null ) { return _symbolProvided; } return getAreaUnit().isSquare() && getAreaUnit().getWidthUnit() == getLengthUnit() ? getLengthUnit().getSymbol() + "\u00B3" : super.getSymbol(); } public boolean getIsCubic() { return getAreaUnit().isSquare() && getAreaUnit().getWidthUnit() == getLengthUnit(); } public LengthUnit div( AreaUnit len ) { return getLengthUnit(); } public MassUnit times( DensityUnit density ) { return density.getMassUnit(); } }
1,447
348
<filename>docs/data/leg-t1/038/03807034.json {"nom":"Beaurepaire","circ":"7ème circonscription","dpt":"Isère","inscrits":3080,"abs":1612,"votants":1468,"blancs":27,"nuls":17,"exp":1424,"res":[{"nuance":"REM","nom":"<NAME>","voix":472},{"nuance":"FN","nom":"<NAME>","voix":285},{"nuance":"LR","nom":"<NAME>","voix":283},{"nuance":"FI","nom":"Mme <NAME>","voix":169},{"nuance":"DVG","nom":"Mme <NAME>","voix":84},{"nuance":"ECO","nom":"Mme <NAME>","voix":56},{"nuance":"DLF","nom":"Mme <NAME>","voix":45},{"nuance":"EXG","nom":"M. <NAME>","voix":19},{"nuance":"DIV","nom":"Mme <NAME>","voix":11}]}
244
784
#include "ocvrs_common.hpp" #include <opencv2/dnn.hpp> #if CV_VERSION_MAJOR == 4 #include <opencv2/dnn/version.hpp> #endif #include <opencv2/dnn/all_layers.hpp> #include <opencv2/dnn/shape_utils.hpp>
96
1,338
<gh_stars>1000+ #include <../private/shared/Keymap.h>
22
379
<gh_stars>100-1000 """Fonduer's parser module.""" from fonduer.parser.parser import Parser __all__ = ["Parser"]
39
884
{ "documentVersion": "1.1.1", "jsonSchemaSemanticVersion": "1.0.0", "manifestName": "Group", "entities": [ { "type": "LocalEntity", "entityName": "PayrollTaxGroup", "entityPath": "PayrollTaxGroup.cdm.json/PayrollTaxGroup" }, { "type": "LocalEntity", "entityName": "PayrollJournalizingDefMatchPrimaryEntry", "entityPath": "PayrollJournalizingDefMatchPrimaryEntry.cdm.json/PayrollJournalizingDefMatchPrimaryEntry" }, { "type": "LocalEntity", "entityName": "PayrollWorkCalendarLine", "entityPath": "PayrollWorkCalendarLine.cdm.json/PayrollWorkCalendarLine" }, { "type": "LocalEntity", "entityName": "PayrollWorkCycle", "entityPath": "PayrollWorkCycle.cdm.json/PayrollWorkCycle" } ], "relationships": [ { "fromEntity": "/core/operationsCommon/Tables/HumanResources/HumanResource/Group/HcmTotalCompStatementSectionTax.cdm.json/HcmTotalCompStatementSectionTax", "fromEntityAttribute": "Relationship_PayrollTaxGroupRelationshipId", "toEntity": "PayrollTaxGroup.cdm.json/PayrollTaxGroup", "toEntityAttribute": "RecId" }, { "fromEntity": "/core/operationsCommon/Tables/HumanResources/Payroll/Reference/PayrollBenefitTaxRule_US.cdm.json/PayrollBenefitTaxRule_US", "fromEntityAttribute": "Relationship_PayrollTaxGroupRelationshipId", "toEntity": "PayrollTaxGroup.cdm.json/PayrollTaxGroup", "toEntityAttribute": "RecId" }, { "fromEntity": "/core/operationsCommon/Tables/HumanResources/Payroll/Reference/PayrollTaxGroupCode.cdm.json/PayrollTaxGroupCode", "fromEntityAttribute": "Relationship_PayrollTaxGroupRelationshipId", "toEntity": "PayrollTaxGroup.cdm.json/PayrollTaxGroup", "toEntityAttribute": "RecId" }, { "fromEntity": "/core/operationsCommon/Entities/HumanResources/Payroll/PayrollTaxGroupEntity.cdm.json/PayrollTaxGroupEntity", "fromEntityAttribute": "BackingTable_PayrollTaxGroupRelationshipId", "toEntity": "PayrollTaxGroup.cdm.json/PayrollTaxGroup", "toEntityAttribute": "RecId" }, { "fromEntity": "PayrollJournalizingDefMatchPrimaryEntry.cdm.json/PayrollJournalizingDefMatchPrimaryEntry", "fromEntityAttribute": "Relationship_FKRelationshipId", "toEntity": "/core/operationsCommon/Tables/Finance/AccountingFoundation/Main/JournalizingDefinitionMatch.cdm.json/JournalizingDefinitionMatch", "toEntityAttribute": "RecId" }, { "fromEntity": "PayrollJournalizingDefMatchPrimaryEntry.cdm.json/PayrollJournalizingDefMatchPrimaryEntry", "fromEntityAttribute": "Relationship_FKRelationshipId", "toEntity": "/core/operationsCommon/Tables/Finance/AccountingFoundation/Main/JournalizingDefinitionMatchDetail.cdm.json/JournalizingDefinitionMatchDetail", "toEntityAttribute": "RecId" }, { "fromEntity": "PayrollJournalizingDefMatchPrimaryEntry.cdm.json/PayrollJournalizingDefMatchPrimaryEntry", "fromEntityAttribute": "Relationship_CompanyRelationshipId", "toEntity": "/core/operationsCommon/Tables/Finance/Ledger/Main/CompanyInfo.cdm.json/CompanyInfo", "toEntityAttribute": "RecId" }, { "fromEntity": "PayrollWorkCalendarLine.cdm.json/PayrollWorkCalendarLine", "fromEntityAttribute": "Relationship_WorkCalendarDateLineRelationshipId", "toEntity": "/core/operationsCommon/Tables/SupplyChain/ProductionControl/Group/WorkCalendarDateLine.cdm.json/WorkCalendarDateLine", "toEntityAttribute": "RecId" }, { "fromEntity": "PayrollWorkCalendarLine.cdm.json/PayrollWorkCalendarLine", "fromEntityAttribute": "Relationship_CompanyRelationshipId", "toEntity": "/core/operationsCommon/Tables/Finance/Ledger/Main/CompanyInfo.cdm.json/CompanyInfo", "toEntityAttribute": "RecId" }, { "fromEntity": "/core/operationsCommon/Tables/HumanResources/Payroll/Miscellaneous/PayrollWorkPeriod.cdm.json/PayrollWorkPeriod", "fromEntityAttribute": "Relationship_PayrollWorkCycleRelationshipId", "toEntity": "PayrollWorkCycle.cdm.json/PayrollWorkCycle", "toEntityAttribute": "RecId" }, { "fromEntity": "/core/operationsCommon/Entities/HumanResources/Payroll/PayrollWorkCycleEntity.cdm.json/PayrollWorkCycleEntity", "fromEntityAttribute": "BackingTable_PayrollWorkCycleRelationshipId", "toEntity": "PayrollWorkCycle.cdm.json/PayrollWorkCycle", "toEntityAttribute": "RecId" } ] }
1,684
1,055
// // Created by lakinduakash on 14/04/19. // /* Copyright (c) 2019, lakinduaksh All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #include <iostream> #include <fstream> #include <algorithm> #include <cstring> #include <pwd.h> #include <unistd.h> #include "read_config.h" #define CONFIG_KEY_COUNT 30 #define STRING_MAX_LENGTH 100 #define BUFSIZE 150 extern "C" { ConfigValues configValues; char configs[CONFIG_KEY_COUNT][STRING_MAX_LENGTH]; int read_config_file() { // std::ifstream is RAII, i.e. no need to call close std::ifstream cFile(get_config_file(CONFIG_FILE_NAME)); if (cFile.is_open()) { std::string line; int i=0; while (getline(cFile, line)) { auto delimiterPos = line.find('='); if (!(line.find("SSID") < delimiterPos) && !(line.find("PASSPHRASE") < delimiterPos)) { line.erase(std::remove_if(line.begin(), line.end(), isspace), line.end()); } if (line[0] == '#' || line.empty()) continue; delimiterPos = line.find('='); //check again in case it changed auto name = line.substr(0, delimiterPos); auto value = line.substr(delimiterPos + 1); strcpy(configs[i],value.c_str()); setConfigValues(name.c_str(),configs[i]); //std::cout << name << " " << value << '\n'; ++i; } } else { std::cerr << "Couldn't open config file for reading.\n"; return READ_CONFIG_FILE_FAIL; } return READ_CONFIG_FILE_SUCCESS; } ConfigValues* getConfigValues(){ return &configValues; } static void setConfigValues(const char * key, char *value){ if( !strcmp ( SSID, key )) configValues.ssid = value; if( !strcmp ( PASSPHRASE, key )) configValues.pass = value; if( !strcmp ( WIFI_IFACE, key )) configValues.iface_wifi = value; if( !strcmp ( INTERNET_IFACE, key )) configValues.iface_inet = value; if( !strcmp ( HIDDEN, key )) configValues.hidden = value; if( !strcmp ( NO_HAVEGED, key )) configValues.no_haveged = value; if( !strcmp ( NO_VIRT, key )) configValues.no_virt = value; if( !strcmp ( NEW_MACADDR, key )) configValues.mac = value; if( !strcmp ( CHANNEL, key )) configValues.channel = value; if( !strcmp ( FREQ_BAND, key )) configValues.freq = value; if( !strcmp ( USE_PSK, key )) configValues.use_psk = value; if( !strcmp ( MAC_FILTER, key )) configValues.mac_filter = value; if( !strcmp ( MAC_FILTER_ACCEP, key )) configValues.accepted_mac_file = value; if( !strcmp ( IEEE80211N, key )) configValues.ieee80211n = value; if( !strcmp ( IEEE80211AC, key )) configValues.ieee80211ac = value; if( !strcmp ( GATEWAY, key )) configValues.gateway = value; } const char* get_config_file(const char* file){ static char *homedir; static char a[BUFSIZE]; // if ((homedir = getenv("HOME")) == nullptr) { // homedir = getpwuid(getuid())->pw_dir; // } // snprintf(a,BUFSIZE,"%s%s%s",homedir,"/",file); snprintf(a,BUFSIZE,"%s",file); return (const char*)a; } }
1,889
432
/////////////////////////////////////////////////////////////////////////////// // /// \file main.h /// \brief Miscellaneous declarations // // Author: <NAME> // // This file has been put into the public domain. // You can do whatever you want with this file. // /////////////////////////////////////////////////////////////////////////////// /// Possible exit status values. These are the same as used by gzip and bzip2. enum exit_status_type { E_SUCCESS = 0, E_ERROR = 1, E_WARNING = 2, }; /// Sets the exit status after a warning or error has occurred. If new_status /// is E_WARNING and the old exit status was already E_ERROR, the exit /// status is not changed. extern void set_exit_status(enum exit_status_type new_status); /// Use E_SUCCESS instead of E_WARNING if something worth a warning occurs /// but nothing worth an error has occurred. This is called when --no-warn /// is specified. extern void set_exit_no_warn(void);
268
643
<reponame>TJUsym/TJU_Advanced_CV_Homework from .functions.roi_align_rotated import roi_align_rotated from .modules.roi_align_rotated import RoIAlignRotated __all__ = ['roi_align_rotated', 'RoIAlignRotated']
84
525
// ***************************************************************************** // ***************************************************************************** // Copyright 2013 - 2015, Cadence Design Systems // // This file is part of the Cadence LEF/DEF Open Source // Distribution, Product Version 5.8. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or // implied. See the License for the specific language governing // permissions and limitations under the License. // // For updates, support, or to become part of the LEF/DEF Community, // check www.openeda.org for details. // // $Author: dell $ // $Revision: #1 $ // $Date: 2017/06/06 $ // $State: $ // ***************************************************************************** // ***************************************************************************** #include <string.h> #include <stdlib.h> #include "lex.h" #include "defiPartition.hpp" #include "defiDebug.hpp" BEGIN_LEFDEF_PARSER_NAMESPACE ////////////////////////////////////////////// ////////////////////////////////////////////// // // defiPartition // ////////////////////////////////////////////// ////////////////////////////////////////////// defiPartition::defiPartition(defrData *data) : defData(data) { Init(); } void defiPartition::Init() { name_ = 0; nameLength_ = 0; pin_ = 0; pinLength_ = 0; inst_ = 0; instLength_ = 0; pinsAllocated_ = 0; numPins_ = 0; pins_ = 0; clear(); } defiPartition::~defiPartition() { Destroy(); } void defiPartition::Destroy() { if (name_) free(name_); name_ = 0; nameLength_ = 0; if (pin_) free(pin_); pin_ = 0; pinLength_ = 0; if (inst_) free(inst_); inst_ = 0; instLength_ = 0; clear(); if (pins_) free((char*)(pins_)); pins_ = 0; pinsAllocated_ = 0; } void defiPartition::clear() { int i; setup_ = ' '; hold_ = ' '; direction_ = ' '; type_ = ' '; if (name_) *(name_) = '\0'; if (pin_) *(pin_) = '\0'; if (inst_) *(inst_) = '\0'; hasMin_ = 0; hasMax_ = 0; if (numPins_) { for (i = 0; i < numPins_; i++) { free(pins_[i]); pins_[i] = 0; } numPins_ = 0; } hasRiseMin_ = 0; hasFallMin_ = 0; hasRiseMax_ = 0; hasFallMax_ = 0; hasRiseMinRange_ = 0; hasFallMinRange_ = 0; hasRiseMaxRange_ = 0; hasFallMaxRange_ = 0; } void defiPartition::setName(const char* name) { int len = strlen(name) + 1; clear(); if (len > nameLength_) { if (name_) free(name_); nameLength_ = len; name_ = (char*)malloc(len); } strcpy(name_, defData->DEFCASE(name)); } void defiPartition::print(FILE* f) const { int i; fprintf(f, "Partition '%s' %c\n", name(), direction()); fprintf(f, " inst %s pin %s type %s\n", instName(), pinName(), itemType()); for (i = 0; i < numPins(); i++) fprintf(f, " %s\n", pin(i)); if (isSetupRise()) fprintf(f, " SETUP RISE\n"); if (isSetupFall()) fprintf(f, " SETUP FALL\n"); if (isHoldRise()) fprintf(f, " HOLD RISE\n"); if (isHoldFall()) fprintf(f, " HOLD FALL\n"); if (hasMin()) fprintf(f, " MIN %g\n", partitionMin()); if (hasMax()) fprintf(f, " MAX %g\n", partitionMax()); if (hasRiseMin()) fprintf(f, " RISE MIN %g\n", riseMin()); if (hasFallMin()) fprintf(f, " FALL MIN %g\n", fallMin()); if (hasRiseMax()) fprintf(f, " RISE MAX %g\n", riseMax()); if (hasFallMax()) fprintf(f, " FALL MAX %g\n", fallMax()); if (hasFallMinRange()) fprintf(f, " FALL MIN %g %g\n", fallMinLeft(), fallMinRight()); if (hasRiseMinRange()) fprintf(f, " RISE MIN %g %g\n", riseMinLeft(), riseMinRight()); if (hasFallMaxRange()) fprintf(f, " FALL MAX %g %g\n", fallMaxLeft(), fallMaxRight()); if (hasRiseMaxRange()) fprintf(f, " RISE MAX %g %g\n", riseMaxLeft(), riseMaxRight()); } const char* defiPartition::name() const { return name_; } void defiPartition::setFromIOPin(const char* pin) { set('F', 'I', "", pin); } char defiPartition::direction() const { return direction_; } const char* defiPartition::instName() const { return inst_; } const char* defiPartition::pinName() const { return pin_; } static char* ad(const char* in) { return (char*)in; } const char* defiPartition::itemType() const { char* c; if (type_ == 'L') c = ad("CLOCK"); else if (type_ == 'I') c = ad("IO"); else if (type_ == 'C') c = ad("COMP"); else c = ad("BOGUS"); return c; } const char* defiPartition::pin(int index) const { return pins_[index]; } int defiPartition::numPins() const { return numPins_; } int defiPartition::isSetupRise() const { return setup_ == 'R' ? 1 : 0 ; } int defiPartition::isSetupFall() const { return setup_ == 'F' ? 1 : 0 ; } int defiPartition::isHoldRise() const { return hold_ == 'R' ? 1 : 0 ; } int defiPartition::isHoldFall() const { return hold_ == 'F' ? 1 : 0 ; } void defiPartition::addTurnOff(const char* setup, const char* hold) { if (*setup == ' ') { setup_ = *setup; } else if (*setup == 'R') { setup_ = *setup; } else if (*setup == 'F') { setup_ = *setup; } else { defiError(0, 6100, "ERROR (DEFPARS-6100): The value spefified for PARTITION SETUP is invalid. The valid value for SETUP is 'R' or 'F'. Specify a valid value for SETUP and then try again.", defData); } if (*hold == ' ') { hold_ = *hold; } else if (*hold == 'R') { hold_ = *hold; } else if (*hold == 'F') { hold_ = *hold; } else { defiError(0, 6101, "ERROR (DEFPARS-6101): The value spefified for PARTITION HOLD is invalid. The valid value for HOLD is 'R' or 'F'. Specify a valid value for HOLD and then try again.", defData); } } void defiPartition::setFromClockPin(const char* inst, const char* pin) { set('F', 'L', inst, pin); } void defiPartition::setToClockPin(const char* inst, const char* pin) { set('T', 'L', inst, pin); } void defiPartition::set(char dir, char typ, const char* inst, const char* pin) { int len = strlen(pin) + 1; direction_ = dir; type_ = typ; if (pinLength_ <= len) { if (pin_) free(pin_); pin_ = (char*)malloc(len); pinLength_ = len; } strcpy(pin_, defData->DEFCASE(pin)); len = strlen(inst) + 1; if (instLength_ <= len) { if (inst_) free(inst_); inst_ = (char*)malloc(len); instLength_ = len; } strcpy(inst_, defData->DEFCASE(inst)); } void defiPartition::setMin(double min, double max) { min_ = min; max_ = max; hasMin_ = 1; } void defiPartition::setFromCompPin(const char* inst, const char* pin) { set('F', 'C', inst, pin); } void defiPartition::setMax(double min, double max) { min_ = min; max_ = max; hasMax_ = 1; } void defiPartition::setToIOPin(const char* pin) { set('T', 'I', "", pin); } void defiPartition::setToCompPin(const char* inst, const char* pin) { set('T', 'C', inst, pin); } void defiPartition::addPin(const char* name) { int len; int i; char** newp; if (numPins_ >= pinsAllocated_) { pinsAllocated_ = pinsAllocated_ ? 2 * pinsAllocated_ : 8; newp = (char**) malloc(sizeof(char*) * pinsAllocated_); for (i = 0; i < numPins_; i++) newp[i] = pins_[i]; if (pins_) free((char*)(pins_)); pins_ = newp; } len = strlen(name) + 1; pins_[numPins_] = (char*)malloc(len); strcpy(pins_[numPins_], defData->DEFCASE(name)); numPins_ += 1; } int defiPartition::hasMin() const { return(int)(hasMin_); } int defiPartition::hasMax() const { return(int)(hasMax_); } double defiPartition::partitionMin() const { return(min_); } double defiPartition::partitionMax() const { return(max_); } int defiPartition::hasRiseMin() const { return (int)(hasRiseMin_); } int defiPartition::hasFallMin() const { return (int)(hasFallMin_); } int defiPartition::hasRiseMax() const { return (int)(hasRiseMax_); } int defiPartition::hasFallMax() const { return (int)(hasFallMax_); } int defiPartition::hasRiseMinRange() const { return (int)(hasRiseMinRange_); } int defiPartition::hasFallMinRange() const { return (int)(hasFallMinRange_); } int defiPartition::hasRiseMaxRange() const { return (int)(hasRiseMaxRange_); } int defiPartition::hasFallMaxRange() const { return (int)(hasFallMaxRange_); } double defiPartition::riseMin() const { return riseMin_; } double defiPartition::fallMin() const { return fallMin_; } double defiPartition::riseMax() const { return riseMax_; } double defiPartition::fallMax() const { return fallMax_; } double defiPartition::riseMinLeft() const { return riseMinLeft_; } double defiPartition::fallMinLeft() const { return fallMinLeft_; } double defiPartition::riseMaxLeft() const { return riseMaxLeft_; } double defiPartition::fallMaxLeft() const { return fallMaxLeft_; } double defiPartition::riseMinRight() const { return riseMinRight_; } double defiPartition::fallMinRight() const { return fallMinRight_; } double defiPartition::riseMaxRight() const { return riseMaxRight_; } double defiPartition::fallMaxRight() const { return fallMaxRight_; } void defiPartition::addRiseMin(double d) { hasRiseMin_ = 1; riseMin_ = d; } void defiPartition::addRiseMax(double d) { hasRiseMax_ = 1; riseMax_ = d; } void defiPartition::addFallMin(double d) { hasFallMin_ = 1; fallMin_ = d; } void defiPartition::addFallMax(double d) { hasFallMax_ = 1; fallMax_ = d; } void defiPartition::addRiseMinRange(double l, double h) { hasRiseMinRange_ = 1; riseMinLeft_ = l; riseMinRight_ = h; } void defiPartition::addRiseMaxRange(double l, double h) { hasRiseMaxRange_ = 1; riseMaxLeft_ = l; riseMaxRight_ = h; } void defiPartition::addFallMinRange(double l, double h) { hasFallMinRange_ = 1; fallMinLeft_ = l; fallMinRight_ = h; } void defiPartition::addFallMaxRange(double l, double h) { hasFallMaxRange_ = 1; fallMaxLeft_ = l; fallMaxRight_ = h; } END_LEFDEF_PARSER_NAMESPACE
4,174
1,787
{ "typeName": "AWS::CUR::ReportDefinition", "description": "The AWS::CUR::ReportDefinition resource creates a Cost & Usage Report with user-defined settings. You can use this resource to define settings like time granularity (hourly, daily, monthly), file format (Parquet, CSV), and S3 bucket for delivery of these reports.", "sourceUrl": "https://github.com/aws-cloudformation/aws-cloudformation-resource-providers-cur.git", "properties": { "ReportName": { "description": "The name of the report that you want to create. The name must be unique, is case sensitive, and can't include spaces.", "type": "string", "minLength": 1, "maxLength": 256, "pattern": "[0-9A-Za-z!\\-_.*\\'()]+" }, "TimeUnit": { "description": "The granularity of the line items in the report.", "type": "string", "enum": [ "HOURLY", "DAILY", "MONTHLY" ] }, "Format": { "description": "The format that AWS saves the report in.", "type": "string", "enum": [ "textORcsv", "Parquet" ] }, "Compression": { "description": "The compression format that AWS uses for the report.", "type": "string", "enum": [ "ZIP", "GZIP", "Parquet" ] }, "AdditionalSchemaElements": { "description": "A list of strings that indicate additional content that Amazon Web Services includes in the report, such as individual resource IDs.", "type": "array", "default": [], "items": { "description": "Whether or not AWS includes resource IDs in the report.", "type": "string", "enum": [ "RESOURCES" ] } }, "S3Bucket": { "description": "The S3 bucket where AWS delivers the report.", "type": "string", "minLength": 1, "maxLength": 256, "pattern": "[A-Za-z0-9_\\.\\-]+" }, "S3Prefix": { "description": "The prefix that AWS adds to the report name when AWS delivers the report. Your prefix can't include spaces.", "type": "string", "minLength": 1, "maxLength": 256, "pattern": "[0-9A-Za-z!\\-_.*\\'()/]*" }, "S3Region": { "description": "The region of the S3 bucket that AWS delivers the report into.", "type": "string" }, "AdditionalArtifacts": { "description": "A list of manifests that you want Amazon Web Services to create for this report.", "type": "array", "default": [], "items": { "description": "The types of manifest that you want AWS to create for this report.", "type": "string", "enum": [ "REDSHIFT", "QUICKSIGHT", "ATHENA" ] } }, "RefreshClosedReports": { "description": "Whether you want Amazon Web Services to update your reports after they have been finalized if Amazon Web Services detects charges related to previous months. These charges can include refunds, credits, or support fees.", "type": "boolean" }, "ReportVersioning": { "description": "Whether you want Amazon Web Services to overwrite the previous version of each report or to deliver the report in addition to the previous versions.", "type": "string", "enum": [ "CREATE_NEW_REPORT", "OVERWRITE_REPORT" ] }, "BillingViewArn": { "description": "The Amazon resource name of the billing view. You can get this value by using the billing view service public APIs.", "type": "string", "default": null, "pattern": "(arn:aws(-cn)?:billing::[0-9]{12}:billingview/)?[a-zA-Z0-9_\\+=\\.\\-@].{1,30}", "minLength": 1, "maxLength": 128 } }, "additionalProperties": false, "required": [ "ReportName", "TimeUnit", "Format", "Compression", "S3Bucket", "S3Prefix", "S3Region", "RefreshClosedReports", "ReportVersioning" ], "createOnlyProperties": [ "/properties/ReportName", "/properties/AdditionalSchemaElements", "/properties/TimeUnit", "/properties/ReportVersioning", "/properties/BillingViewArn" ], "primaryIdentifier": [ "/properties/ReportName" ], "handlers": { "create": { "permissions": [ "cur:PutReportDefinition" ] }, "read": { "permissions": [ "cur:DescribeReportDefinitions" ] }, "update": { "permissions": [ "cur:DescribeReportDefinitions", "cur:ModifyReportDefinition" ] }, "delete": { "permissions": [ "cur:DescribeReportDefinitions", "cur:DeleteReportDefinition" ] }, "list": { "permissions": [ "cur:DescribeReportDefinitions" ] } } }
1,957
4,332
<filename>vowpalwabbit/core/src/parse_primitives.cc // Copyright (c) by respective owners including Yahoo!, Microsoft, and // individual contributors. All rights reserved. Released under a BSD (revised) // license as described in the file LICENSE. #include "vw/core/parse_primitives.h" #include "vw/common/hash.h" #include "vw/common/string_view.h" #include "vw/common/vw_exception.h" #include <algorithm> #include <cctype> #include <iostream> #include <sstream> #include <stdexcept> #include <string> std::vector<std::string> escaped_tokenize(char delim, VW::string_view s, bool allow_empty) { std::vector<std::string> tokens; std::string current; size_t end_pos = 0; const char delims[3] = {'\\', delim, '\0'}; bool last_space = false; while (!s.empty() && ((end_pos = s.find_first_of(delims)) != VW::string_view::npos)) { if (s[end_pos] == '\\') { current.append(s.data(), end_pos); s.remove_prefix(end_pos + 1); // always insert the next character after an escape if it exists if (!s.empty()) { current.append(s.data(), 1); s.remove_prefix(1); } } else { last_space = end_pos == 0; current.append(s.data(), end_pos); s.remove_prefix(end_pos + 1); if (!current.empty() || allow_empty) { tokens.push_back(current); } current.clear(); } } // write whatever's left into the vector if (!s.empty() || !current.empty() || (last_space && allow_empty)) { current.append(s.data(), s.length()); tokens.push_back(current); } return tokens; } bool is_delim(char c) { return c == ' '; } bool is_quote(char c) { return c == '"' || c == '\''; } bool is_escape_char(char c) { return c == '\\'; } template <typename It> char unescape_char(It char_to_unescape_it, It end) { if (char_to_unescape_it == end) { THROW("unescape_char: unexpected end of string while unescaping"); } char c = *char_to_unescape_it; if (c == 'n') { return '\n'; } if (c == 't') { return '\t'; } return c; } template <typename It> std::vector<std::string> split_impl(It begin, It end) { std::vector<std::string> ret; if (begin == end) { return ret; } bool inside_quote = false; char quote_char = '\0'; std::string current; for (; begin != end; ++begin) { if (is_escape_char(*begin)) { begin++; current.append(1, unescape_char(begin, end)); } else if (is_delim(*begin)) { // If we're not inside a token, this token is done. Otherwise just add the space to the token. if (!inside_quote) { if (!current.empty()) { ret.push_back(current); } current.clear(); } else { current.append(1, *begin); } } else if (is_quote(*begin)) { if (inside_quote && quote_char == *begin) { inside_quote = false; } else if (!inside_quote) { inside_quote = true; quote_char = *begin; } else { current.append(1, *begin); } } else { current.append(1, *begin); } } if (inside_quote) { THROW("unbalanced quotes in string"); } if (!current.empty()) { ret.push_back(current); } return ret; } namespace VW { std::string trim_whitespace(const std::string& str) { return std::string(VW::trim_whitespace(VW::string_view(str))); } VW::string_view trim_whitespace(VW::string_view str) { // Determine start auto start = std::find_if_not(str.begin(), str.end(), [](char c) { return std::isspace(c); }); if (start == str.end()) { return ""; } auto start_pos = std::distance(str.begin(), start); // Determine end auto end = std::find_if_not(str.rbegin(), str.rend(), [](char c) { return std::isspace(c); }); if (end == str.rend()) { return ""; } // -1 is required as position 0 of the string is (rend - 1) auto end_pos = std::distance(end, str.rend()) - 1; return str.substr(start_pos, (end_pos - start_pos) + 1); } std::vector<std::string> split_command_line(VW::string_view cmd_line) { return split_impl(cmd_line.begin(), cmd_line.end()); } std::vector<std::string> split_command_line(const std::string& cmd_line) { return split_impl(cmd_line.begin(), cmd_line.end()); } std::vector<VW::string_view> split_by_limit(const VW::string_view& s, size_t limit) { std::vector<VW::string_view> result; size_t start = 0; while (start < s.size()) { size_t end = start + limit; if (end > s.size()) { end = s.size(); } result.push_back(s.substr(start, end - start)); start = end; } return result; } } // namespace VW
1,840
448
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. #ifndef ThreadLocal_hpp #define ThreadLocal_hpp #include <IOKit/IOService.h> #include <IOKit/IOLib.h> #include "Trie.hpp" #define ThreadLocal BXL_CLASS(ThreadLocal) /*! * A straightforward wrapper around a concurrent dictionary that uses current thread's id as the implicit key. */ class ThreadLocal : public OSObject { OSDeclareDefaultStructors(ThreadLocal); private: /* backing dictionary */ Trie *dict_; static uint64_t self_tid() { return thread_tid(current_thread()); } protected: /*! * Initializes this object, following the OSObject pattern. * * @result True if successful, False otherwise. */ bool init() override; /*! * Releases held resources, following the OSObject pattern. */ void free() override; public: /*! * @return Number of entries in this collection */ uint getCount() { return dict_->getCount(); } /*! * @return Number of nodes in the underlying dictionary. */ uint getNodeCount() { return dict_->getNodeCount(); } /*! * @return Size in bytes of each node in the underlying dictionary. */ uint getNodeSize() { return dict_->getNodeSize(); } /*! * Associates 'value' with current thread. * * @param value Value to associate with the current thread. * @result is True when no previous value was associated with the current thread * and False when an existing value is updated to point to the new value. */ bool insert(const OSObject *value); /*! * Removes the value currently associated with the current thread (if any). * * @result is True when there was a previous value was associated with the current * thread and False when there wasn't. */ bool remove(); /*! * @result the value currently associated with the current thread (if any). */ OSObject* get() const; #pragma mark Static Methods /*! * Factory method, following the OSObject pattern. * * First creates an object (by calling 'new'), then invokes 'init' on the newly create object. * * If either of the steps fails, nullptr is returned. * * When object creation succeeds but initialization fails, 'release' is called on the created * object and nullptr is returned. */ static ThreadLocal* create(); }; #endif /* ThreadLocal_hpp */
866
22,481
<filename>homeassistant/components/yamaha_musiccast/__init__.py """The MusicCast integration.""" from __future__ import annotations from datetime import timedelta import logging from aiomusiccast import MusicCastConnectionException from aiomusiccast.capabilities import Capability from aiomusiccast.musiccast_device import MusicCastData, MusicCastDevice from homeassistant.components import ssdp from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, format_mac from homeassistant.helpers.entity import DeviceInfo from homeassistant.helpers.update_coordinator import ( CoordinatorEntity, DataUpdateCoordinator, UpdateFailed, ) from .const import ( BRAND, CONF_SERIAL, CONF_UPNP_DESC, DEFAULT_ZONE, DOMAIN, ENTITY_CATEGORY_MAPPING, ) PLATFORMS = [Platform.MEDIA_PLAYER, Platform.NUMBER, Platform.SELECT] _LOGGER = logging.getLogger(__name__) SCAN_INTERVAL = timedelta(seconds=60) async def get_upnp_desc(hass: HomeAssistant, host: str): """Get the upnp description URL for a given host, using the SSPD scanner.""" ssdp_entries = await ssdp.async_get_discovery_info_by_st(hass, "upnp:rootdevice") matches = [w for w in ssdp_entries if w.ssdp_headers.get("_host", "") == host] upnp_desc = None for match in matches: if upnp_desc := match.ssdp_location: break if not upnp_desc: _LOGGER.warning( "The upnp_description was not found automatically, setting a default one" ) upnp_desc = f"http://{host}:49154/MediaRenderer/desc.xml" return upnp_desc async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up MusicCast from a config entry.""" if entry.data.get(CONF_UPNP_DESC) is None: hass.config_entries.async_update_entry( entry, data={ CONF_HOST: entry.data[CONF_HOST], CONF_SERIAL: entry.data["serial"], CONF_UPNP_DESC: await get_upnp_desc(hass, entry.data[CONF_HOST]), }, ) client = MusicCastDevice( entry.data[CONF_HOST], async_get_clientsession(hass), entry.data[CONF_UPNP_DESC], ) coordinator = MusicCastDataUpdateCoordinator(hass, client=client) await coordinator.async_config_entry_first_refresh() coordinator.musiccast.build_capabilities() hass.data.setdefault(DOMAIN, {}) hass.data[DOMAIN][entry.entry_id] = coordinator await coordinator.musiccast.device.enable_polling() hass.config_entries.async_setup_platforms(entry, PLATFORMS) entry.async_on_unload(entry.add_update_listener(async_reload_entry)) return True async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) if unload_ok: hass.data[DOMAIN][entry.entry_id].musiccast.device.disable_polling() hass.data[DOMAIN].pop(entry.entry_id) return unload_ok async def async_reload_entry(hass: HomeAssistant, entry: ConfigEntry) -> None: """Reload config entry.""" await hass.config_entries.async_reload(entry.entry_id) class MusicCastDataUpdateCoordinator(DataUpdateCoordinator[MusicCastData]): """Class to manage fetching data from the API.""" def __init__(self, hass: HomeAssistant, client: MusicCastDevice) -> None: """Initialize.""" self.musiccast = client super().__init__(hass, _LOGGER, name=DOMAIN, update_interval=SCAN_INTERVAL) self.entities: list[MusicCastDeviceEntity] = [] async def _async_update_data(self) -> MusicCastData: """Update data via library.""" try: await self.musiccast.fetch() except MusicCastConnectionException as exception: raise UpdateFailed() from exception return self.musiccast.data class MusicCastEntity(CoordinatorEntity): """Defines a base MusicCast entity.""" coordinator: MusicCastDataUpdateCoordinator def __init__( self, *, name: str, icon: str, coordinator: MusicCastDataUpdateCoordinator, enabled_default: bool = True, ) -> None: """Initialize the MusicCast entity.""" super().__init__(coordinator) self._enabled_default = enabled_default self._icon = icon self._name = name @property def name(self) -> str: """Return the name of the entity.""" return self._name @property def icon(self) -> str: """Return the mdi icon of the entity.""" return self._icon @property def entity_registry_enabled_default(self) -> bool: """Return if the entity should be enabled when first added to the entity registry.""" return self._enabled_default class MusicCastDeviceEntity(MusicCastEntity): """Defines a MusicCast device entity.""" _zone_id: str = DEFAULT_ZONE @property def device_id(self): """Return the ID of the current device.""" if self._zone_id == DEFAULT_ZONE: return self.coordinator.data.device_id return f"{self.coordinator.data.device_id}_{self._zone_id}" @property def device_name(self): """Return the name of the current device.""" return self.coordinator.data.zones[self._zone_id].name @property def device_info(self) -> DeviceInfo: """Return device information about this MusicCast device.""" device_info = DeviceInfo( name=self.device_name, identifiers={ ( DOMAIN, self.device_id, ) }, manufacturer=BRAND, model=self.coordinator.data.model_name, sw_version=self.coordinator.data.system_version, ) if self._zone_id == DEFAULT_ZONE: device_info["connections"] = { (CONNECTION_NETWORK_MAC, format_mac(mac)) for mac in self.coordinator.data.mac_addresses.values() } else: device_info["via_device"] = (DOMAIN, self.coordinator.data.device_id) return device_info class MusicCastCapabilityEntity(MusicCastDeviceEntity): """Base Entity type for all capabilities.""" def __init__( self, coordinator: MusicCastDataUpdateCoordinator, capability: Capability, zone_id: str = None, ) -> None: """Initialize a capability based entity.""" if zone_id is not None: self._zone_id = zone_id self.capability = capability super().__init__(name=capability.name, icon="", coordinator=coordinator) self._attr_entity_category = ENTITY_CATEGORY_MAPPING.get(capability.entity_type) async def async_added_to_hass(self): """Run when this Entity has been added to HA.""" await super().async_added_to_hass() # All capability based entities should register callbacks to update HA when their state changes self.coordinator.musiccast.register_callback(self.async_write_ha_state) async def async_will_remove_from_hass(self): """Entity being removed from hass.""" await super().async_added_to_hass() self.coordinator.musiccast.remove_callback(self.async_write_ha_state) @property def unique_id(self) -> str: """Return the unique ID for this entity.""" return f"{self.device_id}_{self.capability.id}"
3,140
14,564
package com.alibaba.datax.plugin.reader.otsstreamreader.internal.model; import com.alibaba.datax.plugin.reader.otsstreamreader.internal.config.StatusTableConstants; import com.alicloud.openservices.tablestore.model.ColumnValue; import com.alicloud.openservices.tablestore.model.Row; import com.alicloud.openservices.tablestore.model.RowPutChange; public class ShardCheckpoint { private String shardId; private String version; private String checkpoint; private long skipCount; public ShardCheckpoint(String shardId, String version, String shardIterator, long skipCount) { this.shardId = shardId; this.version = version; this.checkpoint = shardIterator; this.skipCount = skipCount; } public String getShardId() { return shardId; } public void setShardId(String shardId) { this.shardId = shardId; } public String getVersion() { return version; } public void setVersion(String version) { this.version = version; } public String getCheckpoint() { return checkpoint; } public void setCheckpoint(String checkpoint) { this.checkpoint = checkpoint; } public long getSkipCount() { return skipCount; } public void setSkipCount(long skipCount) { this.skipCount = skipCount; } public static ShardCheckpoint fromRow(String shardId, Row row) { String shardIterator = row.getLatestColumn(StatusTableConstants.CHECKPOINT_COLUMN_NAME).getValue().asString(); long skipCount = 0; // compatible with old stream reader if (row.contains(StatusTableConstants.SKIP_COUNT_COLUMN_NAME)) { skipCount = row.getLatestColumn(StatusTableConstants.SKIP_COUNT_COLUMN_NAME).getValue().asLong(); } // compatible with old stream reader String version = ""; if (row.contains(StatusTableConstants.VERSION_COLUMN_NAME)) { version = row.getLatestColumn(StatusTableConstants.VERSION_COLUMN_NAME).getValue().asString(); } return new ShardCheckpoint(shardId, version, shardIterator, skipCount); } public void serializeColumn(RowPutChange rowChange) { rowChange.addColumn(StatusTableConstants.VERSION_COLUMN_NAME, ColumnValue.fromString(version)); rowChange.addColumn(StatusTableConstants.CHECKPOINT_COLUMN_NAME, ColumnValue.fromString(checkpoint)); rowChange.addColumn(StatusTableConstants.SKIP_COUNT_COLUMN_NAME, ColumnValue.fromLong(skipCount)); } @Override public int hashCode() { int result = 31; result = result ^ this.shardId.hashCode(); result = result ^ this.version.hashCode(); result = result ^ this.checkpoint.hashCode(); result = result ^ (int)this.skipCount; return result; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (!(obj instanceof ShardCheckpoint)) { return false; } ShardCheckpoint other = (ShardCheckpoint)obj; return this.shardId.equals(other.shardId) && this.version.equals(other.version) && this.checkpoint.equals(other.checkpoint) && this.skipCount == other.skipCount; } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("ShardId: ").append(shardId) .append(", Version: ").append(version) .append(", Checkpoint: ").append(checkpoint) .append(", SkipCount: ").append(skipCount); return sb.toString(); } }
1,522
1,355
package li.cil.oc.api.internal; import li.cil.oc.api.Persistable; import li.cil.oc.api.network.Environment; import net.minecraft.entity.player.EntityPlayer; /** * This interface is implemented by the keyboard component, to allow more * flexible use of it. * <p/> * You can obtain an instance of the keyboard component via the item driver * of the keyboard block, for example: * <pre> * final ItemStack stack = li.cil.oc.api.Items.get("keyboard").createItemStack(1); * final Keyboard keyboard = (Keyboard) li.cil.oc.api.Driver.driverFor(stack).createEnvironment(stack, this); * </pre> */ public interface Keyboard extends Environment, Persistable { /** * Sets a custom usability override. * <p/> * Instead of the default check, which is based on the component's owner's * position, the specified callback will be queried for usability checks * instead. * <p/> * Pass <tt>null</tt> here to unset a previously set override. * * @param callback the usability checker to use. */ void setUsableOverride(UsabilityChecker callback); /** * Contract interface that has to implemented for usability check overrides. * * @see #setUsableOverride(Keyboard.UsabilityChecker) */ interface UsabilityChecker { /** * Whether the specified keyboard is usable by the specified player. * * @param keyboard the keyboard to check for. * @param player the player to check for. * @return whether the keyboard is usable by the player. */ boolean isUsableByPlayer(Keyboard keyboard, EntityPlayer player); } }
556
14,668
<gh_stars>1000+ // Copyright 2018 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef CHROME_BROWSER_NEW_TAB_PAGE_PROMOS_PROMO_SERVICE_H_ #define CHROME_BROWSER_NEW_TAB_PAGE_PROMOS_PROMO_SERVICE_H_ #include <memory> #include "base/memory/raw_ptr.h" #include "base/observer_list.h" #include "chrome/browser/new_tab_page/promos/promo_data.h" #include "chrome/browser/new_tab_page/promos/promo_service_observer.h" #include "components/keyed_service/core/keyed_service.h" #include "components/prefs/pref_registry_simple.h" #include "services/data_decoder/public/cpp/data_decoder.h" #include "third_party/abseil-cpp/absl/types/optional.h" class GURL; class Profile; namespace network { class SimpleURLLoader; class SharedURLLoaderFactory; } // namespace network // A service that downloads, caches, and hands out PromoData for middle-slot // promos. It never initiates a download automatically, only when Refresh is // called. class PromoService : public KeyedService { public: enum class Status { // Received a valid response and there is a promo running. OK_WITH_PROMO, // Received a valid response but there is no promo running. OK_WITHOUT_PROMO, // Some transient error occurred, e.g. the network request failed because // there is no network connectivity. A previously cached response may still // be used. TRANSIENT_ERROR, // A fatal error occurred, such as the server responding with an error code // or with invalid data. Any previously cached response should be cleared. FATAL_ERROR, // There's a valid promo coming back from the promo server, but it's been // locally blocked by the user client-side. TODO(crbug.com/1003508): send // blocked promo IDs to the server so this doesn't happen / they can do a // better job ranking? OK_BUT_BLOCKED, }; PromoService( scoped_refptr<network::SharedURLLoaderFactory> url_loader_factory, Profile* profile); ~PromoService() override; // KeyedService implementation. void Shutdown() override; static void RegisterProfilePrefs(PrefRegistrySimple* registry); // Returns the currently cached middle-slot PromoData, if any. Virtual for // testing. virtual const absl::optional<PromoData>& promo_data() const; Status promo_status() const { return promo_status_; } // Requests an asynchronous refresh from the network. After the update // completes, OnPromoDataUpdated will be called on the observers. virtual void Refresh(); // Add/remove observers. All observers must unregister themselves before the // PromoService is destroyed. Virtual for testing. virtual void AddObserver(PromoServiceObserver* observer); void RemoveObserver(PromoServiceObserver* observer); // Marks |promo_id| as blocked from being shown again. void BlocklistPromo(const std::string& promo_id); GURL GetLoadURLForTesting() const; protected: void PromoDataLoaded(Status status, const absl::optional<PromoData>& data); private: void OnLoadDone(std::unique_ptr<std::string> response_body); void OnJsonParsed(data_decoder::DataDecoder::ValueOrError result); void NotifyObservers(); // Clears any expired blocklist entries and determines whether |promo_id| has // been blocked by the user. bool IsBlockedAfterClearingExpired(const std::string& promo_id) const; scoped_refptr<network::SharedURLLoaderFactory> url_loader_factory_; std::unique_ptr<network::SimpleURLLoader> simple_loader_; base::ObserverList<PromoServiceObserver, true>::Unchecked observers_; absl::optional<PromoData> promo_data_; Status promo_status_; raw_ptr<Profile> profile_; base::WeakPtrFactory<PromoService> weak_ptr_factory_{this}; }; #endif // CHROME_BROWSER_NEW_TAB_PAGE_PROMOS_PROMO_SERVICE_H_
1,225
2,875
# -*- coding: utf-8 -*- # @Time : 2017/7/13 上午10:59 # @Author : play4fun # @File : OpenCV中的傅里叶变换-DFT.py # @Software: PyCharm """ OpenCV中的傅里叶变换-DFT.py: OpenCV 中相应的函数是 cv2.dft() 和 cv2.idft()。和前 出的结果 一样 但是是双通道的。 第一个通道是结果的实数部 分 第二个通道是结果的虚数部分。 输入图像 先 换成 np.float32 格式 使用函数 cv2.cartToPolar() 它会同时返回幅度和相位。 """ import numpy as np import cv2 from matplotlib import pyplot as plt img = cv2.imread('../data/messi5.jpg', 0) dft = cv2.dft(np.float32(img), flags=cv2.DFT_COMPLEX_OUTPUT) dft_shift = np.fft.fftshift(dft) magnitude_spectrum = 20 * np.log(cv2.magnitude(dft_shift[:, :, 0], dft_shift[:, :, 1])) plt.subplot(121), plt.imshow(img, cmap='gray') plt.title('Input Image'), plt.xticks([]), plt.yticks([]) plt.subplot(122), plt.imshow(magnitude_spectrum, cmap='gray') plt.title('Magnitude Spectrum'), plt.xticks([]), plt.yticks([]) plt.show()
548
10,225
package io.quarkus.devtools.codestarts; import java.io.IOException; import java.nio.file.Path; import java.util.List; import java.util.Map; import java.util.Optional; public interface CodestartProjectDefinition { void generate(Path targetDirectory) throws IOException; List<Codestart> getCodestarts(); CodestartProjectInput getProjectInput(); Optional<Codestart> getCodestart(CodestartType type); Codestart getRequiredCodestart(CodestartType type); String getLanguageName(); Map<String, Object> getSharedData(); Map<String, Object> getDepsData(); Map<String, Object> getCodestartProjectData(); List<Codestart> getBaseCodestarts(); List<Codestart> getExtraCodestarts(); }
243
14,668
// Copyright 2020 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "ui/aura/cursor/cursor_loader.h" #include "base/memory/scoped_refptr.h" #include "testing/gtest/include/gtest/gtest.h" #include "ui/aura/test/aura_test_base.h" #include "ui/base/cursor/cursor.h" #include "ui/base/cursor/cursor_factory.h" #include "ui/base/cursor/mojom/cursor_type.mojom-shared.h" #include "ui/base/cursor/platform_cursor.h" namespace aura { namespace { using CursorLoaderTest = ::aura::test::AuraTestBase; using ::ui::mojom::CursorType; } // namespace TEST_F(CursorLoaderTest, InvisibleCursor) { CursorLoader cursor_loader; ui::Cursor invisible_cursor(CursorType::kNone); cursor_loader.SetPlatformCursor(&invisible_cursor); ASSERT_EQ( invisible_cursor.platform(), ui::CursorFactory::GetInstance()->GetDefaultCursor(CursorType::kNone)); } } // namespace aura
364
5,903
<reponame>zhouguangping/pentaho-kettle /*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2019 by <NAME> : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.www.service.zip; import org.pentaho.di.core.Result; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.job.JobMeta; import org.pentaho.di.job.entries.unzip.JobEntryUnZip; /** * Simple class to handle zip file operations. This class is a wrapper to call Kettle transformation/job entries. */ public class ZipServiceKettle implements ZipService { /** * {@inheritDoc} */ @Override public void extract( String zipFile, String destinationDirectory ) throws KettleException { JobEntryUnZip jobEntryUnZip = instantiateJobEntryUnZip(); execute( jobEntryUnZip, zipFile, destinationDirectory ); } /** * * @param jobEntryUnZip instance of object. * @param zipFile zip file path. * @param destinationDirectory destination directory to extract contents of <code>zipFile</code>. */ protected void execute( JobEntryUnZip jobEntryUnZip, String zipFile, String destinationDirectory ) { setValues( jobEntryUnZip, zipFile, destinationDirectory ); // NOTE not checking result, it will always return negative in this scenario // JobEntryUnZip expects other components to be present, does not effect unzipping proces jobEntryUnZip.execute( new Result(), 1 ); } /** * Populate <code>jobEntryUnZip</code> with the bare minimum values to run. * @param jobEntryUnZip instance of object. * @param zipFile zip file path. * @param destinationDirectory destination directory to extract contents of <code>zipFile</code>. */ protected void setValues( JobEntryUnZip jobEntryUnZip, String zipFile, String destinationDirectory ) { jobEntryUnZip.setZipFilename( zipFile ); jobEntryUnZip.setWildcardSource( "" ); jobEntryUnZip.setWildcardExclude( "" ); jobEntryUnZip.setSourceDirectory( destinationDirectory ); jobEntryUnZip.setMoveToDirectory( "" ); } /** * Create object {@link JobEntryUnZip}. * @return instance of object. */ protected JobEntryUnZip instantiateJobEntryUnZip() { JobEntryUnZip jobEntryUnZip = new JobEntryUnZip( "ZipServiceKettle" ); // Generic name JobMeta jobMeta = instantiateJobMeta(); jobEntryUnZip.setParentJobMeta( jobMeta ); return jobEntryUnZip; } /** * Create object {@link JobMeta}. * @return instance of object. */ protected JobMeta instantiateJobMeta() { return new JobMeta(); } }
939
7,482
<gh_stars>1000+ /** ****************************************************************************** * @file lib_conf.c * @author Application Team * @version V4.4.0 * @date 2018-09-27 * @brief Dirver configuration. ****************************************************************************** * @attention * ****************************************************************************** */ #ifndef __LIB_CONF_H #define __LIB_CONF_H /* ########################## Assert Selection ############################## */ //#define ASSERT_NDEBUG 1 /* Includes ------------------------------------------------------------------*/ /** * @brief Include module's header file */ #include "lib_ana.h" #include "lib_adc.h" #include "lib_adc_tiny.h" #include "lib_clk.h" #include "lib_comp.h" #include "lib_crypt.h" #include "lib_dma.h" #include "lib_flash.h" #include "lib_gpio.h" #include "lib_i2c.h" #include "lib_iso7816.h" #include "lib_lcd.h" #include "lib_misc.h" #include "lib_pmu.h" #include "lib_pwm.h" #include "lib_rtc.h" #include "lib_spi.h" #include "lib_tmr.h" #include "lib_u32k.h" #include "lib_uart.h" #include "lib_version.h" #include "lib_wdt.h" #include "lib_LoadNVR.h" #include "lib_CodeRAM.h" #include "lib_cortex.h" /* Exported macro ------------------------------------------------------------*/ #ifndef ASSERT_NDEBUG #define assert_parameters(expr) ((expr) ? (void)0U : assert_errhandler((uint8_t *)__FILE__, __LINE__)) /* Exported functions ------------------------------------------------------- */ void assert_errhandler(uint8_t* file, uint32_t line); #else #define assert_parameters(expr) ((void)0U) #endif /* ASSERT_NDEBUG */ #endif /*********************************** END OF FILE ******************************/
587
5,133
/* * Copyright MapStruct Authors. * * Licensed under the Apache License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0 */ package org.mapstruct.ap.test.bugs._849; import java.util.Arrays; import org.mapstruct.ap.testutil.IssueKey; import org.mapstruct.ap.testutil.ProcessorTest; import org.mapstruct.ap.testutil.WithClasses; import static org.assertj.core.api.Assertions.assertThat; /** * @author <NAME> * */ @WithClasses({ Source.class, Target.class, Issue849Mapper.class }) public class Issue849Test { @ProcessorTest @IssueKey("849") public void shouldCompileWithAllImportsDeclared() { Source source = new Source(); source.setSourceList( Arrays.asList( "test" ) ); Target target = Issue849Mapper.INSTANCE.mapSourceToTarget( source ); assertThat( target.getTargetList() ).containsExactly( "test" ); } }
318
395
/* Copyright (c) 2012-2021 The ANTLR Project. All rights reserved. * Use of this file is governed by the BSD 3-clause license that * can be found in the LICENSE.txt file in the project root. */ #pragma once #include <cassert> #include <type_traits> namespace antlrcpp { template <typename To, typename From> To downCast(From* from) { static_assert(std::is_pointer<To>::value, "Target type not a pointer."); static_assert((std::is_base_of<From, typename std::remove_pointer<To>::type>::value), "Target type not derived from source type."); #if !defined(__GNUC__) || defined(__GXX_RTTI) assert(from == nullptr || dynamic_cast<To>(from) != nullptr); #endif return static_cast<To>(from); } template <typename To, typename From> To downCast(From& from) { static_assert(std::is_lvalue_reference<To>::value, "Target type not a lvalue reference."); static_assert((std::is_base_of<From, typename std::remove_reference<To>::type>::value), "Target type not derived from source type."); #if !defined(__GNUC__) || defined(__GXX_RTTI) assert(dynamic_cast<typename std::add_pointer<typename std::remove_reference<To>::type>::type>(&from) != nullptr); #endif return static_cast<To>(from); } }
415
7,181
#!/usr/bin/env python # Copyright 2015 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import hashlib import os import re import subprocess import sys import traceback from contextlib import contextmanager # config g_testdir = 'testdir' g_drive_bin = 'drive' g_fail_stop = False # global variables count_ok = 0 count_bad = 0 g_driveignore_fn = os.path.join(g_testdir, '.driveignore') def init(): if not os.path.exists(g_drive_bin): print 'Drive executable (path=%s) not found' % repr(g_drive_bin) sys.exit(1) if not os.path.exists(os.path.join(g_testdir, '.gd')): print 'Please init drive folder %s/ first' % g_testdir sys.exit(1) if Drive.list('') != []: print 'Warning! This test tool has destructive side-effect and will erase you drive' sys.exit(1) def expect_eq(expected, actual): global count_ok, count_bad if expected != actual: print '[expected]', repr(expected) print '[actual]', repr(actual) print 'failed' count_bad += 1 raise Exception else: count_ok += 1 def expect_ne(not_expected, actual): global count_ok, count_bad if not_expected == actual: print '[not expected equal]', repr(actual) print 'failed' count_bad += 1 raise Exception else: count_ok += 1 def expect_true(actual): expect_eq(True, bool(actual)) class Drive: @classmethod def run(cls, cmd, *args, **argd): extraflags = argd.get('extraflags', []) if type(extraflags) not in (list, tuple): extraflags = [extraflags] cmd = [g_drive_bin] + [cmd] + list(extraflags) + list(args) #print '$', if argd.get('input') is not None: if re.match(r'^[\x32-\x79\n]+$', argd.get('input')): print 'echo "%s" |' % argd.get('input'), else: print 'echo ... |', print subprocess.list2cmdline(cmd) try: cwd = os.getcwd() os.chdir(g_testdir) if argd.get('input') is None: p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) else: p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) finally: os.chdir(cwd) out, err = p.communicate(argd.get('input')) return p.returncode, out, err @classmethod def run_ok(cls, *args, **argd): returncode, out, err = cls.run(*args, **argd) if returncode != 0: if out != '': print '[stdout]' sys.stdout.write(out) if err != '': print '[stderr]' sys.stdout.write(err) expect_eq(0, returncode) return returncode, out, err @classmethod def run_fail(cls, *args, **argd): returncode, out, err = cls.run(*args, **argd) if returncode == 0: if out != '': print '[stdout]' sys.stdout.write(out) if err != '': print '[stderr]' sys.stdout.write(err) expect_ne(0, returncode) return returncode, out, err @classmethod def push_piped(cls, filename, content, **argd): return cls.run_ok('push', '-piped', filename, input=content, **argd) @classmethod def pull_piped(cls, filename, **argd): _, out, _ = cls.run_ok('pull', '-piped', filename, **argd) return out @classmethod def trash(cls, *filename, **argd): return cls.run_ok('trash', *filename, **argd) @classmethod def list(cls, path='', recursive=False, **argd): extraflags = ['-no-prompt'] if recursive: extraflags += ['-r', '-m=-1'] _, out, _ = cls.run_ok('list', path, extraflags=extraflags, **argd) return sorted(out.splitlines()) @classmethod def erase_all(cls): to_trash = [] for path in cls.list(''): assert path[0] == '/' and path[1:] to_trash.append(path[1:]) if to_trash: cls.trash(*to_trash, input='y') cls.run_ok('emptytrash', '-no-prompt') @contextmanager def setup_files(name, *files): print '#', name try: os.unlink(g_driveignore_fn) except OSError: pass for path, content in files: Drive.push_piped(path, content) try: yield except Exception: if g_fail_stop: raise traceback.print_exc() print '# clean up' Drive.erase_all() print def verify_files(*files): for path, content in files: expect_eq(content, Drive.pull_piped(path)) def test_basic(): # Most tests depend on these functionality fn = 'foo.txt' data = 'foobar' print '# basic tests' Drive.push_piped(fn, data) expect_eq(data, Drive.pull_piped(fn)) Drive.trash(fn, input='y') print def test_list(): with setup_files('list empty drive'): expect_eq([], Drive.list('')) with setup_files('list folder', ['a/b/c.txt', 'foobar']): expect_eq(['/a'], Drive.list('')) expect_eq(['/a/b'], Drive.list('a')) expect_eq(['/a/b/c.txt'], Drive.list('a/b')) with setup_files('list file, issue #97', ['a/b/c.txt', 'foobar']): expect_eq(['/a/b/c.txt'], Drive.list('a/b/c.txt')) with setup_files('list not-found, issue #95'): _, out, err = Drive.run_fail('list', 'not-found') expect_eq('', out) expect_ne('', err) def test_rename(): with setup_files('rename file in root', ['a.txt', 'a']): Drive.run_ok('rename', 'a.txt', 'abc.txt') expect_eq(['/abc.txt'], Drive.list()) with setup_files('rename file in folder', ['b/b.txt', 'b']): Drive.run_ok('rename', 'b/b.txt', 'c.txt') expect_eq(['/b', '/b/c.txt'], Drive.list(recursive=True)) # special cases with setup_files('rename file to self in root', ['b.txt', 'b']): Drive.run_ok('rename', 'b.txt', 'b.txt') expect_eq(['/b.txt'], Drive.list(recursive=True)) with setup_files('rename file to self in folder', ['b/b.txt', 'b']): Drive.run_ok('rename', 'b/b.txt', 'b.txt') expect_eq(['/b', '/b/b.txt'], Drive.list(recursive=True)) with setup_files('rename to existing file', ['a.txt', 'a'], ['b.txt', 'b']): _, out, err = Drive.run_fail('rename', 'a.txt', 'b.txt') expect_true('already exists' in err) expect_eq(['/a.txt', '/b.txt'], Drive.list(recursive=True)) verify_files(['a.txt', 'a'], ['b.txt', 'b']) with setup_files('rename special path handling', ['a/b/c.txt', 'c'], ['a/a.txt', 'a']): Drive.run_ok('rename', 'a/a.txt', 'b/c.txt') expect_eq(['/a', '/a/b', '/a/b/c.txt', '/a/b/c.txt'], Drive.list(recursive=True)) def test_move(): # basic case with setup_files('move folder to another', ['a/a.txt', 'a'], ['b/b.txt', 'b']): Drive.run_ok('move', 'a', 'b') expect_eq(['/b', '/b/a', '/b/a/a.txt', '/b/b.txt'], Drive.list(recursive=True)) with setup_files('move multiple files', ['a/a.txt', 'a'], ['b/b.txt', 'b'], ['c/c.txt', 'c']): Drive.run_ok('move', 'a/a.txt', 'b/b.txt', 'c') expect_eq(['/a', '/b', '/c', '/c/a.txt', '/c/b.txt', '/c/c.txt'], Drive.list(recursive=True)) Drive.run_ok('move', 'c/a.txt', 'c/b.txt', 'c/c.txt', '') expect_eq(['/a', '/a.txt', '/b', '/b.txt', '/c', '/c.txt'], Drive.list(recursive=True)) with setup_files('move file to file', ['a.txt', 'a'], ['b.txt', 'b']): Drive.run_fail('move', 'a.txt', 'b.txt') expect_eq(['/a.txt', '/b.txt'], Drive.list(recursive=True)) verify_files(['a.txt', 'a'], ['b.txt', 'b']) # special cases with setup_files('move file to the same folder', ['a/b.txt', 'b']): Drive.run_ok('move', 'a/b.txt', 'a') expect_eq(['/a', '/a/b.txt'], Drive.list(recursive=True)) with setup_files('move folder to its parent', ['a/b/c.txt', 'c']): Drive.run_ok('move', 'a/b', 'a') expect_eq(['/a', '/a/b', '/a/b/c.txt'], Drive.list(recursive=True)) with setup_files('move folder to its child', ['a/b/c.txt', 'c']): Drive.run_fail('move', 'a', 'a/b') expect_eq(['/a', '/a/b', '/a/b/c.txt'], Drive.list(recursive=True)) with setup_files('move multiple files and duplicated', ['a/foo.txt', 'a'], ['b/foo.txt', 'b'], ['c/c.txt', 'c']): _, _, err = Drive.run_ok('move', 'a/foo.txt', 'b/foo.txt', 'c') expect_ne('', err) expect_eq(['/a', '/b', '/b/foo.txt', '/c', '/c/c.txt', '/c/foo.txt'], Drive.list(recursive=True)) def test_stat(): cases = [ '', 'foobar', ''.join(map(chr, range(256))), ] for data in cases: print 'try', repr(data) with setup_files('stat file with size=%d' % len(data), ['foo.txt', data]): _, out, _ = Drive.run_ok('stat', 'foo.txt') expect_true(re.search(r'Bytes\s+%s' % len(data), out)) expect_true(re.search(r'DirType\s+file', out)) expect_true(re.search(r'MimeType\s+text/plain', out)) expect_true(re.search(r'Md5Checksum\s+%s' % hashlib.md5(data).hexdigest(), out)) def test_pull(): with setup_files('pull -piped not-found file, issue #95'): _, out, err = Drive.run_fail('pull', '-piped', 'not-found') expect_eq('', out) expect_ne('', err) with setup_files('pull -piped folder', ['a/a.txt', '']): _, out, err = Drive.run_fail('pull', '-piped', 'a') expect_eq('', out) expect_ne('', err) def test_trash(): with setup_files('trash file', ['a.txt', 'a']): Drive.trash('a.txt', input='y') expect_eq([], Drive.list()) with setup_files('trash folder', ['a/b.txt', 'b']): Drive.trash('a/b.txt', input='y') expect_eq(['/a'], Drive.list(recursive=True)) Drive.trash('a', input='y') expect_eq([], Drive.list()) with setup_files('trash multiple files', ['a.txt', ''], ['b.txt', ''], ['c.txt', '']): _, _, err = Drive.run_ok('trash', 'a.txt', 'b.txt', 'c.txt', input='y') expect_eq([], Drive.list()) with setup_files('trash non-existing file'): _, _, err = Drive.run_fail('trash', 'not-found', input='y') expect_ne('', err) def main(): init() test_basic() test_list() test_rename() test_move() test_stat() test_pull() test_trash() print 'ok', count_ok print 'bad', count_bad if __name__ == '__main__': main()
5,673
2,053
/* * Copyright 2015 the original author or authors. * @https://github.com/scouter-project/scouter * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package scouter.client.counter.views; import org.csstudio.swt.xygraph.dataprovider.CircularBufferDataProvider; import org.csstudio.swt.xygraph.dataprovider.ISample; import org.csstudio.swt.xygraph.dataprovider.Sample; import org.csstudio.swt.xygraph.figures.Trace; import org.csstudio.swt.xygraph.figures.Trace.PointStyle; import org.csstudio.swt.xygraph.figures.Trace.TraceType; import org.csstudio.swt.xygraph.figures.XYGraph; import org.eclipse.draw2d.FigureCanvas; import org.eclipse.jface.action.Action; import org.eclipse.jface.action.IToolBarManager; import org.eclipse.jface.window.DefaultToolTip; import org.eclipse.swt.SWT; import org.eclipse.swt.events.ControlEvent; import org.eclipse.swt.events.ControlListener; import org.eclipse.swt.events.KeyEvent; import org.eclipse.swt.events.KeyListener; import org.eclipse.swt.events.MouseEvent; import org.eclipse.swt.events.MouseListener; import org.eclipse.swt.graphics.Font; import org.eclipse.swt.graphics.Point; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Display; import org.eclipse.ui.IViewSite; import org.eclipse.ui.IWorkbenchWindow; import org.eclipse.ui.PartInitException; import org.eclipse.ui.PlatformUI; import scouter.client.Images; import scouter.client.model.AgentColorManager; import scouter.client.model.AgentModelThread; import scouter.client.model.AgentObject; import scouter.client.model.RefreshThread; import scouter.client.model.RefreshThread.Refreshable; import scouter.client.model.TextProxy; import scouter.client.net.INetReader; import scouter.client.net.TcpProxy; import scouter.client.preferences.PManager; import scouter.client.preferences.PreferenceConstants; import scouter.client.server.Server; import scouter.client.server.ServerManager; import scouter.client.threads.ObjectSelectManager; import scouter.client.threads.ObjectSelectManager.IObjectCheckListener; import scouter.client.util.ChartUtil; import scouter.client.util.ColorUtil; import scouter.client.util.ConsoleProxy; import scouter.client.util.CounterUtil; import scouter.client.util.ExUtil; import scouter.client.util.ImageUtil; import scouter.client.util.MenuUtil; import scouter.client.util.ScouterUtil; import scouter.client.util.TimeUtil; import scouter.client.views.ScouterViewPart; import scouter.io.DataInputX; import scouter.lang.pack.MapPack; import scouter.lang.value.ListValue; import scouter.net.RequestCmd; import scouter.util.CastUtil; import scouter.util.DateUtil; import scouter.util.FormatUtil; import scouter.util.HashUtil; import scouter.util.StringUtil; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; public class CounterTodayAllView extends ScouterViewPart implements Refreshable, IObjectCheckListener { public static final String ID = CounterTodayAllView.class.getName(); protected String objType; protected String counter; protected int serverId; protected RefreshThread thread; IWorkbenchWindow window; IToolBarManager man; Trace nearestTrace; public void init(IViewSite site) throws PartInitException { super.init(site); String secId = site.getSecondaryId(); String[] ids = StringUtil.split(secId, "&"); this.serverId = CastUtil.cint(ids[0]); this.objType = ids[1]; this.counter = ids[2]; } boolean isActive = false; public void refresh() { TcpProxy tcp = TcpProxy.getTcpProxy(serverId); final ArrayList<MapPack> values = new ArrayList<MapPack>(); try { MapPack param = new MapPack(); param.put("objType", objType); param.put("counter", counter); isActive = false; tcp.process(RequestCmd.COUNTER_TODAY_ALL, param, new INetReader() { public void process(DataInputX in) throws IOException { values.add((MapPack) in.readPack()); isActive = true; } }); } catch (Throwable t) { ConsoleProxy.errorSafe(t.toString()); } finally { TcpProxy.putTcpProxy(tcp); } ExUtil.exec(this.canvas, new Runnable() { public void run() { if(isActive){ setActive(); }else{ setInactive(); } String date = DateUtil.yyyymmdd(TimeUtil.getCurrentTime(serverId)); try { double max = 0; long stime = DateUtil.getTime(date, "yyyyMMdd"); long etime = stime + DateUtil.MILLIS_PER_DAY; xyGraph.primaryXAxis.setRange(stime, etime); long now = TimeUtil.getCurrentTime(serverId); for (MapPack pack : values) { int objHash = pack.getInt("objHash"); //String objName = pack.getText("objName"); ListValue time = pack.getList("time"); ListValue value = pack.getList("value"); CircularBufferDataProvider data = getDataProvider(objHash); data.clearTrace(); for (int i = 0; time != null && i < time.size(); i++) { long x = CastUtil.clong(time.get(i)); double y = CastUtil.cdouble(value.get(i)); if (x > now) { break; } data.addSample(new Sample(x, y)); } max = Math.max(ChartUtil.getMax(data.iterator()), max); } if (CounterUtil.isPercentValue(objType, counter)) { xyGraph.primaryYAxis.setRange(0, 100); } else { xyGraph.primaryYAxis.setRange(0, max); } } catch (Exception e) { } canvas.redraw(); xyGraph.repaint(); } }); checkSettingChange(); } private void checkSettingChange() { ExUtil.exec(canvas, new Runnable() { public void run() { int width = PManager.getInstance().getInt(PreferenceConstants.P_CHART_LINE_WIDTH); synchronized (traces) { for (Trace t : traces) { if (nearestTrace == null && t.getLineWidth() != width) { t.setLineWidth(width); } int objHash = HashUtil.hash(t.getName()); AgentObject agent = AgentModelThread.getInstance().getAgentObject(objHash); if (agent == null || agent.isAlive() == false || agent.getColor() == null) { return; } if (t.getTraceColor() != agent.getColor()) { t.setTraceColor(agent.getColor()); } } } } }); } protected Map<Integer, CircularBufferDataProvider> datas = new HashMap<Integer, CircularBufferDataProvider>(); protected XYGraph xyGraph; protected Trace trace; protected FigureCanvas canvas; public void createPartControl(Composite parent) { window = PlatformUI.getWorkbench().getActiveWorkbenchWindow(); man = getViewSite().getActionBars().getToolBarManager(); man.add(new Action("Reload", ImageUtil.getImageDescriptor(Images.refresh)) { public void run() { for(int i = 0 ; i < traces.size() ; i++){ xyGraph.removeTrace(traces.get(i)); } traces.clear(); datas.clear(); thread.interrupt(); } }); GridLayout layout = new GridLayout(1, true); layout.marginHeight = 5; layout.marginWidth = 5; parent.setLayout(layout); parent.setBackground(ColorUtil.getInstance().getColor(SWT.COLOR_WHITE)); parent.setBackgroundMode(SWT.INHERIT_FORCE); canvas = new FigureCanvas(parent); canvas.setScrollBarVisibility(FigureCanvas.NEVER); canvas.setLayoutData(new GridData(GridData.FILL_BOTH)); canvas.addControlListener(new ControlListener() { boolean lock = false; public void controlResized(ControlEvent e) { org.eclipse.swt.graphics.Rectangle r = canvas.getClientArea(); if (!lock) { lock = true; if (ChartUtil.isShowLegendAllowSize(r.width, r.height)) { xyGraph.setShowLegend(true); } else { xyGraph.setShowLegend(false); } if (ChartUtil.isShowDescriptionAllowSize(r.height)) { CounterTodayAllView.this.setContentDescription(desc); } else { CounterTodayAllView.this.setContentDescription(""); } r = canvas.getClientArea(); lock = false; } xyGraph.setSize(r.width, r.height); } public void controlMoved(ControlEvent e) { } }); xyGraph = new XYGraph(); xyGraph.setShowLegend(true); xyGraph.setShowTitle(false); canvas.setContents(xyGraph); xyGraph.primaryXAxis.setDateEnabled(true); xyGraph.primaryXAxis.setShowMajorGrid(true); xyGraph.primaryYAxis.setAutoScale(true); xyGraph.primaryYAxis.setShowMajorGrid(true); xyGraph.primaryXAxis.setTitle(""); xyGraph.primaryYAxis.setTitle(""); ObjectSelectManager.getInstance().addObjectCheckStateListener(this); final DefaultToolTip toolTip = new DefaultToolTip(canvas, DefaultToolTip.RECREATE, true); toolTip.setFont(new Font(null, "Arial", 10, SWT.BOLD)); toolTip.setBackgroundColor(Display.getCurrent().getSystemColor(SWT.COLOR_INFO_BACKGROUND)); canvas.addMouseListener(new MouseListener() { public void mouseUp(MouseEvent e) { if (nearestTrace != null) { nearestTrace.setLineWidth(PManager.getInstance().getInt(PreferenceConstants.P_CHART_LINE_WIDTH)); nearestTrace = null; } toolTip.hide(); } public void mouseDown(MouseEvent e) { double x = xyGraph.primaryXAxis.getPositionValue(e.x, false); double y = xyGraph.primaryYAxis.getPositionValue(e.y, false); if (x < 0 || y < 0) { return; } double minDistance = 30.0d; long time = 0; double value = 0; for (Trace t : traces) { ISample s = ScouterUtil.getNearestPoint(t.getDataProvider(), x); if (s != null) { int x2 = xyGraph.primaryXAxis.getValuePosition(s.getXValue(), false); int y2 = xyGraph.primaryYAxis.getValuePosition(s.getYValue(), false); double distance = ScouterUtil.getPointDistance(e.x, e.y, x2, y2); if (minDistance > distance) { minDistance = distance; nearestTrace = t; time = (long) s.getXValue(); value = s.getYValue(); } } } if (nearestTrace != null) { int width = PManager.getInstance().getInt(PreferenceConstants.P_CHART_LINE_WIDTH); nearestTrace.setLineWidth(width + 2); toolTip.setText(nearestTrace.getName() + "\nTime : " + DateUtil.format(time, "HH:mm") + "\nValue : " + FormatUtil.print(value, "#,###.##")); toolTip.show(new Point(e.x, e.y)); } } public void mouseDoubleClick(MouseEvent e) {} }); canvas.addKeyListener(new KeyListener() { public void keyReleased(KeyEvent e) { } public void keyPressed(KeyEvent e) { switch (e.keyCode) { case SWT.F5: for(int i = 0 ; i < traces.size() ; i++){ xyGraph.removeTrace(traces.get(i)); } traces.clear(); datas.clear(); thread.interrupt(); break; } } }); String date = DateUtil.yyyymmdd(TimeUtil.getCurrentTime(serverId)); Server server = ServerManager.getInstance().getServer(serverId); String svrName = ""; String counterDisplay = ""; String counterUnit = ""; if(server != null){ svrName = server.getName(); counterDisplay = server.getCounterEngine().getCounterDisplayName(objType, counter); counterUnit = server.getCounterEngine().getCounterUnit(objType, counter); } desc = "ⓢ"+svrName+" | (Today All) [" + date.substring(0, 4) + "-" + date.substring(4, 6) + "-" + date.substring(6, 8) + "]" + counterDisplay+(!"".equals(counterUnit)?" ("+counterUnit+")":""); try { setViewTab(objType, counter, serverId); } catch (Exception e1) { e1.printStackTrace(); } long from = DateUtil.yyyymmdd(date); long to = from + DateUtil.MILLIS_PER_DAY; MenuUtil.createCounterContextMenu(ID, canvas, serverId, objType, counter, from, to); thread = new RefreshThread(this, 10000); thread.setName(this.toString() + " - " + "objType:"+objType + ", counter:"+counter + ", serverId:"+serverId); thread.start(); } ArrayList<Trace> traces = new ArrayList<Trace>(); private CircularBufferDataProvider getDataProvider(int objHash) { CircularBufferDataProvider data = datas.get(objHash); if (data == null) { data = new CircularBufferDataProvider(true); datas.put(objHash, data); data.setBufferSize(288); data.setCurrentXDataArray(new double[] {}); data.setCurrentYDataArray(new double[] {}); String name = StringUtil.trimToEmpty(TextProxy.object.getLoadText(DateUtil.yyyymmdd(TimeUtil.getCurrentTime(serverId)), objHash, serverId)); Trace trace = new Trace(name, xyGraph.primaryXAxis, xyGraph.primaryYAxis, data); trace.setPointStyle(PointStyle.NONE); trace.getXAxis().setFormatPattern("HH:mm:ss"); trace.getYAxis().setFormatPattern("#,##0"); trace.setLineWidth(PManager.getInstance().getInt(PreferenceConstants.P_CHART_LINE_WIDTH)); trace.setTraceType(TraceType.SOLID_LINE); trace.setTraceColor(AgentColorManager.getInstance().assignColor(objType, objHash)); xyGraph.addTrace(trace); traces.add(trace); } return data; } public void setFocus() { statusMessage = desc + " - setInput(String objType:"+objType+", String counter:"+counter+", int serverId:"+serverId+")"; super.setFocus(); } @Override public void dispose() { super.dispose(); if (this.thread != null) { this.thread.shutdown(); } ObjectSelectManager.getInstance().removeObjectCheckStateListener(this); } public void redraw() { if (canvas != null && canvas.isDisposed() == false) { canvas.redraw(); xyGraph.repaint(); } } public void notifyChangeState() { ExUtil.asyncRun(new Runnable() { public void run() { for (Trace t : traces) { String objName = t.getName(); if (ObjectSelectManager.getInstance().isUnselectedObject(HashUtil.hash(objName))) { t.setVisible(false); } else { t.setVisible(true); } } ExUtil.exec(canvas, new Runnable() { public void run() { redraw(); } }); } }); } }
5,613
726
<gh_stars>100-1000 class Solution { public ListNode mergeTwoLists(ListNode l1, ListNode l2) { if(l1==null) { return l2; } if(l2==null) { return l1; } ListNode head = new ListNode(0); ListNode result = head; while(l1!=null&&l2!=null) { if(l1.val<=l2.val) { result.val = l1.val; l1 = l1.next; } else { result.val = l2.val; l2 = l2.next; } ListNode p = new ListNode(0); result.next = p; result = p; } if(l1!=null) { result.val = l1.val; result.next = l1.next; } if(l2!=null) { result.val = l2.val; result.next = l2.next; } return head; } }
620
1,988
<reponame>clayne/botan /* * (C) 1999-2009,2016,2020 <NAME> * * Botan is released under the Simplified BSD License (see license.txt) */ #include <botan/internal/es_win32.h> #define NOMINMAX 1 #define _WINSOCKAPI_ // stop windows.h including winsock.h #include <windows.h> namespace Botan { size_t Win32_EntropySource::poll(RandomNumberGenerator& rng) { rng.add_entropy_T(::GetTickCount()); rng.add_entropy_T(::GetMessagePos()); rng.add_entropy_T(::GetMessageTime()); rng.add_entropy_T(::GetInputState()); rng.add_entropy_T(::GetCurrentProcessId()); rng.add_entropy_T(::GetCurrentThreadId()); SYSTEM_INFO sys_info; ::GetSystemInfo(&sys_info); rng.add_entropy_T(sys_info); MEMORYSTATUSEX mem_info; ::GlobalMemoryStatusEx(&mem_info); rng.add_entropy_T(mem_info); POINT point; ::GetCursorPos(&point); rng.add_entropy_T(point); ::GetCaretPos(&point); rng.add_entropy_T(point); /* Potential other sources to investigate GetProductInfo GetComputerNameExA GetSystemFirmwareTable GetVersionExA GetProcessorSystemCycleTime GetProcessHandleCount(GetCurrentProcess()) GetThreadTimes(GetCurrentThread()) QueryThreadCycleTime QueryIdleProcessorCycleTime QueryUnbiasedInterruptTime */ // We assume all of the above is basically junk return 0; } }
521
335
{ "word": "Urbanity", "definitions": [ "Courteousness and refinement of manner.", "Urban life." ], "parts-of-speech": "Noun" }
73
467
package com.yoyiyi.soleil.mvp.presenter.home; import com.google.gson.Gson; import com.google.gson.JsonObject; import com.google.gson.JsonParser; import com.yoyiyi.soleil.base.BaseSubscriber; import com.yoyiyi.soleil.base.RxPresenter; import com.yoyiyi.soleil.bean.chase.ChaseBangumi; import com.yoyiyi.soleil.bean.chase.RecommendBangumi; import com.yoyiyi.soleil.mvp.contract.home.ChaseBangumiContract; import com.yoyiyi.soleil.network.helper.RetrofitHelper; import com.yoyiyi.soleil.utils.JsonUtils; import javax.inject.Inject; import io.reactivex.Flowable; /** * @author zzq 作者 E-mail: <EMAIL> * @date 创建时间:2017/5/23 22:04 * 描述:首页直播Presenter */ public class ChaseBangumiPresenter extends RxPresenter<ChaseBangumiContract.View> implements ChaseBangumiContract.Presenter<ChaseBangumiContract.View> { private RetrofitHelper mRetrofitHelper; @Inject public ChaseBangumiPresenter(RetrofitHelper retrofitHelper) { this.mRetrofitHelper = retrofitHelper; } @Override public void getChaseBangumiData() { //需要测试账号登录 /*BaseObjectSubscriber<RecommendBangumi> subscriber = mRetrofitHelper.getChaseBangumi() .compose(RxUtils.handleResult()) .flatMap(chaseBangumi -> { mView.showChaseBangumi(chaseBangumi); return mRetrofitHelper.getRecommendBangumi(); }) .compose(RxUtils.rxSchedulerHelper()) .subscribeWith(new BaseObjectSubscriber<RecommendBangumi>(mView) { @Override public void onSuccess(RecommendBangumi recommendBangumi) { mView.showRecommendBangumi(recommendBangumi); } }); addSubscribe(subscriber);*/ //本地json BaseSubscriber<RecommendBangumi> subscriber = Flowable.just(JsonUtils.readJson("user_chase.json")) .flatMap(string -> { Gson gson = new Gson(); JsonObject object = new JsonParser().parse(string).getAsJsonObject(); JsonObject result = object.getAsJsonObject("result"); ChaseBangumi chaseBangumi = gson.fromJson(result, ChaseBangumi.class); mView.showChaseBangumi(chaseBangumi); return Flowable.just(JsonUtils.readJson("recommend_chase.json")); }) .map(string -> { Gson gson = new Gson(); JsonObject object = new JsonParser().parse(string).getAsJsonObject(); JsonObject result = object.getAsJsonObject("result"); RecommendBangumi recommendBangumi = gson.fromJson(result, RecommendBangumi.class); return recommendBangumi; }) .subscribeWith(new BaseSubscriber<RecommendBangumi>(mView) { @Override public void onSuccess(RecommendBangumi recommendBangumi) { mView.showRecommendBangumi(recommendBangumi); } }); addSubscribe(subscriber); } }
1,555
14,668
<reponame>zealoussnow/chromium<gh_stars>1000+ // Copyright (c) 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef CHROMECAST_PUBLIC_GRAPHICS_TYPES_H_ #define CHROMECAST_PUBLIC_GRAPHICS_TYPES_H_ namespace chromecast { struct Rect { Rect(int w, int h) : x(0), y(0), width(w), height(h) {} Rect(int arg_x, int arg_y, int w, int h) : x(arg_x), y(arg_y), width(w), height(h) {} int x; int y; int width; int height; }; struct RectF { RectF(float w, float h) : x(0), y(0), width(w), height(h) {} RectF(float arg_x, float arg_y, float w, float h) : x(arg_x), y(arg_y), width(w), height(h) {} float x; float y; float width; float height; }; struct Size { Size(int w, int h) : width(w), height(h) {} int width; int height; }; } // namespace chromecast #endif // CHROMECAST_PUBLIC_GRAPHICS_TYPES_H_
394
317
<gh_stars>100-1000 #include "smack.h" #include <assert.h> #include <stdint.h> // @expect verified uint32_t cttz32(uint32_t x) { uint32_t n = 0; /* number of bits */ if (!(x & 0x0000FFFF)) { n += 16; x >>= 16; } if (!(x & 0x000000FF)) { n += 8; x >>= 8; } if (!(x & 0x0000000F)) { n += 4; x >>= 4; } if (!(x & 0x00000003)) { n += 2; x >>= 2; } n += (x & 1) ^ 1; return n; } uint64_t cttz64(uint64_t x) { uint64_t n = 0; /* number of bits */ if (!(x & 0x00000000FFFFFFFF)) { n += 32; x >>= 32; } if (!(x & 0x000000000000FFFF)) { n += 16; x >>= 16; } if (!(x & 0x00000000000000FF)) { n += 8; x >>= 8; } if (!(x & 0x000000000000000F)) { n += 4; x >>= 4; } if (!(x & 0x0000000000000003)) { n += 2; x >>= 2; } n += (x & 1) ^ 1; return n; } int main(void) { uint32_t x32 = __VERIFIER_nondet_unsigned_int(); assume(x32 != 0); uint64_t x64 = __VERIFIER_nondet_unsigned_long_long(); assume(x64 != 0); assert(__builtin_ctz(x32) == cttz32(x32)); assert(__builtin_ctzll(x64) == cttz64(x64)); return 0; }
574
777
// Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.net; import android.os.ConditionVariable; import org.chromium.base.annotations.CalledByNative; import org.chromium.base.annotations.JNINamespace; /** * Class to watch for Sdch dictionary events. The native implementation * unregisters itself when an event happens. Therefore, an instance of this * class is only able to receive a notification of the earliest event. * Currently, implemented events include {@link #onDictionaryAdded}. */ @JNINamespace("cronet") public class SdchObserver { protected boolean mDictionaryAlreadyPresent = false; private final ConditionVariable mAddBlock = new ConditionVariable(); /** * Constructor. * @param targetUrl the target url on which sdch encoding will be used. * @param contextAdapter the native context adapter to register the observer. */ public SdchObserver(String targetUrl, long contextAdapter) { nativeAddSdchObserver(targetUrl, contextAdapter); mAddBlock.block(); mAddBlock.close(); } /** * Called when a dictionary is added to the SdchManager for the target url. * Override this method if caller would like to get notified. */ @CalledByNative public void onDictionaryAdded() { // Left blank; } @CalledByNative private void onAddSdchObserverCompleted() { mAddBlock.open(); } @CalledByNative private void onDictionarySetAlreadyPresent() { mDictionaryAlreadyPresent = true; mAddBlock.open(); } private native void nativeAddSdchObserver(String targetUrl, long contextAdapter); }
569
3,968
<gh_stars>1000+ #!/usr/bin/env python import wandb wandb.require("service") run = wandb.init() print("somedata") run.define_metric("m2", summary="max") run.log(dict(m1=1)) run.log(dict(m2=2)) run.log(dict(m2=8)) run.log(dict(m2=4)) run.finish()
116
348
<reponame>chamberone/Leaflet.PixiOverlay {"nom":"Noyers-sur-Jabron","circ":"2ème circonscription","dpt":"Alpes-de-Haute-Provence","inscrits":409,"abs":217,"votants":192,"blancs":18,"nuls":3,"exp":171,"res":[{"nuance":"REM","nom":"M. <NAME>","voix":110},{"nuance":"FI","nom":"M. <NAME>","voix":61}]}
126
3,765
/** * BSD-style license; for more info see http://pmd.sourceforge.net/license.html */ package net.sourceforge.pmd.lang.dfa; /** * Represents the type (DFA-wise) of a DataFlowNode. * * @author raik * @deprecated See {@link DataFlowNode} */ @Deprecated public enum NodeType { ROOT, /* if - else statements */ IF_EXPR, IF_LAST_STATEMENT, IF_LAST_STATEMENT_WITHOUT_ELSE, ELSE_LAST_STATEMENT, /* while statements */ WHILE_EXPR, WHILE_LAST_STATEMENT, /* switch statements */ SWITCH_START, CASE_LAST_STATEMENT, SWITCH_LAST_DEFAULT_STATEMENT, SWITCH_END, /* for statements */ FOR_INIT, FOR_EXPR, FOR_UPDATE, FOR_BEFORE_FIRST_STATEMENT, FOR_END, /* do - while statements */ DO_BEFORE_FIRST_STATEMENT, DO_EXPR, RETURN_STATEMENT, BREAK_STATEMENT, CONTINUE_STATEMENT, LABEL_STATEMENT, LABEL_LAST_STATEMENT, ASSERT_STATEMENT, // TODO - throw statements? THROW_STATEMENT; @Override public String toString() { return name(); } }
466
416
<filename>src/main/java/com/tencentcloudapi/tke/v20180525/models/ClusterAsGroupOption.java /* * Copyright (c) 2017-2018 THL A29 Limited, a Tencent company. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.tencentcloudapi.tke.v20180525.models; import com.tencentcloudapi.common.AbstractModel; import com.google.gson.annotations.SerializedName; import com.google.gson.annotations.Expose; import java.util.HashMap; public class ClusterAsGroupOption extends AbstractModel{ /** * 是否开启缩容 注意:此字段可能返回 null,表示取不到有效值。 */ @SerializedName("IsScaleDownEnabled") @Expose private Boolean IsScaleDownEnabled; /** * 多伸缩组情况下扩容选择算法(random 随机选择,most-pods 最多类型的Pod least-waste 最少的资源浪费,默认为random) 注意:此字段可能返回 null,表示取不到有效值。 */ @SerializedName("Expander") @Expose private String Expander; /** * 最大并发缩容数 注意:此字段可能返回 null,表示取不到有效值。 */ @SerializedName("MaxEmptyBulkDelete") @Expose private Long MaxEmptyBulkDelete; /** * 集群扩容后多少分钟开始判断缩容(默认为10分钟) 注意:此字段可能返回 null,表示取不到有效值。 */ @SerializedName("ScaleDownDelay") @Expose private Long ScaleDownDelay; /** * 节点连续空闲多少分钟后被缩容(默认为 10分钟) 注意:此字段可能返回 null,表示取不到有效值。 */ @SerializedName("ScaleDownUnneededTime") @Expose private Long ScaleDownUnneededTime; /** * 节点资源使用量低于多少(百分比)时认为空闲(默认: 50(百分比)) 注意:此字段可能返回 null,表示取不到有效值。 */ @SerializedName("ScaleDownUtilizationThreshold") @Expose private Long ScaleDownUtilizationThreshold; /** * 含有本地存储Pod的节点是否不缩容(默认: FALSE) 注意:此字段可能返回 null,表示取不到有效值。 */ @SerializedName("SkipNodesWithLocalStorage") @Expose private Boolean SkipNodesWithLocalStorage; /** * 含有kube-system namespace下非DaemonSet管理的Pod的节点是否不缩容 (默认: FALSE) 注意:此字段可能返回 null,表示取不到有效值。 */ @SerializedName("SkipNodesWithSystemPods") @Expose private Boolean SkipNodesWithSystemPods; /** * 计算资源使用量时是否默认忽略DaemonSet的实例(默认值: False,不忽略) 注意:此字段可能返回 null,表示取不到有效值。 */ @SerializedName("IgnoreDaemonSetsUtilization") @Expose private Boolean IgnoreDaemonSetsUtilization; /** * CA做健康性判断的个数,默认3,即超过OkTotalUnreadyCount个数后,CA会进行健康性判断。 注意:此字段可能返回 null,表示取不到有效值。 */ @SerializedName("OkTotalUnreadyCount") @Expose private Long OkTotalUnreadyCount; /** * 未就绪节点的最大百分比,此后CA会停止操作 注意:此字段可能返回 null,表示取不到有效值。 */ @SerializedName("MaxTotalUnreadyPercentage") @Expose private Long MaxTotalUnreadyPercentage; /** * 表示未准备就绪的节点在有资格进行缩减之前应该停留多长时间 注意:此字段可能返回 null,表示取不到有效值。 */ @SerializedName("ScaleDownUnreadyTime") @Expose private Long ScaleDownUnreadyTime; /** * CA删除未在Kubernetes中注册的节点之前等待的时间 注意:此字段可能返回 null,表示取不到有效值。 */ @SerializedName("UnregisteredNodeRemovalTime") @Expose private Long UnregisteredNodeRemovalTime; /** * Get 是否开启缩容 注意:此字段可能返回 null,表示取不到有效值。 * @return IsScaleDownEnabled 是否开启缩容 注意:此字段可能返回 null,表示取不到有效值。 */ public Boolean getIsScaleDownEnabled() { return this.IsScaleDownEnabled; } /** * Set 是否开启缩容 注意:此字段可能返回 null,表示取不到有效值。 * @param IsScaleDownEnabled 是否开启缩容 注意:此字段可能返回 null,表示取不到有效值。 */ public void setIsScaleDownEnabled(Boolean IsScaleDownEnabled) { this.IsScaleDownEnabled = IsScaleDownEnabled; } /** * Get 多伸缩组情况下扩容选择算法(random 随机选择,most-pods 最多类型的Pod least-waste 最少的资源浪费,默认为random) 注意:此字段可能返回 null,表示取不到有效值。 * @return Expander 多伸缩组情况下扩容选择算法(random 随机选择,most-pods 最多类型的Pod least-waste 最少的资源浪费,默认为random) 注意:此字段可能返回 null,表示取不到有效值。 */ public String getExpander() { return this.Expander; } /** * Set 多伸缩组情况下扩容选择算法(random 随机选择,most-pods 最多类型的Pod least-waste 最少的资源浪费,默认为random) 注意:此字段可能返回 null,表示取不到有效值。 * @param Expander 多伸缩组情况下扩容选择算法(random 随机选择,most-pods 最多类型的Pod least-waste 最少的资源浪费,默认为random) 注意:此字段可能返回 null,表示取不到有效值。 */ public void setExpander(String Expander) { this.Expander = Expander; } /** * Get 最大并发缩容数 注意:此字段可能返回 null,表示取不到有效值。 * @return MaxEmptyBulkDelete 最大并发缩容数 注意:此字段可能返回 null,表示取不到有效值。 */ public Long getMaxEmptyBulkDelete() { return this.MaxEmptyBulkDelete; } /** * Set 最大并发缩容数 注意:此字段可能返回 null,表示取不到有效值。 * @param MaxEmptyBulkDelete 最大并发缩容数 注意:此字段可能返回 null,表示取不到有效值。 */ public void setMaxEmptyBulkDelete(Long MaxEmptyBulkDelete) { this.MaxEmptyBulkDelete = MaxEmptyBulkDelete; } /** * Get 集群扩容后多少分钟开始判断缩容(默认为10分钟) 注意:此字段可能返回 null,表示取不到有效值。 * @return ScaleDownDelay 集群扩容后多少分钟开始判断缩容(默认为10分钟) 注意:此字段可能返回 null,表示取不到有效值。 */ public Long getScaleDownDelay() { return this.ScaleDownDelay; } /** * Set 集群扩容后多少分钟开始判断缩容(默认为10分钟) 注意:此字段可能返回 null,表示取不到有效值。 * @param ScaleDownDelay 集群扩容后多少分钟开始判断缩容(默认为10分钟) 注意:此字段可能返回 null,表示取不到有效值。 */ public void setScaleDownDelay(Long ScaleDownDelay) { this.ScaleDownDelay = ScaleDownDelay; } /** * Get 节点连续空闲多少分钟后被缩容(默认为 10分钟) 注意:此字段可能返回 null,表示取不到有效值。 * @return ScaleDownUnneededTime 节点连续空闲多少分钟后被缩容(默认为 10分钟) 注意:此字段可能返回 null,表示取不到有效值。 */ public Long getScaleDownUnneededTime() { return this.ScaleDownUnneededTime; } /** * Set 节点连续空闲多少分钟后被缩容(默认为 10分钟) 注意:此字段可能返回 null,表示取不到有效值。 * @param ScaleDownUnneededTime 节点连续空闲多少分钟后被缩容(默认为 10分钟) 注意:此字段可能返回 null,表示取不到有效值。 */ public void setScaleDownUnneededTime(Long ScaleDownUnneededTime) { this.ScaleDownUnneededTime = ScaleDownUnneededTime; } /** * Get 节点资源使用量低于多少(百分比)时认为空闲(默认: 50(百分比)) 注意:此字段可能返回 null,表示取不到有效值。 * @return ScaleDownUtilizationThreshold 节点资源使用量低于多少(百分比)时认为空闲(默认: 50(百分比)) 注意:此字段可能返回 null,表示取不到有效值。 */ public Long getScaleDownUtilizationThreshold() { return this.ScaleDownUtilizationThreshold; } /** * Set 节点资源使用量低于多少(百分比)时认为空闲(默认: 50(百分比)) 注意:此字段可能返回 null,表示取不到有效值。 * @param ScaleDownUtilizationThreshold 节点资源使用量低于多少(百分比)时认为空闲(默认: 50(百分比)) 注意:此字段可能返回 null,表示取不到有效值。 */ public void setScaleDownUtilizationThreshold(Long ScaleDownUtilizationThreshold) { this.ScaleDownUtilizationThreshold = ScaleDownUtilizationThreshold; } /** * Get 含有本地存储Pod的节点是否不缩容(默认: FALSE) 注意:此字段可能返回 null,表示取不到有效值。 * @return SkipNodesWithLocalStorage 含有本地存储Pod的节点是否不缩容(默认: FALSE) 注意:此字段可能返回 null,表示取不到有效值。 */ public Boolean getSkipNodesWithLocalStorage() { return this.SkipNodesWithLocalStorage; } /** * Set 含有本地存储Pod的节点是否不缩容(默认: FALSE) 注意:此字段可能返回 null,表示取不到有效值。 * @param SkipNodesWithLocalStorage 含有本地存储Pod的节点是否不缩容(默认: FALSE) 注意:此字段可能返回 null,表示取不到有效值。 */ public void setSkipNodesWithLocalStorage(Boolean SkipNodesWithLocalStorage) { this.SkipNodesWithLocalStorage = SkipNodesWithLocalStorage; } /** * Get 含有kube-system namespace下非DaemonSet管理的Pod的节点是否不缩容 (默认: FALSE) 注意:此字段可能返回 null,表示取不到有效值。 * @return SkipNodesWithSystemPods 含有kube-system namespace下非DaemonSet管理的Pod的节点是否不缩容 (默认: FALSE) 注意:此字段可能返回 null,表示取不到有效值。 */ public Boolean getSkipNodesWithSystemPods() { return this.SkipNodesWithSystemPods; } /** * Set 含有kube-system namespace下非DaemonSet管理的Pod的节点是否不缩容 (默认: FALSE) 注意:此字段可能返回 null,表示取不到有效值。 * @param SkipNodesWithSystemPods 含有kube-system namespace下非DaemonSet管理的Pod的节点是否不缩容 (默认: FALSE) 注意:此字段可能返回 null,表示取不到有效值。 */ public void setSkipNodesWithSystemPods(Boolean SkipNodesWithSystemPods) { this.SkipNodesWithSystemPods = SkipNodesWithSystemPods; } /** * Get 计算资源使用量时是否默认忽略DaemonSet的实例(默认值: False,不忽略) 注意:此字段可能返回 null,表示取不到有效值。 * @return IgnoreDaemonSetsUtilization 计算资源使用量时是否默认忽略DaemonSet的实例(默认值: False,不忽略) 注意:此字段可能返回 null,表示取不到有效值。 */ public Boolean getIgnoreDaemonSetsUtilization() { return this.IgnoreDaemonSetsUtilization; } /** * Set 计算资源使用量时是否默认忽略DaemonSet的实例(默认值: False,不忽略) 注意:此字段可能返回 null,表示取不到有效值。 * @param IgnoreDaemonSetsUtilization 计算资源使用量时是否默认忽略DaemonSet的实例(默认值: False,不忽略) 注意:此字段可能返回 null,表示取不到有效值。 */ public void setIgnoreDaemonSetsUtilization(Boolean IgnoreDaemonSetsUtilization) { this.IgnoreDaemonSetsUtilization = IgnoreDaemonSetsUtilization; } /** * Get CA做健康性判断的个数,默认3,即超过OkTotalUnreadyCount个数后,CA会进行健康性判断。 注意:此字段可能返回 null,表示取不到有效值。 * @return OkTotalUnreadyCount CA做健康性判断的个数,默认3,即超过OkTotalUnreadyCount个数后,CA会进行健康性判断。 注意:此字段可能返回 null,表示取不到有效值。 */ public Long getOkTotalUnreadyCount() { return this.OkTotalUnreadyCount; } /** * Set CA做健康性判断的个数,默认3,即超过OkTotalUnreadyCount个数后,CA会进行健康性判断。 注意:此字段可能返回 null,表示取不到有效值。 * @param OkTotalUnreadyCount CA做健康性判断的个数,默认3,即超过OkTotalUnreadyCount个数后,CA会进行健康性判断。 注意:此字段可能返回 null,表示取不到有效值。 */ public void setOkTotalUnreadyCount(Long OkTotalUnreadyCount) { this.OkTotalUnreadyCount = OkTotalUnreadyCount; } /** * Get 未就绪节点的最大百分比,此后CA会停止操作 注意:此字段可能返回 null,表示取不到有效值。 * @return MaxTotalUnreadyPercentage 未就绪节点的最大百分比,此后CA会停止操作 注意:此字段可能返回 null,表示取不到有效值。 */ public Long getMaxTotalUnreadyPercentage() { return this.MaxTotalUnreadyPercentage; } /** * Set 未就绪节点的最大百分比,此后CA会停止操作 注意:此字段可能返回 null,表示取不到有效值。 * @param MaxTotalUnreadyPercentage 未就绪节点的最大百分比,此后CA会停止操作 注意:此字段可能返回 null,表示取不到有效值。 */ public void setMaxTotalUnreadyPercentage(Long MaxTotalUnreadyPercentage) { this.MaxTotalUnreadyPercentage = MaxTotalUnreadyPercentage; } /** * Get 表示未准备就绪的节点在有资格进行缩减之前应该停留多长时间 注意:此字段可能返回 null,表示取不到有效值。 * @return ScaleDownUnreadyTime 表示未准备就绪的节点在有资格进行缩减之前应该停留多长时间 注意:此字段可能返回 null,表示取不到有效值。 */ public Long getScaleDownUnreadyTime() { return this.ScaleDownUnreadyTime; } /** * Set 表示未准备就绪的节点在有资格进行缩减之前应该停留多长时间 注意:此字段可能返回 null,表示取不到有效值。 * @param ScaleDownUnreadyTime 表示未准备就绪的节点在有资格进行缩减之前应该停留多长时间 注意:此字段可能返回 null,表示取不到有效值。 */ public void setScaleDownUnreadyTime(Long ScaleDownUnreadyTime) { this.ScaleDownUnreadyTime = ScaleDownUnreadyTime; } /** * Get CA删除未在Kubernetes中注册的节点之前等待的时间 注意:此字段可能返回 null,表示取不到有效值。 * @return UnregisteredNodeRemovalTime CA删除未在Kubernetes中注册的节点之前等待的时间 注意:此字段可能返回 null,表示取不到有效值。 */ public Long getUnregisteredNodeRemovalTime() { return this.UnregisteredNodeRemovalTime; } /** * Set CA删除未在Kubernetes中注册的节点之前等待的时间 注意:此字段可能返回 null,表示取不到有效值。 * @param UnregisteredNodeRemovalTime CA删除未在Kubernetes中注册的节点之前等待的时间 注意:此字段可能返回 null,表示取不到有效值。 */ public void setUnregisteredNodeRemovalTime(Long UnregisteredNodeRemovalTime) { this.UnregisteredNodeRemovalTime = UnregisteredNodeRemovalTime; } public ClusterAsGroupOption() { } /** * NOTE: Any ambiguous key set via .set("AnyKey", "value") will be a shallow copy, * and any explicit key, i.e Foo, set via .setFoo("value") will be a deep copy. */ public ClusterAsGroupOption(ClusterAsGroupOption source) { if (source.IsScaleDownEnabled != null) { this.IsScaleDownEnabled = new Boolean(source.IsScaleDownEnabled); } if (source.Expander != null) { this.Expander = new String(source.Expander); } if (source.MaxEmptyBulkDelete != null) { this.MaxEmptyBulkDelete = new Long(source.MaxEmptyBulkDelete); } if (source.ScaleDownDelay != null) { this.ScaleDownDelay = new Long(source.ScaleDownDelay); } if (source.ScaleDownUnneededTime != null) { this.ScaleDownUnneededTime = new Long(source.ScaleDownUnneededTime); } if (source.ScaleDownUtilizationThreshold != null) { this.ScaleDownUtilizationThreshold = new Long(source.ScaleDownUtilizationThreshold); } if (source.SkipNodesWithLocalStorage != null) { this.SkipNodesWithLocalStorage = new Boolean(source.SkipNodesWithLocalStorage); } if (source.SkipNodesWithSystemPods != null) { this.SkipNodesWithSystemPods = new Boolean(source.SkipNodesWithSystemPods); } if (source.IgnoreDaemonSetsUtilization != null) { this.IgnoreDaemonSetsUtilization = new Boolean(source.IgnoreDaemonSetsUtilization); } if (source.OkTotalUnreadyCount != null) { this.OkTotalUnreadyCount = new Long(source.OkTotalUnreadyCount); } if (source.MaxTotalUnreadyPercentage != null) { this.MaxTotalUnreadyPercentage = new Long(source.MaxTotalUnreadyPercentage); } if (source.ScaleDownUnreadyTime != null) { this.ScaleDownUnreadyTime = new Long(source.ScaleDownUnreadyTime); } if (source.UnregisteredNodeRemovalTime != null) { this.UnregisteredNodeRemovalTime = new Long(source.UnregisteredNodeRemovalTime); } } /** * Internal implementation, normal users should not use it. */ public void toMap(HashMap<String, String> map, String prefix) { this.setParamSimple(map, prefix + "IsScaleDownEnabled", this.IsScaleDownEnabled); this.setParamSimple(map, prefix + "Expander", this.Expander); this.setParamSimple(map, prefix + "MaxEmptyBulkDelete", this.MaxEmptyBulkDelete); this.setParamSimple(map, prefix + "ScaleDownDelay", this.ScaleDownDelay); this.setParamSimple(map, prefix + "ScaleDownUnneededTime", this.ScaleDownUnneededTime); this.setParamSimple(map, prefix + "ScaleDownUtilizationThreshold", this.ScaleDownUtilizationThreshold); this.setParamSimple(map, prefix + "SkipNodesWithLocalStorage", this.SkipNodesWithLocalStorage); this.setParamSimple(map, prefix + "SkipNodesWithSystemPods", this.SkipNodesWithSystemPods); this.setParamSimple(map, prefix + "IgnoreDaemonSetsUtilization", this.IgnoreDaemonSetsUtilization); this.setParamSimple(map, prefix + "OkTotalUnreadyCount", this.OkTotalUnreadyCount); this.setParamSimple(map, prefix + "MaxTotalUnreadyPercentage", this.MaxTotalUnreadyPercentage); this.setParamSimple(map, prefix + "ScaleDownUnreadyTime", this.ScaleDownUnreadyTime); this.setParamSimple(map, prefix + "UnregisteredNodeRemovalTime", this.UnregisteredNodeRemovalTime); } }
10,600
575
<gh_stars>100-1000 // // RCGimbalControlViewController.h // DJISdkDemo // // Copyright © 2018 DJI. All rights reserved. // #import <UIKit/UIKit.h> @interface RCGimbalControlViewController : UIViewController @end
81
634
<reponame>halotroop2288/consulo /* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.xdebugger.impl.actions; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.actionSystem.CommonDataKeys; import com.intellij.openapi.actionSystem.DataContext; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.ex.EditorEx; import com.intellij.openapi.editor.ex.EditorGutterComponentEx; import com.intellij.openapi.editor.markup.GutterIconRenderer; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Pair; import com.intellij.xdebugger.impl.breakpoints.XBreakpointUtil; import com.intellij.xdebugger.impl.breakpoints.ui.BreakpointItem; import com.intellij.xdebugger.impl.breakpoints.ui.BreakpointsDialogFactory; import javax.annotation.Nonnull; import javax.swing.*; import java.awt.*; public abstract class EditBreakpointActionHandler extends DebuggerActionHandler { protected abstract void doShowPopup(Project project, JComponent component, Point whereToShow, Object breakpoint); @Override public void perform(@Nonnull Project project, AnActionEvent event) { DataContext dataContext = event.getDataContext(); Editor editor = dataContext.getData(CommonDataKeys.EDITOR); if (editor == null) return; final Pair<GutterIconRenderer,Object> pair = XBreakpointUtil.findSelectedBreakpoint(project, editor); Object breakpoint = pair.second; GutterIconRenderer breakpointGutterRenderer = pair.first; if (breakpointGutterRenderer == null) return; editBreakpoint(project, editor, breakpoint, breakpointGutterRenderer); } public void editBreakpoint(@Nonnull Project project, @Nonnull Editor editor, @Nonnull Object breakpoint, @Nonnull GutterIconRenderer breakpointGutterRenderer) { if (BreakpointsDialogFactory.getInstance(project).isBreakpointPopupShowing()) return; EditorGutterComponentEx gutterComponent = ((EditorEx)editor).getGutterComponentEx(); Point point = gutterComponent.getCenterPoint(breakpointGutterRenderer); if (point != null) { doShowPopup(project, gutterComponent, point, breakpoint); } } public void editBreakpoint(@Nonnull Project project, @Nonnull JComponent parent, @Nonnull Point whereToShow, @Nonnull BreakpointItem breakpoint) { doShowPopup(project, parent, whereToShow, breakpoint.getBreakpoint()); } }
901
32,544
package com.baeldung.protobuf; import org.junit.After; import org.junit.Test; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Paths; import java.util.Random; import static org.junit.Assert.assertEquals; public class ProtobufUnitTest { private final String filePath = "address_book"; @After public void cleanup() throws IOException { Files.deleteIfExists(Paths.get(filePath)); } @Test public void givenGeneratedProtobufClass_whenCreateClass_thenShouldCreateJavaInstance() { //when String email = "<EMAIL>"; int id = new Random().nextInt(); String name = "<NAME>"; String number = "01234567890"; AddressBookProtos.Person person = AddressBookProtos.Person.newBuilder() .setId(id) .setName(name) .setEmail(email) .addNumbers(number) .build(); //then assertEquals(person.getEmail(), email); assertEquals(person.getId(), id); assertEquals(person.getName(), name); assertEquals(person.getNumbers(0), number); } @Test public void givenAddressBookWithOnePerson_whenSaveAsAFile_shouldLoadFromFileToJavaClass() throws IOException { //given String email = "<EMAIL>"; int id = new Random().nextInt(); String name = "<NAME>"; String number = "01234567890"; AddressBookProtos.Person person = AddressBookProtos.Person.newBuilder() .setId(id) .setName(name) .setEmail(email) .addNumbers(number) .build(); //when AddressBookProtos.AddressBook addressBook = AddressBookProtos.AddressBook.newBuilder().addPeople(person).build(); FileOutputStream fos = new FileOutputStream(filePath); addressBook.writeTo(fos); fos.close(); //then FileInputStream fis = new FileInputStream(filePath); AddressBookProtos.AddressBook deserialized = AddressBookProtos.AddressBook.newBuilder().mergeFrom(fis).build(); fis.close(); assertEquals(deserialized.getPeople(0).getEmail(), email); assertEquals(deserialized.getPeople(0).getId(), id); assertEquals(deserialized.getPeople(0).getName(), name); assertEquals(deserialized.getPeople(0).getNumbers(0), number); } }
1,184
2,151
<reponame>clairechingching/ScaffCC<gh_stars>1000+ //===-- SparcSelectionDAGInfo.cpp - Sparc SelectionDAG Info ---------------===// // // The LLVM Compiler Infrastructure // // This file is distributed under the University of Illinois Open Source // License. See LICENSE.TXT for details. // //===----------------------------------------------------------------------===// // // This file implements the SparcSelectionDAGInfo class. // //===----------------------------------------------------------------------===// #define DEBUG_TYPE "sparc-selectiondag-info" #include "SparcTargetMachine.h" using namespace llvm; SparcSelectionDAGInfo::SparcSelectionDAGInfo(const SparcTargetMachine &TM) : TargetSelectionDAGInfo(TM) { } SparcSelectionDAGInfo::~SparcSelectionDAGInfo() { }
241
488
<gh_stars>100-1000 /* PR fortran/37201. Linked with bind_c_usage_17.f90. */ char cdir(void){return '/';}
50
648
<gh_stars>100-1000 {"resourceType":"SearchParameter","id":"ClinicalImpression-action","url":"http://hl7.org/fhir/SearchParameter/ClinicalImpression-action","name":"action","status":"draft","experimental":false,"date":"2017-04-19T07:44:43+10:00","publisher":"Health Level Seven International (Patient Care)","contact":[{"telecom":[{"system":"url","value":"http://hl7.org/fhir"}]},{"telecom":[{"system":"url","value":"http://www.hl7.org/Special/committees/patientcare/index.cfm"}]}],"code":"action","base":["ClinicalImpression"],"type":"reference","description":"Action taken as part of assessment procedure","expression":"ClinicalImpression.action","xpath":"f:ClinicalImpression/f:action","xpathUsage":"normal","target":["Appointment","ReferralRequest","MedicationRequest","ProcedureRequest","Procedure"]}
226
4,538
/* * Copyright (C) 2015-2020 Alibaba Group Holding Limited */ #ifndef __BLUETOOTH_H__ #define __BLUETOOTH_H__ #include "stdint.h" #include "string.h" #include "btif_sys_config.h" #ifdef __cplusplus extern "C" { #endif #ifndef BOOL_DEFINED typedef unsigned int BOOL; /* IGNORESTYLE */ #endif typedef unsigned int U32; typedef unsigned short U16; typedef unsigned char U8; typedef int S32; typedef short S16; typedef char S8; #ifndef U32_PTR_DEFINED typedef U32 U32_PTR; #define U32_PTR_DEFINED #endif /* U32_PTR_DEFINED */ /* Variable sized integers. Used to optimize processor efficiency by * using the most efficient data size for counters, arithmatic, etc. */ typedef unsigned long I32; #ifndef XA_INTEGER_SIZE #define XA_INTEGER_SIZE 4 #endif #if XA_INTEGER_SIZE == 4 typedef unsigned long I16; typedef unsigned long I8; #elif XA_INTEGER_SIZE == 2 typedef unsigned short I16; typedef unsigned short I8; #elif XA_INTEGER_SIZE == 1 typedef unsigned short I16; typedef unsigned char I8; #else #error No XA_INTEGER_SIZE specified! #endif typedef void (*PFV) (void); /* Boolean Definitions */ #ifndef TRUE #define TRUE (1==1) #endif /* TRUE */ #ifndef FALSE #define FALSE (0==1) #endif /* FALSE */ #ifndef TimeT typedef U32 TimeT; #endif #ifndef BtPriority typedef U8 BtPriority; #endif /** Bluetooth Address */ typedef struct { uint8_t address[6]; } __attribute__ ((packed)) bt_bdaddr_t; #define BTIF_BD_ADDR_SIZE 6 #define BTIF_LINK_KEY_SIZE 16 typedef struct _list_entr { struct _list_entr *Flink; struct _list_entr *Blink; unsigned int resv; } list_entry_t; /*--------------------------------------------------------------------------- * * Doubly-linked list manipulation routines. Some are implemented as * macros but logically are procedures. */ #ifndef BTIF_LIST_MACROS //void InitializeListHead(ListEntry *head); #define initialize_list_head(ListHead) (\ (ListHead)->Flink = (ListHead)->Blink = (ListHead) ) #define initialize_list_entry(Entry) (\ (Entry)->Flink = (Entry)->Blink = 0 ) #define is_entry_available(Entry) (\ ((Entry)->Flink == 0)) #ifndef is_list_empty //BOOL is_list_empty(ListEntry *head); #define is_list_empty(ListHead) (\ ((ListHead)->Flink == (ListHead))) #endif #define get_head_list(ListHead) (ListHead)->Flink #define get_tail_list(ListHead) (ListHead)->Blink #define get_next_node(Node) (Node)->Flink #define get_prior_node(Node) (Node)->Blink #define is_node_connected(n) (((n)->Blink->Flink == (n)) && ((n)->Flink->Blink == (n))) BOOL is_list_circular(list_entry_t * list); #define list_assert(exp) (ASSERT(exp, "%s %s, %d\n", #exp, __func__, __LINE__)) //void InsertTailList(ListEntry *head, ListEntry *entry); void _insert_tail_list(list_entry_t * head, list_entry_t * entry); #define insert_tail_list(a, b) (list_assert(is_list_circular(a)), \ _insert_tail_list(a, b), \ list_assert(is_list_circular(a))) void insert_head_list(list_entry_t * head, list_entry_t * entry); void _insert_head_list(list_entry_t * head, list_entry_t * entry); #define insert_head_list(a, b) (list_assert(is_list_circular(a)), \ _insert_head_list(a, b), \ list_assert(is_list_circular(a))) list_entry_t *remove_head_list(list_entry_t * head); list_entry_t *_remove_head_list(list_entry_t * head); #define remove_head_list(a) (list_assert(is_list_circular(a)), \ _remove_head_list(a)) void remove_entry_list(list_entry_t * entry); BOOL is_node_on_list(list_entry_t * head, list_entry_t * node); U8 get_list_number(list_entry_t * head); BOOL is_list_circular(list_entry_t * list); void move_list(list_entry_t * dest, list_entry_t * src); #endif #define iterate_list_safe(head, cur, next, type) \ for ( (cur) = (type) get_head_list(head) ; \ (next) = (type) get_next_node(&(cur)->node), \ (cur) != (type) (head); \ (cur) = (next)) /*--------------------------------------------------------------------------- * IterateList() * * Sets up ordinary traversal of a list. The current member must NOT * be removed during iteration. Must be followed by a block of code * containing the body of the iteration. * * For example: * BtSecurityRecord *record; * IterateList(MEC(secList), record, BtSecurityRecord *) { * [...do something with "record", but do not remove it!...] * } * * Parameters: * head - Head of list (address of ListEntry structure) * cur - Variable to use for current list member * type - Structure type of cur and next. */ #define iterate_list(head, cur, type) \ for ( (cur) = (type) get_head_list(&(head)) ; \ (cur) != (type) &(head); \ (cur) = (type) get_next_node(&(cur)->node) ) enum _bt_status { BT_STS_SUCCESS = 0, BT_STS_FAILED = 1, BT_STS_PENDING = 2, BT_STS_BUSY = 11, BT_STS_NO_RESOURCES = 12, BT_STS_NOT_FOUND = 13, BT_STS_DEVICE_NOT_FOUND = 14, BT_STS_CONNECTION_FAILED = 15, BT_STS_TIMEOUT = 16, BT_STS_NO_CONNECTION = 17, BT_STS_INVALID_PARM = 18, BT_STS_IN_PROGRESS = 19, BT_STS_RESTRICTED = 20, BT_STS_INVALID_TYPE = 21, BT_STS_HCI_INIT_ERR = 22, BT_STS_NOT_SUPPORTED = 23, BT_STS_IN_USE = 5, BT_STS_SDP_CONT_STATE = 24, BT_STS_CONTINUE =24, BT_STS_CANCELLED = 25, /* The last defined status code */ BT_STS_LAST_CODE = BT_STS_CANCELLED, }; typedef uint32_t bt_status_t; typedef struct _evm_timer evm_timer_t; typedef void (*evm_timer_notify) (evm_timer_t *); struct _evm_timer { list_entry_t node; /* Used internally by the Event Manager */ void *context; /* Context area for use by callers */ evm_timer_notify func; /* Function to call when timer fires */ /* === Internal use only === */ TimeT time; /* Amount of time to wait */ TimeT startTime; /* System time when the timer started */ }; /*--------------------------------------------------------------------------- * btif_packet_flags type * * This type is used by L2CAP and protocols that use directly L2CAP * to manage the status of a particular BtPacket. */ typedef uint16_t btif_packet_flags; #define BTIF_BTP_FLAG_NONE 0x0000 /* No current flags */ #define BTIF_BTP_FLAG_INUSE 0x0001 /* Used only by packet owner */ #define BTIF_BTP_FLAG_LSYSTEM 0x0002 /* Used only by L2CAP */ #define BTIF_BTP_FLAG_TAIL 0x0004 /* Used only by L2CAP Applications */ #define BTIF_BTP_FLAG_RDEV 0x0008 /* Used only by L2CAP */ #define BTIF_BTP_FLAG_FCS 0x0010 /* FCS field is valid, set only by L2CAP */ #define BTIF_BTP_FLAG_NON_FLUSH 0x0020 /* Used by L2CAP, HCI or packet owner */ #define BTIF_BTP_FLAG_ENHANCED 0x0040 /* Used only by L2CAP */ #define BTIF_BTP_FLAG_SEGMENTED 0x0080 /* Used only by L2CAP */ #define BTIF_BTP_FLAG_TXDONE 0x0100 /* Used only by L2CAP */ #define BTIF_BTP_FLAG_USER 0x0200 /* Used only by L2CAP */ #define BTIF_BTP_FLAG_IMMEDIATE 0x0400 /* Used only by L2CAP */ /* End of btif_packet_flags */ #define BTIF_BT_PACKET_HEADER_LEN 25 typedef struct { list_entry_t node; uint8_t *data; /* Points to a buffer of user data. */ uint16_t dataLen; /* Indicates the length of "data" in bytes. */ uint16_t flags; /* Must be initialized to BTIF_BTP_FLAG_NONE by * applications running on top of L2CAP. */ #if BTIF_L2CAP_PRIORITY == BTIF_ENABLED BtPriority priority; #endif /* Group: The following fields are for internal use only by the stack. */ void *ulpContext; uint8_t *tail; uint16_t tailLen; #ifdef BTIF_XA_STATISTICS U32 rfc_timer; U32 hci_timer; U32 l2cap_timer; #endif uint16_t llpContext; uint16_t remoteCid; #if BTIF_L2CAP_NUM_ENHANCED_CHANNELS > 0 uint8_t segStart; uint16_t segNum; uint16_t segCount; uint8_t fcs[2]; #endif uint8_t hciPackets; uint8_t headerLen; uint8_t header[BTIF_BT_PACKET_HEADER_LEN]; } btif_bt_packet_t; /*--------------------------------------------------------------------------- * le_to_host16() *--------------------------------------------------------------------------- * * Synopsis: Retrieve a 16-bit number from the given buffer. The number * is in Little-Endian format. * * Return: 16-bit number. */ U16 le_to_host16(const U8 * ptr); /*--------------------------------------------------------------------------- * be_to_host16() *--------------------------------------------------------------------------- * * Synopsis: Retrieve a 16-bit number from the given buffer. The number * is in Big-Endian format. * * Return: 16-bit number. */ U16 be_to_host16(const U8* ptr); /*--------------------------------------------------------------------------- * be_to_host32() *--------------------------------------------------------------------------- * * Synopsis: Retrieve a 32-bit number from the given buffer. The number * is in Big-Endian format. * * Return: 32-bit number. */ U32 be_to_host32(const U8* ptr); /*--------------------------------------------------------------------------- * store_le16() *--------------------------------------------------------------------------- * * Synopsis: Store 16 bit value into a buffer in Little Endian format. * * Return: void */ void store_le16(U8 *buff, U16 le_value); /*--------------------------------------------------------------------------- * store_le32() *--------------------------------------------------------------------------- * * Synopsis: Store 32 bit value into a buffer in Little Endian format. * * Return: void */ void store_le32(U8 *buff, U32 le_value); /*--------------------------------------------------------------------------- * store_be16() *--------------------------------------------------------------------------- * * Synopsis: Store 16 bit value into a buffer in Big Endian format. * * Return: void */ void store_be16(U8 *buff, U16 be_value); /*--------------------------------------------------------------------------- * store_be32() *--------------------------------------------------------------------------- * * Synopsis: Store 32 bit value into a buffer in Big Endian format. * * Return: void */ void store_be32(U8 *buff, U32 be_value); #if defined(ENHANCED_STACK) /* Copy, compare bluetooth Address */ static inline int ba_cmp(const bt_bdaddr_t *ba1, const bt_bdaddr_t *ba2) { return memcmp(ba1, ba2, sizeof(bt_bdaddr_t )); } static inline void ba_cpy( bt_bdaddr_t *dst, const bt_bdaddr_t *src) { memcpy(dst, src, sizeof(bt_bdaddr_t )); } #define BTIF_CTX_INIT(buff) \ POSSIBLY_UNUSED unsigned int __offset = 2; \ POSSIBLY_UNUSED unsigned char *__buff = buff; #define BTIF_CTX_STR_BUF(buff,len) \ memcpy(__buff+__offset, buff, len); \ __offset += len; #define BTIF_CTX_LDR_BUF(buff,len) \ memcpy(buff, __buff+__offset, len); \ __offset += len; #define BTIF_CTX_STR_VAL8(v) \ __buff[__offset] = v&0xFF; \ __offset += 1; #define BTIF_CTX_LDR_VAL8(v) \ v = __buff[__offset]; \ __offset += 1; #define BTIF_CTX_STR_VAL16(v) \ __buff[__offset] = v&0xFF; \ __buff[__offset+1] = (v>>8)&0xFF; \ __offset += 2; #define BTIF_CTX_LDR_VAL16(v) \ v = __buff[__offset]; \ v |= __buff[__offset+1]<<8; \ __offset += 2; #define BTIF_CTX_STR_VAL32(v) \ __buff[__offset] = v&0xFF; \ __buff[__offset+1] = (v>>8)&0xFF; \ __buff[__offset+2] = (v>>16)&0xFF; \ __buff[__offset+3] = (v>>24)&0xFF; \ __offset += 4; #define BTIF_CTX_LDR_VAL32(v) \ v = __buff[__offset]; \ v |= __buff[__offset+1]<<8; \ v |= __buff[__offset+2]<<16; \ v |= __buff[__offset+3]<<24; \ __offset += 4; #define BTIF_CTX_GET_BUF_CURR() __buff #define BTIF_CTX_GET_BUF_HEAD() __buff #define BTIF_CTX_GET_OFFSET() __offset #define BTIF_CTX_GET_DATA_LEN() (__buff[0] | __buff[1]<<8) #define BTIF_CTX_GET_TOTAL_LEN() (BTIF_CTX_GET_DATA_LEN()+2) #define BTIF_CTX_SAVE_UPDATE_DATA_LEN() \ __buff[0] = (__offset-2)&0xFF; \ __buff[1] = ((__offset-2)>>8)&0xFF; struct btif_ctx_content { unsigned char *buff; unsigned int buff_len; }; #endif /* ENHANCED_STACK */ #ifdef __cplusplus } #endif /* */ #endif /*__BLUETOOTH_H__*/
5,203
1,844
// Copyright (c) 2015 Baidu.com, Inc. All Rights Reserved // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Authors: Wang,Yao(<EMAIL>) // <NAME>(<EMAIL>) #include "braft/configuration.h" #include <butil/logging.h> #include <butil/string_splitter.h> namespace braft { std::ostream& operator<<(std::ostream& os, const Configuration& a) { std::vector<PeerId> peers; a.list_peers(&peers); for (size_t i = 0; i < peers.size(); i++) { os << peers[i]; if (i < peers.size() - 1) { os << ","; } } return os; } int Configuration::parse_from(butil::StringPiece conf) { reset(); std::string peer_str; for (butil::StringSplitter sp(conf.begin(), conf.end(), ','); sp; ++sp) { braft::PeerId peer; peer_str.assign(sp.field(), sp.length()); if (peer.parse(peer_str) != 0) { LOG(ERROR) << "Fail to parse " << peer_str; return -1; } add_peer(peer); } return 0; } }
598
7,739
<gh_stars>1000+ import pytest import torch from ludwig.encoders.generic_encoders import DenseEncoder, PassthroughEncoder @pytest.mark.parametrize("input_size", [1, 2, 10]) @pytest.mark.parametrize("categorical", [True, False]) def test_generic_passthrough_encoder(input_size: int, categorical: bool): passthrough_encoder = PassthroughEncoder(input_size) # Passthrough encoder allows categorical input feature (int), dense encoder's input must be float. if categorical: inputs = torch.randint(10, (10, input_size)) else: inputs = torch.rand((10, input_size)) outputs = passthrough_encoder(inputs) # Ensures output shape matches encoder expected output shape. assert outputs["encoder_output"].shape[1:] == passthrough_encoder.output_shape @pytest.mark.parametrize("input_size", [1, 2, 10]) @pytest.mark.parametrize("num_layers", [1, 3, 6]) @pytest.mark.parametrize("output_size", [1, 2, 10, 256]) def test_generic_dense_encoder(input_size: int, num_layers: int, output_size: int): dense_encoder = DenseEncoder(input_size, num_layers=num_layers, output_size=output_size) inputs = torch.rand((10, input_size)) outputs = dense_encoder(inputs) # Ensures output shape matches encoder expected output shape. assert outputs["encoder_output"].shape[1:] == dense_encoder.output_shape
489
368
from nose.tools import assert_equal from cutecharts.charts import Scatter from cutecharts.render.engine import remove_key_with_none_value def gen_scatter_base() -> Scatter: c = Scatter("Scatter") c.set_options(x_label="I'm xlabel", y_label="I'm ylabel") c.add_series("series-A", [("1", 1), ("2", 2)]) return c def test_scatter_opts_before(): c = gen_scatter_base() expected = { "title": "Scatter", "data": { "datasets": [ {"label": "series-A", "data": [{"x": "1", "y": 1}, {"x": "2", "y": 2}]} ] }, "xLabel": "I'm xlabel", "yLabel": "I'm ylabel", "options": { "xTickCount": 3, "yTickCount": 3, "legendPosition": 1, "dataColors": None, "fontFamily": None, "showLine": False, "dotSize": 1, "timeFormat": None, }, } assert_equal(c.opts, expected) def test_scatter_opts_after(): c = gen_scatter_base() c.opts = remove_key_with_none_value(c.opts) expected = { "title": "Scatter", "data": { "datasets": [ {"label": "series-A", "data": [{"x": "1", "y": 1}, {"x": "2", "y": 2}]} ] }, "xLabel": "I'm xlabel", "yLabel": "I'm ylabel", "options": { "xTickCount": 3, "yTickCount": 3, "legendPosition": 1, "showLine": False, "dotSize": 1, }, } assert_equal(c.opts, expected)
851
552
from math import sqrt from random import gauss, randint, uniform from DaPy.core.base import Matrix from .models import BaseEngineModel class Layer(BaseEngineModel): def __init__(self, engine, function, str_activation): BaseEngineModel.__init__(self, engine) self.activation = function self.strfunc = str_activation def __repr__(self): return self.__name__ @property def activation(self): return self._func.__name__ @activation.setter def activation(self, other): assert callable(other) or other is None, 'activation should bu callable object' self._func = other def __getstate__(self): obj = self.__dict__.copy() obj['_engine'] = self.engine del obj['_func'] return obj def __setstate__(self, pkl): self.engine = pkl['_engine'] self.strfunc = pkl['strfunc'] if '_weight' in pkl: self._weight = pkl['_weight'] def propagation(self, *args, **kwrds): pass def backward(self, *args, **kwrds): pass class Input(Layer): '''Input layer in the model''' __name__ = 'Input' def __init__(self, engine, in_cells, *args, **kwrds): Layer.__init__(self, engine, None, None) self._in_cells = in_cells self._weight = self._engine.zeros((in_cells, in_cells)) @property def shape(self): return (self._in_cells, self._in_cells) def propagation(self, x): assert x.shape[1] == self._in_cells self._input = self._output = x return x class Dense(Layer): '''A type of common layer for multilayer perceptron This kind of structure can help you quickly develop a new machine learning model. ''' __name__ = 'Dense' def __init__(self, engine, n_in, n_out, activation, str_act, init_weight='Xavier'): Layer.__init__(self, engine, activation, str_act) self._init_parameters(n_in, n_out, init_weight) @property def shape(self): return self._weight.shape def _init_parameters(self, in_cells, out_cells, mode='MSRA'): '''inintialized the weight matrix in this layer. Paramters --------- in_cells : int the number of input variables. out_cells : int the number of output variables. activation : function ''' if mode in ('MSRA', 'He'): t1, t2, f = 0, sqrt(2.0 / in_cells), gauss elif mode == 'Xavier': low = in_cells + out_cells t1, t2, f = - sqrt(6.0 / low), sqrt(6.0 / low), uniform elif mode == 'Gauss': t1, t2, f = 0, 1, gauss else: raise TypeError('the mode for initiall weight only supports MSRA, '+\ 'Xavier, and Gauss.') weight = [[f(t1, t2) for j in range(out_cells)] for i in range(in_cells)] self._weight = self._engine.mat(weight) def propagation(self, input_): self._input = input_ self._output = self._func(input_.dot(self._weight)) return self._output def backward(self, gradient, alpha): gradient = self._mul(gradient, self._func(self._output, True)) self._weight += self._input.T.dot(gradient) * alpha return self._dot(gradient, self._weight.T) class Output(Dense): '''Output layer in the model''' __name__ = 'Output' def __init__(self, engine, n_in, n_out, activation, init_weight='Xavier'): Dense.__init__(self, engine, n_in, n_out, activation, init_weight)
1,584
602
<filename>game/overlord/ssound.h #pragma once #ifndef JAK_V2_SSOUND_H #define JAK_V2_SSOUND_H void InitSound_Overlord(); #endif // JAK_V2_SSOUND_H
73
367
@import UIKit; @class MBEMetalView; @interface MBEViewController : UIViewController @property (nonatomic, readonly) MBEMetalView *metalView; @end
53
12,252
<filename>server-spi-private/src/main/java/org/keycloak/scripting/ScriptingProviderFactory.java<gh_stars>1000+ package org.keycloak.scripting; import org.keycloak.provider.ProviderFactory; /** * @author <a href="mailto:<EMAIL>"><NAME></a> */ public interface ScriptingProviderFactory extends ProviderFactory<ScriptingProvider> { }
108
434
<filename>aws-lambda-java-runtime-interface-client/src/main/java/com/amazonaws/services/lambda/runtime/api/client/api/LambdaCognitoIdentity.java /* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. */ package com.amazonaws.services.lambda.runtime.api.client.api; import com.amazonaws.services.lambda.runtime.CognitoIdentity; public class LambdaCognitoIdentity implements CognitoIdentity { private final String cognitoIdentityId; private final String cognitoIdentityPoolId; public LambdaCognitoIdentity(String identityid, String poolid) { this.cognitoIdentityId = identityid; this.cognitoIdentityPoolId = poolid; } public String getIdentityId() { return this.cognitoIdentityId; } public String getIdentityPoolId() { return this.cognitoIdentityPoolId; } }
294
482
package io.cattle.platform.iaas.api.auth.projects; import io.cattle.platform.api.auth.Identity; import io.cattle.platform.api.auth.Policy; import io.cattle.platform.api.resource.AbstractObjectResourceManager; import io.cattle.platform.core.constants.ProjectConstants; import io.cattle.platform.core.dao.GenericResourceDao; import io.cattle.platform.core.model.Account; import io.cattle.platform.core.model.ProjectMember; import io.cattle.platform.iaas.api.auth.dao.AuthDao; import io.cattle.platform.iaas.api.auth.identity.IdentityManager; import io.cattle.platform.json.JsonMapper; import io.cattle.platform.object.ObjectManager; import io.cattle.platform.object.process.ObjectProcessManager; import io.github.ibuildthecloud.gdapi.context.ApiContext; import io.github.ibuildthecloud.gdapi.exception.ClientVisibleException; import io.github.ibuildthecloud.gdapi.factory.SchemaFactory; import io.github.ibuildthecloud.gdapi.model.ListOptions; import io.github.ibuildthecloud.gdapi.request.ApiRequest; import io.github.ibuildthecloud.gdapi.util.RequestUtils; import io.github.ibuildthecloud.gdapi.util.ResponseCodes; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import javax.inject.Inject; import org.apache.commons.lang3.StringUtils; public class ProjectMemberResourceManager extends AbstractObjectResourceManager { @Inject JsonMapper jsonMapper; @Inject AuthDao authDao; @Inject GenericResourceDao resourceDao; @Inject ObjectManager objectManager; @Inject ObjectProcessManager objectProcessManager; @Inject IdentityManager identityManager; @Override protected Object removeFromStore(String type, String id, Object obj, ApiRequest request) { throw new UnsupportedOperationException(); } @Override protected Object listInternal(SchemaFactory schemaFactory, String type, Map<Object, Object> criteria, ListOptions options) { Policy policy = (Policy) ApiContext.getContext().getPolicy(); String id = RequestUtils.makeSingularStringIfCan(criteria.get("id")); if (StringUtils.isNotEmpty(id)) { ProjectMember projectMember; try { projectMember = authDao.getProjectMember(Long.valueOf(id)); } catch (NumberFormatException e) { throw new ClientVisibleException(ResponseCodes.NOT_FOUND); } if (projectMember == null) { throw new ClientVisibleException(ResponseCodes.NOT_FOUND); } if (!authDao.hasAccessToProject(projectMember.getProjectId(), policy.getAccountId(), policy.isOption(Policy.AUTHORIZED_FOR_ALL_ACCOUNTS), policy.getIdentities())) { throw new ClientVisibleException(ResponseCodes.NOT_FOUND); } Identity identity = identityManager.projectMemberToIdentity(projectMember); policy.grantObjectAccess(identity); return Collections.singletonList(identity); } String projectId = RequestUtils.makeSingularStringIfCan(criteria.get("projectId")); List<? extends ProjectMember> members; if (StringUtils.isNotEmpty(projectId)) { members = authDao.getActiveProjectMembers(Long.valueOf(projectId)); } else { members = authDao.getActiveProjectMembers(policy.getAccountId()); } List<Identity> identities = new ArrayList<>(); for (ProjectMember member:members){ Identity identity = identityManager.projectMemberToIdentity(member); identities.add(identity); policy.grantObjectAccess(identity); } return identities; } @Override public String[] getTypes() { return new String[]{"projectMember"}; } @Override public Class<?>[] getTypeClasses() { return new Class<?>[0]; } @Override protected Object createInternal(String type, ApiRequest request) { throw new ClientVisibleException(ResponseCodes.METHOD_NOT_ALLOWED); } public List<ProjectMember> setMembers(Account project, List<Map<String, String>> members, boolean forProjectCreate) { List<ProjectMember> membersCreated = new ArrayList<>(); Set<Member> membersTransformed = new HashSet<>(); if ((members == null || members.isEmpty())) { Policy policy = (Policy) ApiContext.getContext().getPolicy(); Identity idToUse = null; for (Identity identity : policy.getIdentities()) { if (idToUse == null) { if (identity.getExternalIdType().equalsIgnoreCase(ProjectConstants.RANCHER_ID)) { idToUse = identity; } } } if (idToUse != null) { Member owner = new Member(idToUse, ProjectConstants.OWNER); membersTransformed.add(owner); } } else { for (Map<String, String> newMember : members) { if (newMember.get("externalId") == null || newMember.get("externalIdType") == null || newMember.get("role") == null) { throw new ClientVisibleException(ResponseCodes.BAD_REQUEST, "InvalidFormat", "Member format " + "invalid", null); } } for (Map<String, String> newMember : members) { Identity givenIdentity = new Identity(newMember.get("externalIdType"), newMember.get("externalId")); givenIdentity = identityManager.projectMemberToIdentity(givenIdentity); membersTransformed.add(new Member(givenIdentity, newMember.get("role"))); } } boolean hasOwner = false; Set<Member> newOwners = new HashSet<>(); for (Member member : membersTransformed) { if (member.getRole().equalsIgnoreCase(ProjectConstants.OWNER)) { hasOwner = true; newOwners.add(member); } } if (!hasOwner) { throw new ClientVisibleException(ResponseCodes.BAD_REQUEST, "InvalidFormat", "Members list does not have an owner", null); } Policy policy = (Policy) ApiContext.getContext().getPolicy(); boolean isOwner = authDao.isProjectOwner(project.getId(), policy.getAccountId(), policy.isOption(Policy.AUTHORIZED_FOR_ALL_ACCOUNTS), policy .getIdentities()); // Can only change project owners if you're an owner or this is part of initial project create (to assign yourself as owner) if (!isOwner && !forProjectCreate) { List<? extends ProjectMember>currentMembers = authDao.getActiveProjectMembers(project.getId()); Set<Member> currentOwners = new HashSet<>(); String projectId = (String) ApiContext.getContext().getIdFormatter().formatId(objectManager.getType(Account.class), project.getId()); for (ProjectMember m : currentMembers) { if (ProjectConstants.OWNER.equalsIgnoreCase(m.getRole())) { currentOwners.add(new Member(m, projectId)); } } if (!newOwners.equals(currentOwners)) { throw new ClientVisibleException(ResponseCodes.BAD_REQUEST, "InvalidFormat", "Cannot change project owners", null); } } membersCreated.addAll(authDao.setProjectMembers(project, membersTransformed, ApiContext.getContext() .getIdFormatter())); for (ProjectMember member : membersCreated) { identityManager.untransform(identityManager.projectMemberToIdentity(member), true); } return membersCreated; } }
3,168
316
<filename>platform/android/src/geometry/lat_lng_bounds.hpp #pragma once #include <mbgl/util/noncopyable.hpp> #include <mbgl/util/geo.hpp> #include <mbgl/util/geometry.hpp> #include <jni/jni.hpp> namespace mbgl { namespace android { class LatLngBounds : private mbgl::util::noncopyable { public: static constexpr auto Name() { return "com/mapbox/mapboxsdk/geometry/LatLngBounds"; }; static jni::Local<jni::Object<LatLngBounds>> New(jni::JNIEnv&, mbgl::LatLngBounds); static mbgl::LatLngBounds getLatLngBounds(jni::JNIEnv&, const jni::Object<LatLngBounds>&); static void registerNative(jni::JNIEnv&); }; } // namespace android } // namespace mbgl
277
1,131
<reponame>T3sT3ro/Create { "parent": "create:block/encased_shaft/item", "textures": { "casing": "create:block/brass_casing", "opening": "create:block/brass_gearbox" } }
80
7,986
# This file provides the runtime support for running a basic program # Assumes the program has been parsed using basparse.py import sys import math import random class BasicInterpreter: # Initialize the interpreter. prog is a dictionary # containing (line,statement) mappings def __init__(self, prog): self.prog = prog self.functions = { # Built-in function table 'SIN': lambda z: math.sin(self.eval(z)), 'COS': lambda z: math.cos(self.eval(z)), 'TAN': lambda z: math.tan(self.eval(z)), 'ATN': lambda z: math.atan(self.eval(z)), 'EXP': lambda z: math.exp(self.eval(z)), 'ABS': lambda z: abs(self.eval(z)), 'LOG': lambda z: math.log(self.eval(z)), 'SQR': lambda z: math.sqrt(self.eval(z)), 'INT': lambda z: int(self.eval(z)), 'RND': lambda z: random.random() } # Collect all data statements def collect_data(self): self.data = [] for lineno in self.stat: if self.prog[lineno][0] == 'DATA': self.data = self.data + self.prog[lineno][1] self.dc = 0 # Initialize the data counter # Check for end statements def check_end(self): has_end = 0 for lineno in self.stat: if self.prog[lineno][0] == 'END' and not has_end: has_end = lineno if not has_end: print("NO END INSTRUCTION") self.error = 1 return if has_end != lineno: print("END IS NOT LAST") self.error = 1 # Check loops def check_loops(self): for pc in range(len(self.stat)): lineno = self.stat[pc] if self.prog[lineno][0] == 'FOR': forinst = self.prog[lineno] loopvar = forinst[1] for i in range(pc + 1, len(self.stat)): if self.prog[self.stat[i]][0] == 'NEXT': nextvar = self.prog[self.stat[i]][1] if nextvar != loopvar: continue self.loopend[pc] = i break else: print("FOR WITHOUT NEXT AT LINE %s" % self.stat[pc]) self.error = 1 # Evaluate an expression def eval(self, expr): etype = expr[0] if etype == 'NUM': return expr[1] elif etype == 'GROUP': return self.eval(expr[1]) elif etype == 'UNARY': if expr[1] == '-': return -self.eval(expr[2]) elif etype == 'BINOP': if expr[1] == '+': return self.eval(expr[2]) + self.eval(expr[3]) elif expr[1] == '-': return self.eval(expr[2]) - self.eval(expr[3]) elif expr[1] == '*': return self.eval(expr[2]) * self.eval(expr[3]) elif expr[1] == '/': return float(self.eval(expr[2])) / self.eval(expr[3]) elif expr[1] == '^': return abs(self.eval(expr[2]))**self.eval(expr[3]) elif etype == 'VAR': var, dim1, dim2 = expr[1] if not dim1 and not dim2: if var in self.vars: return self.vars[var] else: print("UNDEFINED VARIABLE %s AT LINE %s" % (var, self.stat[self.pc])) raise RuntimeError # May be a list lookup or a function evaluation if dim1 and not dim2: if var in self.functions: # A function return self.functions[var](dim1) else: # A list evaluation if var in self.lists: dim1val = self.eval(dim1) if dim1val < 1 or dim1val > len(self.lists[var]): print("LIST INDEX OUT OF BOUNDS AT LINE %s" % self.stat[self.pc]) raise RuntimeError return self.lists[var][dim1val - 1] if dim1 and dim2: if var in self.tables: dim1val = self.eval(dim1) dim2val = self.eval(dim2) if dim1val < 1 or dim1val > len(self.tables[var]) or dim2val < 1 or dim2val > len(self.tables[var][0]): print("TABLE INDEX OUT OUT BOUNDS AT LINE %s" % self.stat[self.pc]) raise RuntimeError return self.tables[var][dim1val - 1][dim2val - 1] print("UNDEFINED VARIABLE %s AT LINE %s" % (var, self.stat[self.pc])) raise RuntimeError # Evaluate a relational expression def releval(self, expr): etype = expr[1] lhs = self.eval(expr[2]) rhs = self.eval(expr[3]) if etype == '<': if lhs < rhs: return 1 else: return 0 elif etype == '<=': if lhs <= rhs: return 1 else: return 0 elif etype == '>': if lhs > rhs: return 1 else: return 0 elif etype == '>=': if lhs >= rhs: return 1 else: return 0 elif etype == '=': if lhs == rhs: return 1 else: return 0 elif etype == '<>': if lhs != rhs: return 1 else: return 0 # Assignment def assign(self, target, value): var, dim1, dim2 = target if not dim1 and not dim2: self.vars[var] = self.eval(value) elif dim1 and not dim2: # List assignment dim1val = self.eval(dim1) if not var in self.lists: self.lists[var] = [0] * 10 if dim1val > len(self.lists[var]): print ("DIMENSION TOO LARGE AT LINE %s" % self.stat[self.pc]) raise RuntimeError self.lists[var][dim1val - 1] = self.eval(value) elif dim1 and dim2: dim1val = self.eval(dim1) dim2val = self.eval(dim2) if not var in self.tables: temp = [0] * 10 v = [] for i in range(10): v.append(temp[:]) self.tables[var] = v # Variable already exists if dim1val > len(self.tables[var]) or dim2val > len(self.tables[var][0]): print("DIMENSION TOO LARGE AT LINE %s" % self.stat[self.pc]) raise RuntimeError self.tables[var][dim1val - 1][dim2val - 1] = self.eval(value) # Change the current line number def goto(self, linenum): if not linenum in self.prog: print("UNDEFINED LINE NUMBER %d AT LINE %d" % (linenum, self.stat[self.pc])) raise RuntimeError self.pc = self.stat.index(linenum) # Run it def run(self): self.vars = {} # All variables self.lists = {} # List variables self.tables = {} # Tables self.loops = [] # Currently active loops self.loopend = {} # Mapping saying where loops end self.gosub = None # Gosub return point (if any) self.error = 0 # Indicates program error self.stat = list(self.prog) # Ordered list of all line numbers self.stat.sort() self.pc = 0 # Current program counter # Processing prior to running self.collect_data() # Collect all of the data statements self.check_end() self.check_loops() if self.error: raise RuntimeError while 1: line = self.stat[self.pc] instr = self.prog[line] op = instr[0] # END and STOP statements if op == 'END' or op == 'STOP': break # We're done # GOTO statement elif op == 'GOTO': newline = instr[1] self.goto(newline) continue # PRINT statement elif op == 'PRINT': plist = instr[1] out = "" for label, val in plist: if out: out += ' ' * (15 - (len(out) % 15)) out += label if val: if label: out += " " eval = self.eval(val) out += str(eval) sys.stdout.write(out) end = instr[2] if not (end == ',' or end == ';'): sys.stdout.write("\n") if end == ',': sys.stdout.write(" " * (15 - (len(out) % 15))) if end == ';': sys.stdout.write(" " * (3 - (len(out) % 3))) # LET statement elif op == 'LET': target = instr[1] value = instr[2] self.assign(target, value) # READ statement elif op == 'READ': for target in instr[1]: if self.dc < len(self.data): value = ('NUM', self.data[self.dc]) self.assign(target, value) self.dc += 1 else: # No more data. Program ends return elif op == 'IF': relop = instr[1] newline = instr[2] if (self.releval(relop)): self.goto(newline) continue elif op == 'FOR': loopvar = instr[1] initval = instr[2] finval = instr[3] stepval = instr[4] # Check to see if this is a new loop if not self.loops or self.loops[-1][0] != self.pc: # Looks like a new loop. Make the initial assignment newvalue = initval self.assign((loopvar, None, None), initval) if not stepval: stepval = ('NUM', 1) stepval = self.eval(stepval) # Evaluate step here self.loops.append((self.pc, stepval)) else: # It's a repeat of the previous loop # Update the value of the loop variable according to the # step stepval = ('NUM', self.loops[-1][1]) newvalue = ( 'BINOP', '+', ('VAR', (loopvar, None, None)), stepval) if self.loops[-1][1] < 0: relop = '>=' else: relop = '<=' if not self.releval(('RELOP', relop, newvalue, finval)): # Loop is done. Jump to the NEXT self.pc = self.loopend[self.pc] self.loops.pop() else: self.assign((loopvar, None, None), newvalue) elif op == 'NEXT': if not self.loops: print("NEXT WITHOUT FOR AT LINE %s" % line) return nextvar = instr[1] self.pc = self.loops[-1][0] loopinst = self.prog[self.stat[self.pc]] forvar = loopinst[1] if nextvar != forvar: print("NEXT DOESN'T MATCH FOR AT LINE %s" % line) return continue elif op == 'GOSUB': newline = instr[1] if self.gosub: print("ALREADY IN A SUBROUTINE AT LINE %s" % line) return self.gosub = self.stat[self.pc] self.goto(newline) continue elif op == 'RETURN': if not self.gosub: print("RETURN WITHOUT A GOSUB AT LINE %s" % line) return self.goto(self.gosub) self.gosub = None elif op == 'FUNC': fname = instr[1] pname = instr[2] expr = instr[3] def eval_func(pvalue, name=pname, self=self, expr=expr): self.assign((pname, None, None), pvalue) return self.eval(expr) self.functions[fname] = eval_func elif op == 'DIM': for vname, x, y in instr[1]: if y == 0: # Single dimension variable self.lists[vname] = [0] * x else: # Double dimension variable temp = [0] * y v = [] for i in range(x): v.append(temp[:]) self.tables[vname] = v self.pc += 1 # Utility functions for program listing def expr_str(self, expr): etype = expr[0] if etype == 'NUM': return str(expr[1]) elif etype == 'GROUP': return "(%s)" % self.expr_str(expr[1]) elif etype == 'UNARY': if expr[1] == '-': return "-" + str(expr[2]) elif etype == 'BINOP': return "%s %s %s" % (self.expr_str(expr[2]), expr[1], self.expr_str(expr[3])) elif etype == 'VAR': return self.var_str(expr[1]) def relexpr_str(self, expr): return "%s %s %s" % (self.expr_str(expr[2]), expr[1], self.expr_str(expr[3])) def var_str(self, var): varname, dim1, dim2 = var if not dim1 and not dim2: return varname if dim1 and not dim2: return "%s(%s)" % (varname, self.expr_str(dim1)) return "%s(%s,%s)" % (varname, self.expr_str(dim1), self.expr_str(dim2)) # Create a program listing def list(self): stat = list(self.prog) # Ordered list of all line numbers stat.sort() for line in stat: instr = self.prog[line] op = instr[0] if op in ['END', 'STOP', 'RETURN']: print("%s %s" % (line, op)) continue elif op == 'REM': print("%s %s" % (line, instr[1])) elif op == 'PRINT': _out = "%s %s " % (line, op) first = 1 for p in instr[1]: if not first: _out += ", " if p[0] and p[1]: _out += '"%s"%s' % (p[0], self.expr_str(p[1])) elif p[1]: _out += self.expr_str(p[1]) else: _out += '"%s"' % (p[0],) first = 0 if instr[2]: _out += instr[2] print(_out) elif op == 'LET': print("%s LET %s = %s" % (line, self.var_str(instr[1]), self.expr_str(instr[2]))) elif op == 'READ': _out = "%s READ " % line first = 1 for r in instr[1]: if not first: _out += "," _out += self.var_str(r) first = 0 print(_out) elif op == 'IF': print("%s IF %s THEN %d" % (line, self.relexpr_str(instr[1]), instr[2])) elif op == 'GOTO' or op == 'GOSUB': print("%s %s %s" % (line, op, instr[1])) elif op == 'FOR': _out = "%s FOR %s = %s TO %s" % ( line, instr[1], self.expr_str(instr[2]), self.expr_str(instr[3])) if instr[4]: _out += " STEP %s" % (self.expr_str(instr[4])) print(_out) elif op == 'NEXT': print("%s NEXT %s" % (line, instr[1])) elif op == 'FUNC': print("%s DEF %s(%s) = %s" % (line, instr[1], instr[2], self.expr_str(instr[3]))) elif op == 'DIM': _out = "%s DIM " % line first = 1 for vname, x, y in instr[1]: if not first: _out += "," first = 0 if y == 0: _out += "%s(%d)" % (vname, x) else: _out += "%s(%d,%d)" % (vname, x, y) print(_out) elif op == 'DATA': _out = "%s DATA " % line first = 1 for v in instr[1]: if not first: _out += "," first = 0 _out += v print(_out) # Erase the current program def new(self): self.prog = {} # Insert statements def add_statements(self, prog): for line, stat in prog.items(): self.prog[line] = stat # Delete a statement def del_line(self, lineno): try: del self.prog[lineno] except KeyError: pass
10,627
704
<gh_stars>100-1000 from __future__ import absolute_import from .numpy_wrapper import * from . import numpy_boxes from . import numpy_vjps
45
350
<filename>src/4uqi/top.h /* * Copyright (C) 2005-2017 <NAME> (<EMAIL>). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * See the file COPYING for License information. */ #include "0root/root.h" #include <map> #include "1base/error.h" #include "2config/db_config.h" #include "4uqi/plugin_wrapper.h" #include "4uqi/statements.h" #include "4uqi/scanvisitor.h" #include "4uqi/scanvisitorfactoryhelper.h" // Always verify that a file of level N does not include headers > N! #ifndef UPS_ROOT_H # error "root.h was not included" #endif namespace upscaledb { typedef std::vector<uint8_t> ByteVector; // If the vector is full then delete the old minimum to make space. // Then append the new value. template<typename T> static inline T store_min_value(T new_minimum, T old_minimum, const void *value_data, size_t value_size, std::map<T, ByteVector> &storage, size_t limit) { typedef typename std::map<T, ByteVector>::value_type ValueType; const uint8_t *v = (const uint8_t *)value_data; if (unlikely(storage.size() < limit)) { storage.insert(ValueType(new_minimum, ByteVector(v, v + value_size))); return new_minimum < old_minimum ? new_minimum : old_minimum; } if (new_minimum > old_minimum) { storage.erase(storage.find(old_minimum)); storage.insert(ValueType(new_minimum, ByteVector(v, v + value_size))); return storage.begin()->first; } return old_minimum; } template<typename Key, typename Record> struct TopScanVisitorBase : public NumericalScanVisitor { typedef std::map<Key, ByteVector> KeyMap; typedef std::map<Record, ByteVector> RecordMap; TopScanVisitorBase(const DbConfig *cfg, SelectStatement *stmt) : NumericalScanVisitor(stmt), min_key(std::numeric_limits<typename Key::type>::max()), min_record(std::numeric_limits<typename Record::type>::max()), key_type(cfg->key_type), record_type(cfg->record_type) { if (statement->limit == 0) statement->limit = 1; } // Assigns the result to |result| virtual void assign_result(uqi_result_t *result) { uqi_result_initialize(result, key_type, record_type); if (ISSET(statement->function.flags, UQI_STREAM_KEY)) { for (typename KeyMap::iterator it = stored_keys.begin(); it != stored_keys.end(); it++) { const Key &key = it->first; const ByteVector &record = it->second; uqi_result_add_row(result, key.ptr(), key.size(), record.data(), record.size()); } } else { for (typename RecordMap::iterator it = stored_records.begin(); it != stored_records.end(); it++) { const Record &record = it->first; const ByteVector &key = it->second; uqi_result_add_row(result, key.data(), key.size(), record.ptr(), record.size()); } } } // The minimum value currently stored in |keys| Key min_key; // The current set of keys KeyMap stored_keys; // The minimum value currently stored in |records| Record min_record; // The current set of records RecordMap stored_records; // The types for keys and records int key_type; int record_type; }; template<typename Key, typename Record> struct TopScanVisitor : public TopScanVisitorBase<Key, Record> { typedef TopScanVisitorBase<Key, Record> P; TopScanVisitor(const DbConfig *cfg, SelectStatement *stmt) : TopScanVisitorBase<Key, Record>(cfg, stmt) { } // Operates on a single key virtual void operator()(const void *key_data, uint16_t key_size, const void *record_data, uint32_t record_size) { if (ISSET(P::statement->function.flags, UQI_STREAM_KEY)) { Key key(key_data, key_size); P::min_key = store_min_value(key, P::min_key, record_data, record_size, P::stored_keys, P::statement->limit); } else { Record record(record_data, record_size); P::min_record = store_min_value(record, P::min_record, key_data, key_size, P::stored_records, P::statement->limit); } } // Operates on an array of keys virtual void operator()(const void *key_data, const void *record_data, size_t length) { Sequence<Key> keys(key_data, length); Sequence<Record> records(record_data, length); typename Sequence<Key>::iterator kit = keys.begin(); typename Sequence<Record>::iterator rit = records.begin(); if (ISSET(P::statement->function.flags, UQI_STREAM_KEY)) { for (; kit != keys.end(); kit++, rit++) { P::min_key = store_min_value(*kit, P::min_key, &rit->value, rit->size(), P::stored_keys, P::statement->limit); } } else { for (; kit != keys.end(); kit++, rit++) { P::min_record = store_min_value(*rit, P::min_record, &kit->value, kit->size(), P::stored_records, P::statement->limit); } } } }; struct TopScanVisitorFactory { static ScanVisitor *create(const DbConfig *cfg, SelectStatement *stmt) { return ScanVisitorFactoryHelper::create<TopScanVisitor>(cfg, stmt); } }; template<typename Key, typename Record> struct TopIfScanVisitor : public TopScanVisitorBase<Key, Record> { typedef TopScanVisitorBase<Key, Record> P; TopIfScanVisitor(const DbConfig *cfg, SelectStatement *stmt) : TopScanVisitorBase<Key, Record>(cfg, stmt), plugin(cfg, stmt) { } // Operates on a single key // // TODO first check if the key is < old_minimum, THEN check the predicate // (otherwise the predicate is checked for every key, and I think this is // more expensive than the other way round) virtual void operator()(const void *key_data, uint16_t key_size, const void *record_data, uint32_t record_size) { if (plugin.pred(key_data, key_size, record_data, record_size)) { if (ISSET(P::statement->function.flags, UQI_STREAM_KEY)) { Key key(key_data, key_size); P::min_key = store_min_value(key, P::min_key, record_data, record_size, P::stored_keys, P::statement->limit); } else { Record record(record_data, record_size); P::min_record = store_min_value(record, P::min_record, key_data, key_size, P::stored_records, P::statement->limit); } } } // Operates on an array of keys and records (both with fixed length) // // TODO first check if the key is < old_minimum, THEN check the predicate // (otherwise the predicate is checked for every key, and I think this is // more expensive than the other way round) virtual void operator()(const void *key_data, const void *record_data, size_t length) { Sequence<Key> keys(key_data, length); Sequence<Record> records(record_data, length); typename Sequence<Key>::iterator kit = keys.begin(); typename Sequence<Record>::iterator rit = records.begin(); if (ISSET(P::statement->function.flags, UQI_STREAM_KEY)) { for (; kit != keys.end(); kit++, rit++) { if (plugin.pred(&kit->value, kit->size(), &rit->value, rit->size())) { P::min_key = store_min_value(*kit, P::min_key, &rit->value, rit->size(), P::stored_keys, P::statement->limit); } } } else { for (; kit != keys.end(); kit++, rit++) { if (plugin.pred(&kit->value, kit->size(), &rit->value, rit->size())) { P::min_record = store_min_value(*rit, P::min_record, &kit->value, kit->size(), P::stored_records, P::statement->limit); } } } } // The predicate plugin PredicatePluginWrapper plugin; }; struct TopIfScanVisitorFactory { static ScanVisitor *create(const DbConfig *cfg, SelectStatement *stmt) { return ScanVisitorFactoryHelper::create<TopIfScanVisitor>(cfg, stmt); } }; } // namespace upscaledb
3,509
373
<gh_stars>100-1000 /** @file @copyright Copyright 2012 - 2021 Intel Corporation. <BR> SPDX-License-Identifier: BSD-2-Clause-Patent **/ #include <Library/PlatformSetupVariableSyncLib.h> /*++ Description: This function will parse the variable hob and find three vairables: RP variable PC common variable PC generation variable This is used to sync Pc variable to RP variable value Arguments: PeiServices - PeiServices Header - VARIABLE_STORE_HEADER CreateHobDataForRpDefaults - will create a hob for RP defaults, this is used in normal post case, cannot be used in specicfic hob event Returns: EFI_SUCCESS - Sync to RP variable Success Other -Sync to RP variable Failure --*/ EFI_STATUS SyncSetupVariable ( IN EFI_PEI_SERVICES **PeiServices, IN OUT VOID* Header, IN BOOLEAN CreateHobDataForRpDefaults ) { EFI_STATUS Status = EFI_SUCCESS; return Status; } /*++ Description: This function finds the matched default data and create GUID hob only for RP variable . This is used to sync Pc variable to RP variable value Arguments: DefaultId - Specifies the type of defaults to retrieve. BoardId - Specifies the platform board of defaults to retrieve. Returns: EFI_SUCCESS - The matched default data is found. EFI_NOT_FOUND - The matched default data is not found. EFI_OUT_OF_RESOURCES - No enough resource to create HOB. --*/ EFI_STATUS CreateRPVariableHob ( IN UINT16 DefaultId, IN UINT16 BoardId ){ EFI_STATUS Status = EFI_SUCCESS; return Status; }
737
1,338
/* * Copyright 2017, <NAME>, <EMAIL>. * Copyright 2011, <NAME>, <EMAIL>. * Copyright 2001-2010, <NAME>, <EMAIL>. * This file may be used under the terms of the MIT License. */ #ifndef B_TREE_H #define B_TREE_H #include "btrfs.h" #include "Volume.h" #define BTREE_NULL -1LL #define BTREE_FREE -2LL #define BTRFS_MAX_TREE_DEPTH 8 //! Tree traversal direction status, used by functions manipulating trees enum btree_traversing { BTREE_FORWARD = 1, BTREE_EXACT = 0, BTREE_BACKWARD = -1, BTREE_BEGIN = 0, BTREE_END = -1 }; class Transaction; // #pragma mark - in-memory structures template<class T> class Stack; class TreeIterator; // needed for searching (utilizing a stack) struct node_and_key { off_t nodeOffset; uint16 keyIndex; }; class BTree { public: class Path; class Node; public: BTree(Volume* volume); BTree(Volume* volume, btrfs_stream* stream); BTree(Volume* volume, fsblock_t rootBlock); ~BTree(); status_t FindExact(Path* path, btrfs_key& key, void** _value, uint32* _size = NULL, uint32* _offset = NULL) const; status_t FindNext(Path* path, btrfs_key& key, void** _value, uint32* _size = NULL, uint32* _offset = NULL) const; status_t FindPrevious(Path* path, btrfs_key& key, void** _value, uint32* _size = NULL, uint32* _offset = NULL) const; /*! Traverse from the root to fill in the path along the way * \return Current slot at leaf if successful, error code (out of memory, * no such entry, unmapped block) otherwise */ status_t Traverse(btree_traversing type, Path* path, const btrfs_key& key) const; status_t PreviousLeaf(Path* path) const; status_t NextLeaf(Path* path) const; /*! Insert consecutive empty entries * \param num Number of entries to be inserted * \param startKey Slot to start inserting * \return Starting slot on success, error code otherwise */ status_t MakeEntries(Transaction& transaction, Path* path, const btrfs_key& startKey, int num, int length); //! MakeEntries and fill in them status_t InsertEntries(Transaction& transaction, Path* path, btrfs_entry* entries, void** data, int num); /*! Like MakeEntries, but here entries are removed. * \param _data Location to store removed data */ status_t RemoveEntries(Transaction& transaction, Path* path, const btrfs_key& startKey, void** _data, int num); Volume* SystemVolume() const { return fVolume; } status_t SetRoot(off_t logical, fsblock_t* block); void SetRoot(Node* root); fsblock_t RootBlock() const { return fRootBlock; } off_t LogicalRoot() const { return fLogicalRoot; } uint8 RootLevel() const { return fRootLevel; } private: BTree(const BTree& other); BTree& operator=(const BTree& other); // no implementation /*! Search for key in the tree * \param _value Location to store item if search successful * \return B_OK when key found, error code otherwise */ status_t _Find(Path* path, btrfs_key& key, void** _value, uint32* _size, uint32* _offset, btree_traversing type) const; void _AddIterator(TreeIterator* iterator); void _RemoveIterator(TreeIterator* iterator); private: friend class TreeIterator; fsblock_t fRootBlock; off_t fLogicalRoot; uint8 fRootLevel; Volume* fVolume; mutex fIteratorLock; SinglyLinkedList<TreeIterator> fIterators; public: class Node { public: Node(Volume* volume); Node(Volume* volume, off_t block); ~Node(); uint64 LogicalAddress() const { return fNode->header.LogicalAddress(); } uint64 Flags() const { return fNode->header.Flags(); } uint64 Generation() const { return fNode->header.Generation(); } uint64 Owner() const { return fNode->header.Owner(); } uint32 ItemCount() const { return fNode->header.ItemCount(); } uint8 Level() const { return fNode->header.Level(); } void SetLogicalAddress(uint64 address) { fNode->header.SetLogicalAddress(address); } void SetGeneration(uint64 generation) { fNode->header.SetGeneration(generation); } void SetItemCount(uint32 itemCount) { fNode->header.SetItemCount(itemCount); } btrfs_index* Index(uint32 i) const { return &fNode->index[i]; } btrfs_entry* Item(uint32 i) const { return &fNode->entries[i]; } uint8* ItemData(uint32 i) const { return (uint8*)Item(0) + Item(i)->Offset(); } //! Reset Node and decrements ref-count to the Node's block void Unset(); //! Load node at block offset from disk void SetTo(off_t block); void SetToWritable(off_t block, int32 transactionId, bool empty); int SpaceUsed() const; int SpaceLeft() const; off_t BlockNum() const { return fBlockNumber;} bool IsWritable() const { return fWritable; } /*! * copy node header, items and items data * length is size to insert/remove * if node is a internal node, length isnt used * length = 0: Copy a whole * length < 0: removing * length > 0: inserting */ status_t Copy(const Node* origin, uint32 start, uint32 end, int length) const; //! Shift data in items between start and end by offset length status_t MoveEntries(uint32 start, uint32 end, int length) const; //! Searches for item slot in the node status_t SearchSlot(const btrfs_key& key, int* slot, btree_traversing type) const; private: Node(const Node&); Node& operator=(const Node&); // no implementation //! Internal function used by Copy void _Copy(const Node* origin, uint32 at, uint32 from, uint32 to, int length) const; status_t _SpaceCheck(int length) const; /*! * calculate used space except the header. * type is only for leaf node * type 1: only item space * type 2: only item data space * type 3: both type 1 and 2 */ int _CalculateSpace(uint32 from, uint32 to, uint8 type = 1) const; btrfs_stream* fNode; Volume* fVolume; off_t fBlockNumber; bool fWritable; }; class Path { public: Path(BTree* tree); ~Path(); Node* GetNode(int level, int* _slot = NULL) const; Node* SetNode(off_t block, int slot); Node* SetNode(const Node* node, int slot); status_t GetCurrentEntry(btrfs_key* _key, void** _value, uint32* _size = NULL, uint32* _offset = NULL); status_t GetEntry(int slot, btrfs_key* _key, void** _value, uint32* _size = NULL, uint32* _offset = NULL); status_t SetEntry(int slot, const btrfs_entry& entry, void* value); int Move(int level, int step); /*! * Allocate and copy block and do all the changes that it can. * for now, we only copy-on-write tree block, * file data is "nocow" by default. * * o parent o * | ===> \ * o x o */ status_t CopyOnWrite(Transaction& transaction, int level, uint32 start, int num, int length); /*! * Copy-On-Write all internal nodes start from a specific level. * level > 0: to root * level <= 0: to leaf * * path cow-path path cow-path * ================================================= * root cow-root root level < 0 * | | | * n1 cow-n1 ...______ * | | | \ * n2 cow-n2 n1 cow-n1 * | / | | * ...____/ n2 cow-n2 * | | | * leaf level > 0 leaf cow-leaf */ status_t InternalCopy(Transaction& transaction, int level); BTree* Tree() const { return fTree; } private: Path(const Path&); Path operator=(const Path&); private: Node* fNodes[BTRFS_MAX_TREE_DEPTH]; int fSlots[BTRFS_MAX_TREE_DEPTH]; BTree* fTree; }; }; // class BTree class TreeIterator : public SinglyLinkedListLinkImpl<TreeIterator> { public: TreeIterator(BTree* tree, const btrfs_key& key); ~TreeIterator(); void Rewind(bool inverse = false); //! Set current key in the iterator status_t Find(const btrfs_key& key); status_t GetNextEntry(void** _value, uint32* _size = NULL, uint32* _offset = NULL); status_t GetPreviousEntry(void** _value, uint32* _size = NULL, uint32* _offset = NULL); BTree* Tree() const { return fTree; } btrfs_key Key() const { return fKey; } private: friend class BTree; //! Iterates through the tree in the specified direction status_t _Traverse(btree_traversing direction); status_t _Find(btree_traversing type, btrfs_key& key, void** _value); //! Like GetEntry in BTree::Path but checks type and moving status_t _GetEntry(btree_traversing type, void** _value, uint32* _size, uint32* _offset); // called by BTree void Stop(); private: BTree* fTree; BTree::Path* fPath; btrfs_key fKey; status_t fIteratorStatus; }; // #pragma mark - BTree::Path inline functions inline status_t BTree::Path::GetCurrentEntry(btrfs_key* _key, void** _value, uint32* _size, uint32* _offset) { return GetEntry(fSlots[0], _key, _value, _size, _offset); } // #pragma mark - TreeIterator inline functions inline status_t TreeIterator::GetNextEntry(void** _value, uint32* _size, uint32* _offset) { return _GetEntry(BTREE_FORWARD, _value, _size, _offset); } inline status_t TreeIterator::GetPreviousEntry(void** _value, uint32* _size, uint32* _offset) { return _GetEntry(BTREE_BACKWARD, _value, _size, _offset); } #endif // B_TREE_H
4,073
1,682
<filename>data-avro/src/main/java/com/linkedin/data/avro/SchemaTranslationException.java /* Copyright (c) 2012 LinkedIn Corp. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.linkedin.data.avro; import java.io.IOException; /** * Exception thrown if there is an error translating a schema from * one representation to another. */ public class SchemaTranslationException extends IOException { private static final long serialVersionUID = 1L; /** * Initialize a {@link SchemaTranslationException}. * * @param message provides a message. */ public SchemaTranslationException(String message) { super(message); } /** * Initialize a {@link SchemaTranslationException}. * * @param message provides a message. * @param e provides the cause. */ public SchemaTranslationException(String message, Throwable e) { super(message, e); } }
407
428
<reponame>wanchong/apis<gh_stars>100-1000 /* * Copyright 2012 SURFnet bv, The Netherlands * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.surfnet.oaaas.auth; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang.StringUtils; import org.surfnet.oaaas.auth.principal.BasicAuthCredentials; import org.surfnet.oaaas.model.AccessTokenRequest; import org.surfnet.oaaas.model.AuthorizationRequest; import org.surfnet.oaaas.model.Client; import org.surfnet.oaaas.repository.ClientRepository; import javax.inject.Inject; import javax.inject.Named; import java.util.HashSet; import java.util.List; import java.util.Set; import static org.surfnet.oaaas.auth.OAuth2Validator.ValidationResponse.*; /** * Implementation of {@link OAuth2Validator} * */ @Named public class OAuth2ValidatorImpl implements OAuth2Validator { private static final Set<String> RESPONSE_TYPES = new HashSet<String>(); private static final Set<String> GRANT_TYPES = new HashSet<String>(); static { RESPONSE_TYPES.add(IMPLICIT_GRANT_RESPONSE_TYPE); RESPONSE_TYPES.add(AUTHORIZATION_CODE_GRANT_RESPONSE_TYPE); GRANT_TYPES.add(GRANT_TYPE_AUTHORIZATION_CODE); GRANT_TYPES.add(GRANT_TYPE_REFRESH_TOKEN); GRANT_TYPES.add(GRANT_TYPE_CLIENT_CREDENTIALS); GRANT_TYPES.add(GRANT_TYPE_PASSWORD); } @Inject private ClientRepository clientRepository; @Override public ValidationResponse validate(AuthorizationRequest authorizationRequest) { try { validateAuthorizationRequest(authorizationRequest); String responseType = validateResponseType(authorizationRequest); Client client = validateClient(authorizationRequest); authorizationRequest.setClient(client); String redirectUri = determineRedirectUri(authorizationRequest, responseType, client); authorizationRequest.setRedirectUri(redirectUri); List<String> scopes = determineScopes(authorizationRequest, client); authorizationRequest.setRequestedScopes(scopes); } catch (ValidationResponseException e) { return e.v; } return VALID; } protected List<String> determineScopes(AuthorizationRequest authorizationRequest, Client client) { if (CollectionUtils.isEmpty(authorizationRequest.getRequestedScopes())) { // TODO add default scopes. return null; } else { List<String> scopes = authorizationRequest.getRequestedScopes(); List<String> clientScopes = client.getScopes(); for (String scope : scopes) { if (!clientScopes.contains(scope)) { throw new ValidationResponseException(SCOPE_NOT_VALID); } } return authorizationRequest.getRequestedScopes(); } } protected String determineRedirectUri(AuthorizationRequest authorizationRequest, String responseType, Client client) { List<String> uris = client.getRedirectUris(); String redirectUri = authorizationRequest.getRedirectUri(); if (StringUtils.isBlank(redirectUri)) { if (responseType.equals(IMPLICIT_GRANT_RESPONSE_TYPE)) { throw new ValidationResponseException(IMPLICIT_GRANT_REDIRECT_URI); } else if (CollectionUtils.isEmpty(uris)) { throw new ValidationResponseException(REDIRECT_URI_REQUIRED); } else { return uris.get(0); } } else if (!AuthenticationFilter.isValidUri(redirectUri)) { throw new ValidationResponseException(REDIRECT_URI_NOT_URI); } else if (redirectUri.contains("#")) { throw new ValidationResponseException(REDIRECT_URI_FRAGMENT_COMPONENT); } else if (CollectionUtils.isNotEmpty(uris)) { boolean match = false; for (String uri : uris) { if (redirectUri.startsWith(uri)) { match = true; break; } } if (!match) { // Reset the redirect uri to first of the registered ones. Otherwise the result error response would be undesired: a (possibly on purpose) redirect to URI that is not acked. authorizationRequest.setRedirectUri(uris.get(0)); throw new ValidationResponseException(REDIRECT_URI_NOT_VALID); } } return redirectUri; } protected Client validateClient(AuthorizationRequest authorizationRequest) { String clientId = authorizationRequest.getClientId(); Client client = StringUtils.isBlank(clientId) ? null : clientRepository.findByClientId(clientId); if (client == null) { throw new ValidationResponseException(UNKNOWN_CLIENT_ID); } if (!client.isAllowedImplicitGrant() && authorizationRequest.getResponseType().equals(IMPLICIT_GRANT_RESPONSE_TYPE)) { throw new ValidationResponseException(IMPLICIT_GRANT_NOT_PERMITTED); } return client; } protected String validateResponseType(AuthorizationRequest authorizationRequest) { String responseType = authorizationRequest.getResponseType(); if (StringUtils.isBlank(responseType) || !RESPONSE_TYPES.contains(responseType)) { throw new ValidationResponseException(UNSUPPORTED_RESPONSE_TYPE); } return responseType; } protected void validateAuthorizationRequest(AuthorizationRequest authorizationRequest) { } /* (non-Javadoc) * @see org.surfnet.oaaas.auth.OAuth2Validator#validate(org.surfnet.oaaas.model.AccessTokenRequest) */ @Override public ValidationResponse validate(AccessTokenRequest request, BasicAuthCredentials clientCredentials) { try { validateGrantType(request); validateAttributes(request); validateClient(request, clientCredentials); validateAccessTokenRequest(request); } catch (ValidationResponseException e) { return e.v; } return VALID; } protected void validateGrantType(AccessTokenRequest request) { String grantType = request.getGrantType(); if (StringUtils.isBlank(grantType) || !GRANT_TYPES.contains(grantType)) { throw new ValidationResponseException(UNSUPPORTED_GRANT_TYPE); } } protected void validateAttributes(AccessTokenRequest request) { String grantType = request.getGrantType(); if (GRANT_TYPE_AUTHORIZATION_CODE.equals(grantType)) { if (StringUtils.isBlank(request.getCode())) { throw new ValidationResponseException(INVALID_GRANT_AUTHORIZATION_CODE); } } else if (GRANT_TYPE_REFRESH_TOKEN.equals(grantType)) { if (StringUtils.isBlank(request.getRefreshToken())) { throw new ValidationResponseException(INVALID_GRANT_REFRESH_TOKEN); } } else if (GRANT_TYPE_PASSWORD.equals(grantType)) { if (StringUtils.isBlank(request.getUsername()) || StringUtils.isBlank(request.getPassword())) { throw new ValidationResponseException(INVALID_GRANT_PASSWORD); } } } protected void validateClient(AccessTokenRequest accessTokenRequest, BasicAuthCredentials clientCredentials) { Client client = null; // Were we given client credentials via basic auth? if (!clientCredentials.isNull()) { // Confirm that the credentials are valid and use them to get the client if (!clientCredentials.isValid()) { throw new ValidationResponseException(UNAUTHORIZED_CLIENT); } client = getClient(clientCredentials.getUsername(), clientCredentials.getPassword(), UNAUTHORIZED_CLIENT); } else if (!StringUtils.isBlank(accessTokenRequest.getClientId())) { // Use the request parameters to obtain the client client = getClient(accessTokenRequest.getClientId(), accessTokenRequest.getClientSecret(), UNKNOWN_CLIENT_ID); } // Record the associated client accessTokenRequest.setClient(client); } private Client getClient(String clientId, String clientSecret, ValidationResponse error) { // Find the indicated client Client client = clientRepository.findByClientId(clientId); if (client == null) { throw new ValidationResponseException(error); } // Confirm that the credentials match those for the client if (!client.verifySecret(clientSecret)) { throw new ValidationResponseException(error); } return client; } protected void validateAccessTokenRequest(AccessTokenRequest accessTokenRequest) { if (accessTokenRequest.getGrantType().equals(GRANT_TYPE_CLIENT_CREDENTIALS)) { // We must have a client Client client = accessTokenRequest.getClient(); if (client == null) { throw new ValidationResponseException(INVALID_GRANT_CLIENT_CREDENTIALS); } // And the client must be allowed to perform this grant type if (!client.isAllowedClientCredentials()) { accessTokenRequest.setClient(null); throw new ValidationResponseException(CLIENT_CREDENTIALS_NOT_PERMITTED); } } } }
3,308
722
import torch import soundfile as sf import numpy as np import os from asteroid.models import ConvTasNet, save_publishable from asteroid.data.wham_dataset import wham_noise_license, wsj0_license def setup_register_sr(): model = ConvTasNet( n_src=2, n_repeats=2, n_blocks=3, bn_chan=16, hid_chan=4, skip_chan=8, n_filters=32, ) to_save = model.serialize() to_save["model_args"].pop("sample_rate") torch.save(to_save, "tmp.th") def setup_infer(): sf.write("tmp.wav", np.random.randn(16000), 8000) sf.write("tmp2.wav", np.random.randn(16000), 8000) def setup_upload(): train_set_infos = dict( dataset="WHAM", task="sep_noisy", licenses=[wsj0_license, wham_noise_license] ) final_results = {"si_sdr": 8.67, "si_sdr_imp": 13.16} model = ConvTasNet( n_src=2, n_repeats=2, n_blocks=3, bn_chan=16, hid_chan=4, skip_chan=8, n_filters=32, ) model_dict = model.serialize() model_dict.update(train_set_infos) os.makedirs("publish_dir", exist_ok=True) save_publishable( "publish_dir", model_dict, metrics=final_results, train_conf=dict(), ) if __name__ == "__main__": setup_register_sr() setup_infer() setup_upload()
664
1,194
<reponame>akawalsky/hapi-fhir { "resourceType": "CodeSystem", "id": "message-transport", "meta": { "lastUpdated": "2019-11-01T09:29:23.356+11:00" }, "text": { "status": "generated", "div": "\u003cdiv xmlns\u003d\"http://www.w3.org/1999/xhtml\"\u003e\u003ch2\u003eMessageTransport\u003c/h2\u003e\u003cdiv\u003e\u003cp\u003eThe protocol used for message transport.\u003c/p\u003e\n\u003c/div\u003e\u003cp\u003eThis code system http://terminology.hl7.org/CodeSystem/message-transport defines the following codes:\u003c/p\u003e\u003ctable class\u003d\"codes\"\u003e\u003ctr\u003e\u003ctd style\u003d\"white-space:nowrap\"\u003e\u003cb\u003eCode\u003c/b\u003e\u003c/td\u003e\u003ctd\u003e\u003cb\u003eDisplay\u003c/b\u003e\u003c/td\u003e\u003ctd\u003e\u003cb\u003eDefinition\u003c/b\u003e\u003c/td\u003e\u003c/tr\u003e\u003ctr\u003e\u003ctd style\u003d\"white-space:nowrap\"\u003ehttp\u003ca name\u003d\"message-transport-http\"\u003e \u003c/a\u003e\u003c/td\u003e\u003ctd\u003eHTTP\u003c/td\u003e\u003ctd\u003eThe application sends or receives messages using HTTP POST (may be over http: or https:).\u003c/td\u003e\u003c/tr\u003e\u003ctr\u003e\u003ctd style\u003d\"white-space:nowrap\"\u003eftp\u003ca name\u003d\"message-transport-ftp\"\u003e \u003c/a\u003e\u003c/td\u003e\u003ctd\u003eFTP\u003c/td\u003e\u003ctd\u003eThe application sends or receives messages using File Transfer Protocol.\u003c/td\u003e\u003c/tr\u003e\u003ctr\u003e\u003ctd style\u003d\"white-space:nowrap\"\u003emllp\u003ca name\u003d\"message-transport-mllp\"\u003e \u003c/a\u003e\u003c/td\u003e\u003ctd\u003eMLLP\u003c/td\u003e\u003ctd\u003eThe application sends or receives messages using HL7\u0027s Minimal Lower Level Protocol.\u003c/td\u003e\u003c/tr\u003e\u003c/table\u003e\u003c/div\u003e" }, "extension": [ { "url": "http://hl7.org/fhir/StructureDefinition/structuredefinition-wg", "valueCode": "fhir" }, { "url": "http://hl7.org/fhir/StructureDefinition/structuredefinition-standards-status", "valueCode": "normative" }, { "url": "http://hl7.org/fhir/StructureDefinition/structuredefinition-normative-version", "valueCode": "4.0.0" }, { "url": "http://hl7.org/fhir/StructureDefinition/structuredefinition-fmm", "valueInteger": 5 } ], "url": "http://terminology.hl7.org/CodeSystem/message-transport", "identifier": [ { "system": "urn:ietf:rfc:3986", "value": "urn:oid:2.16.840.1.113883.4.642.4.1080" } ], "version": "4.0.1", "name": "MessageTransport", "title": "MessageTransport", "status": "active", "experimental": false, "date": "2019-11-01T09:29:23+11:00", "publisher": "HL7 (FHIR Project)", "contact": [ { "telecom": [ { "system": "url", "value": "http://hl7.org/fhir" }, { "system": "email", "value": "<EMAIL>" } ] } ], "description": "The protocol used for message transport.", "caseSensitive": true, "valueSet": "http://hl7.org/fhir/ValueSet/message-transport", "content": "complete", "concept": [ { "code": "http", "display": "HTTP", "definition": "The application sends or receives messages using HTTP POST (may be over http: or https:)." }, { "code": "ftp", "display": "FTP", "definition": "The application sends or receives messages using File Transfer Protocol." }, { "code": "mllp", "display": "MLLP", "definition": "The application sends or receives messages using HL7\u0027s Minimal Lower Level Protocol." } ] }
1,674
348
{"nom":"<NAME>","circ":"4ème circonscription","dpt":"Alpes-Maritimes","inscrits":2523,"abs":1519,"votants":1004,"blancs":89,"nuls":13,"exp":902,"res":[{"nuance":"REM","nom":"Mme <NAME>","voix":501},{"nuance":"FN","nom":"M. <NAME>","voix":401}]}
100
531
/** * Copyright (c) 2011-2021, JFXtras * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * Neither the name of the organization nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL JFXTRAS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package jfxtras.icalendarfx; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Parameter; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; import jfxtras.icalendarfx.VCalendar; import jfxtras.icalendarfx.VChild; import jfxtras.icalendarfx.VElement; import jfxtras.icalendarfx.VElementBase; import jfxtras.icalendarfx.VParent; import jfxtras.icalendarfx.VParentBase; import jfxtras.icalendarfx.components.VComponent; import jfxtras.icalendarfx.content.ContentLineStrategy; import jfxtras.icalendarfx.content.Orderer; import jfxtras.icalendarfx.content.OrdererBase; import jfxtras.icalendarfx.content.UnfoldingStringIterator; import jfxtras.icalendarfx.parameters.VParameter; import jfxtras.icalendarfx.properties.VProperty; import jfxtras.icalendarfx.properties.component.recurrence.rrule.RRulePart; import jfxtras.icalendarfx.properties.component.recurrence.rrule.RecurrenceRuleValue; /** * <p>Base class for parent calendar components.</p> * * <p>The order of the children from {@link #childrenUnmodifiable()} equals the order they were added. * Adding children is not exposed by the implementation, but rather handled internally. When a {@link VChild} has its * value set, it's automatically included in the collection of children by the {@link Orderer}.</p> * * <p>The {@link Orderer} requires registering listeners to child properties.</p> * * @author <NAME> */ public abstract class VParentBase<T> extends VElementBase implements VParent { /* Setter, getter maps * The first key is the VParent class * The second key is the VChild of that VParent */ private static final Map<Class<? extends VParent>, Map<Class<? extends VChild>, Method>> SETTERS = new HashMap<>(); private static final Map<Class<? extends VParent>, Map<Class<? extends VChild>, Method>> GETTERS = new HashMap<>(); /* * HANDLE SORT ORDER FOR CHILD ELEMENTS */ protected Orderer orderer; /** Return the {@link Orderer} for this {@link VParent} */ @Override public void orderChild(VChild addedChild) { orderer.orderChild(addedChild); } @Override public void orderChild(VChild oldChild, VChild newChild) { orderer.replaceChild(oldChild, newChild); } @Override public void orderChild(int index, VChild addedChild) { orderer.orderChild(index, addedChild); } @Override public void addChild(VChild child) { Method setter = getSetter(child); boolean isList = Collection.class.isAssignableFrom(setter.getParameters()[0].getType()); try { if (isList) { Method getter = getGetter(child); Collection<VChild> list = (Collection<VChild>) getter.invoke(this); if (list == null) { list = (getter.getReturnType() == List.class) ? new ArrayList<>() : (getter.getReturnType() == Set.class) ? new LinkedHashSet<>() : new ArrayList<>(); list.add(child); setter.invoke(this, list); } else { list.add(child); orderChild(child); } } else { setter.invoke(this, child); } } catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException e) { e.printStackTrace(); } } @Override public void addChild(int index, VChild child) { addChild(child); orderChild(index, child); } @Override public void addChild(String childContent) { parseContent(childContent); // TODO - Do I want this? } @Override public boolean removeChild(VChild child) { Method setter = getSetter(child); boolean isList = List.class.isAssignableFrom(setter.getParameters()[0].getType()); try { if (isList) { Method getter = getGetter(child); List<VChild> list = (List<VChild>) getter.invoke(this); if (list == null) { return false; } else { boolean result = list.remove(child); orderChild(child, null); // Should I leave empty lists? - below code removes empty lists // if (list.isEmpty()) // { // setter.invoke(this, (Object) null); // } return result; } } else { setter.invoke(this, (Object) null); orderChild(child, null); return true; } } catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException e) { e.printStackTrace(); } return false; } @Override public boolean removeChild(int index) { return removeChild(childrenUnmodifiable().get(index)); } @Override public boolean replaceChild(int index, VChild child) { removeChild(index); addChild(index, child); return true; } @Override public boolean replaceChild(VChild oldChild, VChild newChild) { return orderer.replaceChild(oldChild, newChild); } public T withChild(VChild child) { addChild(child); return (T) this; } protected Map<Class<? extends VChild>, Method> getSetters() { if (SETTERS.get(getClass()) == null) { Map<Class<? extends VChild>, Method> setterMap = collectSetterMap(getClass()); SETTERS.put(getClass(), setterMap); return setterMap; } return SETTERS.get(getClass()); } protected Map<Class<? extends VChild>, Method> getGetters() { if (GETTERS.get(getClass()) == null) { Map<Class<? extends VChild>, Method> getterMap = collectGetterMap(getClass()); GETTERS.put(getClass(), getterMap); return getterMap; } return GETTERS.get(getClass()); } protected Method getSetter(VChild child) { return getSetters().get(child.getClass()); } protected Method getGetter(VChild child) { return getGetters().get(child.getClass()); } @Override protected List<Message> parseContent(String content) { Iterator<String> i = Arrays.asList(content.split(System.lineSeparator())).iterator(); return parseContent(new UnfoldingStringIterator(i)); } /* * NOTE: PARAMETER AND PROPERTY MUST HAVE OVERRIDDEN PARSECONTENT (to handle value part) */ protected List<Message> parseContent(Iterator<String> unfoldedLineIterator) { final Class<? extends VElement> multilineChildClass; final Class<? extends VElement> singlelineChildClass; if (VCalendar.class.isAssignableFrom(getClass())) { multilineChildClass = VComponent.class; singlelineChildClass = VProperty.class; } else if (VComponent.class.isAssignableFrom(getClass())) { multilineChildClass = VComponent.class; singlelineChildClass = VProperty.class; } else if (VProperty.class.isAssignableFrom(getClass())) { multilineChildClass = null; singlelineChildClass = VParameter.class; } else if (RecurrenceRuleValue.class.isAssignableFrom(getClass())) { multilineChildClass = null; singlelineChildClass = RRulePart.class; } else { throw new RuntimeException("Not supported parent class:" + getClass()); } List<Message> messages = new ArrayList<>(); while (unfoldedLineIterator.hasNext()) { String unfoldedLine = unfoldedLineIterator.next(); if (unfoldedLine.startsWith(END)) return messages; // exit when end found; String childName = elementName(unfoldedLine); if (childName != null) childName = (childName.startsWith("X-")) ? "X-" : childName; boolean isMultiLineElement = unfoldedLine.startsWith(BEGIN); // e.g. vcalendar, vcomponent boolean isMainComponent = name().equals(childName); final VElementBase child; if (isMultiLineElement) { if (! isMainComponent) { child = (VElementBase) VElementBase.newEmptyVElement(multilineChildClass, childName); List<Message> myMessages = ((VParentBase<?>) child).parseContent(unfoldedLineIterator); // recursively parse child parent messages.addAll(myMessages); addChildInternal(messages, unfoldedLine, childName, (VChild) child); } } else { // single line element (e.g. property, parameter, rrule value) if (isMainComponent) { // a main component still needs it value and elements processed in subclasses (e.g property) child = this; } else { child = (VElementBase) VElementBase.newEmptyVElement(singlelineChildClass, childName); } if (child != null) { List<Message> myMessages = ((VParentBase<?>) child).parseContent(unfoldedLine); // recursively parse child parent // don't add single-line children with info or error messages - they have problems and should be ignored if (myMessages.isEmpty()) { addChildInternal(messages, unfoldedLine, childName, (VChild) child); } else { messages.addAll(myMessages); } } else { messages.add(new Message(this, "Unknown element:" + unfoldedLine, MessageEffect.MESSAGE_ONLY)); } } } return messages; } // For Recurrence Rule Value and Properties protected void processInLineChild( List<Message> messages, String childName, String content, Class<? extends VElement> singleLineChildClass) { VChild newChild = VElementBase.newEmptyVElement(singleLineChildClass, childName); if (newChild != null) { List<Message> myMessages = ((VElementBase) newChild).parseContent(childName + "=" + content); messages.addAll(myMessages); addChildInternal(messages, content, childName, newChild); } else { messages.add(new Message(this, "Unknown element:" + content, MessageEffect.MESSAGE_ONLY)); } } protected boolean checkChild(List<Message> messages, String content, String elementName, VChild newChild) { int initialMessageSize = messages.size(); if (newChild == null) { Message message = new Message(this, "Ignored invalid element:" + content, MessageEffect.MESSAGE_ONLY); messages.add(message); } Method getter = getGetter(newChild); boolean isChildAllowed = getter != null; if (! isChildAllowed) { Message message = new Message(this, elementName + " not allowed in " + name(), MessageEffect.THROW_EXCEPTION); messages.add(message); } else // Moved to an else block, because getter could be null here { final boolean isChildAlreadyPresent; Object currentParameter = null; try { currentParameter = getter.invoke(this); } catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException e) { e.printStackTrace(); } if (currentParameter instanceof Collection) { isChildAlreadyPresent = ((Collection<?>) currentParameter).contains(newChild); // TODO contains is expensive - try to find a way to avoid } else { isChildAlreadyPresent = currentParameter != null; } if (isChildAlreadyPresent) { Message message = new Message(this, newChild.getClass().getSimpleName() + " can only occur once in a calendar component. Ignoring instances beyond first.", MessageEffect.MESSAGE_ONLY); messages.add(message); } } return messages.size() == initialMessageSize; } protected void addChildInternal(List<Message> messages, String content, String elementName, VChild newChild) { boolean isOK = checkChild(messages, content, elementName, newChild); if (isOK) { addChild(newChild); } } /* Strategy to build iCalendar content lines */ protected ContentLineStrategy contentLineGenerator; @Override public List<VChild> childrenUnmodifiable() { return orderer.childrenUnmodifiable(); } public void copyChildrenInto(VParent destination) { childrenUnmodifiable().forEach((childSource) -> { try { // use copy constructors to make copy of child VChild newChild = childSource.getClass() .getConstructor(childSource.getClass()) .newInstance(childSource); destination.addChild(newChild); } catch (InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException | NoSuchMethodException | SecurityException e) { e.printStackTrace(); } }); } /* * CONSTRUCTOR */ public VParentBase() { orderer = new OrdererBase(this, getGetters()); } // copy constructor public VParentBase(VParentBase<T> source) { this(); source.copyChildrenInto(this); } @Override public List<String> errors() { return childrenUnmodifiable().stream() .flatMap(c -> c.errors().stream()) .collect(Collectors.toList()); } @Override public String toString() { if (contentLineGenerator == null) { throw new RuntimeException("Can't produce content lines because contentLineGenerator isn't set"); // contentLineGenerator MUST be set by subclasses } return contentLineGenerator.execute(); } // Note: can't check equals or hashCode of parents - causes stack overflow @Override public boolean equals(Object obj) { if (obj == this) return true; if((obj == null) || (obj.getClass() != getClass())) { return false; } VParent testObj = (VParent) obj; // getter version is slower, but will be correct. Map<Class<? extends VChild>, Method> getters = getGetters(); return getters.entrySet() .stream() .map(e -> e.getValue()) .allMatch(m -> { try { Object v1 = m.invoke(this); Object v2 = m.invoke(testObj); return Objects.equals(v1, v2); } catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException e1) { e1.printStackTrace(); } return false; }); } @Override public int hashCode() { final int prime = 31; int result = 1; for (VChild child : childrenUnmodifiable()) { result = prime * result + child.hashCode(); } return result; } /* * MAP MAKERS FOR SETTERS AND GETTERS */ public static Map<Class<? extends VChild>, Method> collectGetterMap(Class<?> class1) { Map<Class<? extends VChild>, Method> getters = new HashMap<>(); Iterator<Method> methodIterator = Arrays.stream(class1.getMethods()) .filter(m -> m.getParameters().length == 0) .filter(m -> m.getName().startsWith("get")) .iterator(); while (methodIterator.hasNext()) { Method m = methodIterator.next(); Class<? extends VChild> returnType = (Class<? extends VChild>) m.getReturnType(); if (VChild.class.isAssignableFrom(returnType)) { getters.put(returnType, m); } else if (Collection.class.isAssignableFrom(returnType)) { ParameterizedType pt = (ParameterizedType) m.getGenericReturnType(); Type t = pt.getActualTypeArguments()[0]; if (ParameterizedType.class.isAssignableFrom(t.getClass())) { ParameterizedType t2 = (ParameterizedType) t; t = t2.getRawType(); // Fixes Attachment<?> property } Class<? extends VChild> listType = (Class<? extends VChild>) t; getters.put(listType, m); } } return getters; } public static Map<Class<? extends VChild>, Method> collectSetterMap(Class<?> class1) { Map<Class<? extends VChild>, Method> setters = new HashMap<>(); Iterator<Method> methodIterator = Arrays.stream(class1.getMethods()) .filter(m -> m.getParameters().length == 1) .filter(m -> m.getName().startsWith("set")) .iterator(); while (methodIterator.hasNext()) { Method m = methodIterator.next(); Parameter p = m.getParameters()[0]; Class<? extends VChild> parameterType = (Class<? extends VChild>) p.getType(); if (VChild.class.isAssignableFrom(parameterType)) { setters.put(parameterType, m); } else if (Collection.class.isAssignableFrom(parameterType)) { ParameterizedType pt = (ParameterizedType) p.getParameterizedType(); Type t = pt.getActualTypeArguments()[0]; if (ParameterizedType.class.isAssignableFrom(t.getClass())) { ParameterizedType t2 = (ParameterizedType) t; t = t2.getRawType(); // Fixes Attachment<?> property } Class<? extends VChild> clazz2 = (Class<? extends VChild>) t; boolean isListOfChildren = VChild.class.isAssignableFrom(clazz2); if (isListOfChildren) { setters.put(clazz2, m); } } } return setters; } }
7,305
3,459
<reponame>werminghoff/Provenance namespace MDFN_IEN_NES { class Dis6502 { public: Dis6502(void); virtual ~Dis6502(); virtual uint8 Read(uint16 A); virtual uint8 GetX(void); virtual uint8 GetY(void); void Disassemble(uint16 &a, uint16 SpecialA, char *); }; }
103
892
<reponame>westonsteimel/advisory-database-github { "schema_version": "1.2.0", "id": "GHSA-wxvx-3x3r-j3jr", "modified": "2022-05-02T06:16:46Z", "published": "2022-05-02T06:16:46Z", "aliases": [ "CVE-2010-0895" ], "details": "Unspecified vulnerability in the Solaris component in Oracle Sun Product Suite OpenSolaris snv_119 allows local users to affect integrity and availability via unknown vectors related to IP Filter.", "severity": [ ], "affected": [ ], "references": [ { "type": "ADVISORY", "url": "https://nvd.nist.gov/vuln/detail/CVE-2010-0895" }, { "type": "WEB", "url": "https://exchange.xforce.ibmcloud.com/vulnerabilities/57757" }, { "type": "WEB", "url": "http://www.oracle.com/technetwork/topics/security/cpuapr2010-099504.html" }, { "type": "WEB", "url": "http://www.securityfocus.com/bid/39455" }, { "type": "WEB", "url": "http://www.us-cert.gov/cas/techalerts/TA10-103B.html" } ], "database_specific": { "cwe_ids": [ ], "severity": "LOW", "github_reviewed": false } }
518
17,318
/* * Copyright (c) 2011-2018, <NAME>. All Rights Reserved. * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.dianping.cat.consumer.state; import java.text.SimpleDateFormat; import java.util.Date; import java.util.TimeZone; import junit.framework.Assert; import org.junit.Before; import org.junit.Test; import org.unidal.helper.Files; import org.unidal.lookup.ComponentTestCase; import com.dianping.cat.Constants; import com.dianping.cat.analysis.MessageAnalyzer; import com.dianping.cat.consumer.state.model.entity.StateReport; public class StateAnalyzerTest extends ComponentTestCase { private StateAnalyzer m_analyzer; private String m_domain = "group"; @Before public void setUp() throws Exception { super.setUp(); TimeZone.setDefault(TimeZone.getTimeZone("Asia/Shanghai")); SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd HH:mm:ss:SS"); Date date = sdf.parse("20120101 00:00:00:00"); m_analyzer = (StateAnalyzer) lookup(MessageAnalyzer.class, StateAnalyzer.ID); m_analyzer.initialize(date.getTime(), Constants.HOUR, Constants.MINUTE * 5); } @Test public void testProcess() throws Exception { StateReport report = m_analyzer.getReport(m_domain); String expected = Files.forIO().readFrom(getClass().getResourceAsStream("state_analyzer.xml"), "utf-8"); Assert.assertEquals(expected.replaceAll("\r", ""), report.toString().replaceAll("\r", "")); } }
650
681
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package edu.harvard.iq.dataverse.harvest.server.xoai; import com.lyncode.xoai.dataprovider.model.Item; import com.lyncode.xoai.dataprovider.model.Set; import com.lyncode.xoai.model.oaipmh.About; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.harvest.server.OAIRecord; import edu.harvard.iq.dataverse.util.StringUtil; import java.util.ArrayList; import java.util.Date; import java.util.List; /** * * @author <NAME> * * This is an implemention of an Lyncode XOAI Item; * You can think of it as an XOAI Item wrapper around the * Dataverse OAIRecord entity. */ public class Xitem implements Item { public Xitem(OAIRecord oaiRecord) { super(); this.oaiRecord = oaiRecord; oaisets = new ArrayList<>(); if (!StringUtil.isEmpty(oaiRecord.getSetName())) { oaisets.add(new Set(oaiRecord.getSetName())); } } private OAIRecord oaiRecord; public OAIRecord getOaiRecord() { return oaiRecord; } public void setOaiRecord(OAIRecord oaiRecord) { this.oaiRecord = oaiRecord; } private Dataset dataset; public Dataset getDataset() { return dataset; } public Xitem withDataset(Dataset dataset) { this.dataset = dataset; return this; } @Override public List<About> getAbout() { return null; } @Override public Xmetadata getMetadata() { return new Xmetadata((String)null); } @Override public String getIdentifier() { return oaiRecord.getGlobalId(); } @Override public Date getDatestamp() { return oaiRecord.getLastUpdateTime(); } private List<Set> oaisets; @Override public List<Set> getSets() { return oaisets; } @Override public boolean isDeleted() { return oaiRecord.isRemoved(); } }
892
678
/** * This header is generated by class-dump-z 0.2b. * * Source: /System/Library/PrivateFrameworks/GMM.framework/GMM */ #import <GMM/GMMTrip.h> #import <GMM/XXUnknownSuperclass.h> @class NSString, NSMutableArray; __attribute__((visibility("hidden"))) @interface GMMTrip : XXUnknownSuperclass { NSMutableArray *_routes; // 4 = 0x4 NSString *_tripSummary; // 8 = 0x8 BOOL _hasDetailLevel; // 12 = 0xc int _detailLevel; // 16 = 0x10 } @property(assign, nonatomic) int detailLevel; // G=0x25ad5; S=0x25af9; @synthesize=_detailLevel @property(assign, nonatomic) BOOL hasDetailLevel; // G=0x25ff9; S=0x26009; @synthesize=_hasDetailLevel @property(retain, nonatomic) NSString *tripSummary; // G=0x25fc5; S=0x25fd5; @synthesize=_tripSummary @property(readonly, assign, nonatomic) BOOL hasTripSummary; // G=0x25abd; @property(retain, nonatomic) NSMutableArray *routes; // G=0x25f91; S=0x25fa1; @synthesize=_routes // declared property setter: - (void)setHasDetailLevel:(BOOL)level; // 0x26009 // declared property getter: - (BOOL)hasDetailLevel; // 0x25ff9 // declared property setter: - (void)setTripSummary:(id)summary; // 0x25fd5 // declared property getter: - (id)tripSummary; // 0x25fc5 // declared property setter: - (void)setRoutes:(id)routes; // 0x25fa1 // declared property getter: - (id)routes; // 0x25f91 - (void)writeTo:(id)to; // 0x25e29 - (BOOL)readFrom:(id)from; // 0x25c65 - (id)dictionaryRepresentation; // 0x25b8d - (id)description; // 0x25b1d // declared property setter: - (void)setDetailLevel:(int)level; // 0x25af9 // declared property getter: - (int)detailLevel; // 0x25ad5 // declared property getter: - (BOOL)hasTripSummary; // 0x25abd - (id)routeAtIndex:(unsigned)index; // 0x25a9d - (unsigned)routesCount; // 0x25a7d - (void)addRoute:(id)route; // 0x25a19 - (void)dealloc; // 0x259c1 @end @interface GMMTrip (Descriptions) - (id)description; // 0xa74d @end @interface GMMTrip (GMMGEODirectionsProvider_Extras) - (id)altDescription; // 0x3e9a1 @end
818
1,444
package mage.abilities.effects.common; import mage.MageObject; import mage.ObjectColor; import mage.abilities.Ability; import mage.abilities.effects.PreventionEffectImpl; import mage.constants.Duration; import mage.game.Game; import mage.game.events.GameEvent; /** * * @author LevelX2 */ public class PreventDamageByColorEffect extends PreventionEffectImpl { private final ObjectColor color; public PreventDamageByColorEffect(ObjectColor color, int amount) { super(Duration.WhileOnBattlefield, amount, false, false); this.color = color; this.staticText = "If a " + color.getDescription() + " source would deal damage to you, prevent " + amount + " of that damage"; } public PreventDamageByColorEffect(PreventDamageByColorEffect effect) { super(effect); this.color = effect.color; } @Override public boolean checksEventType(GameEvent event, Game game) { return event.getType() == GameEvent.EventType.DAMAGE_PLAYER; } @Override public boolean applies(GameEvent event, Ability source, Game game) { if (event.getTargetId().equals(source.getControllerId())) { MageObject sourceObject = game.getObject(event.getSourceId()); if (sourceObject != null && sourceObject.getColor(game).contains(color)) { return super.applies(event, source, game); } } return false; } @Override public PreventDamageByColorEffect copy() { return new PreventDamageByColorEffect(this); } }
549
347
<reponame>jihwahn1018/ovirt-engine package org.ovirt.engine.core.vdsbroker.irsbroker; import org.ovirt.engine.core.common.vdscommands.IrsBaseVDSCommandParameters; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class MarkPoolInReconstructModeVDSCommand<P extends IrsBaseVDSCommandParameters> extends IrsBrokerCommand<P> { private static final Logger log = LoggerFactory.getLogger(MarkPoolInReconstructModeVDSCommand.class); public MarkPoolInReconstructModeVDSCommand(P parameters) { super(parameters); } @Override protected void executeVDSCommand() { try { IrsProxy proxy = getCurrentIrsProxy(); proxy.clearPoolTimers(); proxy.clearCache(); } catch (Exception e) { log.error("Could not change timers for pool '{}': {}", getParameters().getStoragePoolId(), e.getMessage()); log.debug("Exception", e); } getVDSReturnValue().setSucceeded(true); } }
403
585
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.update.processor; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; import java.io.IOException; import com.google.common.collect.ImmutableMap; import org.apache.lucene.util.BytesRef; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.util.NamedList; import org.apache.solr.request.LocalSolrQueryRequest; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.response.SolrQueryResponse; import org.apache.solr.update.AddUpdateCommand; import org.apache.solr.update.processor.SkipExistingDocumentsProcessorFactory.SkipExistingDocumentsUpdateProcessor; import org.junit.BeforeClass; import org.junit.Test; import org.mockito.Mockito; public class SkipExistingDocumentsProcessorFactoryTest { private BytesRef docId = new BytesRef(); @SuppressWarnings({"rawtypes"}) private SolrQueryRequest defaultRequest = new LocalSolrQueryRequest(null, new NamedList()); @BeforeClass public static void beforeClass() { SolrTestCaseJ4.assumeWorkingMockito(); } // Tests for logic in the factory @Test(expected=SolrException.class) public void testExceptionIfSkipInsertParamNonBoolean() { SkipExistingDocumentsProcessorFactory factory = new SkipExistingDocumentsProcessorFactory(); NamedList<Object> initArgs = new NamedList<>(); initArgs.add("skipInsertIfExists", "false"); factory.init(initArgs); } @Test(expected=SolrException.class) public void testExceptionIfSkipUpdateParamNonBoolean() { SkipExistingDocumentsProcessorFactory factory = new SkipExistingDocumentsProcessorFactory(); NamedList<Object> initArgs = new NamedList<>(); initArgs.add("skipUpdateIfMissing", 0); factory.init(initArgs); } @Test(expected=SolrException.class) public void testExceptionIfNextProcessorIsNull() { SkipExistingDocumentsProcessorFactory factory = new SkipExistingDocumentsProcessorFactory(); NamedList<Object> initArgs = new NamedList<>(); factory.init(initArgs); factory.getInstance(defaultRequest, new SolrQueryResponse(), null); } @Test(expected=SolrException.class) public void testExceptionIfNextProcessorNotDistributed() { SkipExistingDocumentsProcessorFactory factory = new SkipExistingDocumentsProcessorFactory(); NamedList<Object> initArgs = new NamedList<>(); factory.init(initArgs); UpdateRequestProcessor next = new BufferingRequestProcessor(null); factory.getInstance(defaultRequest, new SolrQueryResponse(), next); } @Test public void testNoExceptionIfNextProcessorIsDistributed() { SkipExistingDocumentsProcessorFactory factory = new SkipExistingDocumentsProcessorFactory(); NamedList<Object> initArgs = new NamedList<>(); factory.init(initArgs); UpdateRequestProcessor next = Mockito.mock(DistributedUpdateProcessor.class); factory.getInstance(defaultRequest, new SolrQueryResponse(), next); } @Test public void testNoExceptionIfNextNextProcessorIsDistributed() { SkipExistingDocumentsProcessorFactory factory = new SkipExistingDocumentsProcessorFactory(); NamedList<Object> initArgs = new NamedList<>(); factory.init(initArgs); UpdateRequestProcessor distProcessor = Mockito.mock(DistributedUpdateProcessor.class); UpdateRequestProcessor next = new BufferingRequestProcessor(distProcessor); factory.getInstance(defaultRequest, new SolrQueryResponse(), next); } @Test public void testSkipInsertsAndUpdatesDefaultToTrueIfNotConfigured() { SkipExistingDocumentsProcessorFactory factory = new SkipExistingDocumentsProcessorFactory(); NamedList<Object> initArgs = new NamedList<>(); factory.init(initArgs); UpdateRequestProcessor next = Mockito.mock(DistributedUpdateProcessor.class); SkipExistingDocumentsUpdateProcessor processor = factory.getInstance(defaultRequest, new SolrQueryResponse(), next); assertTrue("Expected skipInsertIfExists to be true", processor.isSkipInsertIfExists()); assertTrue("Expected skipUpdateIfMissing to be true", processor.isSkipUpdateIfMissing()); } @Test public void testSkipInsertsFalseIfInInitArgs() { SkipExistingDocumentsProcessorFactory factory = new SkipExistingDocumentsProcessorFactory(); NamedList<Object> initArgs = new NamedList<>(); initArgs.add("skipInsertIfExists", false); factory.init(initArgs); UpdateRequestProcessor next = Mockito.mock(DistributedUpdateProcessor.class); SkipExistingDocumentsUpdateProcessor processor = factory.getInstance(defaultRequest, new SolrQueryResponse(), next); assertFalse("Expected skipInsertIfExists to be false", processor.isSkipInsertIfExists()); assertTrue("Expected skipUpdateIfMissing to be true", processor.isSkipUpdateIfMissing()); } @Test public void testSkipUpdatesFalseIfInInitArgs() { SkipExistingDocumentsProcessorFactory factory = new SkipExistingDocumentsProcessorFactory(); NamedList<Object> initArgs = new NamedList<>(); initArgs.add("skipUpdateIfMissing", false); factory.init(initArgs); UpdateRequestProcessor next = Mockito.mock(DistributedUpdateProcessor.class); SkipExistingDocumentsUpdateProcessor processor = factory.getInstance(defaultRequest, new SolrQueryResponse(), next); assertTrue("Expected skipInsertIfExists to be true", processor.isSkipInsertIfExists()); assertFalse("Expected skipUpdateIfMissing to be false", processor.isSkipUpdateIfMissing()); } @Test public void testSkipBothFalseIfInInitArgs() { SkipExistingDocumentsProcessorFactory factory = new SkipExistingDocumentsProcessorFactory(); NamedList<Object> initArgs = new NamedList<>(); initArgs.add("skipInsertIfExists", false); initArgs.add("skipUpdateIfMissing", false); factory.init(initArgs); UpdateRequestProcessor next = Mockito.mock(DistributedUpdateProcessor.class); SkipExistingDocumentsUpdateProcessor processor = factory.getInstance(defaultRequest, new SolrQueryResponse(), next); assertFalse("Expected skipInsertIfExists to be false", processor.isSkipInsertIfExists()); assertFalse("Expected skipUpdateIfMissing to be false", processor.isSkipUpdateIfMissing()); } @Test public void testSkipInsertsFalseIfInitArgsTrueButFalseStringInRequest() { SkipExistingDocumentsProcessorFactory factory = new SkipExistingDocumentsProcessorFactory(); NamedList<Object> initArgs = new NamedList<>(); initArgs.add("skipInsertIfExists", true); factory.init(initArgs); NamedList<String> requestArgs = new NamedList<>(); requestArgs.add("skipInsertIfExists", "false"); SolrQueryRequest req = new LocalSolrQueryRequest(null, requestArgs); UpdateRequestProcessor next = Mockito.mock(DistributedUpdateProcessor.class); SkipExistingDocumentsUpdateProcessor processor = factory.getInstance(req, new SolrQueryResponse(), next); assertFalse("Expected skipInsertIfExists to be false", processor.isSkipInsertIfExists()); assertTrue("Expected skipUpdateIfMissing to be true", processor.isSkipUpdateIfMissing()); } @Test public void testSkipUpdatesFalseIfInitArgsTrueButFalseBooleanInRequest() { SkipExistingDocumentsProcessorFactory factory = new SkipExistingDocumentsProcessorFactory(); NamedList<Object> initArgs = new NamedList<>(); initArgs.add("skipUpdateIfMissing", true); factory.init(initArgs); NamedList<Object> requestArgs = new NamedList<>(); requestArgs.add("skipUpdateIfMissing", false); SolrQueryRequest req = new LocalSolrQueryRequest(null, requestArgs); UpdateRequestProcessor next = Mockito.mock(DistributedUpdateProcessor.class); SkipExistingDocumentsUpdateProcessor processor = factory.getInstance(req, new SolrQueryResponse(), next); assertTrue("Expected skipInsertIfExists to be true", processor.isSkipInsertIfExists()); assertFalse("Expected skipUpdateIfMissing to be false", processor.isSkipUpdateIfMissing()); } @Test public void testSkipUpdatesTrueIfInitArgsFalseButTrueStringInRequest() { SkipExistingDocumentsProcessorFactory factory = new SkipExistingDocumentsProcessorFactory(); NamedList<Object> initArgs = new NamedList<>(); initArgs.add("skipInsertIfExists", true); initArgs.add("skipUpdateIfMissing", false); factory.init(initArgs); NamedList<Object> requestArgs = new NamedList<>(); requestArgs.add("skipUpdateIfMissing", "true"); SolrQueryRequest req = new LocalSolrQueryRequest(null, requestArgs); UpdateRequestProcessor next = Mockito.mock(DistributedUpdateProcessor.class); SkipExistingDocumentsUpdateProcessor processor = factory.getInstance(req, new SolrQueryResponse(), next); assertTrue("Expected skipInsertIfExists to be true", processor.isSkipInsertIfExists()); assertTrue("Expected skipUpdateIfMissing to be true", processor.isSkipUpdateIfMissing()); } // Tests for logic in the processor @Test public void testSkippableInsertIsNotSkippedIfNotLeader() throws IOException { UpdateRequestProcessor next = Mockito.mock(DistributedUpdateProcessor.class); SkipExistingDocumentsUpdateProcessor processor = Mockito.spy(new SkipExistingDocumentsUpdateProcessor(defaultRequest, next, true, true)); AddUpdateCommand cmd = createInsertUpdateCmd(defaultRequest); doReturn(false).when(processor).isLeader(cmd); doReturn(true).when(processor).doesDocumentExist(docId); processor.processAdd(cmd); verify(next).processAdd(cmd); } @Test public void testSkippableInsertIsNotSkippedIfSkipInsertsFalse() throws IOException { UpdateRequestProcessor next = Mockito.mock(DistributedUpdateProcessor.class); SkipExistingDocumentsUpdateProcessor processor = Mockito.spy(new SkipExistingDocumentsUpdateProcessor(defaultRequest, next, false, false)); AddUpdateCommand cmd = createInsertUpdateCmd(defaultRequest); doReturn(true).when(processor).isLeader(cmd); doReturn(true).when(processor).doesDocumentExist(docId); processor.processAdd(cmd); verify(next).processAdd(cmd); } @Test public void testSkippableInsertIsSkippedIfSkipInsertsTrue() throws IOException { UpdateRequestProcessor next = Mockito.mock(DistributedUpdateProcessor.class); SkipExistingDocumentsUpdateProcessor processor = Mockito.spy(new SkipExistingDocumentsUpdateProcessor(defaultRequest, next, true, false)); AddUpdateCommand cmd = createInsertUpdateCmd(defaultRequest); doReturn(true).when(processor).isLeader(cmd); doReturn(true).when(processor).doesDocumentExist(docId); processor.processAdd(cmd); verify(next, never()).processAdd(cmd); } @Test public void testNonSkippableInsertIsNotSkippedIfSkipInsertsTrue() throws IOException { UpdateRequestProcessor next = Mockito.mock(DistributedUpdateProcessor.class); SkipExistingDocumentsUpdateProcessor processor = Mockito.spy(new SkipExistingDocumentsUpdateProcessor(defaultRequest, next, true, false)); AddUpdateCommand cmd = createInsertUpdateCmd(defaultRequest); doReturn(true).when(processor).isLeader(cmd); doReturn(false).when(processor).doesDocumentExist(docId); processor.processAdd(cmd); verify(next).processAdd(cmd); } @Test public void testSkippableUpdateIsNotSkippedIfNotLeader() throws IOException { UpdateRequestProcessor next = Mockito.mock(DistributedUpdateProcessor.class); SkipExistingDocumentsUpdateProcessor processor = Mockito.spy(new SkipExistingDocumentsUpdateProcessor(defaultRequest, next, true, true)); AddUpdateCommand cmd = createAtomicUpdateCmd(defaultRequest); doReturn(false).when(processor).isLeader(cmd); doReturn(false).when(processor).doesDocumentExist(docId); processor.processAdd(cmd); verify(next).processAdd(cmd); } @Test public void testSkippableUpdateIsNotSkippedIfSkipUpdatesFalse() throws IOException { UpdateRequestProcessor next = Mockito.mock(DistributedUpdateProcessor.class); SkipExistingDocumentsUpdateProcessor processor = Mockito.spy(new SkipExistingDocumentsUpdateProcessor(defaultRequest, next, false, false)); AddUpdateCommand cmd = createAtomicUpdateCmd(defaultRequest); doReturn(true).when(processor).isLeader(cmd); doReturn(false).when(processor).doesDocumentExist(docId); processor.processAdd(cmd); verify(next).processAdd(cmd); } @Test public void testSkippableUpdateIsSkippedIfSkipUpdatesTrue() throws IOException { UpdateRequestProcessor next = Mockito.mock(DistributedUpdateProcessor.class); SkipExistingDocumentsUpdateProcessor processor = Mockito.spy(new SkipExistingDocumentsUpdateProcessor(defaultRequest, next, false, true)); AddUpdateCommand cmd = createAtomicUpdateCmd(defaultRequest); doReturn(true).when(processor).isLeader(cmd); doReturn(false).when(processor).doesDocumentExist(docId); processor.processAdd(cmd); verify(next, never()).processAdd(cmd); } @Test public void testNonSkippableUpdateIsNotSkippedIfSkipUpdatesTrue() throws IOException { UpdateRequestProcessor next = Mockito.mock(DistributedUpdateProcessor.class); SkipExistingDocumentsUpdateProcessor processor = Mockito.spy(new SkipExistingDocumentsUpdateProcessor(defaultRequest, next, false, true)); AddUpdateCommand cmd = createAtomicUpdateCmd(defaultRequest); doReturn(true).when(processor).isLeader(cmd); doReturn(true).when(processor).doesDocumentExist(docId); processor.processAdd(cmd); verify(next).processAdd(cmd); } private AddUpdateCommand createInsertUpdateCmd(SolrQueryRequest req) { AddUpdateCommand cmd = new AddUpdateCommand(req); cmd.setIndexedId(docId); cmd.solrDoc = new SolrInputDocument(); assertFalse(AtomicUpdateDocumentMerger.isAtomicUpdate(cmd)); return cmd; } private AddUpdateCommand createAtomicUpdateCmd(SolrQueryRequest req) { AddUpdateCommand cmd = new AddUpdateCommand(req); cmd.setIndexedId(docId); cmd.solrDoc = new SolrInputDocument(); cmd.solrDoc.addField("last_name", ImmutableMap.of("set", "Smith")); assertTrue(AtomicUpdateDocumentMerger.isAtomicUpdate(cmd)); return cmd; } }
4,568
5,447
<filename>ZBLibrary/src/main/java/zuo/biao/library/ui/TopTabView.java<gh_stars>1000+ /*Copyright ©2015 TommyLemon(https://github.com/TommyLemon) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.*/ package zuo.biao.library.ui; import java.util.List; import zuo.biao.library.R; import zuo.biao.library.base.BaseView; import zuo.biao.library.util.StringUtil; import android.annotation.SuppressLint; import android.app.Activity; import android.content.res.Resources; import android.support.annotation.LayoutRes; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.View.OnClickListener; import android.widget.LinearLayout; import android.widget.TextView; /**自定义顶栏切换标签View * @warn 复制到其它工程内使用时务必修改import R文件路径为所在应用包名 * @author Lemon * @use * <br> TopTabView modleView = new TopTabView(context, inflater); * <br> adapter中使用:[具体见.BaseTabActivity] * <br> convertView = modleView.getView(); * <br> 或 其它类中使用 * <br> containerView.addView(modleView.getConvertView()); * <br> 然后 * <br> modleView.bindView(object); * <br> modleView.setOnTabSelectedListener(onItemSelectedListener); */ public class TopTabView extends BaseView<String[]> { private static final String TAG = "TopTabView"; /** */ public interface OnTabSelectedListener { // void beforeTabSelected(TextView tvTab, int position, int id); void onTabSelected(TextView tvTab, int position, int id); } private OnTabSelectedListener onTabSelectedListener; public void setOnTabSelectedListener(OnTabSelectedListener onTabSelectedListener) { this.onTabSelectedListener = onTabSelectedListener; } private LayoutInflater inflater; public TopTabView(Activity context) { super(context, R.layout.top_tab_view); this.inflater = context.getLayoutInflater(); } private int minWidth; public TopTabView(Activity context, int minWidth) { this(context); this.minWidth = minWidth; } public TopTabView(Activity context, int minWidth, @LayoutRes int resource){ super(context, resource); this.minWidth = minWidth; this.inflater = context.getLayoutInflater(); } private int currentPosition = 0; public void setCurrentPosition(int currentPosition) { this.currentPosition = currentPosition; } public TextView tvTopTabViewTabFirst; public TextView tvTopTabViewTabLast; public LinearLayout llTopTabViewContainer; @Override public View createView() { tvTopTabViewTabFirst = findView(R.id.tvTopTabViewTabFirst); tvTopTabViewTabLast = findView(R.id.tvTopTabViewTabLast); llTopTabViewContainer = findView(R.id.llTopTabViewContainer); return super.createView(); } public String[] names;//传进来的数据 public int getCount() { return names.length; } public int getCurrentPosition() { return currentPosition; } public TextView getCurrentTab() { return tvTabs[getCurrentPosition()]; } private int lastPosition = 1; /** * @param nameList */ public void bindView(List<String> nameList){ if (nameList != null) { for (int i = 0; i < nameList.size(); i++) { names[i] = nameList.get(i); } } bindView(names); } private int width; private int maxWidth; @Override public void bindView(String[] names){ if (names == null || names.length < 2) { Log.e(TAG, "setInerView names == null || names.length < 2 >> return; "); return; } super.bindView(names); this.names = names; this.lastPosition = getCount() - 1; tvTabs = new TextView[getCount()]; tvTabs[0] = tvTopTabViewTabFirst; tvTabs[lastPosition] = tvTopTabViewTabLast; llTopTabViewContainer.removeAllViews(); for (int i = 0; i < tvTabs.length; i++) { final int position = i; if (tvTabs[position] == null) { //viewgroup.addView(child)中的child相同,否则会崩溃 tvTabs[position] = (TextView) inflater.inflate(R.layout.top_tab_tv_center, llTopTabViewContainer, false); llTopTabViewContainer.addView(tvTabs[position]); View divider = inflater.inflate(R.layout.divider_vertical_1dp, llTopTabViewContainer, false); divider.setBackgroundColor(getColor(R.color.white)); llTopTabViewContainer.addView(divider); } tvTabs[position].setText(StringUtil.getTrimedString(names[position])); tvTabs[position].setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { select(position); } }); width = tvTabs[position].getWidth(); if (minWidth < width) { minWidth = width; } } //防止超出 maxWidth = llTopTabViewContainer.getMeasuredWidth() / tvTabs.length; if (minWidth > maxWidth) { minWidth = maxWidth; } for (int i = 0; i < tvTabs.length; i++) { //保持一致 tvTabs[i].setMinWidth(minWidth); //防止超出 if (tvTabs[i].getWidth() > maxWidth) { tvTabs[i].setWidth(maxWidth); } } select(currentPosition); } private TextView[] tvTabs; /**选择tab * @param position */ public void select(int position) { Log.i(TAG, "select position = " + position); if (position < 0 || position >= getCount()) { Log.e(TAG, "select position < 0 || position >= getCount() >> return;"); return; } for (int i = 0; i < tvTabs.length; i++) { tvTabs[i].setSelected(i == position); } if (onTabSelectedListener != null) { onTabSelectedListener.onTabSelected(tvTabs[position] , position, tvTabs[position].getId()); } this.currentPosition = position; } }
2,217
745
<reponame>deveil/mrq from mrq.task import Task from mrq.context import connections class EnsureIndexes(Task): def run(self, params): if connections.mongodb_logs: connections.mongodb_logs.mrq_logs.ensure_index( [("job", 1)], background=True) connections.mongodb_logs.mrq_logs.ensure_index( [("worker", 1)], background=True, sparse=True) connections.mongodb_jobs.mrq_workers.ensure_index( [("status", 1)], background=True) connections.mongodb_jobs.mrq_workers.ensure_index( [("datereported", 1)], background=True, expireAfterSeconds=3600) connections.mongodb_jobs.mrq_jobs.ensure_index( [("status", 1)], background=True) connections.mongodb_jobs.mrq_jobs.ensure_index( [("path", 1)], background=True) connections.mongodb_jobs.mrq_jobs.ensure_index( [("worker", 1)], background=True, sparse=True) connections.mongodb_jobs.mrq_jobs.ensure_index( [("queue", 1)], background=True) connections.mongodb_jobs.mrq_jobs.ensure_index( [("dateexpires", 1)], sparse=True, background=True, expireAfterSeconds=0) connections.mongodb_jobs.mrq_jobs.ensure_index( [("dateretry", 1)], sparse=True, background=True) connections.mongodb_jobs.mrq_jobs.ensure_index( [("datequeued", 1)], background=True) connections.mongodb_jobs.mrq_jobs.ensure_index( [("queue", 1), ("status", 1), ("datequeued", 1), ("_id", 1)], background=True) connections.mongodb_jobs.mrq_jobs.ensure_index( [("status", 1), ("queue", 1), ("path", 1)], background=True) connections.mongodb_jobs.mrq_scheduled_jobs.ensure_index( [("hash", 1)], unique=True, background=False) connections.mongodb_jobs.mrq_agents.ensure_index( [("datereported", 1)], background=True) connections.mongodb_jobs.mrq_agents.ensure_index( [("dateexpires", 1)], background=True, expireAfterSeconds=0) connections.mongodb_jobs.mrq_agents.ensure_index( [("worker_group", 1)], background=True)
1,007
2,151
// Copyright (c) 2015-2016 The Khronos Group Inc. // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and/or associated documentation files (the // "Materials"), to deal in the Materials without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Materials, and to // permit persons to whom the Materials are furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be included // in all copies or substantial portions of the Materials. // // MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS // KHRONOS STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS // SPECIFICATIONS AND HEADER INFORMATION ARE LOCATED AT // https://www.khronos.org/registry/ // // THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. // IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY // CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, // TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE // MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS. #ifndef LIBSPIRV_UTIL_BITUTILS_H_ #define LIBSPIRV_UTIL_BITUTILS_H_ #include <cstdint> #include <cstring> namespace spvutils { // Performs a bitwise copy of source to the destination type Dest. template <typename Dest, typename Src> Dest BitwiseCast(Src source) { Dest dest; static_assert(sizeof(source) == sizeof(dest), "BitwiseCast: Source and destination must have the same size"); std::memcpy(&dest, &source, sizeof(dest)); return dest; } // SetBits<T, First, Num> returns an integer of type <T> with bits set // for position <First> through <First + Num - 1>, counting from the least // significant bit. In particular when Num == 0, no positions are set to 1. // A static assert will be triggered if First + Num > sizeof(T) * 8, that is, // a bit that will not fit in the underlying type is set. template <typename T, size_t First = 0, size_t Num = 0> struct SetBits { static_assert(First < sizeof(T) * 8, "Tried to set a bit that is shifted too far."); const static T get = (T(1) << First) | SetBits<T, First + 1, Num - 1>::get; }; template <typename T, size_t Last> struct SetBits<T, Last, 0> { const static T get = T(0); }; // This is all compile-time so we can put our tests right here. static_assert(SetBits<uint32_t, 0, 0>::get == uint32_t(0x00000000), "SetBits failed"); static_assert(SetBits<uint32_t, 0, 1>::get == uint32_t(0x00000001), "SetBits failed"); static_assert(SetBits<uint32_t, 31, 1>::get == uint32_t(0x80000000), "SetBits failed"); static_assert(SetBits<uint32_t, 1, 2>::get == uint32_t(0x00000006), "SetBits failed"); static_assert(SetBits<uint32_t, 30, 2>::get == uint32_t(0xc0000000), "SetBits failed"); static_assert(SetBits<uint32_t, 0, 31>::get == uint32_t(0x7FFFFFFF), "SetBits failed"); static_assert(SetBits<uint32_t, 0, 32>::get == uint32_t(0xFFFFFFFF), "SetBits failed"); static_assert(SetBits<uint32_t, 16, 16>::get == uint32_t(0xFFFF0000), "SetBits failed"); static_assert(SetBits<uint64_t, 0, 1>::get == uint64_t(0x0000000000000001LL), "SetBits failed"); static_assert(SetBits<uint64_t, 63, 1>::get == uint64_t(0x8000000000000000LL), "SetBits failed"); static_assert(SetBits<uint64_t, 62, 2>::get == uint64_t(0xc000000000000000LL), "SetBits failed"); static_assert(SetBits<uint64_t, 31, 1>::get == uint64_t(0x0000000080000000LL), "SetBits failed"); static_assert(SetBits<uint64_t, 16, 16>::get == uint64_t(0x00000000FFFF0000LL), "SetBits failed"); } // namespace spvutils #endif // LIBSPIRV_UTIL_BITUTILS_H_
1,526
335
<reponame>Safal08/Hacktoberfest-1<filename>U/Undivided_adjective.json { "word": "Undivided", "definitions": [ "Not divided, separated, or broken into parts.", "Devoted completely to one object." ], "parts-of-speech": "Adjective" }
111
416
#if (defined(USE_UIKIT_PUBLIC_HEADERS) && USE_UIKIT_PUBLIC_HEADERS) || !__has_include(<UIKitCore/UIPointerStyle.h>) // // UIPointerStyle.h // UIKit // // Copyright © 2020 Apple Inc. All rights reserved. // #import <Foundation/Foundation.h> #import <UIKit/UIKitDefines.h> #import <UIKit/UIGeometry.h> #import <CoreGraphics/CGGeometry.h> @class UITargetedPreview, UIBezierPath, UIPointerEffect, UIPointerShape; NS_ASSUME_NONNULL_BEGIN UIKIT_EXTERN API_AVAILABLE(ios(13.4)) API_UNAVAILABLE(watchos, tvos) @interface UIPointerStyle : NSObject <NSCopying> /*! * @abstract Applies the provided content effect and pointer shape within the current region. * * @param effect The desired pointer effect. * @param shape The desired pointer shape. If omitted, a pointer will be generated automatically from the effect's preview view. */ + (instancetype)styleWithEffect:(UIPointerEffect *)effect shape:(nullable UIPointerShape *)shape NS_REFINED_FOR_SWIFT; /*! * @abstract Morphs the pointer into the provided shape when hovering over the current region. * * @param shape The desired pointer shape. * @param axes Axes along which to recenter the pointer on touch up. */ + (instancetype)styleWithShape:(UIPointerShape *)shape constrainedAxes:(UIAxis)axes NS_REFINED_FOR_SWIFT; /*! * @abstract Hides the pointer when hovering over the current region. */ + (instancetype)hiddenPointerStyle; - (instancetype)init NS_UNAVAILABLE; + (instancetype)new NS_UNAVAILABLE; @end UIKIT_EXTERN API_AVAILABLE(ios(13.4)) API_UNAVAILABLE(watchos, tvos) NS_REFINED_FOR_SWIFT @interface UIPointerEffect : NSObject <NSCopying> @property (nonatomic, copy, readonly) UITargetedPreview *preview; /*! * @abstract Creates a pointer content effect with the given preview's view. * * @param preview A UITargetedPreview object describing a view with which to construct the effect. * * @discussion UIPointerEffect attempts to determine the appropriate effect for the given preview automatically. * Use one of its subclasses to request a specific system-provided effect. */ + (instancetype)effectWithPreview:(UITargetedPreview *)preview; - (instancetype)init NS_UNAVAILABLE; + (instancetype)new NS_UNAVAILABLE; @end /// Pointer slides under the given view and morphs into the view's shape UIKIT_EXTERN API_AVAILABLE(ios(13.4)) API_UNAVAILABLE(watchos, tvos) NS_REFINED_FOR_SWIFT @interface UIPointerHighlightEffect : UIPointerEffect @end /// Pointer slides under the given view and disappears as the view scales up and gains a shadow. UIKIT_EXTERN API_AVAILABLE(ios(13.4)) API_UNAVAILABLE(watchos, tvos) NS_REFINED_FOR_SWIFT @interface UIPointerLiftEffect : UIPointerEffect @end typedef NS_ENUM(NSInteger, UIPointerEffectTintMode) { UIPointerEffectTintModeNone = 0, // Indicates that no tint should be applied to the view. UIPointerEffectTintModeOverlay, // Indicates that a tint overlay should be placed above the view. UIPointerEffectTintModeUnderlay, // Indicates that a tint underlay should be placed below the view. } API_AVAILABLE(ios(13.4)) API_UNAVAILABLE(watchos, tvos) NS_REFINED_FOR_SWIFT; /// Pointer retains the system shape while over the given view. Visual changes applied to the view are dictated by the effect's properties. UIKIT_EXTERN API_AVAILABLE(ios(13.4)) API_UNAVAILABLE(watchos, tvos) NS_REFINED_FOR_SWIFT @interface UIPointerHoverEffect : UIPointerEffect @property (nonatomic) UIPointerEffectTintMode preferredTintMode; // Defaults to UIPointerEffectTintModeOverlay @property (nonatomic) BOOL prefersShadow; // Defaults to NO @property (nonatomic) BOOL prefersScaledContent; // Defaults to YES @end UIKIT_EXTERN API_AVAILABLE(ios(13.4)) API_UNAVAILABLE(watchos, tvos) NS_REFINED_FOR_SWIFT @interface UIPointerShape : NSObject <NSCopying> /*! * @abstract Morphs the pointer to the given path. */ + (instancetype)shapeWithPath:(UIBezierPath *)path; /*! * @abstract Morphs the pointer to a rounded rectangle with the provided rect and the standard system corner radius. * * @param rect CGRect describing the pointer's frame. If used alongside a content effect, this rect must be in the effect's * preview's container view's coordinate space. Otherwise, it is centered about the pointer's current location * and the rect's origin is interpreted as an offset. */ + (instancetype)shapeWithRoundedRect:(CGRect)rect; /*! * @abstract Morphs the pointer to a rounded rectangle with the provided rect and cornerRadius. * * @param rect CGRect describing the pointer's frame. If used alongside a content effect, this rect must be in * the effect's preview's container view's coordinate space. Otherwise, it is centered about the * pointer's current location and the rect's origin is interpreted as an offset. * @param cornerRadius Corner radius to apply to the pointer. */ + (instancetype)shapeWithRoundedRect:(CGRect)rect cornerRadius:(CGFloat)cornerRadius; /*! * @abstract Morphs the pointer to a beam with the given length and axis. * * @param length The beam's length. Limited to the region's width or height, depending on the beam's axis. * @param axis The axis along which to draw the beam. Axis must be either UIAxisVertical or UIAxisHorizontal. */ + (instancetype)beamWithPreferredLength:(CGFloat)length axis:(UIAxis)axis; - (instancetype)init NS_UNAVAILABLE; + (instancetype)new NS_UNAVAILABLE; @end NS_ASSUME_NONNULL_END #else #import <UIKitCore/UIPointerStyle.h> #endif
1,933
3,012
<reponame>akhakimo/edk2 /** @file AML Node. Copyright (c) 2019 - 2020, Arm Limited. All rights reserved.<BR> SPDX-License-Identifier: BSD-2-Clause-Patent **/ #ifndef AML_NODE_H_ #define AML_NODE_H_ #include <AmlNodeDefines.h> #include <IndustryStandard/Acpi.h> /** Create an AML_ROOT_NODE. This node will be the root of the tree. @param [in] SdtHeader Pointer to an ACPI DSDT/SSDT header to copy the data from. @param [out] NewRootNodePtr If success, contains the created AML_ROOT_NODE. Otherwise reset to NULL. @retval EFI_SUCCESS The function completed successfully. @retval EFI_INVALID_PARAMETER Invalid parameter. @retval EFI_OUT_OF_RESOURCES Could not allocate memory. **/ EFI_STATUS EFIAPI AmlCreateRootNode ( IN CONST EFI_ACPI_DESCRIPTION_HEADER *SdtHeader, OUT AML_ROOT_NODE **NewRootNodePtr ); /** Create an AML_OBJECT_NODE. @param [in] AmlByteEncoding Byte encoding entry. @param [in] PkgLength PkgLength of the node if the AmlByteEncoding has the PkgLen attribute. 0 otherwise. @param [out] NewObjectNodePtr If success, contains the created AML_OBJECT_NODE. Otherwise reset to NULL. @retval EFI_SUCCESS The function completed successfully. @retval EFI_INVALID_PARAMETER Invalid parameter. @retval EFI_OUT_OF_RESOURCES Could not allocate memory. **/ EFI_STATUS EFIAPI AmlCreateObjectNode ( IN CONST AML_BYTE_ENCODING *AmlByteEncoding, IN UINT32 PkgLength, OUT AML_OBJECT_NODE **NewObjectNodePtr ); /** Create an AML_DATA_NODE. @param [in] DataType DataType of the node. @param [in] Data Pointer to the AML bytecode corresponding to this node. Data is copied from there. @param [in] DataSize Number of bytes to consider at the address pointed by Data. @param [out] NewDataNodePtr If success, contains the created AML_DATA_NODE. Otherwise reset to NULL. @retval EFI_SUCCESS The function completed successfully. @retval EFI_INVALID_PARAMETER Invalid parameter. @retval EFI_OUT_OF_RESOURCES Could not allocate memory. **/ EFI_STATUS EFIAPI AmlCreateDataNode ( IN EAML_NODE_DATA_TYPE DataType, IN CONST UINT8 *Data, IN UINT32 DataSize, OUT AML_DATA_NODE **NewDataNodePtr ); /** Delete a Node. @param [in] Node Pointer to a Node. @retval EFI_SUCCESS The function completed successfully. @retval EFI_INVALID_PARAMETER Invalid parameter. **/ EFI_STATUS EFIAPI AmlDeleteNode ( IN AML_NODE_HEADER *Node ); /** Check whether ObjectNode has the input attribute. This function can be used to check ObjectNode is an object node at the same time. @param [in] ObjectNode Pointer to an object node. @param [in] Attribute Attribute to check for. @retval TRUE The node is an AML object and the attribute is present. @retval FALSE Otherwise. **/ BOOLEAN EFIAPI AmlNodeHasAttribute ( IN CONST AML_OBJECT_NODE *ObjectNode, IN AML_OP_ATTRIBUTE Attribute ); /** Check whether ObjectNode has the input OpCode/SubOpcode couple. @param [in] ObjectNode Pointer to an object node. @param [in] OpCode OpCode to check @param [in] SubOpCode SubOpCode to check @retval TRUE The node is an AML object and the Opcode and the SubOpCode match. @retval FALSE Otherwise. **/ BOOLEAN EFIAPI AmlNodeCompareOpCode ( IN CONST AML_OBJECT_NODE *ObjectNode, IN UINT8 OpCode, IN UINT8 SubOpCode ); /** Check whether a Node is an integer node. By integer node we mean an object node having one of the following opcode: - AML_BYTE_PREFIX; - AML_WORD_PREFIX; - AML_DWORD_PREFIX; - AML_QWORD_PREFIX. @param [in] Node The node to check. @retval TRUE The Node is an integer node. @retval FALSE Otherwise. */ BOOLEAN EFIAPI IsIntegerNode ( IN AML_OBJECT_NODE *Node ); /** Check whether a Node is a ZeroOp, a OneOp or a OnesOp. These two objects don't have a data node holding a value. This require special handling. @param [in] Node The node to check. @retval TRUE The Node is a ZeroOp or OneOp. @retval FALSE Otherwise. */ BOOLEAN EFIAPI IsSpecialIntegerNode ( IN AML_OBJECT_NODE *Node ); /** Check whether Node corresponds to a method definition. A method definition can be introduced: - By a method object, having an AML_METHOD_OP OpCode; - By an external definition of a method, having an AML_EXTERNAL_OP OpCode and an ObjectType byte set to the MethodObj. Note: An alias node, having an AML_ALIAS_OP, can be resolved to a method definition. This function doesn't handle this case. @param [in] Node Node to check whether it is a method definition. @retval TRUE The Node is a method definition. @retval FALSE Otherwise. **/ BOOLEAN EFIAPI AmlIsMethodDefinitionNode ( IN CONST AML_OBJECT_NODE *Node ); /** Get the index at which the name of the node is stored. @param [in] ObjectNode Pointer to an object node. Must have the AML_IN_NAMESPACE attribute. @param [out] Index Index of the name in the fixed list of arguments. @retval EFI_SUCCESS The function completed successfully. @retval EFI_INVALID_PARAMETER Invalid parameter. **/ EFI_STATUS AmlNodeGetNameIndex ( IN CONST AML_OBJECT_NODE *ObjectNode, OUT EAML_PARSE_INDEX *Index ); /** Get the name of the Node. Node must be part of the namespace. @param [in] ObjectNode Pointer to an object node, which is part of the namespace. @return A pointer to the name. NULL otherwise. Return NULL for the root node. **/ CHAR8 * EFIAPI AmlNodeGetName ( IN CONST AML_OBJECT_NODE *ObjectNode ); #endif // AML_NODE_H_
2,976
4,054
<filename>modules/lwjgl/tootle/src/main/c/DirectXMesh/DirectXMesh.h //------------------------------------------------------------------------------------- // DirectXMesh.h // // DirectX Mesh Geometry Library // // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // // http://go.microsoft.com/fwlink/?LinkID=324981 //------------------------------------------------------------------------------------- #pragma once #include <cstdint> #include <functional> #include <memory> #include <string> #include <vector> #if !defined(__d3d11_h__) && !defined(__d3d11_x_h__) && !defined(__d3d12_h__) && !defined(__d3d12_x_h__) && !defined(__XBOX_D3D12_X__) #if defined(_XBOX_ONE) && defined(_TITLE) #include <d3d11_x.h> #else #include <d3d11_1.h> #endif #endif #include <DirectXMath.h> #include <DirectXCollision.h> #include <DirectXPackedVector.h> #define DIRECTX_MESH_VERSION 150 namespace DirectX { //--------------------------------------------------------------------------------- // DXGI Format Utilities bool __cdecl IsValidVB(_In_ DXGI_FORMAT fmt) noexcept; constexpr bool __cdecl IsValidIB(_In_ DXGI_FORMAT fmt) noexcept; size_t __cdecl BytesPerElement(_In_ DXGI_FORMAT fmt) noexcept; //--------------------------------------------------------------------------------- // Input Layout Descriptor Utilities #if defined(__d3d11_h__) || defined(__d3d11_x_h__) bool __cdecl IsValid(_In_reads_(nDecl) const D3D11_INPUT_ELEMENT_DESC* vbDecl, _In_ size_t nDecl) noexcept; void __cdecl ComputeInputLayout( _In_reads_(nDecl) const D3D11_INPUT_ELEMENT_DESC* vbDecl, _In_ size_t nDecl, _Out_writes_opt_(nDecl) uint32_t* offsets, _Out_writes_opt_(D3D11_IA_VERTEX_INPUT_RESOURCE_SLOT_COUNT) uint32_t* strides) noexcept; #endif #if defined(__d3d12_h__) || defined(__d3d12_x_h__) || defined(__XBOX_D3D12_X__) bool __cdecl IsValid(const D3D12_INPUT_LAYOUT_DESC& vbDecl) noexcept; void __cdecl ComputeInputLayout( const D3D12_INPUT_LAYOUT_DESC& vbDecl, _Out_writes_opt_(vbDecl.NumElements) uint32_t* offsets, _Out_writes_opt_(D3D12_IA_VERTEX_INPUT_RESOURCE_SLOT_COUNT) uint32_t* strides) noexcept; #endif //--------------------------------------------------------------------------------- // Attribute Utilities std::vector<std::pair<size_t, size_t>> __cdecl ComputeSubsets(_In_reads_opt_(nFaces) const uint32_t* attributes, _In_ size_t nFaces); // Returns a list of face offset,counts for attribute groups //--------------------------------------------------------------------------------- // Mesh Optimization Utilities void __cdecl ComputeVertexCacheMissRate( _In_reads_(nFaces * 3) const uint16_t* indices, _In_ size_t nFaces, _In_ size_t nVerts, _In_ size_t cacheSize, _Out_ float& acmr, _Out_ float& atvr); void __cdecl ComputeVertexCacheMissRate( _In_reads_(nFaces * 3) const uint32_t* indices, _In_ size_t nFaces, _In_ size_t nVerts, _In_ size_t cacheSize, _Out_ float& acmr, _Out_ float& atvr); // Compute the average cache miss ratio and average triangle vertex reuse for the post-transform vertex cache //--------------------------------------------------------------------------------- // Vertex Buffer Reader/Writer class VBReader { public: VBReader() noexcept(false); VBReader(VBReader&& moveFrom) noexcept; VBReader& operator= (VBReader&& moveFrom) noexcept; VBReader(VBReader const&) = delete; VBReader& operator= (VBReader const&) = delete; ~VBReader(); #if defined(__d3d11_h__) || defined(__d3d11_x_h__) HRESULT __cdecl Initialize(_In_reads_(nDecl) const D3D11_INPUT_ELEMENT_DESC* vbDecl, _In_ size_t nDecl); // Does not support VB decls with D3D11_INPUT_PER_INSTANCE_DATA #endif #if defined(__d3d12_h__) || defined(__d3d12_x_h__) || defined(__XBOX_D3D12_X__) HRESULT __cdecl Initialize(const D3D12_INPUT_LAYOUT_DESC& vbDecl); // Does not support VB decls with D3D12_INPUT_CLASSIFICATION_PER_INSTANCE_DATA #endif HRESULT __cdecl AddStream(_In_reads_bytes_(stride*nVerts) const void* vb, _In_ size_t nVerts, _In_ size_t inputSlot, _In_ size_t stride = 0) noexcept; // Add vertex buffer to reader HRESULT __cdecl Read(_Out_writes_(count) XMVECTOR* buffer, _In_z_ const char* semanticName, _In_ unsigned int semanticIndex, _In_ size_t count, bool x2bias = false) const; // Extracts data elements from vertex buffer HRESULT __cdecl Read(_Out_writes_(count) float* buffer, _In_z_ const char* semanticName, _In_ unsigned int semanticIndex, _In_ size_t count, bool x2bias = false) const; HRESULT __cdecl Read(_Out_writes_(count) XMFLOAT2* buffer, _In_z_ const char* semanticName, _In_ unsigned int semanticIndex, _In_ size_t count, bool x2bias = false) const; HRESULT __cdecl Read(_Out_writes_(count) XMFLOAT3* buffer, _In_z_ const char* semanticName, _In_ unsigned int semanticIndex, _In_ size_t count, bool x2bias = false) const; HRESULT __cdecl Read(_Out_writes_(count) XMFLOAT4* buffer, _In_z_ const char* semanticName, _In_ unsigned int semanticIndex, _In_ size_t count, bool x2bias = false) const; // Helpers for data extraction void __cdecl Release() noexcept; #if defined(__d3d11_h__) || defined(__d3d11_x_h__) const D3D11_INPUT_ELEMENT_DESC* GetElement(_In_z_ const char* semanticName, _In_ unsigned int semanticIndex) const { return GetElement11(semanticName, semanticIndex); } const D3D11_INPUT_ELEMENT_DESC* __cdecl GetElement11(_In_z_ const char* semanticName, _In_ unsigned int semanticIndex) const; #endif #if defined(__d3d12_h__) || defined(__d3d12_x_h__) || defined(__XBOX_D3D12_X__) const D3D12_INPUT_ELEMENT_DESC* __cdecl GetElement12(_In_z_ const char* semanticName, _In_ unsigned int semanticIndex) const; #endif private: // Private implementation. class Impl; std::unique_ptr<Impl> pImpl; }; class VBWriter { public: VBWriter() noexcept(false); VBWriter(VBWriter&& moveFrom) noexcept; VBWriter& operator= (VBWriter&& moveFrom) noexcept; VBWriter(VBWriter const&) = delete; VBWriter& operator= (VBWriter const&) = delete; ~VBWriter(); #if defined(__d3d11_h__) || defined(__d3d11_x_h__) HRESULT __cdecl Initialize(_In_reads_(nDecl) const D3D11_INPUT_ELEMENT_DESC* vbDecl, _In_ size_t nDecl); // Does not support VB decls with D3D11_INPUT_PER_INSTANCE_DATA #endif #if defined(__d3d12_h__) || defined(__d3d12_x_h__) || defined(__XBOX_D3D12_X__) HRESULT __cdecl Initialize(const D3D12_INPUT_LAYOUT_DESC& vbDecl); // Does not support VB decls with D3D12_INPUT_CLASSIFICATION_PER_INSTANCE_DATA #endif HRESULT __cdecl AddStream(_Out_writes_bytes_(stride*nVerts) void* vb, _In_ size_t nVerts, _In_ size_t inputSlot, _In_ size_t stride = 0) noexcept; // Add vertex buffer to writer HRESULT __cdecl Write(_In_reads_(count) const XMVECTOR* buffer, _In_z_ const char* semanticName, _In_ unsigned int semanticIndex, _In_ size_t count, bool x2bias = false) const; // Inserts data elements into vertex buffer HRESULT __cdecl Write(_In_reads_(count) const float* buffer, _In_z_ const char* semanticName, _In_ unsigned int semanticIndex, _In_ size_t count, bool x2bias = false) const; HRESULT __cdecl Write(_In_reads_(count) const XMFLOAT2* buffer, _In_z_ const char* semanticName, _In_ unsigned int semanticIndex, _In_ size_t count, bool x2bias = false) const; HRESULT __cdecl Write(_In_reads_(count) const XMFLOAT3* buffer, _In_z_ const char* semanticName, _In_ unsigned int semanticIndex, _In_ size_t count, bool x2bias = false) const; HRESULT __cdecl Write(_In_reads_(count) const XMFLOAT4* buffer, _In_z_ const char* semanticName, _In_ unsigned int semanticIndex, _In_ size_t count, bool x2bias = false) const; // Helpers for data insertion void __cdecl Release() noexcept; #if defined(__d3d11_h__) || defined(__d3d11_x_h__) const D3D11_INPUT_ELEMENT_DESC* __cdecl GetElement(_In_z_ const char* semanticName, _In_ unsigned int semanticIndex) const { return GetElement11(semanticName, semanticIndex); } const D3D11_INPUT_ELEMENT_DESC* __cdecl GetElement11(_In_z_ const char* semanticName, _In_ unsigned int semanticIndex) const; #endif #if defined(__d3d12_h__) || defined(__d3d12_x_h__) || defined(__XBOX_D3D12_X__) const D3D12_INPUT_ELEMENT_DESC* __cdecl GetElement12(_In_z_ const char* semanticName, _In_ unsigned int semanticIndex) const; #endif private: // Private implementation. class Impl; std::unique_ptr<Impl> pImpl; }; //--------------------------------------------------------------------------------- // Adjacency Computation HRESULT __cdecl GenerateAdjacencyAndPointReps( _In_reads_(nFaces * 3) const uint16_t* indices, _In_ size_t nFaces, _In_reads_(nVerts) const XMFLOAT3* positions, _In_ size_t nVerts, _In_ float epsilon, _Out_writes_opt_(nVerts) uint32_t* pointRep, _Out_writes_opt_(nFaces * 3) uint32_t* adjacency); HRESULT __cdecl GenerateAdjacencyAndPointReps( _In_reads_(nFaces * 3) const uint32_t* indices, _In_ size_t nFaces, _In_reads_(nVerts) const XMFLOAT3* positions, _In_ size_t nVerts, _In_ float epsilon, _Out_writes_opt_(nVerts) uint32_t* pointRep, _Out_writes_opt_(nFaces * 3) uint32_t* adjacency); // If pointRep is null, it still generates them internally as they are needed for the final adjacency computation HRESULT __cdecl ConvertPointRepsToAdjacency( _In_reads_(nFaces * 3) const uint16_t* indices, _In_ size_t nFaces, _In_reads_(nVerts) const XMFLOAT3* positions, _In_ size_t nVerts, _In_reads_opt_(nVerts) const uint32_t* pointRep, _Out_writes_(nFaces * 3) uint32_t* adjacency); HRESULT __cdecl ConvertPointRepsToAdjacency( _In_reads_(nFaces * 3) const uint32_t* indices, _In_ size_t nFaces, _In_reads_(nVerts) const XMFLOAT3* positions, _In_ size_t nVerts, _In_reads_opt_(nVerts) const uint32_t* pointRep, _Out_writes_(nFaces * 3) uint32_t* adjacency); // If pointRep is null, assumes an identity HRESULT __cdecl GenerateGSAdjacency( _In_reads_(nFaces * 3) const uint16_t* indices, _In_ size_t nFaces, _In_reads_(nVerts) const uint32_t* pointRep, _In_reads_(nFaces * 3) const uint32_t* adjacency, _In_ size_t nVerts, _Out_writes_(nFaces * 6) uint16_t* indicesAdj) noexcept; HRESULT __cdecl GenerateGSAdjacency( _In_reads_(nFaces * 3) const uint32_t* indices, _In_ size_t nFaces, _In_reads_(nVerts) const uint32_t* pointRep, _In_reads_(nFaces * 3) const uint32_t* adjacency, _In_ size_t nVerts, _Out_writes_(nFaces * 6) uint32_t* indicesAdj) noexcept; // Generates an IB suitable for Geometry Shader using D3D1x_PRIMITIVE_TOPOLOGY_TRIANGLELIST_ADJ //--------------------------------------------------------------------------------- // Normals, Tangents, and Bi-Tangents Computation enum CNORM_FLAGS : unsigned long { CNORM_DEFAULT = 0x0, // Default is to compute normals using weight-by-angle CNORM_WEIGHT_BY_AREA = 0x1, // Computes normals using weight-by-area CNORM_WEIGHT_EQUAL = 0x2, // Compute normals with equal weights CNORM_WIND_CW = 0x4, // Vertices are clock-wise (defaults to CCW) }; HRESULT __cdecl ComputeNormals( _In_reads_(nFaces * 3) const uint16_t* indices, _In_ size_t nFaces, _In_reads_(nVerts) const XMFLOAT3* positions, _In_ size_t nVerts, _In_ CNORM_FLAGS flags, _Out_writes_(nVerts) XMFLOAT3* normals) noexcept; HRESULT __cdecl ComputeNormals( _In_reads_(nFaces * 3) const uint32_t* indices, _In_ size_t nFaces, _In_reads_(nVerts) const XMFLOAT3* positions, _In_ size_t nVerts, _In_ CNORM_FLAGS flags, _Out_writes_(nVerts) XMFLOAT3* normals) noexcept; // Computes vertex normals HRESULT __cdecl ComputeTangentFrame( _In_reads_(nFaces * 3) const uint16_t* indices, _In_ size_t nFaces, _In_reads_(nVerts) const XMFLOAT3* positions, _In_reads_(nVerts) const XMFLOAT3* normals, _In_reads_(nVerts) const XMFLOAT2* texcoords, _In_ size_t nVerts, _Out_writes_opt_(nVerts) XMFLOAT3* tangents, _Out_writes_opt_(nVerts) XMFLOAT3* bitangents) noexcept; HRESULT __cdecl ComputeTangentFrame( _In_reads_(nFaces * 3) const uint32_t* indices, _In_ size_t nFaces, _In_reads_(nVerts) const XMFLOAT3* positions, _In_reads_(nVerts) const XMFLOAT3* normals, _In_reads_(nVerts) const XMFLOAT2* texcoords, _In_ size_t nVerts, _Out_writes_opt_(nVerts) XMFLOAT3* tangents, _Out_writes_opt_(nVerts) XMFLOAT3* bitangents) noexcept; HRESULT __cdecl ComputeTangentFrame( _In_reads_(nFaces * 3) const uint16_t* indices, _In_ size_t nFaces, _In_reads_(nVerts) const XMFLOAT3* positions, _In_reads_(nVerts) const XMFLOAT3* normals, _In_reads_(nVerts) const XMFLOAT2* texcoords, _In_ size_t nVerts, _Out_writes_opt_(nVerts) XMFLOAT4* tangents, _Out_writes_opt_(nVerts) XMFLOAT3* bitangents) noexcept; HRESULT __cdecl ComputeTangentFrame( _In_reads_(nFaces * 3) const uint32_t* indices, _In_ size_t nFaces, _In_reads_(nVerts) const XMFLOAT3* positions, _In_reads_(nVerts) const XMFLOAT3* normals, _In_reads_(nVerts) const XMFLOAT2* texcoords, _In_ size_t nVerts, _Out_writes_opt_(nVerts) XMFLOAT4* tangents, _Out_writes_opt_(nVerts) XMFLOAT3* bitangents) noexcept; HRESULT __cdecl ComputeTangentFrame( _In_reads_(nFaces * 3) const uint16_t* indices, _In_ size_t nFaces, _In_reads_(nVerts) const XMFLOAT3* positions, _In_reads_(nVerts) const XMFLOAT3* normals, _In_reads_(nVerts) const XMFLOAT2* texcoords, _In_ size_t nVerts, _Out_writes_(nVerts) XMFLOAT4* tangents) noexcept; HRESULT __cdecl ComputeTangentFrame( _In_reads_(nFaces * 3) const uint32_t* indices, _In_ size_t nFaces, _In_reads_(nVerts) const XMFLOAT3* positions, _In_reads_(nVerts) const XMFLOAT3* normals, _In_reads_(nVerts) const XMFLOAT2* texcoords, _In_ size_t nVerts, _Out_writes_(nVerts) XMFLOAT4* tangents) noexcept; // Computes tangents and/or bi-tangents (optionally with handedness stored in .w) //--------------------------------------------------------------------------------- // Mesh clean-up and validation enum VALIDATE_FLAGS : unsigned long { VALIDATE_DEFAULT = 0x0, VALIDATE_BACKFACING = 0x1, // Check for duplicate neighbor from triangle (requires adjacency) VALIDATE_BOWTIES = 0x2, // Check for two fans of triangles using the same vertex (requires adjacency) VALIDATE_DEGENERATE = 0x4, // Check for degenerate triangles VALIDATE_UNUSED = 0x8, // Check for issues with 'unused' triangles VALIDATE_ASYMMETRIC_ADJ = 0x10, // Checks that neighbors are symmetric (requires adjacency) }; HRESULT __cdecl Validate( _In_reads_(nFaces * 3) const uint16_t* indices, _In_ size_t nFaces, _In_ size_t nVerts, _In_reads_opt_(nFaces * 3) const uint32_t* adjacency, _In_ VALIDATE_FLAGS flags, _In_opt_ std::wstring* msgs = nullptr); HRESULT __cdecl Validate( _In_reads_(nFaces * 3) const uint32_t* indices, _In_ size_t nFaces, _In_ size_t nVerts, _In_reads_opt_(nFaces * 3) const uint32_t* adjacency, _In_ VALIDATE_FLAGS flags, _In_opt_ std::wstring* msgs = nullptr); // Checks the mesh for common problems, return 'S_OK' if no problems were found HRESULT __cdecl Clean( _Inout_updates_all_(nFaces * 3) uint16_t* indices, _In_ size_t nFaces, _In_ size_t nVerts, _Inout_updates_all_opt_(nFaces * 3) uint32_t* adjacency, _In_reads_opt_(nFaces) const uint32_t* attributes, _Inout_ std::vector<uint32_t>& dupVerts, _In_ bool breakBowties = false); HRESULT __cdecl Clean( _Inout_updates_all_(nFaces * 3) uint32_t* indices, _In_ size_t nFaces, _In_ size_t nVerts, _Inout_updates_all_opt_(nFaces * 3) uint32_t* adjacency, _In_reads_opt_(nFaces) const uint32_t* attributes, _Inout_ std::vector<uint32_t>& dupVerts, _In_ bool breakBowties = false); // Cleans the mesh, splitting vertices if needed //--------------------------------------------------------------------------------- // Mesh utilities HRESULT __cdecl WeldVertices( _Inout_updates_all_(nFaces * 3) uint16_t* indices, _In_ size_t nFaces, _In_ size_t nVerts, _In_reads_(nVerts) const uint32_t* pointRep, _Out_writes_opt_(nVerts) uint32_t* vertexRemap, _In_ std::function<bool __cdecl(uint32_t v0, uint32_t v1)> weldTest); HRESULT __cdecl WeldVertices( _Inout_updates_all_(nFaces * 3) uint32_t* indices, _In_ size_t nFaces, _In_ size_t nVerts, _In_reads_(nVerts) const uint32_t* pointRep, _Out_writes_opt_(nVerts) uint32_t* vertexRemap, _In_ std::function<bool __cdecl(uint32_t v0, uint32_t v1)> weldTest); // Welds vertices together based on a test function //--------------------------------------------------------------------------------- // Mesh Optimization HRESULT __cdecl AttributeSort( _In_ size_t nFaces, _Inout_updates_all_(nFaces) uint32_t* attributes, _Out_writes_(nFaces) uint32_t* faceRemap); // Reorders faces by attribute id enum OPTFACES : uint32_t { OPTFACES_V_DEFAULT = 12, OPTFACES_R_DEFAULT = 7, // Default vertex cache size and restart threshold which is considered 'device independent' OPTFACES_LRU_DEFAULT = 32, // Default vertex cache size for the LRU algorithm OPTFACES_V_STRIPORDER = 0, // Indicates no vertex cache optimization, only reordering into strips }; HRESULT __cdecl OptimizeFaces( _In_reads_(nFaces * 3) const uint16_t* indices, _In_ size_t nFaces, _In_reads_(nFaces * 3) const uint32_t* adjacency, _Out_writes_(nFaces) uint32_t* faceRemap, _In_ uint32_t vertexCache = OPTFACES_V_DEFAULT, _In_ uint32_t restart = OPTFACES_R_DEFAULT); HRESULT __cdecl OptimizeFaces( _In_reads_(nFaces * 3) const uint32_t* indices, _In_ size_t nFaces, _In_reads_(nFaces * 3) const uint32_t* adjacency, _Out_writes_(nFaces) uint32_t* faceRemap, _In_ uint32_t vertexCache = OPTFACES_V_DEFAULT, _In_ uint32_t restart = OPTFACES_R_DEFAULT); HRESULT __cdecl OptimizeFacesLRU( _In_reads_(nFaces * 3) const uint16_t* indices, _In_ size_t nFaces, _Out_writes_(nFaces) uint32_t* faceRemap, _In_ uint32_t lruCacheSize = OPTFACES_LRU_DEFAULT); HRESULT __cdecl OptimizeFacesLRU( _In_reads_(nFaces * 3) const uint32_t* indices, _In_ size_t nFaces, _Out_writes_(nFaces) uint32_t* faceRemap, _In_ uint32_t lruCacheSize = OPTFACES_LRU_DEFAULT); // Reorders faces to increase hit rate of vertex caches HRESULT __cdecl OptimizeFacesEx( _In_reads_(nFaces * 3) const uint16_t* indices, _In_ size_t nFaces, _In_reads_(nFaces * 3) const uint32_t* adjacency, _In_reads_(nFaces) const uint32_t* attributes, _Out_writes_(nFaces) uint32_t* faceRemap, _In_ uint32_t vertexCache = OPTFACES_V_DEFAULT, _In_ uint32_t restart = OPTFACES_R_DEFAULT); HRESULT __cdecl OptimizeFacesEx( _In_reads_(nFaces * 3) const uint32_t* indices, _In_ size_t nFaces, _In_reads_(nFaces * 3) const uint32_t* adjacency, _In_reads_(nFaces) const uint32_t* attributes, _Out_writes_(nFaces) uint32_t* faceRemap, _In_ uint32_t vertexCache = OPTFACES_V_DEFAULT, _In_ uint32_t restart = OPTFACES_R_DEFAULT); HRESULT __cdecl OptimizeFacesLRUEx( _In_reads_(nFaces * 3) const uint16_t* indices, _In_ size_t nFaces, _In_reads_(nFaces) const uint32_t* attributes, _Out_writes_(nFaces) uint32_t* faceRemap, _In_ uint32_t lruCacheSize = OPTFACES_LRU_DEFAULT); HRESULT __cdecl OptimizeFacesLRUEx( _In_reads_(nFaces * 3) const uint32_t* indices, _In_ size_t nFaces, _In_reads_(nFaces) const uint32_t* attributes, _Out_writes_(nFaces) uint32_t* faceRemap, _In_ uint32_t lruCacheSize = OPTFACES_LRU_DEFAULT); // Attribute group version of OptimizeFaces HRESULT __cdecl OptimizeVertices( _In_reads_(nFaces * 3) const uint16_t* indices, _In_ size_t nFaces, _In_ size_t nVerts, _Out_writes_(nVerts) uint32_t* vertexRemap, _Out_opt_ size_t* trailingUnused = nullptr) noexcept; HRESULT __cdecl OptimizeVertices( _In_reads_(nFaces * 3) const uint32_t* indices, _In_ size_t nFaces, _In_ size_t nVerts, _Out_writes_(nVerts) uint32_t* vertexRemap, _Out_opt_ size_t* trailingUnused = nullptr) noexcept; // Reorders vertices in order of use //--------------------------------------------------------------------------------- // Remap functions HRESULT __cdecl ReorderIB( _In_reads_(nFaces * 3) const uint16_t* ibin, _In_ size_t nFaces, _In_reads_(nFaces) const uint32_t* faceRemap, _Out_writes_(nFaces * 3) uint16_t* ibout) noexcept; HRESULT __cdecl ReorderIB( _Inout_updates_all_(nFaces * 3) uint16_t* ib, _In_ size_t nFaces, _In_reads_(nFaces) const uint32_t* faceRemap) noexcept; HRESULT __cdecl ReorderIB( _In_reads_(nFaces * 3) const uint32_t* ibin, _In_ size_t nFaces, _In_reads_(nFaces) const uint32_t* faceRemap, _Out_writes_(nFaces * 3) uint32_t* ibout) noexcept; HRESULT __cdecl ReorderIB( _Inout_updates_all_(nFaces * 3) uint32_t* ib, _In_ size_t nFaces, _In_reads_(nFaces) const uint32_t* faceRemap) noexcept; // Applies a face remap reordering to an index buffer HRESULT __cdecl ReorderIBAndAdjacency( _In_reads_(nFaces * 3) const uint16_t* ibin, _In_ size_t nFaces, _In_reads_(nFaces * 3) const uint32_t* adjin, _In_reads_(nFaces) const uint32_t* faceRemap, _Out_writes_(nFaces * 3) uint16_t* ibout, _Out_writes_(nFaces * 3) uint32_t* adjout) noexcept; HRESULT __cdecl ReorderIBAndAdjacency( _Inout_updates_all_(nFaces * 3) uint16_t* ib, _In_ size_t nFaces, _Inout_updates_all_(nFaces * 3) uint32_t* adj, _In_reads_(nFaces) const uint32_t* faceRemap) noexcept; HRESULT __cdecl ReorderIBAndAdjacency( _In_reads_(nFaces * 3) const uint32_t* ibin, _In_ size_t nFaces, _In_reads_(nFaces * 3) const uint32_t* adjin, _In_reads_(nFaces) const uint32_t* faceRemap, _Out_writes_(nFaces * 3) uint32_t* ibout, _Out_writes_(nFaces * 3) uint32_t* adjout) noexcept; HRESULT __cdecl ReorderIBAndAdjacency( _Inout_updates_all_(nFaces * 3) uint32_t* ib, _In_ size_t nFaces, _Inout_updates_all_(nFaces * 3) uint32_t* adj, _In_reads_(nFaces) const uint32_t* faceRemap) noexcept; // Applies a face remap reordering to an index buffer and adjacency HRESULT __cdecl FinalizeIB( _In_reads_(nFaces * 3) const uint16_t* ibin, _In_ size_t nFaces, _In_reads_(nVerts) const uint32_t* vertexRemap, _In_ size_t nVerts, _Out_writes_(nFaces * 3) uint16_t* ibout) noexcept; HRESULT __cdecl FinalizeIB( _Inout_updates_all_(nFaces * 3) uint16_t* ib, _In_ size_t nFaces, _In_reads_(nVerts) const uint32_t* vertexRemap, _In_ size_t nVerts) noexcept; HRESULT __cdecl FinalizeIB( _In_reads_(nFaces * 3) const uint32_t* ibin, _In_ size_t nFaces, _In_reads_(nVerts) const uint32_t* vertexRemap, _In_ size_t nVerts, _Out_writes_(nFaces * 3) uint32_t* ibout) noexcept; HRESULT __cdecl FinalizeIB( _Inout_updates_all_(nFaces * 3) uint32_t* ib, _In_ size_t nFaces, _In_reads_(nVerts) const uint32_t* vertexRemap, _In_ size_t nVerts) noexcept; // Applies a vertex remap reordering to an index buffer HRESULT __cdecl FinalizeVB( _In_reads_bytes_(nVerts*stride) const void* vbin, _In_ size_t stride, _In_ size_t nVerts, _In_reads_opt_(nDupVerts) const uint32_t* dupVerts, _In_ size_t nDupVerts, _In_reads_opt_(nVerts + nDupVerts) const uint32_t* vertexRemap, _Out_writes_bytes_((nVerts + nDupVerts)*stride) void* vbout) noexcept; HRESULT __cdecl FinalizeVB( _Inout_updates_bytes_all_(nVerts*stride) void* vb, _In_ size_t stride, _In_ size_t nVerts, _In_reads_(nVerts) const uint32_t* vertexRemap) noexcept; // Applies a vertex remap and/or a vertex duplication set to a vertex buffer HRESULT __cdecl FinalizeVBAndPointReps( _In_reads_bytes_(nVerts*stride) const void* vbin, _In_ size_t stride, _In_ size_t nVerts, _In_reads_(nVerts) const uint32_t* prin, _In_reads_opt_(nDupVerts) const uint32_t* dupVerts, _In_ size_t nDupVerts, _In_reads_opt_(nVerts + nDupVerts) const uint32_t* vertexRemap, _Out_writes_bytes_((nVerts + nDupVerts)*stride) void* vbout, _Out_writes_(nVerts + nDupVerts) uint32_t* prout) noexcept; HRESULT __cdecl FinalizeVBAndPointReps( _Inout_updates_bytes_all_(nVerts*stride) void* vb, _In_ size_t stride, _In_ size_t nVerts, _Inout_updates_all_(nVerts) uint32_t* pointRep, _In_reads_(nVerts) const uint32_t* vertexRemap) noexcept; // Applies a vertex remap and/or a vertex duplication set to a vertex buffer and point representatives HRESULT __cdecl CompactVB( _In_reads_bytes_(nVerts*stride) const void* vbin, _In_ size_t stride, _In_ size_t nVerts, _In_ size_t trailingUnused, _In_reads_opt_(nVerts) const uint32_t* vertexRemap, _Out_writes_bytes_((nVerts - trailingUnused)*stride) void* vbout) noexcept; // Applies a vertex remap which contains a known number of unused entries at the end //--------------------------------------------------------------------------------- // Meshlet Generation constexpr size_t MESHLET_DEFAULT_MAX_VERTS = 128u; constexpr size_t MESHLET_DEFAULT_MAX_PRIMS = 128u; constexpr size_t MESHLET_MINIMUM_SIZE = 32u; constexpr size_t MESHLET_MAXIMUM_SIZE = 256u; enum MESHLET_FLAGS : unsigned long { MESHLET_DEFAULT = 0x0, MESHLET_WIND_CW = 0x1, // Vertices are clock-wise (defaults to CCW) }; struct Meshlet { uint32_t VertCount; uint32_t VertOffset; uint32_t PrimCount; uint32_t PrimOffset; }; struct MeshletTriangle { uint32_t i0 : 10; uint32_t i1 : 10; uint32_t i2 : 10; }; struct CullData { DirectX::BoundingSphere BoundingSphere; // xyz = center, w = radius DirectX::PackedVector::XMUBYTEN4 NormalCone; // xyz = axis, w = -cos(a + 90) float ApexOffset; // apex = center - axis * offset }; HRESULT __cdecl ComputeMeshlets( _In_reads_(nFaces * 3) const uint16_t* indices, _In_ size_t nFaces, _In_reads_(nVerts) const XMFLOAT3* positions, _In_ size_t nVerts, _In_reads_opt_(nFaces * 3) const uint32_t* adjacency, _Inout_ std::vector<Meshlet>& meshlets, _Inout_ std::vector<uint8_t>& uniqueVertexIB, _Inout_ std::vector<MeshletTriangle>& primitiveIndices, _In_ size_t maxVerts = MESHLET_DEFAULT_MAX_VERTS, _In_ size_t maxPrims = MESHLET_DEFAULT_MAX_PRIMS); HRESULT __cdecl ComputeMeshlets( _In_reads_(nFaces * 3) const uint32_t* indices, _In_ size_t nFaces, _In_reads_(nVerts) const XMFLOAT3* positions, _In_ size_t nVerts, _In_reads_opt_(nFaces * 3) const uint32_t* adjacency, _Inout_ std::vector<Meshlet>& meshlets, _Inout_ std::vector<uint8_t>& uniqueVertexIB, _Inout_ std::vector<MeshletTriangle>& primitiveIndices, _In_ size_t maxVerts = MESHLET_DEFAULT_MAX_VERTS, _In_ size_t maxPrims = MESHLET_DEFAULT_MAX_PRIMS); // Generates meshlets for a single subset mesh HRESULT __cdecl ComputeMeshlets( _In_reads_(nFaces * 3) const uint16_t* indices, _In_ size_t nFaces, _In_reads_(nVerts) const XMFLOAT3* positions, _In_ size_t nVerts, _In_reads_(nSubsets) const std::pair<size_t, size_t>* subsets, _In_ size_t nSubsets, _In_reads_opt_(nFaces * 3) const uint32_t* adjacency, _Inout_ std::vector<Meshlet>& meshlets, _Inout_ std::vector<uint8_t>& uniqueVertexIB, _Inout_ std::vector<MeshletTriangle>& primitiveIndices, _Out_writes_(nSubsets) std::pair<size_t, size_t>* meshletSubsets, _In_ size_t maxVerts = MESHLET_DEFAULT_MAX_VERTS, _In_ size_t maxPrims = MESHLET_DEFAULT_MAX_PRIMS); HRESULT __cdecl ComputeMeshlets( _In_reads_(nFaces * 3) const uint32_t* indices, _In_ size_t nFaces, _In_reads_(nVerts) const XMFLOAT3* positions, _In_ size_t nVerts, _In_reads_(nSubsets) const std::pair<size_t, size_t>* subsets, _In_ size_t nSubsets, _In_reads_opt_(nFaces * 3) const uint32_t* adjacency, _Inout_ std::vector<Meshlet>& meshlets, _Inout_ std::vector<uint8_t>& uniqueVertexIB, _Inout_ std::vector<MeshletTriangle>& primitiveIndices, _Out_writes_(nSubsets) std::pair<size_t, size_t>* meshletSubsets, _In_ size_t maxVerts = MESHLET_DEFAULT_MAX_VERTS, _In_ size_t maxPrims = MESHLET_DEFAULT_MAX_PRIMS); // Generates meshlets for a mesh with several face subsets HRESULT __cdecl ComputeCullData( _In_reads_(nVerts) const XMFLOAT3* positions, _In_ size_t nVerts, _In_reads_(nMeshlets) const Meshlet* meshlets, _In_ size_t nMeshlets, _In_reads_(nVertIndices) const uint16_t* uniqueVertexIndices, _In_ size_t nVertIndices, _In_reads_(nPrimIndices) const MeshletTriangle* primitiveIndices, _In_ size_t nPrimIndices, _Out_writes_(nMeshlets) CullData* cullData, _In_ MESHLET_FLAGS flags = MESHLET_DEFAULT) noexcept; HRESULT __cdecl ComputeCullData( _In_reads_(nVerts) const XMFLOAT3* positions, _In_ size_t nVerts, _In_reads_(nMeshlets) const Meshlet* meshlets, _In_ size_t nMeshlets, _In_reads_(nVertIndices) const uint32_t* uniqueVertexIndices, _In_ size_t nVertIndices, _In_reads_(nPrimIndices) const MeshletTriangle* primitiveIndices, _In_ size_t nPrimIndices, _Out_writes_(nMeshlets) CullData* cullData, _In_ MESHLET_FLAGS flags = MESHLET_DEFAULT) noexcept; // Computes culling data for each input meshlet //--------------------------------------------------------------------------------- #ifdef __clang__ #pragma clang diagnostic push #pragma clang diagnostic ignored "-Wdeprecated-dynamic-exception-spec" #endif #include "DirectXMesh.inl" #ifdef __clang__ #pragma clang diagnostic pop #endif } // namespace
14,100
369
// Copyright (c) 2017-2021, Mudit<NAME>. All rights reserved. // For licensing, see https://github.com/mudita/MuditaOS/LICENSE.md #include "gtest/gtest.h" #include "TestBoxLayout.hpp" #include <log/log.hpp> #include <module-gui/test/mock/TestListViewProvider.hpp> #include <gui/input/InputEvent.hpp> namespace testStyle { const inline uint32_t box_x = 0; const inline uint32_t box_y = 0; const inline uint32_t box_w = 200; const inline uint32_t box_h = 600; const inline uint32_t VBox_w = 200; const inline uint32_t VBox_h = 600; const inline uint32_t HBox_w = 200; const inline uint32_t HBox_h = 50; const inline uint32_t VBox_item_w = 200; const inline uint32_t VBox_item_h = 100; const inline uint32_t HBox_item_w = 50; const inline uint32_t HBox_item_h = 50; } // namespace testStyle class TestItem : public gui::Rect { public: unsigned int ID = 0; TestItem(Item *parent, uint32_t x, uint32_t y, uint32_t w, uint32_t h) : Rect(parent, x, y, w, h){}; ~TestItem() = default; }; class BoxLayoutTesting : public ::testing::Test { protected: void SetUp() override { testBoxLayout = new TestBoxLayout(nullptr, testStyle::box_x, testStyle::box_y, testStyle::box_w, testStyle::box_h); testVBoxLayout = new gui::VBox(nullptr, testStyle::box_x, testStyle::box_y, testStyle::VBox_w, testStyle::VBox_h); testHBoxLayout = new gui::HBox(nullptr, testStyle::box_x, testStyle::box_y, testStyle::HBox_w, testStyle::HBox_h); ASSERT_EQ(0, testBoxLayout->children.size()) << "Box should be empty"; ASSERT_EQ(0, testVBoxLayout->children.size()) << "Box should be empty"; ASSERT_EQ(0, testHBoxLayout->children.size()) << "Box should be empty"; } void TearDown() override { delete testBoxLayout; delete testVBoxLayout; delete testHBoxLayout; } void moveNTimes(gui::BoxLayout *Box, unsigned int n, gui::KeyCode key) { for (unsigned int i = 0; i < n; i++) { Box->onInput(gui::InputEvent({}, gui::InputEvent::State::keyReleasedShort, key)); } } void addNItems(gui::BoxLayout *Box, unsigned int n, uint32_t item_w, uint32_t item_h, const gui::Margins &margins = gui::Margins()) { for (unsigned int i = 1; i <= n; i++) { auto item = new TestItem(nullptr, 0, 0, item_w, item_h); item->ID = i; item->visible = true; item->setMargins(margins); Box->addWidget(item); } } TestItem *getNItem(gui::BoxLayout *Box, unsigned int n) { auto item = Box->children.begin(); std::advance(item, n); return dynamic_cast<TestItem *>(*item); } TestBoxLayout *testBoxLayout = nullptr; gui::VBox *testVBoxLayout = nullptr; gui::HBox *testHBoxLayout = nullptr; const unsigned int fillVBoxPage = testStyle::VBox_h / testStyle::VBox_item_h; const unsigned int notFillVBoxPage = fillVBoxPage - 2; const unsigned int fillHBoxPage = testStyle::HBox_w / testStyle::HBox_item_w; const unsigned int notFillHVBoxPage = fillHBoxPage - 1; const unsigned int overflowHBoxPage = fillHBoxPage + 2; }; TEST_F(BoxLayoutTesting, Constructor_Destructor_Test) { // Check that there are no memory leaks - done by fixture setup and teardown. } TEST_F(BoxLayoutTesting, Fill_Box_Test) { // Add data to VBox and HBox addNItems(testVBoxLayout, fillVBoxPage, testStyle::VBox_item_w, testStyle::VBox_item_h); ASSERT_EQ(fillVBoxPage, testVBoxLayout->children.size()) << "VBox should contain 6 elements"; ASSERT_TRUE(testVBoxLayout->children.back()->visible) << "Last element should be visible"; testVBoxLayout->erase(); ASSERT_EQ(0, testVBoxLayout->children.size()) << "VBox should contain 0 elements"; addNItems(testHBoxLayout, overflowHBoxPage, testStyle::HBox_item_w, testStyle::HBox_item_h); ASSERT_EQ(overflowHBoxPage, testHBoxLayout->children.size()) << "HBox should contain 6 elements"; ASSERT_FALSE(getNItem(testHBoxLayout, 5)->visible) << "5 element should not be visible - as it not fit"; ASSERT_FALSE(testHBoxLayout->children.back()->visible) << "Last element should not be visible - as it not fit"; testHBoxLayout->erase(); ASSERT_EQ(0, testVBoxLayout->children.size()) << "VBox should contain 0 elements"; } TEST_F(BoxLayoutTesting, Navigate_Test) { // Add data to VBox addNItems(testVBoxLayout, notFillVBoxPage, testStyle::VBox_item_w, testStyle::VBox_item_h); ASSERT_EQ(notFillVBoxPage, testVBoxLayout->children.size()) << "Box should contain 4 elements"; testVBoxLayout->setFocus(true); ASSERT_EQ(1, dynamic_cast<TestItem *>(testVBoxLayout->getFocusItem())->ID) << "first element should have focus"; moveNTimes(testVBoxLayout, 2, gui::KeyCode::KEY_DOWN); ASSERT_EQ(3, dynamic_cast<TestItem *>(testVBoxLayout->getFocusItem())->ID) << "move down by 2 - third element should have focus"; moveNTimes(testVBoxLayout, 1, gui::KeyCode::KEY_UP); ASSERT_EQ(2, dynamic_cast<TestItem *>(testVBoxLayout->getFocusItem())->ID) << "move up by 1 - second element should have focus"; // Fill HBox addNItems(testHBoxLayout, fillHBoxPage, testStyle::HBox_item_w, testStyle::HBox_item_h); ASSERT_EQ(fillHBoxPage, testHBoxLayout->children.size()) << "Box should contain 4 elements"; testHBoxLayout->setFocus(true); ASSERT_EQ(1, dynamic_cast<TestItem *>(testHBoxLayout->getFocusItem())->ID) << "first element should have focus"; moveNTimes(testHBoxLayout, 2, gui::KeyCode::KEY_RIGHT); ASSERT_EQ(3, dynamic_cast<TestItem *>(testHBoxLayout->getFocusItem())->ID) << "move right by 2 - third element should have focus"; moveNTimes(testHBoxLayout, 1, gui::KeyCode::KEY_LEFT); ASSERT_EQ(2, dynamic_cast<TestItem *>(testHBoxLayout->getFocusItem())->ID) << "move left by 1 - second element should have focus"; } TEST_F(BoxLayoutTesting, Border_Callback_Test) { // Add data to VBox addNItems(testVBoxLayout, notFillVBoxPage, testStyle::VBox_item_w, testStyle::VBox_item_h); ASSERT_EQ(notFillVBoxPage, testVBoxLayout->children.size()) << "Box should contain 4 elements"; testVBoxLayout->setFocus(true); auto borderCallback = false; testVBoxLayout->borderCallback = [&borderCallback](const gui::InputEvent &inputEvent) -> bool { borderCallback = true; return true; }; moveNTimes(testVBoxLayout, 2, gui::KeyCode::KEY_DOWN); ASSERT_FALSE(borderCallback) << "move down by 2 - border callback should not be called"; moveNTimes(testVBoxLayout, 2, gui::KeyCode::KEY_DOWN); ASSERT_TRUE(borderCallback) << "move second down time by 2 - border callback should be called"; // Fill HBox addNItems(testHBoxLayout, fillHBoxPage, testStyle::HBox_item_w, testStyle::HBox_item_h); ASSERT_EQ(fillHBoxPage, testHBoxLayout->children.size()) << "Box should contain 4 elements"; testHBoxLayout->setFocus(true); borderCallback = false; testHBoxLayout->borderCallback = [&borderCallback](const gui::InputEvent &inputEvent) -> bool { borderCallback = true; return true; }; moveNTimes(testHBoxLayout, 1, gui::KeyCode::KEY_LEFT); ASSERT_TRUE(borderCallback) << "move left by 1 - border callback should be called"; borderCallback = false; moveNTimes(testHBoxLayout, 2, gui::KeyCode::KEY_RIGHT); ASSERT_FALSE(borderCallback) << "move right by 2 - border callback should not be called"; } TEST_F(BoxLayoutTesting, Box_Alignment_Test) { // set no Reverse Order and no Alignment testVBoxLayout->setReverseOrder(false); testVBoxLayout->setAlignment(gui::Alignment(gui::Alignment::Vertical::None)); // Add 4 elements to VBox - there should be space for 6 addNItems(testVBoxLayout, notFillVBoxPage, testStyle::VBox_item_w, testStyle::VBox_item_h); ASSERT_EQ(notFillVBoxPage, testVBoxLayout->children.size()) << "Box should contain 4 elements"; ASSERT_EQ(0, testVBoxLayout->children.front()->getPosition(gui::Axis::Y)) << "first element should have Y pos 0"; ASSERT_EQ(300, testVBoxLayout->children.back()->getPosition(gui::Axis::Y)) << "last element should have Y pos 300"; testVBoxLayout->erase(); // set Reverse Order and no Alignment testVBoxLayout->setReverseOrder(true); testVBoxLayout->setAlignment(gui::Alignment(gui::Alignment::Vertical::None)); // Add 4 elements to VBox - there should be space for 6 addNItems(testVBoxLayout, notFillVBoxPage, testStyle::VBox_item_w, testStyle::VBox_item_h); ASSERT_EQ(notFillVBoxPage, testVBoxLayout->children.size()) << "Box should contain 4 elements"; ASSERT_EQ(500, testVBoxLayout->children.front()->getPosition(gui::Axis::Y)) << "first element should have Y pos 500 - first from bottom"; ASSERT_EQ(200, testVBoxLayout->children.back()->getPosition(gui::Axis::Y)) << "last element should have Y pos 200"; testVBoxLayout->erase(); // set no Reverse Order and set Alignment to Top testVBoxLayout->setReverseOrder(false); testVBoxLayout->setAlignment(gui::Alignment(gui::Alignment::Vertical::Top)); // Add 4 elements to VBox - there should be space for 6 addNItems(testVBoxLayout, notFillVBoxPage, testStyle::VBox_item_w, testStyle::VBox_item_h); ASSERT_EQ(notFillVBoxPage, testVBoxLayout->children.size()) << "Box should contain 4 elements"; ASSERT_EQ(0, testVBoxLayout->children.front()->getPosition(gui::Axis::Y)) << "first element should have Y pos 0"; ASSERT_EQ(300, testVBoxLayout->children.back()->getPosition(gui::Axis::Y)) << "last element should have Y pos 300"; testVBoxLayout->erase(); // set no Reverse Order and set Alignment to Center testVBoxLayout->setReverseOrder(false); testVBoxLayout->setAlignment(gui::Alignment(gui::Alignment::Vertical::Center)); // Add 4 elements to VBox - there should be space for 6 addNItems(testVBoxLayout, notFillVBoxPage, testStyle::VBox_item_w, testStyle::VBox_item_h); ASSERT_EQ(notFillVBoxPage, testVBoxLayout->children.size()) << "Box should contain 4 elements"; ASSERT_EQ(100, testVBoxLayout->children.front()->getPosition(gui::Axis::Y)) << "first element should have Y pos 100"; ASSERT_EQ(400, testVBoxLayout->children.back()->getPosition(gui::Axis::Y)) << "last element should have Y pos 400"; testVBoxLayout->erase(); // set no Reverse Order and set Alignment to Bottom testVBoxLayout->setReverseOrder(false); testVBoxLayout->setAlignment(gui::Alignment(gui::Alignment::Vertical::Bottom)); // Add 4 elements to VBox - there should be space for 6 addNItems(testVBoxLayout, notFillVBoxPage, testStyle::VBox_item_w, testStyle::VBox_item_h); ASSERT_EQ(notFillVBoxPage, testVBoxLayout->children.size()) << "Box should contain 4 elements"; ASSERT_EQ(200, testVBoxLayout->children.front()->getPosition(gui::Axis::Y)) << "first element should have Y pos 200"; ASSERT_EQ(500, testVBoxLayout->children.back()->getPosition(gui::Axis::Y)) << "last element should have Y pos 500"; testVBoxLayout->erase(); // set Reverse Order and set Alignment to Top testVBoxLayout->setReverseOrder(true); testVBoxLayout->setAlignment(gui::Alignment(gui::Alignment::Vertical::Top)); // Add 4 elements to VBox - there should be space for 6 addNItems(testVBoxLayout, notFillVBoxPage, testStyle::VBox_item_w, testStyle::VBox_item_h); ASSERT_EQ(notFillVBoxPage, testVBoxLayout->children.size()) << "Box should contain 4 elements"; ASSERT_EQ(300, testVBoxLayout->children.front()->getPosition(gui::Axis::Y)) << "first element should have Y pos 300"; ASSERT_EQ(0, testVBoxLayout->children.back()->getPosition(gui::Axis::Y)) << "last element should have Y pos 0"; testVBoxLayout->erase(); // set Reverse Order and set Alignment to Center testVBoxLayout->setReverseOrder(true); testVBoxLayout->setAlignment(gui::Alignment(gui::Alignment::Vertical::Center)); // Add 4 elements to VBox - there should be space for 6 addNItems(testVBoxLayout, notFillVBoxPage, testStyle::VBox_item_w, testStyle::VBox_item_h); ASSERT_EQ(notFillVBoxPage, testVBoxLayout->children.size()) << "Box should contain 4 elements"; ASSERT_EQ(400, testVBoxLayout->children.front()->getPosition(gui::Axis::Y)) << "first element should have Y pos 400"; ASSERT_EQ(100, testVBoxLayout->children.back()->getPosition(gui::Axis::Y)) << "last element should have Y pos 100"; testVBoxLayout->erase(); // set Reverse Order and set Alignment to Bottom testVBoxLayout->setReverseOrder(true); testVBoxLayout->setAlignment(gui::Alignment(gui::Alignment::Vertical::Bottom)); // Add 4 elements to VBox - there should be space for 6 addNItems(testVBoxLayout, notFillVBoxPage, testStyle::VBox_item_w, testStyle::VBox_item_h); ASSERT_EQ(notFillVBoxPage, testVBoxLayout->children.size()) << "Box should contain 4 elements"; ASSERT_EQ(500, testVBoxLayout->children.front()->getPosition(gui::Axis::Y)) << "first element should have Y pos 500 - first from bottom"; ASSERT_EQ(200, testVBoxLayout->children.back()->getPosition(gui::Axis::Y)) << "last element should have Y pos 200"; testVBoxLayout->erase(); } TEST_F(BoxLayoutTesting, Box_Widget_Min_Max_Resize_Test) { // Add element to HBox with 0 size addNItems(testHBoxLayout, 1, 0, 0); // Set element minimal size getNItem(testHBoxLayout, 0)->setMinimumSize(testStyle::HBox_item_w / 2, testStyle::HBox_item_h / 3); // Resize Box and check if item has actual size equal to Min size testHBoxLayout->resizeItems(); ASSERT_EQ(testStyle::HBox_item_w / 2, getNItem(testHBoxLayout, 0)->getSize(gui::Axis::X)); ASSERT_EQ(testStyle::HBox_item_h / 3, getNItem(testHBoxLayout, 0)->getSize(gui::Axis::Y)); // Set element maximal size to HBoxSize getNItem(testHBoxLayout, 0)->setMaximumSize(testStyle::HBox_w, testStyle::HBox_h); // Resize Box and check if item has actual size equal to Max size testHBoxLayout->resizeItems(); ASSERT_EQ(testStyle::HBox_w, getNItem(testHBoxLayout, 0)->getSize(gui::Axis::X)); ASSERT_EQ(testStyle::HBox_h, getNItem(testHBoxLayout, 0)->getSize(gui::Axis::Y)); // Set element maximal size to double HBoxSize getNItem(testHBoxLayout, 0)->setMaximumSize(testStyle::HBox_w * 2, testStyle::HBox_h * 2); // Resize Box and check if item has actual size equal to Box Size testHBoxLayout->resizeItems(); ASSERT_EQ(testStyle::HBox_w, getNItem(testHBoxLayout, 0)->getSize(gui::Axis::X)); ASSERT_EQ(testStyle::HBox_h, getNItem(testHBoxLayout, 0)->getSize(gui::Axis::Y)); testHBoxLayout->erase(); // Add element to HBox with 0 size addNItems(testHBoxLayout, 1, 0, 0); // set Box to Alignment to Right and Vertical Center testHBoxLayout->setAlignment(gui::Alignment(gui::Alignment::Horizontal::Right, gui::Alignment::Vertical::Center)); // Set element maximal size to half HBoxSize getNItem(testHBoxLayout, 0)->setMaximumSize(testStyle::HBox_w / 2, testStyle::HBox_h / 2); // Resize Box and check size testHBoxLayout->resizeItems(); ASSERT_EQ(testStyle::HBox_w / 2, getNItem(testHBoxLayout, 0)->getSize(gui::Axis::X)); ASSERT_EQ(testStyle::HBox_h / 2, getNItem(testHBoxLayout, 0)->getSize(gui::Axis::Y)); ASSERT_EQ(testStyle::HBox_w - testStyle::HBox_w / 2, getNItem(testHBoxLayout, 0)->getPosition(gui::Axis::X)); ASSERT_EQ((testStyle::HBox_h - testStyle::HBox_h / 2) / 2, getNItem(testHBoxLayout, 0)->getPosition(gui::Axis::Y)); } TEST_F(BoxLayoutTesting, Box_Widgets_Alignment_Test) { // set Box to None testHBoxLayout->setAlignment(gui::Alignment(gui::Alignment::Horizontal::None, gui::Alignment::Vertical::None)); // Add 3 elements to HBox with half parent Horizontal Size addNItems(testHBoxLayout, notFillHVBoxPage, testStyle::HBox_item_w, testStyle::HBox_item_h / 2); ASSERT_EQ(notFillHVBoxPage, testHBoxLayout->children.size()) << "Box should contain 3 elements"; // Set first item Alignment to Vertical Top getNItem(testHBoxLayout, 0)->setAlignment(gui::Alignment(gui::Alignment::Vertical::Top)); // Set second item Alignment to Vertical Center getNItem(testHBoxLayout, 1)->setAlignment(gui::Alignment(gui::Alignment::Vertical::Center)); // Set first item Alignment to Vertical Bottom getNItem(testHBoxLayout, 2)->setAlignment(gui::Alignment(gui::Alignment::Vertical::Bottom)); // Force position recalculation testHBoxLayout->resizeItems(); ASSERT_EQ(0, getNItem(testHBoxLayout, 0)->getPosition(gui::Axis::X)) << "first element should have X pos 0 - first from left"; ASSERT_EQ(0, getNItem(testHBoxLayout, 0)->getPosition(gui::Axis::Y)) << "first element should have Y pos 0 - top"; ASSERT_EQ(testStyle::HBox_item_w, getNItem(testHBoxLayout, 1)->getPosition(gui::Axis::X)) << "second element should have X pos testStyle::HBox_item_w - second from left"; ASSERT_EQ(testStyle::HBox_item_h / 4, getNItem(testHBoxLayout, 1)->getPosition(gui::Axis::Y)) << "second element should have Y pos testStyle::HBox_item_h/4 - Center"; ASSERT_EQ(2 * testStyle::HBox_item_w, getNItem(testHBoxLayout, 2)->getPosition(gui::Axis::X)) << "third element should have X pos 2 * testStyle::HBox_item_w - third from left"; ASSERT_EQ(testStyle::HBox_item_h / 2, getNItem(testHBoxLayout, 2)->getPosition(gui::Axis::Y)) << "third element should have Y pos testStyle::HBox_item_h/2 - Bottom"; // Change Box Horizontal (in Axis) Alignment to Center testHBoxLayout->setAlignment(gui::Alignment(gui::Alignment::Horizontal::Center, gui::Alignment::Vertical::None)); // Force position recalculation testHBoxLayout->resizeItems(); ASSERT_EQ((testStyle::HBox_w - notFillHVBoxPage * testStyle::HBox_item_w) / 2, getNItem(testHBoxLayout, 0)->getPosition(gui::Axis::X)) << "first element should have X pos 25"; ASSERT_EQ(0, getNItem(testHBoxLayout, 0)->getPosition(gui::Axis::Y)) << "first element should have Y pos 0 - top"; ASSERT_EQ((testStyle::HBox_w - notFillHVBoxPage * testStyle::HBox_item_w) / 2 + testStyle::HBox_item_w, getNItem(testHBoxLayout, 1)->getPosition(gui::Axis::X)) << "second element should have X pos 75"; ASSERT_EQ(testStyle::HBox_item_h / 4, getNItem(testHBoxLayout, 1)->getPosition(gui::Axis::Y)) << "second element should have Y pos testStyle::HBox_item_h/4 - Center"; ASSERT_EQ((testStyle::HBox_w - notFillHVBoxPage * testStyle::HBox_item_w) / 2 + testStyle::HBox_item_w * 2, getNItem(testHBoxLayout, 2)->getPosition(gui::Axis::X)) << "third element should have X pos 125"; ASSERT_EQ(testStyle::HBox_item_h / 2, getNItem(testHBoxLayout, 2)->getPosition(gui::Axis::Y)) << "third element should have Y pos testStyle::HBox_item_h/2 - Bottom"; // Change Box Horizontal (in Axis) Alignment to Right testHBoxLayout->setAlignment(gui::Alignment(gui::Alignment::Horizontal::Right, gui::Alignment::Vertical::None)); // Force position recalculation testHBoxLayout->resizeItems(); ASSERT_EQ((testStyle::HBox_w - notFillHVBoxPage * testStyle::HBox_item_w), getNItem(testHBoxLayout, 0)->getPosition(gui::Axis::X)) << "first element should have X pos 50"; ASSERT_EQ(0, getNItem(testHBoxLayout, 0)->getPosition(gui::Axis::Y)) << "first element should have Y pos 0 - top"; ASSERT_EQ((testStyle::HBox_w - notFillHVBoxPage * testStyle::HBox_item_w) + testStyle::HBox_item_w, getNItem(testHBoxLayout, 1)->getPosition(gui::Axis::X)) << "second element should have X pos 100"; ASSERT_EQ(testStyle::HBox_item_h / 4, getNItem(testHBoxLayout, 1)->getPosition(gui::Axis::Y)) << "second element should have Y pos testStyle::HBox_item_h/4 - Center"; ASSERT_EQ((testStyle::HBox_w - notFillHVBoxPage * testStyle::HBox_item_w) + testStyle::HBox_item_w * 2, getNItem(testHBoxLayout, 2)->getPosition(gui::Axis::X)) << "third element should have X pos 150"; ASSERT_EQ(testStyle::HBox_item_h / 2, getNItem(testHBoxLayout, 2)->getPosition(gui::Axis::Y)) << "third element should have Y pos testStyle::HBox_item_h/2 - Bottom"; // Change Box Vertical (in Axis) Alignment to Top -> so it should override children Alignment. testHBoxLayout->setAlignment(gui::Alignment(gui::Alignment::Horizontal::Left, gui::Alignment::Vertical::Top)); // Force position recalculation testHBoxLayout->resizeItems(); ASSERT_EQ(0, getNItem(testHBoxLayout, 0)->getPosition(gui::Axis::X)) << "first element should have X pos 0 - first from left"; ASSERT_EQ(0, getNItem(testHBoxLayout, 0)->getPosition(gui::Axis::Y)) << "first element should have Y pos 0 - top"; ASSERT_EQ(testStyle::HBox_item_w, getNItem(testHBoxLayout, 1)->getPosition(gui::Axis::X)) << "second element should have X pos testStyle::HBox_item_w - second from left"; ASSERT_EQ(0, getNItem(testHBoxLayout, 1)->getPosition(gui::Axis::Y)) << "second element should have Y pos 0 - top"; ASSERT_EQ(2 * testStyle::HBox_item_w, getNItem(testHBoxLayout, 2)->getPosition(gui::Axis::X)) << "third element should have X pos 2 * testStyle::HBox_item_w - third from left"; ASSERT_EQ(0, getNItem(testHBoxLayout, 2)->getPosition(gui::Axis::Y)) << "third element should have Y pos 0 - top"; // Change Box Vertical (in Axis) Alignment to Center -> so it should override children Alignment. testHBoxLayout->setAlignment(gui::Alignment(gui::Alignment::Horizontal::Left, gui::Alignment::Vertical::Center)); // Force position recalculation testHBoxLayout->resizeItems(); ASSERT_EQ(0, getNItem(testHBoxLayout, 0)->getPosition(gui::Axis::X)) << "first element should have X pos 0 - first from left"; ASSERT_EQ(testStyle::HBox_item_h / 4, getNItem(testHBoxLayout, 0)->getPosition(gui::Axis::Y)) << "first element should have Y pos testStyle::HBox_item_h/4 - Center"; ASSERT_EQ(testStyle::HBox_item_w, getNItem(testHBoxLayout, 1)->getPosition(gui::Axis::X)) << "second element should have X pos testStyle::HBox_item_w - second from left"; ASSERT_EQ(testStyle::HBox_item_h / 4, getNItem(testHBoxLayout, 1)->getPosition(gui::Axis::Y)) << "second element should have Y pos testStyle::HBox_item_h/4 - Center"; ASSERT_EQ(2 * testStyle::HBox_item_w, getNItem(testHBoxLayout, 2)->getPosition(gui::Axis::X)) << "third element should have X pos 2 * testStyle::HBox_item_w - third from left"; ASSERT_EQ(testStyle::HBox_item_h / 4, getNItem(testHBoxLayout, 2)->getPosition(gui::Axis::Y)) << "third element should have Y pos testStyle::HBox_item_h/4 - Center"; // Change Box Vertical (in Axis) Alignment to Bottom -> so it should override children Alignment. testHBoxLayout->setAlignment(gui::Alignment(gui::Alignment::Horizontal::Left, gui::Alignment::Vertical::Bottom)); // Force position recalculation testHBoxLayout->resizeItems(); ASSERT_EQ(0, getNItem(testHBoxLayout, 0)->getPosition(gui::Axis::X)) << "first element should have X pos 0 - first from left"; ASSERT_EQ(testStyle::HBox_item_h / 2, getNItem(testHBoxLayout, 0)->getPosition(gui::Axis::Y)) << "first element should have Y pos testStyle::HBox_item_h/2 - Bottom"; ASSERT_EQ(testStyle::HBox_item_w, getNItem(testHBoxLayout, 1)->getPosition(gui::Axis::X)) << "second element should have X pos testStyle::HBox_item_w - second from left"; ASSERT_EQ(testStyle::HBox_item_h / 2, getNItem(testHBoxLayout, 1)->getPosition(gui::Axis::Y)) << "second element should have Y pos testStyle::HBox_item_h/2 - Bottom"; ASSERT_EQ(2 * testStyle::HBox_item_w, getNItem(testHBoxLayout, 2)->getPosition(gui::Axis::X)) << "third element should have X pos 2 * testStyle::HBox_item_w - third from left"; ASSERT_EQ(testStyle::HBox_item_h / 2, getNItem(testHBoxLayout, 2)->getPosition(gui::Axis::Y)) << "third element should have Y pos testStyle::HBox_item_h/2 - Bottom"; } TEST_F(BoxLayoutTesting, Box_Widgets_Alignment_Magrin_Test) { // set Box to None testHBoxLayout->setAlignment(gui::Alignment(gui::Alignment::Horizontal::None, gui::Alignment::Vertical::None)); // Add 3 elements to HBox with half parent Horizontal Size addNItems(testHBoxLayout, notFillHVBoxPage, testStyle::HBox_item_w, testStyle::HBox_item_h / 2); ASSERT_EQ(notFillHVBoxPage, testHBoxLayout->children.size()) << "Box should contain 3 elements"; // Set first item Alignment to Vertical Top getNItem(testHBoxLayout, 0)->setAlignment(gui::Alignment(gui::Alignment::Vertical::Top)); // Set second item Alignment to Vertical Center getNItem(testHBoxLayout, 1)->setAlignment(gui::Alignment(gui::Alignment::Vertical::Center)); // Set first item Alignment to Vertical Bottom getNItem(testHBoxLayout, 2)->setAlignment(gui::Alignment(gui::Alignment::Vertical::Bottom)); auto testSmallMargin = 5; auto testBigMargin = 15; // Set items orthogonal margins getNItem(testHBoxLayout, 0)->setMargins(gui::Margins(0, testBigMargin, 0, 0)); getNItem(testHBoxLayout, 1)->setMargins(gui::Margins(0, testSmallMargin, 0, testBigMargin)); getNItem(testHBoxLayout, 2)->setMargins(gui::Margins(0, testSmallMargin, 0, testBigMargin)); // Force position recalculation testHBoxLayout->resizeItems(); ASSERT_EQ(0, getNItem(testHBoxLayout, 0)->getPosition(gui::Axis::X)) << "first element should have X pos 0 - first from left"; ASSERT_EQ(testBigMargin, getNItem(testHBoxLayout, 0)->getPosition(gui::Axis::Y)) << "first element should have Y pos 0 + top margin"; ASSERT_EQ(testStyle::HBox_item_w, getNItem(testHBoxLayout, 1)->getPosition(gui::Axis::X)) << "second element should have X pos testStyle::HBox_item_w - second from left"; ASSERT_EQ(testStyle::HBox_item_h / 4 - ((testSmallMargin + testBigMargin) / 2), getNItem(testHBoxLayout, 1)->getPosition(gui::Axis::Y)) << "second element should have Y pos testStyle::HBox_item_h/4 - margins/2"; ASSERT_EQ(2 * testStyle::HBox_item_w, getNItem(testHBoxLayout, 2)->getPosition(gui::Axis::X)) << "third element should have X pos 2 * testStyle::HBox_item_w - third from left"; ASSERT_EQ(testStyle::HBox_item_h / 2 - testBigMargin, getNItem(testHBoxLayout, 2)->getPosition(gui::Axis::Y)) << "third element should have Y pos testStyle::HBox_item_h/2 - bottom margin"; } TEST_F(BoxLayoutTesting, Box_Margins_Test) { auto testTopMargin = 30; auto testBottomMargin = 50; auto testRightMargin = 2; auto testLeftMargin = 5; // Add data to VBox with custom margins top 30, bot 50. addNItems(testVBoxLayout, fillVBoxPage, testStyle::VBox_item_w, testStyle::VBox_item_h, gui::Margins(0, testTopMargin, 0, testBottomMargin)); ASSERT_EQ(testTopMargin, getNItem(testVBoxLayout, 0)->widgetArea.y) << "First element y pos should be 30"; ASSERT_EQ(testTopMargin + testStyle::VBox_item_h + testBottomMargin + testTopMargin, getNItem(testVBoxLayout, 1)->widgetArea.y) << "Second element y pos should be 210"; // Add data to HBox with custom margins right 20, left 50 and with reverse order. testHBoxLayout->setReverseOrder(true); addNItems(testHBoxLayout, fillHBoxPage, testStyle::HBox_item_w, testStyle::HBox_item_h, gui::Margins(testLeftMargin, 0, testRightMargin, 0)); ASSERT_EQ(testStyle::HBox_w - testRightMargin - testStyle::HBox_item_w, getNItem(testHBoxLayout, 0)->widgetArea.x) << "First element y pos should be 148"; ASSERT_EQ(testStyle::HBox_w - testRightMargin - testStyle::HBox_item_w - testLeftMargin - testRightMargin - testStyle::HBox_item_w, getNItem(testHBoxLayout, 1)->widgetArea.x) << "Second element y pos should be 91"; } TEST_F(BoxLayoutTesting, Box_Content_Change_Test) { auto thirdBox = new gui::HBox(nullptr, testStyle::box_x, testStyle::box_y, testStyle::HBox_w, testStyle::HBox_h); auto secondBox = new gui::HBox(thirdBox, testStyle::box_x, testStyle::box_y, testStyle::HBox_w, testStyle::HBox_h); auto firstBox = new gui::HBox(secondBox, testStyle::box_x, testStyle::box_y, testStyle::HBox_w, testStyle::HBox_h); // Fill first box with data addNItems(firstBox, fillHBoxPage, testStyle::HBox_item_w, testStyle::HBox_item_h); // Check Boxes content ASSERT_EQ(firstBox->children.size(), 4) << "First box element size should be 4"; ASSERT_TRUE(getNItem(firstBox, 0)->visible) << "First box element 0 should be visible"; ASSERT_TRUE(getNItem(firstBox, 1)->visible) << "First box element 1 should be visible"; ASSERT_TRUE(getNItem(firstBox, 2)->visible) << "First box element 2 should be visible"; ASSERT_TRUE(getNItem(firstBox, 3)->visible) << "First box element 3 should be visible"; // Change first box, first element min/max size to parent size. getNItem(firstBox, 0)->setMinimumSize(testStyle::HBox_w, testStyle::HBox_h); getNItem(firstBox, 0)->setMaximumSize(testStyle::HBox_w, testStyle::HBox_h); // Box contents should not change as no automatic recalculate procedure has been called ASSERT_EQ(firstBox->children.size(), 4) << "First box element size should be 4"; ASSERT_TRUE(getNItem(firstBox, 0)->visible) << "First box element 0 should be visible"; ASSERT_TRUE(getNItem(firstBox, 1)->visible) << "First box element 1 should be visible"; ASSERT_TRUE(getNItem(firstBox, 2)->visible) << "First box element 2 should be visible"; ASSERT_TRUE(getNItem(firstBox, 3)->visible) << "First box element 3 should be visible"; // Call content change method on firstBox firstBox->informContentChanged(); // Box contents should have changed and only one element should be visible ASSERT_EQ(firstBox->children.size(), 4) << "First box element size should be 4"; ASSERT_TRUE(getNItem(firstBox, 0)->visible) << "First box element 0 should be visible"; ASSERT_FALSE(getNItem(firstBox, 1)->visible) << "First box element 1 should not be visible"; ASSERT_FALSE(getNItem(firstBox, 2)->visible) << "First box element 2 should not be visible"; ASSERT_FALSE(getNItem(firstBox, 3)->visible) << "First box element 3 should not be visible"; delete thirdBox; }
11,765
475
package com.zfoo.net.zookeeper.curator.cache; import com.zfoo.util.ThreadUtils; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.CuratorFrameworkFactory; import org.apache.curator.framework.recipes.cache.ChildData; import org.apache.curator.framework.recipes.cache.CuratorCache; import org.apache.curator.framework.recipes.cache.CuratorCacheListener; import org.apache.curator.retry.ExponentialBackoffRetry; import org.junit.Ignore; import org.junit.Test; /** * @author jaysunxiao * @version 1.0 * @since 2019-08-25 09:43 */ @Ignore public class CuratorCacheTest { private static CuratorFramework curator = CuratorFrameworkFactory.builder() .connectString("localhost:2181") .sessionTimeoutMs(5000) .retryPolicy(new ExponentialBackoffRetry(1000, 3)) // .retryPolicy(new RetryNTimes(1, 1000)) .build(); @Test public void test() { curator.start(); var curatorCache = CuratorCache.builder(curator, "/test") .withExceptionHandler(e -> e.printStackTrace()) .build(); curatorCache.start(); //节点变化的监听器 curatorCache.listenable().addListener(new CuratorCacheListener() { @Override public void event(Type type, ChildData oldData, ChildData data) { System.out.println("pathCache ------ Type:" + type + ","); System.out.println(data); } }); ThreadUtils.sleep(Long.MAX_VALUE); } }
654
190,993
/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ #ifndef TENSORFLOW_CORE_KERNELS_LINALG_EIG_OP_IMPL_H_ #define TENSORFLOW_CORE_KERNELS_LINALG_EIG_OP_IMPL_H_ // See docs in ../ops/linalg_ops.cc. #include "third_party/eigen3/Eigen/Core" #include "third_party/eigen3/Eigen/Eigenvalues" #include "tensorflow/core/framework/kernel_def_builder.h" #include "tensorflow/core/framework/op_kernel.h" #include "tensorflow/core/framework/tensor_shape.h" #include "tensorflow/core/kernels/linalg/linalg_ops_common.h" #include "tensorflow/core/lib/core/errors.h" #include "tensorflow/core/platform/denormal.h" #include "tensorflow/core/platform/logging.h" #include "tensorflow/core/platform/types.h" namespace tensorflow { template <class InputScalar, class OutputScalar> class EigOp : public LinearAlgebraOp<InputScalar, OutputScalar> { public: typedef LinearAlgebraOp<InputScalar, OutputScalar> Base; explicit EigOp(OpKernelConstruction* context) : Base(context) { OP_REQUIRES_OK(context, context->GetAttr("compute_v", &compute_v_)); } using TensorShapes = typename Base::TensorShapes; using InputMatrix = typename Base::InputMatrix; using InputMatrixMaps = typename Base::InputMatrixMaps; using InputConstMatrixMap = typename Base::InputConstMatrixMap; using InputConstMatrixMaps = typename Base::InputConstMatrixMaps; using OutputMatrix = typename Base::OutputMatrix; using OutputMatrixMaps = typename Base::OutputMatrixMaps; using OutputConstMatrixMap = typename Base::OutputConstMatrixMap; using OutputConstMatrixMaps = typename Base::OutputConstMatrixMaps; TensorShapes GetOutputMatrixShapes( const TensorShapes& input_matrix_shapes) const final { int64_t n = input_matrix_shapes[0].dim_size(0); if (compute_v_) { return TensorShapes({TensorShape({n}), TensorShape({n, n})}); } else { return TensorShapes({TensorShape({n})}); } } void ComputeMatrix(OpKernelContext* context, const InputConstMatrixMaps& inputs, OutputMatrixMaps* outputs) final { const int64_t rows = inputs[0].rows(); if (rows == 0) { // If X is an empty matrix (0 rows, 0 col), X * X' == X. // Therefore, we return X. return; } // This algorithm relies on denormals, so switch them back on locally. port::ScopedDontFlushDenormal dont_flush_denormals; Eigen::ComplexEigenSolver<OutputMatrix> eig( inputs[0], compute_v_ ? Eigen::ComputeEigenvectors : Eigen::EigenvaluesOnly); // TODO(rmlarsen): Output more detailed error info on failure. OP_REQUIRES( context, eig.info() == Eigen::Success, errors::InvalidArgument("Eigen decomposition was not " "successful. The input might not be valid.")); outputs->at(0) = eig.eigenvalues().template cast<OutputScalar>(); if (compute_v_) { outputs->at(1) = eig.eigenvectors(); } } private: bool compute_v_; }; } // namespace tensorflow #endif // TENSORFLOW_CORE_KERNELS_LINALG_EIG_OP_IMPL_H_
1,325
8,805
<filename>ios/Pods/boost-for-react-native/boost/align/detail/max_size.hpp /* (c) 2014-2015 <NAME> <glenjofe -at- gmail.com> Distributed under the Boost Software License, Version 1.0. http://boost.org/LICENSE_1_0.txt */ #ifndef BOOST_ALIGN_DETAIL_MAX_SIZE_HPP #define BOOST_ALIGN_DETAIL_MAX_SIZE_HPP #include <boost/align/detail/integral_constant.hpp> #include <cstddef> namespace boost { namespace alignment { namespace detail { template<std::size_t A, std::size_t B> struct max_size : integral_constant<std::size_t, (A > B) ? A : B> { }; } /* .detail */ } /* .alignment */ } /* .boost */ #endif
279
634
/**************************************************************** * Licensed to the Apache Software Foundation (ASF) under one * * or more contributor license agreements. See the NOTICE file * * distributed with this work for additional information * * regarding copyright ownership. The ASF licenses this file * * to you under the Apache License, Version 2.0 (the * * "License"); you may not use this file except in compliance * * with the License. You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, * * software distributed under the License is distributed on an * * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * * KIND, either express or implied. See the License for the * * specific language governing permissions and limitations * * under the License. * ****************************************************************/ package org.apache.james.mailbox.quota.mailing.commands; import java.util.List; import org.apache.james.eventsourcing.CommandHandler; import org.apache.james.eventsourcing.Event; import org.apache.james.eventsourcing.eventstore.EventStore; import org.apache.james.mailbox.quota.mailing.QuotaMailingListenerConfiguration; import org.apache.james.mailbox.quota.mailing.aggregates.UserQuotaThresholds; import org.reactivestreams.Publisher; import reactor.core.publisher.Mono; public class DetectThresholdCrossingHandler implements CommandHandler<DetectThresholdCrossing> { private final EventStore eventStore; private final QuotaMailingListenerConfiguration quotaMailingListenerConfiguration; private final String listenerName; public DetectThresholdCrossingHandler(EventStore eventStore, QuotaMailingListenerConfiguration quotaMailingListenerConfiguration) { this.eventStore = eventStore; this.quotaMailingListenerConfiguration = quotaMailingListenerConfiguration; this.listenerName = quotaMailingListenerConfiguration.getName(); } @Override public Publisher<List<? extends Event>> handle(DetectThresholdCrossing command) { return loadAggregate(command) .map(aggregate -> aggregate.detectThresholdCrossing(quotaMailingListenerConfiguration, command)); } private Mono<UserQuotaThresholds> loadAggregate(DetectThresholdCrossing command) { UserQuotaThresholds.Id aggregateId = UserQuotaThresholds.Id.from(command.getUsername(), listenerName); return Mono.from(eventStore.getEventsOfAggregate(aggregateId)) .map(history -> UserQuotaThresholds.fromEvents(aggregateId, history)); } @Override public Class<DetectThresholdCrossing> handledClass() { return DetectThresholdCrossing.class; } }
1,014
3,301
package com.alibaba.alink.params.io.shared; import org.apache.flink.ml.api.misc.param.ParamInfo; import org.apache.flink.ml.api.misc.param.ParamInfoFactory; import org.apache.flink.ml.api.misc.param.WithParams; public interface HasPassword<T> extends WithParams <T> { ParamInfo <String> PASSWORD = ParamInfoFactory .createParamInfo("password", String.class) .setDescription("password") .setRequired() .build(); default String getPassword() { return get(PASSWORD); } default T setPassword(String value) { return set(PASSWORD, value); } }
195
310
<reponame>orekyuu/doma package org.seasar.doma.jdbc.tx; import static org.seasar.doma.internal.util.AssertionUtil.assertNotNull; import java.sql.Savepoint; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.function.Supplier; import org.seasar.doma.message.Message; /** A context for a local transaction. */ public class LocalTransactionContext { private final List<String> savepointNames = new ArrayList<>(); private final Map<String, Savepoint> savepointMap = new HashMap<>(); private LocalTransactionConnection localTxConnection; private Supplier<LocalTransactionConnection> localTxConnectionSupplier; private String id; private boolean rollbackOnly; void begin(Supplier<LocalTransactionConnection> localTxConnectionSupplier) { assertNotNull(localTxConnectionSupplier); id = String.valueOf(System.identityHashCode(localTxConnectionSupplier)); this.localTxConnectionSupplier = localTxConnectionSupplier; } void end() { id = null; localTxConnectionSupplier = null; } LocalTransactionConnection getConnection() { if (localTxConnection == null) { if (localTxConnectionSupplier == null) { throw new TransactionNotYetBegunException(Message.DOMA2048); } localTxConnection = localTxConnectionSupplier.get(); } return localTxConnection; } boolean hasConnection() { return localTxConnection != null; } Savepoint getSavepoint(String savepointName) { assertNotNull(savepointName); return savepointMap.get(savepointName); } void addSavepoint(String savepointName, Savepoint savepoint) { assertNotNull(savepointName, savepoint); savepointNames.add(savepointName); savepointMap.put(savepointName, savepoint); } Savepoint releaseAndGetSavepoint(String savepointName) { assertNotNull(savepointName); Savepoint result = savepointMap.get(savepointName); if (result == null) { return null; } int pos = savepointNames.lastIndexOf(savepointName); if (pos > -1) { List<String> subList = savepointNames.subList(pos, savepointNames.size()); for (String name : subList) { savepointMap.remove(name); } subList.clear(); } return result; } String getId() { return id; } void setRollbackOnly() { this.rollbackOnly = true; } boolean isRollbackOnly() { return rollbackOnly; } }
822
88,283
// Copyright (c) 2019 GitHub, Inc. // Use of this source code is governed by the MIT license that can be // found in the LICENSE file. #include "shell/browser/ui/accelerator_util.h" #include "testing/gtest/include/gtest/gtest.h" namespace accelerator_util { TEST(AcceleratorUtilTest, StringToAccelerator) { struct { const std::string& description; bool expected_success; } keys[] = { {"♫♫♫♫♫♫♫", false}, {"Cmd+Plus", true}, {"Ctrl+Space", true}, {"CmdOrCtrl", false}, {"Alt+Tab", true}, {"AltGr+Backspace", true}, {"Super+Esc", true}, {"Super+X", true}, {"Shift+1", true}, }; for (const auto& key : keys) { // Initialize empty-but-not-null accelerator ui::Accelerator out = ui::Accelerator(ui::VKEY_UNKNOWN, ui::EF_NONE); bool success = StringToAccelerator(key.description, &out); EXPECT_EQ(success, key.expected_success); } } } // namespace accelerator_util
349
683
<filename>test/test_function_linedisplay.py<gh_stars>100-1000 #!/usr/bin/python """tests for line display :author: `<NAME> <<EMAIL>>`_ :organization: `python-escpos <https://github.com/python-escpos>`_ :copyright: Copyright (c) 2017 `python-escpos <https://github.com/python-escpos>`_ :license: MIT """ import escpos.printer as printer def test_function_linedisplay_select_on(): """test the linedisplay_select function (activate)""" instance = printer.Dummy() instance.linedisplay_select(select_display=True) assert(instance.output == b'\x1B\x3D\x02') def test_function_linedisplay_select_off(): """test the linedisplay_select function (deactivate)""" instance = printer.Dummy() instance.linedisplay_select(select_display=False) assert(instance.output == b'\x1B\x3D\x01') def test_function_linedisplay_clear(): """test the linedisplay_clear function""" instance = printer.Dummy() instance.linedisplay_clear() assert(instance.output == b'\x1B\x40')
363
419
<gh_stars>100-1000 #if KRG_ENABLE_NAVPOWER #include "NavmeshBuilder.h" #include "Tools/Physics/ResourceDescriptors/ResourceDescriptor_PhysicsMesh.h" #include "Tools/Core/Resource/RawAssets/RawAssetReader.h" #include "Tools/Core/Resource/RawAssets/RawMesh.h" #include "Engine/Navmesh/NavPower.h" #include "Engine/Navmesh/NavmeshData.h" #include "Engine/Navmesh/Components/Component_Navmesh.h" #include "Engine/Physics/Components/Component_PhysicsMesh.h" #include "Engine/Core/Entity/EntityAccessor.h" #include "Engine/Core/Entity/EntityDescriptors.h" #include "System/Core/FileSystem/FileSystem.h" #include "System/Core/Serialization/BinaryArchive.h" #include <bfxSystem.h> //------------------------------------------------------------------------- namespace KRG { template<> struct TEntityAccessor<Physics::PhysicsMeshComponent> { TEntityAccessor( Physics::PhysicsMeshComponent* pType ) : m_pType( pType ) {} inline ResourceID const& GetMeshResourceID() { return m_pType->m_pPhysicsMesh.GetResourceID(); } protected: Physics::PhysicsMeshComponent* m_pType = nullptr; }; } //------------------------------------------------------------------------- namespace KRG::Navmesh { bool NavmeshBuilder::Error( char const* pFormat, ... ) const { va_list args; va_start( args, pFormat ); Log::AddEntryVarArgs( Log::Severity::Error, "Navmesh Builder", __FILE__, __LINE__, pFormat, args ); va_end( args ); return false; } void NavmeshBuilder::Warning( char const* pFormat, ... ) const { va_list args; va_start( args, pFormat ); Log::AddEntryVarArgs( Log::Severity::Warning, "Navmesh Builder", __FILE__, __LINE__, pFormat, args ); va_end( args ); } void NavmeshBuilder::Message( char const* pFormat, ... ) const { va_list args; va_start( args, pFormat ); Log::AddEntryVarArgs( Log::Severity::Message, "Navmesh Builder", __FILE__, __LINE__, pFormat, args ); va_end( args ); } //------------------------------------------------------------------------- bool NavmeshBuilder::Build( Resource::CompileContext const& ctx, EntityModel::EntityCollectionDescriptor const& entityCollectionDesc, FileSystem::Path const& navmeshResourcePath, NavmeshComponent const* pNavmeshComponent ) { THashMap<ResourcePath, TVector<Transform>> collisionPrimitives; if ( !CollectCollisionPrimitives( ctx, entityCollectionDesc, collisionPrimitives ) ) { return false; } //------------------------------------------------------------------------- if ( !CollectTriangles( ctx, collisionPrimitives ) ) { return false; } //------------------------------------------------------------------------- KRG_ASSERT( pNavmeshComponent != nullptr ); NavmeshData navmeshData; if ( !BuildNavmesh( ctx, pNavmeshComponent, navmeshData ) ) { return false; } //------------------------------------------------------------------------- // Serialize //------------------------------------------------------------------------- FileSystem::EnsurePathExists( navmeshResourcePath ); Serialization::BinaryFileArchive archive( Serialization::Mode::Write, navmeshResourcePath ); if ( archive.IsValid() ) { archive << Resource::ResourceHeader( s_version, NavmeshData::GetStaticResourceTypeID() ) << navmeshData; return true; } else { return false; } } bool NavmeshBuilder::CollectCollisionPrimitives( Resource::CompileContext const& ctx, EntityModel::EntityCollectionDescriptor const& entityCollectionDesc, THashMap<ResourcePath, TVector<Transform>>& collisionPrimitives ) { TVector<Entity*> createdEntities = entityCollectionDesc.InstantiateCollection( nullptr, ctx.m_typeRegistry ); // Update all spatial transforms //------------------------------------------------------------------------- // We need to do this since we dont update world transforms when loading a collection for ( auto pEntity : createdEntities ) { if ( pEntity->IsSpatialEntity() ) { pEntity->SetWorldTransform( pEntity->GetWorldTransform() ); } } // Collect all collision geometry //------------------------------------------------------------------------- auto foundPhysicsComponents = entityCollectionDesc.GetComponentsOfType( ctx.m_typeRegistry, Physics::PhysicsMeshComponent::GetStaticTypeID() ); for ( auto const& searchResult : foundPhysicsComponents ) { int32 const entityIdx = entityCollectionDesc.FindEntityIndex( searchResult.m_pEntity->m_name ); KRG_ASSERT( entityIdx != InvalidIndex ); int32 const componentIdx = entityCollectionDesc.GetEntityDescriptors()[entityIdx].FindComponentIndex( searchResult.m_pComponent->m_name ); KRG_ASSERT( componentIdx != InvalidIndex ); Entity const* pEntity = createdEntities[entityIdx]; KRG_ASSERT( pEntity != nullptr ); auto pPhysicsComponent = Cast<Physics::PhysicsMeshComponent>( pEntity->GetComponents()[componentIdx] ); KRG_ASSERT( pPhysicsComponent != nullptr ); TEntityAccessor<Physics::PhysicsMeshComponent> accessor( pPhysicsComponent ); ResourceID geometryResourceID = accessor.GetMeshResourceID(); if ( geometryResourceID.IsValid() ) { collisionPrimitives[geometryResourceID.GetResourcePath()].emplace_back( pPhysicsComponent->GetWorldTransform() ); } } //------------------------------------------------------------------------- for ( auto& pEntity : createdEntities ) { KRG::Delete( pEntity ); } return true; } bool NavmeshBuilder::CollectTriangles( Resource::CompileContext const& ctx, THashMap<ResourcePath, TVector<Transform>> const& collisionPrimitives ) { for ( auto const& primitiveDesc : collisionPrimitives ) { // Load descriptor //------------------------------------------------------------------------- FileSystem::Path descFilePath; if ( !ctx.ConvertResourcePathToFilePath( primitiveDesc.first, descFilePath ) ) { return Error( "Invalid source data path (%s) for physics mesh descriptor", primitiveDesc.first.c_str() ); } Physics::PhysicsMeshResourceDescriptor resourceDescriptor; if ( !Resource::ResourceDescriptor::TryReadFromFile( ctx.m_typeRegistry, descFilePath, resourceDescriptor ) ) { return Error( "Failed to read physics mesh resource descriptor from file: %s", descFilePath.c_str() ); } // Load mesh //------------------------------------------------------------------------- FileSystem::Path meshFilePath; if ( !ctx.ConvertResourcePathToFilePath( resourceDescriptor.m_meshPath, meshFilePath ) ) { return Error( "Invalid source data path (%) in physics mesh descriptor: %s", resourceDescriptor.m_meshPath.c_str(), descFilePath.c_str() ); } RawAssets::ReaderContext readerCtx = { [this]( char const* pString ) { Warning( pString ); }, [this] ( char const* pString ) { Error( pString ); } }; TUniquePtr<RawAssets::RawMesh> pRawMesh = RawAssets::ReadStaticMesh( readerCtx, meshFilePath, resourceDescriptor.m_meshName ); if ( pRawMesh == nullptr ) { return Error( "Failed to read mesh from source file: %s" ); } KRG_ASSERT( pRawMesh->IsValid() ); // Add triangles //------------------------------------------------------------------------- for ( auto const& transform : primitiveDesc.second ) { auto const scale = transform.GetScale(); int32 numNegativelyScaledAxes = ( scale.m_x < 0 ) ? 1 : 0; numNegativelyScaledAxes += ( scale.m_y < 0 ) ? 1 : 0; numNegativelyScaledAxes += ( scale.m_z < 0 ) ? 1 : 0; bool const flipWindingDueToScale = Math::IsOdd( numNegativelyScaledAxes ); //------------------------------------------------------------------------- for ( auto const& geometrySection : pRawMesh->GetGeometrySections() ) { // NavPower expects counterclockwise winding bool flipWinding = geometrySection.m_clockwiseWinding ? true : false; if ( flipWindingDueToScale ) { flipWinding = !flipWinding; } //------------------------------------------------------------------------- int32 const numTriangles = geometrySection.GetNumTriangles(); int32 const numIndices = (int32) geometrySection.m_indices.size(); for ( auto t = 0; t < numTriangles; t++ ) { int32 const i = t * 3; KRG_ASSERT( i <= numIndices - 3 ); // NavPower expects counterclockwise winding int32 const index0 = geometrySection.m_indices[flipWinding ? i + 2 : i]; int32 const index1 = geometrySection.m_indices[i + 1]; int32 const index2 = geometrySection.m_indices[flipWinding ? i : i + 2]; // Add triangle auto& buildFace = m_buildFaces.emplace_back( bfx::BuildFace() ); buildFace.m_type = bfx::WALKABLE_FACE; buildFace.m_verts[0] = ToBfx( transform.TransformPoint( geometrySection.m_vertices[index0].m_position ) ); buildFace.m_verts[1] = ToBfx( transform.TransformPoint( geometrySection.m_vertices[index1].m_position ) ); buildFace.m_verts[2] = ToBfx( transform.TransformPoint( geometrySection.m_vertices[index2].m_position ) ); } } } } return true; } bool NavmeshBuilder::BuildNavmesh( Resource::CompileContext const& ctx, NavmeshComponent const* pNavmeshComponent, NavmeshData& navmeshData ) { KRG_ASSERT( pNavmeshComponent != nullptr ); if ( m_buildFaces.empty() ) { return true; } //------------------------------------------------------------------------- bfx::CustomAllocator* pAllocator = bfx::CreateDLMallocAllocator(); bfx::SystemCreate( bfx::SystemParams( 2.0f, bfx::Z_UP ), pAllocator ); bfx::RegisterBuilderSystem(); bfx::SystemStart(); //------------------------------------------------------------------------- TVector<bfx::BuildParams> layerBuildParams; // Default Layer auto& defaultLayerBuildParams = layerBuildParams.emplace_back( bfx::BuildParams() ); defaultLayerBuildParams.m_voxSize = pNavmeshComponent->m_defaultLayerBuildSettings.m_voxSize; defaultLayerBuildParams.m_height = pNavmeshComponent->m_defaultLayerBuildSettings.m_height; defaultLayerBuildParams.m_radius = pNavmeshComponent->m_defaultLayerBuildSettings.m_radius; defaultLayerBuildParams.m_step = pNavmeshComponent->m_defaultLayerBuildSettings.m_step; defaultLayerBuildParams.m_additionalInwardsSmoothingDist = pNavmeshComponent->m_defaultLayerBuildSettings.m_additionalInwardsSmoothingDist; defaultLayerBuildParams.m_optimizeForAxisAligned = pNavmeshComponent->m_defaultLayerBuildSettings.m_optimizeForAxisAligned; defaultLayerBuildParams.m_dropOffRadius = pNavmeshComponent->m_defaultLayerBuildSettings.m_dropOffRadius; defaultLayerBuildParams.m_maxWalkableSlope = pNavmeshComponent->m_defaultLayerBuildSettings.m_maxWalkableSlope; defaultLayerBuildParams.m_leaveSmallIslandsTouchingPortals = pNavmeshComponent->m_defaultLayerBuildSettings.m_leaveSmallIslandsTouchingPortals; defaultLayerBuildParams.m_minIslandSurfaceArea = pNavmeshComponent->m_defaultLayerBuildSettings.m_minIslandSurfaceArea; defaultLayerBuildParams.m_useEnhancedTerrainTracking = pNavmeshComponent->m_defaultLayerBuildSettings.m_useEnhancedTerrainTracking; defaultLayerBuildParams.m_tessellateForPathingAccuracy = pNavmeshComponent->m_defaultLayerBuildSettings.m_tessellateForPathingAccuracy; // Additional Layers for ( auto const& layerSettings : pNavmeshComponent->m_additionalLayerBuildSettings ) { auto& buildParams = layerBuildParams.emplace_back( bfx::BuildParams() ); buildParams.m_voxSize = layerSettings.m_voxSize; buildParams.m_height = layerSettings.m_height; buildParams.m_radius = layerSettings.m_radius; buildParams.m_step = layerSettings.m_step; buildParams.m_additionalInwardsSmoothingDist = layerSettings.m_additionalInwardsSmoothingDist; buildParams.m_optimizeForAxisAligned = layerSettings.m_optimizeForAxisAligned; buildParams.m_dropOffRadius = layerSettings.m_dropOffRadius; buildParams.m_maxWalkableSlope = layerSettings.m_maxWalkableSlope; buildParams.m_leaveSmallIslandsTouchingPortals = layerSettings.m_leaveSmallIslandsTouchingPortals; buildParams.m_minIslandSurfaceArea = layerSettings.m_minIslandSurfaceArea; buildParams.m_useEnhancedTerrainTracking = layerSettings.m_useEnhancedTerrainTracking; buildParams.m_tessellateForPathingAccuracy = layerSettings.m_tessellateForPathingAccuracy; } //------------------------------------------------------------------------- bfx::SurfaceNavigationInput surfaceInput; surfaceInput.m_globalParams.m_maxNumCores = 16; surfaceInput.m_pFaces = m_buildFaces.data(); surfaceInput.m_numFaces = (uint32) m_buildFaces.size(); surfaceInput.m_pParams = layerBuildParams.data(); surfaceInput.m_numParams = (uint32) layerBuildParams.size(); surfaceInput.m_globalParams.m_enableMulticoreBuild = true; surfaceInput.m_globalParams.m_maxNumCores = Threading::GetProcessorInfo().m_numPhysicalCores; //------------------------------------------------------------------------- if ( pNavmeshComponent->m_enableBuildLogging ) { bfx::EnableBuildLog( "D:\\buildlog.bfx_log", true ); } bfx::NavGraphImage* pGraphImage = bfx::CreateNavGraphImage( surfaceInput, bfx::PlatformParams() ); // Copy graph image data into the resource navmeshData.m_graphImage.resize( pGraphImage->GetNumBytes() ); memcpy( navmeshData.m_graphImage.data(), pGraphImage->GetPtr(), pGraphImage->GetNumBytes() ); bfx::DestroyNavGraphImage( pGraphImage ); //------------------------------------------------------------------------- bfx::SystemStop(); bfx::SystemDestroy(); bfx::DestroyAllocator( pAllocator ); return true; } } #endif
6,524