max_stars_count
int64
301
224k
text
stringlengths
6
1.05M
token_count
int64
3
727k
549
<reponame>aleanza/notejam<filename>spring/src/test/java/net/notejam/spring/note/CreateNoteControllerTest.java package net.notejam.spring.note; import static net.notejam.spring.test.UriUtil.getPathVariable; import static net.notejam.spring.test.UriUtil.redirectToAuthentication; import static org.hamcrest.Matchers.empty; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertThat; import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.csrf; import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.user; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.model; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.view; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.security.test.context.support.WithMockUser; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import org.springframework.test.web.servlet.MvcResult; import net.notejam.spring.URITemplates; import net.notejam.spring.note.controller.CreateNoteController; import net.notejam.spring.pad.Pad; import net.notejam.spring.pad.PadService; import net.notejam.spring.test.IntegrationTest; import net.notejam.spring.test.MockMvcProvider; import net.notejam.spring.user.SignedUpUserProvider; import net.notejam.spring.user.UserService; /** * An integration test for the {@link CreateNoteController}. * * @author <EMAIL> * @see <a href="bitcoin:1335STSwu9hST4vcMRppEPgENMHD2r1REK">Donations</a> */ @IntegrationTest @RunWith(SpringJUnit4ClassRunner.class) public class CreateNoteControllerTest { @Autowired private UserService userService; @Autowired private PadService padService; @Autowired private NoteRepository repository; @Rule @Autowired public MockMvcProvider mockMvcProvider; @Rule @Autowired public SignedUpUserProvider userProvider; /** * Note can be successfully created. */ @Test @WithMockUser(SignedUpUserProvider.EMAIL) public void noteCanBeCreated() throws Exception { final String name = "name"; final String text = "text"; mockMvcProvider.getMockMvc().perform(post(URITemplates.CREATE_NOTE) .param("name", name) .param("text", text) .with(csrf())) .andExpect(model().hasNoErrors()) .andExpect((MvcResult result) -> { int id = Integer.parseInt(getPathVariable("id", URITemplates.VIEW_NOTE, result.getResponse().getRedirectedUrl())); Note note = repository.findOne(id); assertEquals(name, note.getName()); assertEquals(text, note.getText()); assertEquals(SignedUpUserProvider.EMAIL, note.getUser().getEmail()); assertNull(note.getPad()); }); } /** * Note can't be created by anonymous user. */ @Test public void noteCannotBeCreatedByAnonymous() throws Exception { mockMvcProvider.getMockMvc().perform(post(URITemplates.CREATE_NOTE) .param("name", "name") .param("text", "text") .with(csrf())) .andExpect(redirectToAuthentication()); assertThat(repository.findAll(), empty()); } /** * Note can't be created if required fields are missing. */ @Test @WithMockUser(SignedUpUserProvider.EMAIL) public void noteCannotBeCreatedIfFieldsAreMissing() throws Exception { mockMvcProvider.getMockMvc().perform(post(URITemplates.CREATE_NOTE) .param("text", "text") .with(csrf())) .andExpect(model().attributeHasFieldErrors("note", "name")) .andExpect(view().name("note/create")); assertThat(repository.findAll(), empty()); } /** * Note can't be added into another's user pad. */ @Test @WithMockUser(SignedUpUserProvider.EMAIL) public void noteCannotBeAddedIntoAnotherUserPad() throws Exception { final String otherUser = "<EMAIL>"; userService.signUp(otherUser, "password"); final Pad pad = padService.buildPad(); pad.setName("name"); padService.savePad(pad); mockMvcProvider.getMockMvc().perform(post(URITemplates.CREATE_NOTE) .param("name", "name") .param("text", "text") .param("pad", pad.getId().toString()) .with(csrf()) .with(user(otherUser))) .andExpect(status().is(403)); assertThat(repository.findAll(), empty()); } }
2,139
2,261
{ "preserveFontFaces": true, "preserveMediaQueries": true, "insertPreservedExtraCss": "div", "extraCss": "@font-face { font-family: 'Extra'; }" }
68
465
package com.ansel.service.impl; import java.util.Date; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional; import com.ansel.bean.BillInfo; import com.ansel.bean.BillRelease; import com.ansel.bean.GoodsReceiptInfo; import com.ansel.dao.IBillInfoDao; import com.ansel.dao.IBillReleaseDao; import com.ansel.dao.ICargoReceiptDao; import com.ansel.dao.ICargoReceiptDetailDao; import com.ansel.dao.IGoodsBillDao; import com.ansel.dao.IGoodsBillEventDao; import com.ansel.dao.IGoodsReceiptInfoDao; import com.ansel.service.IBillService; @Service(value = "billService") @Transactional(propagation = Propagation.REQUIRED) public class BillServiceImpl implements IBillService { @Autowired private IBillInfoDao billInfoDao; @Autowired private IBillReleaseDao billReleaseDao; @Autowired private IGoodsBillEventDao goodsBillEventDao; @Autowired private IGoodsBillDao goodsBillDao; @Autowired private ICargoReceiptDao cargoReceiptDao; @Autowired private ICargoReceiptDetailDao cargoReceiptDetailDao; @Autowired private IGoodsReceiptInfoDao goodsReceiptInfoDao; @Override public Page<BillInfo> findAllByPage(Pageable pageable) { // TODO Auto-generated method stub return billInfoDao.findAll(pageable); } @Override public Page<BillInfo> findNotRelease(Pageable pageable) { // TODO Auto-generated method stub return billInfoDao.findNotRelease(pageable); } @Override public boolean addRelease(BillRelease billRelease) { // TODO Auto-generated method stub billRelease.setBillType("货运单"); String billCode = billRelease.getBillCode(); try { billReleaseDao.save(billRelease); goodsBillEventDao.updateEventName("未到", new Date(), billCode); String goodsRevertBillId = cargoReceiptDetailDao.findByGoodsBillDetailId(billCode).getGoodsRevertBillId(); cargoReceiptDao.updateRelease(billRelease.getReceiveBillTime(), billRelease.getReceiveBillPerson(), "未到车辆", goodsRevertBillId); return true; } catch (Exception e) { // TODO: handle exception e.printStackTrace(); System.err.println("单据明细表插入失败 | 货运单 & 货运回执信息 更新失败"); return false; } } @Override public boolean addGoodsReceipt(GoodsReceiptInfo goodsReceiptInfo) { // TODO Auto-generated method stub String goodsRevertBillId = goodsReceiptInfo.getGoodsRevertCode(); String billId = cargoReceiptDetailDao.findByGoodsRevertBillId(goodsRevertBillId).getGoodsBillDetailId(); try { goodsReceiptInfoDao.save(goodsReceiptInfo); goodsBillEventDao.updateEventName("未结", new Date(), billId); goodsBillDao.updateFactDealDate(goodsReceiptInfo.getRceiveGoodsDate(), billId); cargoReceiptDao.updateArriveTime(goodsReceiptInfo.getRceiveGoodsDate(), "未结合同", goodsRevertBillId); return true; } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); System.err.println("货物回执信息添加失败"); return false; } } }
1,257
852
import FWCore.ParameterSet.Config as cms import RecoTracker.TransientTrackingRecHit.TransientTrackingRecHitBuilder_cfi myTTRHBuilderWithoutAngle4PixelPairs = RecoTracker.TransientTrackingRecHit.TransientTrackingRecHitBuilder_cfi.ttrhbwr.clone( StripCPE = 'Fake', ComponentName = 'TTRHBuilderWithoutAngle4PixelPairs' )
118
1,682
/* Copyright (c) 2019 LinkedIn Corp. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.linkedin.restli.internal.server.model; import com.linkedin.restli.common.EmptyRecord; import com.linkedin.restli.server.errors.ServiceError; import java.util.ArrayList; import java.util.Arrays; import org.mockito.Mockito; import org.testng.Assert; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; /** * Tests for {@link ResourceModel}. * * @author <NAME> */ public class TestResourceModel { @DataProvider(name = "isAnyServiceErrorListDefinedData") public Object[][] providesIsAnyServiceErrorListDefinedData() { return new Object[][] { // No service errors or resource methods at all { null, makeMockResourceMethodDescriptors(), false }, // Empty resource-level service errors list but not resource methods { new ServiceError[] {}, makeMockResourceMethodDescriptors(), true }, // One resource-level service error but no resource methods { new ServiceError[] { SampleResources.SampleServiceError.ERROR_A }, makeMockResourceMethodDescriptors(), true }, // No resource-level service errors but one resource method with a service error { null, makeMockResourceMethodDescriptors(true), true }, // No resource-level service errors and two resource methods without service errors { null, makeMockResourceMethodDescriptors(false, false), false }, // No resource-level service errors but three resource methods with mixed service errors { null, makeMockResourceMethodDescriptors(false, true, false), true }, // Two resource-level service errors and two resource methods with service errors { new ServiceError[] { SampleResources.SampleServiceError.ERROR_A, SampleResources.SampleServiceError.ERROR_B }, makeMockResourceMethodDescriptors(true, true), true } }; } /** * Creates an array of mock {@link ResourceMethodDescriptor} objects by mocking the result of the method call * {@link ResourceMethodDescriptor#getServiceErrors()} for each. * * @param definesServiceErrorsArray whether each mock method descriptor defines service errors * @return array of mocked objects */ private ResourceMethodDescriptor[] makeMockResourceMethodDescriptors(Boolean ... definesServiceErrorsArray) { final ResourceMethodDescriptor[] resourceMethodDescriptors = new ResourceMethodDescriptor[definesServiceErrorsArray.length]; int i = 0; for (boolean definesServiceErrors : definesServiceErrorsArray) { final ResourceMethodDescriptor resourceMethodDescriptor = Mockito.mock(ResourceMethodDescriptor.class); Mockito.when(resourceMethodDescriptor.getServiceErrors()).thenReturn(definesServiceErrors ? new ArrayList<>() : null); resourceMethodDescriptors[i++] = resourceMethodDescriptor; } return resourceMethodDescriptors; } /** * Ensures that the logic in {@link ResourceModel#isAnyServiceErrorListDefined()} is correct. * * @param resourceLevelServiceErrors resource-level service errors * @param resourceMethodDescriptors resource method descriptors possibly containing method-level service errors * @param expected expected result of the method call */ @Test(dataProvider = "isAnyServiceErrorListDefinedData") public void testIsAnyServiceErrorListDefined(ServiceError[] resourceLevelServiceErrors, ResourceMethodDescriptor[] resourceMethodDescriptors, boolean expected) { // Create dummy resource model final ResourceModel resourceModel = new ResourceModel(EmptyRecord.class, SampleResources.CollectionCollectionResource.class, null, "collectionCollection", ResourceType.COLLECTION, "com.linkedin.restli.internal.server.model", "collectionCollection"); // Add resource-level service errors if (resourceLevelServiceErrors == null) { resourceModel.setServiceErrors(null); } else { resourceModel.setServiceErrors(Arrays.asList(resourceLevelServiceErrors)); } // Add mock resource method descriptors for (ResourceMethodDescriptor resourceMethodDescriptor : resourceMethodDescriptors) { resourceModel.addResourceMethodDescriptor(resourceMethodDescriptor); } Assert.assertEquals(expected, resourceModel.isAnyServiceErrorListDefined(), "Cannot correctly compute whether resource model defines resource-level or method-level service errors."); } }
2,119
1,745
<reponame>bsf2dev/bsf //************************************ bs::framework - Copyright 2018 <NAME> **************************************// //*********** Licensed under the MIT license. See LICENSE.md for full terms. This notice is not to be removed. ***********// #include "BsD3D11TextureView.h" #include "BsD3D11RenderAPI.h" #include "BsD3D11Device.h" #include "BsD3D11Texture.h" #include "Profiling/BsRenderStats.h" #include "BsD3D11Mappings.h" #include "Error/BsException.h" namespace bs { namespace ct { D3D11TextureView::D3D11TextureView(const D3D11Texture* texture, const TEXTURE_VIEW_DESC& desc) : TextureView(desc) { if ((mDesc.usage & GVU_RANDOMWRITE) != 0) mUAV = createUAV(texture, mDesc.mostDetailMip, mDesc.firstArraySlice, mDesc.numArraySlices); else if ((mDesc.usage & GVU_RENDERTARGET) != 0) mRTV = createRTV(texture, mDesc.mostDetailMip, mDesc.firstArraySlice, mDesc.numArraySlices); else if ((mDesc.usage & GVU_DEPTHSTENCIL) != 0) { mWDepthWStencilView = createDSV(texture, mDesc.mostDetailMip, mDesc.firstArraySlice, mDesc.numArraySlices, false, false); mRODepthWStencilView = createDSV(texture, mDesc.mostDetailMip, mDesc.firstArraySlice, mDesc.numArraySlices, true, false); mRODepthROStencilView = createDSV(texture, mDesc.mostDetailMip, mDesc.firstArraySlice, mDesc.numArraySlices, true, true); mWDepthROStencilView = createDSV(texture, mDesc.mostDetailMip, mDesc.firstArraySlice, mDesc.numArraySlices, false, true); } else mSRV = createSRV(texture, mDesc.mostDetailMip, mDesc.numMips, mDesc.firstArraySlice, mDesc.numArraySlices); } D3D11TextureView::~D3D11TextureView() { SAFE_RELEASE(mSRV); SAFE_RELEASE(mUAV); SAFE_RELEASE(mRTV); SAFE_RELEASE(mWDepthWStencilView); SAFE_RELEASE(mWDepthROStencilView); SAFE_RELEASE(mRODepthWStencilView); SAFE_RELEASE(mRODepthROStencilView); } ID3D11DepthStencilView* D3D11TextureView::getDSV(bool readOnlyDepth, bool readOnlyStencil) const { if(readOnlyDepth) { if (readOnlyStencil) return mRODepthROStencilView; else return mRODepthWStencilView; } else { if (readOnlyStencil) return mWDepthROStencilView; else return mWDepthWStencilView; } } ID3D11ShaderResourceView* D3D11TextureView::createSRV(const D3D11Texture* texture, UINT32 mostDetailMip, UINT32 numMips, UINT32 firstArraySlice, UINT32 numArraySlices) { D3D11_SHADER_RESOURCE_VIEW_DESC desc; ZeroMemory(&desc, sizeof(desc)); const TextureProperties& texProps = texture->getProperties(); UINT32 numFaces = texProps.getNumFaces(); switch (texProps.getTextureType()) { case TEX_TYPE_1D: if (numFaces <= 1) { desc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE1D; desc.Texture1D.MipLevels = numMips; desc.Texture1D.MostDetailedMip = mostDetailMip; } else { desc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE1DARRAY; desc.Texture1DArray.MipLevels = numMips; desc.Texture1DArray.MostDetailedMip = mostDetailMip; desc.Texture1DArray.FirstArraySlice = firstArraySlice; desc.Texture1DArray.ArraySize = numArraySlices; } break; case TEX_TYPE_2D: if (texProps.getNumSamples() > 1) { if (numFaces <= 1) { desc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2DMS; } else { desc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2DMSARRAY; desc.Texture2DMSArray.FirstArraySlice = firstArraySlice; desc.Texture2DMSArray.ArraySize = numArraySlices; } } else { if (numFaces <= 1) { desc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2D; desc.Texture2D.MipLevels = numMips; desc.Texture2D.MostDetailedMip = mostDetailMip; } else { desc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2DARRAY; desc.Texture2DArray.MipLevels = numMips; desc.Texture2DArray.MostDetailedMip = mostDetailMip; desc.Texture2DArray.FirstArraySlice = firstArraySlice; desc.Texture2DArray.ArraySize = numArraySlices; } } break; case TEX_TYPE_3D: desc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE3D; desc.Texture3D.MipLevels = numMips; desc.Texture3D.MostDetailedMip = mostDetailMip; break; case TEX_TYPE_CUBE_MAP: if(numFaces % 6 == 0) { if (numFaces == 6) { desc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURECUBE; desc.TextureCube.MipLevels = numMips; desc.TextureCube.MostDetailedMip = mostDetailMip; } else { desc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURECUBEARRAY; desc.TextureCubeArray.MipLevels = numMips; desc.TextureCubeArray.MostDetailedMip = mostDetailMip; desc.TextureCubeArray.First2DArrayFace = firstArraySlice; desc.TextureCubeArray.NumCubes = numArraySlices / 6; } } else { desc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2DARRAY; desc.Texture2DArray.MipLevels = numMips; desc.Texture2DArray.MostDetailedMip = mostDetailMip; desc.Texture2DArray.FirstArraySlice = firstArraySlice; desc.Texture2DArray.ArraySize = numArraySlices; } break; default: BS_EXCEPT(InvalidParametersException, "Invalid texture type for this view type."); } desc.Format = texture->getColorFormat(); ID3D11ShaderResourceView* srv = nullptr; D3D11RenderAPI* d3d11rs = static_cast<D3D11RenderAPI*>(D3D11RenderAPI::instancePtr()); HRESULT hr = d3d11rs->getPrimaryDevice().getD3D11Device()->CreateShaderResourceView(texture->getDX11Resource(), &desc, &srv); if (FAILED(hr) || d3d11rs->getPrimaryDevice().hasError()) { String msg = d3d11rs->getPrimaryDevice().getErrorDescription(); BS_EXCEPT(RenderingAPIException, "Cannot create ShaderResourceView: " + msg); } return srv; } ID3D11RenderTargetView* D3D11TextureView::createRTV(const D3D11Texture* texture, UINT32 mipSlice, UINT32 firstArraySlice, UINT32 numArraySlices) { D3D11_RENDER_TARGET_VIEW_DESC desc; ZeroMemory(&desc, sizeof(desc)); const TextureProperties& texProps = texture->getProperties(); UINT32 numFaces = texProps.getNumFaces(); switch (texProps.getTextureType()) { case TEX_TYPE_1D: if (numFaces <= 1) { desc.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE1D; desc.Texture1D.MipSlice = mipSlice; } else { desc.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE1DARRAY; desc.Texture1DArray.MipSlice = mipSlice; desc.Texture1DArray.FirstArraySlice = firstArraySlice; desc.Texture1DArray.ArraySize = numArraySlices; } break; case TEX_TYPE_2D: if (texProps.getNumSamples() > 1) { if (numFaces <= 1) { desc.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2DMS; } else { desc.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2DMSARRAY; desc.Texture2DMSArray.FirstArraySlice = firstArraySlice; desc.Texture2DMSArray.ArraySize = numArraySlices; } } else { if (numFaces <= 1) { desc.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2D; desc.Texture2D.MipSlice = mipSlice; } else { desc.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2DARRAY; desc.Texture2DArray.MipSlice = mipSlice; desc.Texture2DArray.FirstArraySlice = firstArraySlice; desc.Texture2DArray.ArraySize = numArraySlices; } } break; case TEX_TYPE_3D: desc.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE3D; desc.Texture3D.MipSlice = mipSlice; desc.Texture3D.FirstWSlice = 0; desc.Texture3D.WSize = texProps.getDepth(); break; case TEX_TYPE_CUBE_MAP: desc.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2DARRAY; desc.Texture2DArray.FirstArraySlice = firstArraySlice; desc.Texture2DArray.ArraySize = numArraySlices; desc.Texture2DArray.MipSlice = mipSlice; break; default: BS_EXCEPT(InvalidParametersException, "Invalid texture type for this view type."); } desc.Format = texture->getColorFormat(); ID3D11RenderTargetView* rtv = nullptr; D3D11RenderAPI* d3d11rs = static_cast<D3D11RenderAPI*>(D3D11RenderAPI::instancePtr()); HRESULT hr = d3d11rs->getPrimaryDevice().getD3D11Device()->CreateRenderTargetView(texture->getDX11Resource(), &desc, &rtv); if (FAILED(hr) || d3d11rs->getPrimaryDevice().hasError()) { String msg = d3d11rs->getPrimaryDevice().getErrorDescription(); BS_EXCEPT(RenderingAPIException, "Cannot create RenderTargetView: " + msg); } return rtv; } ID3D11UnorderedAccessView* D3D11TextureView::createUAV(const D3D11Texture* texture, UINT32 mipSlice, UINT32 firstArraySlice, UINT32 numArraySlices) { D3D11_UNORDERED_ACCESS_VIEW_DESC desc; ZeroMemory(&desc, sizeof(desc)); const TextureProperties& texProps = texture->getProperties(); UINT32 numFaces = texProps.getNumFaces(); switch (texProps.getTextureType()) { case TEX_TYPE_1D: if (numFaces <= 1) { desc.ViewDimension = D3D11_UAV_DIMENSION_TEXTURE1D; desc.Texture1D.MipSlice = mipSlice; } else { desc.ViewDimension = D3D11_UAV_DIMENSION_TEXTURE1DARRAY; desc.Texture1DArray.MipSlice = mipSlice; desc.Texture1DArray.FirstArraySlice = firstArraySlice; desc.Texture1DArray.ArraySize = numArraySlices; } break; case TEX_TYPE_2D: if (numFaces <= 1) { desc.ViewDimension = D3D11_UAV_DIMENSION_TEXTURE2D; desc.Texture2D.MipSlice = mipSlice; } else { desc.ViewDimension = D3D11_UAV_DIMENSION_TEXTURE2DARRAY; desc.Texture2DArray.MipSlice = mipSlice; desc.Texture2DArray.FirstArraySlice = firstArraySlice; desc.Texture2DArray.ArraySize = numArraySlices; } break; case TEX_TYPE_3D: desc.ViewDimension = D3D11_UAV_DIMENSION_TEXTURE3D; desc.Texture3D.MipSlice = mipSlice; desc.Texture3D.FirstWSlice = 0; desc.Texture3D.WSize = texProps.getDepth(); break; case TEX_TYPE_CUBE_MAP: desc.ViewDimension = D3D11_UAV_DIMENSION_TEXTURE2DARRAY; desc.Texture2DArray.FirstArraySlice = firstArraySlice; desc.Texture2DArray.ArraySize = numArraySlices; desc.Texture2DArray.MipSlice = mipSlice; break; default: BS_EXCEPT(InvalidParametersException, "Invalid texture type for this view type."); } desc.Format = texture->getDXGIFormat(); ID3D11UnorderedAccessView* uav = nullptr; D3D11RenderAPI* d3d11rs = static_cast<D3D11RenderAPI*>(D3D11RenderAPI::instancePtr()); HRESULT hr = d3d11rs->getPrimaryDevice().getD3D11Device()->CreateUnorderedAccessView(texture->getDX11Resource(), &desc, &uav); if (FAILED(hr) || d3d11rs->getPrimaryDevice().hasError()) { String msg = d3d11rs->getPrimaryDevice().getErrorDescription(); BS_EXCEPT(RenderingAPIException, "Cannot create UnorderedAccessView: " + msg); } return uav; } ID3D11DepthStencilView* D3D11TextureView::createDSV(const D3D11Texture* texture, UINT32 mipSlice, UINT32 firstArraySlice, UINT32 numArraySlices, bool readOnlyDepth, bool readOnlyStencil) { D3D11_DEPTH_STENCIL_VIEW_DESC desc; ZeroMemory(&desc, sizeof(desc)); const TextureProperties& texProps = texture->getProperties(); UINT32 numFaces = texProps.getNumFaces(); switch (texProps.getTextureType()) { case TEX_TYPE_1D: if (numFaces <= 1) { desc.ViewDimension = D3D11_DSV_DIMENSION_TEXTURE1D; desc.Texture1D.MipSlice = mipSlice; } else { desc.ViewDimension = D3D11_DSV_DIMENSION_TEXTURE1DARRAY; desc.Texture1DArray.MipSlice = mipSlice; desc.Texture1DArray.FirstArraySlice = firstArraySlice; desc.Texture1DArray.ArraySize = numArraySlices; } break; case TEX_TYPE_2D: if (texProps.getNumSamples() > 1) { if (numFaces <= 1) { desc.ViewDimension = D3D11_DSV_DIMENSION_TEXTURE2DMS; } else { desc.ViewDimension = D3D11_DSV_DIMENSION_TEXTURE2DMSARRAY; desc.Texture2DMSArray.FirstArraySlice = firstArraySlice; desc.Texture2DMSArray.ArraySize = numArraySlices; } } else { if (numFaces <= 1) { desc.ViewDimension = D3D11_DSV_DIMENSION_TEXTURE2D; desc.Texture2D.MipSlice = mipSlice; } else { desc.ViewDimension = D3D11_DSV_DIMENSION_TEXTURE2DARRAY; desc.Texture2DArray.MipSlice = mipSlice; desc.Texture2DArray.FirstArraySlice = firstArraySlice; desc.Texture2DArray.ArraySize = numArraySlices; } } break; case TEX_TYPE_3D: desc.ViewDimension = D3D11_DSV_DIMENSION_TEXTURE2DARRAY; desc.Texture2DArray.FirstArraySlice = 0; desc.Texture2DArray.ArraySize = texProps.getDepth(); desc.Texture2DArray.MipSlice = mipSlice; break; case TEX_TYPE_CUBE_MAP: desc.ViewDimension = D3D11_DSV_DIMENSION_TEXTURE2DARRAY; desc.Texture2DArray.FirstArraySlice = firstArraySlice; desc.Texture2DArray.ArraySize = numArraySlices; desc.Texture2DArray.MipSlice = mipSlice; break; default: BS_EXCEPT(InvalidParametersException, "Invalid texture type for this view type."); } desc.Format = texture->getDepthStencilFormat(); if (readOnlyDepth) desc.Flags = D3D11_DSV_READ_ONLY_DEPTH; bool hasStencil = desc.Format == DXGI_FORMAT_D32_FLOAT_S8X24_UINT || desc.Format == DXGI_FORMAT_D24_UNORM_S8_UINT; if (readOnlyStencil && hasStencil) desc.Flags |= D3D11_DSV_READ_ONLY_STENCIL; ID3D11DepthStencilView* dsv = nullptr; D3D11RenderAPI* d3d11rs = static_cast<D3D11RenderAPI*>(D3D11RenderAPI::instancePtr()); HRESULT hr = d3d11rs->getPrimaryDevice().getD3D11Device()->CreateDepthStencilView(texture->getDX11Resource(), &desc, &dsv); if (FAILED(hr) || d3d11rs->getPrimaryDevice().hasError()) { String msg = d3d11rs->getPrimaryDevice().getErrorDescription(); BS_EXCEPT(RenderingAPIException, "Cannot create DepthStencilView: " + msg); } return dsv; } }}
6,117
1,469
/*- * ========================LICENSE_START================================= * Bucket4j * %% * Copyright (C) 2015 - 2020 <NAME> * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * =========================LICENSE_END================================== */ package io.github.bucket4j; import io.github.bucket4j.serialization.DeserializationAdapter; import io.github.bucket4j.serialization.SerializationAdapter; import io.github.bucket4j.serialization.SerializationHandle; import java.io.IOException; import java.io.Serializable; /** * Describes the estimation result. * * @see Bucket#estimateAbilityToConsume(long) * @see AsyncBucket#estimateAbilityToConsume(long) */ public class EstimationProbe implements Serializable { private static final long serialVersionUID = 42L; private final boolean canBeConsumed; private final long remainingTokens; private final long nanosToWaitForRefill; public static final SerializationHandle<EstimationProbe> SERIALIZATION_HANDLE = new SerializationHandle<EstimationProbe>() { @Override public <S> EstimationProbe deserialize(DeserializationAdapter<S> adapter, S input) throws IOException { boolean canBeConsumed = adapter.readBoolean(input); long remainingTokens = adapter.readLong(input); long nanosToWaitForRefill = adapter.readLong(input); return new EstimationProbe(canBeConsumed, remainingTokens, nanosToWaitForRefill); } @Override public <O> void serialize(SerializationAdapter<O> adapter, O output, EstimationProbe probe) throws IOException { adapter.writeBoolean(output, probe.canBeConsumed); adapter.writeLong(output, probe.remainingTokens); adapter.writeLong(output, probe.nanosToWaitForRefill); } @Override public int getTypeId() { return 16; } @Override public Class<EstimationProbe> getSerializedType() { return EstimationProbe.class; } }; public static EstimationProbe canBeConsumed(long remainingTokens) { return new EstimationProbe(true, remainingTokens, 0); } public static EstimationProbe canNotBeConsumed(long remainingTokens, long nanosToWaitForRefill) { return new EstimationProbe(false, remainingTokens, nanosToWaitForRefill); } private EstimationProbe(boolean canBeConsumed, long remainingTokens, long nanosToWaitForRefill) { this.canBeConsumed = canBeConsumed; this.remainingTokens = Math.max(0L, remainingTokens); this.nanosToWaitForRefill = nanosToWaitForRefill; } /** * Flag describes result of consumption operation. * * @return true if requested tokens can be consumed */ public boolean canBeConsumed() { return canBeConsumed; } /** * Return the tokens remaining in the bucket * * @return the tokens remaining in the bucket */ public long getRemainingTokens() { return remainingTokens; } /** * Returns zero if {@link #canBeConsumed()} returns true, else time in nanos which need to wait until requested amount of tokens will be refilled * * @return Zero if {@link #canBeConsumed()} returns true, else time in nanos which need to wait until requested amount of tokens will be refilled */ public long getNanosToWaitForRefill() { return nanosToWaitForRefill; } @Override public String toString() { return "ConsumptionResult{" + "isConsumed=" + canBeConsumed + ", remainingTokens=" + remainingTokens + ", nanosToWaitForRefill=" + nanosToWaitForRefill + '}'; } }
1,492
963
<filename>core/src/test/java/com/vladmihalcea/book/hpjp/hibernate/transaction/identifier/TransactionIdTest.java package com.vladmihalcea.book.hpjp.hibernate.transaction.identifier; import com.vladmihalcea.book.hpjp.util.AbstractTest; import com.vladmihalcea.book.hpjp.util.providers.Database; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import javax.persistence.Entity; import javax.persistence.Id; import javax.persistence.Table; import javax.persistence.Version; import java.util.Arrays; import java.util.Collection; /** * @author <NAME> */ @RunWith(Parameterized.class) public class TransactionIdTest extends AbstractTest { private Database database; public TransactionIdTest(Database database) { this.database = database; } @Override protected Database database() { return database; } @Override protected Class<?>[] entities() { return new Class<?>[]{ Post.class }; } @Parameterized.Parameters public static Collection<Object[]> databases() { return Arrays.asList( new Object[]{Database.HSQLDB}, new Object[]{Database.ORACLE}, new Object[]{Database.SQLSERVER}, new Object[]{Database.POSTGRESQL}, new Object[]{Database.MYSQL} ); } @Test public void test() { doInJPA(entityManager -> { Post post = new Post(); post.setId(1L); post.setTitle("High-Performance Java Persistence"); entityManager.persist(post); LOGGER.info("Current transaction id: {}", transactionId(entityManager)); }); } @Entity(name = "Post") @Table(name = "post") public static class Post { @Id private Long id; private String title; @Version private int version; public Long getId() { return id; } public void setId(Long id) { this.id = id; } public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } } }
984
751
<reponame>FDio/vpp<filename>src/vnet/devices/tap/tap.c /* *------------------------------------------------------------------ * Copyright (c) 2017 Cisco and/or its affiliates. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *------------------------------------------------------------------ */ #define _GNU_SOURCE #include <sys/types.h> #include <sys/stat.h> #include <sys/socket.h> #include <fcntl.h> #include <net/if.h> #include <linux/if_tun.h> #include <sys/ioctl.h> #include <linux/ethtool.h> #include <linux/sockios.h> #include <sys/eventfd.h> #include <net/if_arp.h> #include <limits.h> #include <linux/netlink.h> #include <linux/rtnetlink.h> #include <vlib/vlib.h> #include <vlib/physmem.h> #include <vlib/unix/unix.h> #include <vppinfra/linux/netns.h> #include <vnet/ethernet/ethernet.h> #include <vnet/ip/ip4_packet.h> #include <vnet/ip/ip6_packet.h> #include <vnet/devices/netlink.h> #include <vnet/devices/virtio/virtio.h> #include <vnet/devices/tap/tap.h> tap_main_t tap_main; #define tap_log_err(dev, f, ...) \ vlib_log (VLIB_LOG_LEVEL_ERR, tap_main.log_default, "tap%u: " f, dev->dev_instance, ## __VA_ARGS__) #define tap_log_dbg(dev, f, ...) \ vlib_log (VLIB_LOG_LEVEL_DEBUG, tap_main.log_default, "tap%u: " f, dev->dev_instance, ## __VA_ARGS__) #define _IOCTL(fd,a,...) \ if (ioctl (fd, a, __VA_ARGS__) < 0) \ { \ err = clib_error_return_unix (0, "ioctl(" #a ")"); \ tap_log_err (vif, "%U", format_clib_error, err); \ goto error; \ } /* *INDENT-OFF* */ VNET_HW_INTERFACE_CLASS (tun_device_hw_interface_class, static) = { .name = "tun-device", .flags = VNET_HW_INTERFACE_CLASS_FLAG_P2P, }; /* *INDENT-ON* */ #define TUN_MAX_PACKET_BYTES 65355 #define TUN_MIN_PACKET_BYTES 64 #define TUN_DEFAULT_PACKET_BYTES 1500 static u32 virtio_eth_flag_change (vnet_main_t * vnm, vnet_hw_interface_t * hi, u32 flags) { /* nothing for now */ //TODO On MTU change call vnet_netlink_set_if_mtu return 0; } #define TAP_MAX_INSTANCE 1024 static void tap_free (vlib_main_t * vm, virtio_if_t * vif) { virtio_main_t *mm = &virtio_main; tap_main_t *tm = &tap_main; clib_error_t *err = 0; int i; /* *INDENT-OFF* */ vec_foreach_index (i, vif->vhost_fds) if (vif->vhost_fds[i] != -1) close (vif->vhost_fds[i]); vec_foreach_index (i, vif->rxq_vrings) virtio_vring_free_rx (vm, vif, RX_QUEUE (i)); vec_foreach_index (i, vif->txq_vrings) virtio_vring_free_tx (vm, vif, TX_QUEUE (i)); /* *INDENT-ON* */ if (vif->tap_fds) { _IOCTL (vif->tap_fds[0], TUNSETPERSIST, (void *) (uintptr_t) 0); tap_log_dbg (vif, "TUNSETPERSIST: unset"); } error: vec_foreach_index (i, vif->tap_fds) close (vif->tap_fds[i]); vec_free (vif->tap_fds); vec_free (vif->vhost_fds); vec_free (vif->rxq_vrings); vec_free (vif->txq_vrings); vec_free (vif->host_if_name); vec_free (vif->net_ns); vec_free (vif->host_bridge); clib_error_free (vif->error); tm->tap_ids = clib_bitmap_set (tm->tap_ids, vif->id, 0); clib_memset (vif, 0, sizeof (*vif)); pool_put (mm->interfaces, vif); } void tap_create_if (vlib_main_t * vm, tap_create_if_args_t * args) { vlib_thread_main_t *thm = vlib_get_thread_main (); vlib_physmem_main_t *vpm = &vm->physmem_main; vnet_main_t *vnm = vnet_get_main (); virtio_main_t *vim = &virtio_main; tap_main_t *tm = &tap_main; vnet_sw_interface_t *sw; vnet_hw_interface_t *hw; int i, num_vhost_queues; int old_netns_fd = -1; struct ifreq ifr = {.ifr_flags = IFF_NO_PI | IFF_VNET_HDR }; struct ifreq get_ifr = {.ifr_flags = 0 }; size_t hdrsz; vhost_memory_t *vhost_mem = 0; virtio_if_t *vif = 0; clib_error_t *err = 0; unsigned int tap_features; int tfd = -1, qfd = -1, vfd = -1, nfd = -1; char *host_if_name = 0; unsigned int offload = 0; int sndbuf = 0; if (args->id != ~0) { if (clib_bitmap_get (tm->tap_ids, args->id)) { args->rv = VNET_API_ERROR_INVALID_INTERFACE; args->error = clib_error_return (0, "interface already exists"); return; } } else { args->id = clib_bitmap_first_clear (tm->tap_ids); } if (args->id > TAP_MAX_INSTANCE) { args->rv = VNET_API_ERROR_UNSPECIFIED; args->error = clib_error_return (0, "cannot find free interface id"); return; } pool_get_zero (vim->interfaces, vif); if (args->tap_flags & TAP_FLAG_TUN) { vif->type = VIRTIO_IF_TYPE_TUN; ifr.ifr_flags |= IFF_TUN; /* * From kernel 4.20, xdp support has been added in tun_sendmsg. * If sndbuf == INT_MAX, vhost batches the packet and processes * them using xdp data path for tun driver. It assumes packets * are ethernet frames (It needs to be fixed). * To avoid xdp data path in tun driver, sndbuf value should * be < INT_MAX. */ sndbuf = INT_MAX - 1; } else { vif->type = VIRTIO_IF_TYPE_TAP; ifr.ifr_flags |= IFF_TAP; sndbuf = INT_MAX; } vif->dev_instance = vif - vim->interfaces; vif->id = args->id; vif->num_txqs = thm->n_vlib_mains; vif->num_rxqs = clib_max (args->num_rx_queues, 1); if (args->tap_flags & TAP_FLAG_ATTACH) { if (args->host_if_name == NULL) { args->rv = VNET_API_ERROR_NO_MATCHING_INTERFACE; err = clib_error_return (0, "host_if_name is not provided"); goto error; } } /* if namespace is specified, all further netlink messages should be executed * after we change our net namespace */ if (args->host_namespace) { old_netns_fd = clib_netns_open (NULL /* self */); if ((nfd = clib_netns_open (args->host_namespace)) == -1) { args->rv = VNET_API_ERROR_SYSCALL_ERROR_2; args->error = clib_error_return_unix (0, "clib_netns_open '%s'", args->host_namespace); goto error; } if (clib_setns (nfd) == -1) { args->rv = VNET_API_ERROR_SYSCALL_ERROR_3; args->error = clib_error_return_unix (0, "setns '%s'", args->host_namespace); goto error; } } if (args->host_if_name != NULL) { host_if_name = (char *) args->host_if_name; clib_memcpy (ifr.ifr_name, host_if_name, clib_min (IFNAMSIZ, vec_len (host_if_name))); } if ((tfd = open ("/dev/net/tun", O_RDWR | O_NONBLOCK)) < 0) { args->rv = VNET_API_ERROR_SYSCALL_ERROR_2; args->error = clib_error_return_unix (0, "open '/dev/net/tun'"); goto error; } vec_add1 (vif->tap_fds, tfd); tap_log_dbg (vif, "open tap fd %d", tfd); _IOCTL (tfd, TUNGETFEATURES, &tap_features); tap_log_dbg (vif, "TUNGETFEATURES: features 0x%lx", tap_features); if ((tap_features & IFF_VNET_HDR) == 0) { args->rv = VNET_API_ERROR_SYSCALL_ERROR_2; args->error = clib_error_return (0, "vhost-net backend not available"); goto error; } if ((tap_features & IFF_MULTI_QUEUE) == 0) { if (vif->num_rxqs > 1) { args->rv = VNET_API_ERROR_SYSCALL_ERROR_2; args->error = clib_error_return (0, "multiqueue not supported"); goto error; } vif->num_rxqs = vif->num_txqs = 1; } else ifr.ifr_flags |= IFF_MULTI_QUEUE; hdrsz = sizeof (virtio_net_hdr_v1_t); if (args->tap_flags & TAP_FLAG_GSO) { offload = TUN_F_CSUM | TUN_F_TSO4 | TUN_F_TSO6; vif->gso_enabled = 1; } else if (args->tap_flags & TAP_FLAG_CSUM_OFFLOAD) { offload = TUN_F_CSUM; vif->csum_offload_enabled = 1; } _IOCTL (tfd, TUNSETIFF, (void *) &ifr); tap_log_dbg (vif, "TUNSETIFF fd %d name %s flags 0x%x", tfd, ifr.ifr_ifrn.ifrn_name, ifr.ifr_flags); vif->ifindex = if_nametoindex (ifr.ifr_ifrn.ifrn_name); tap_log_dbg (vif, "ifindex %d", vif->ifindex); if (!args->host_if_name) host_if_name = ifr.ifr_ifrn.ifrn_name; else host_if_name = (char *) args->host_if_name; /* * unset the persistence when attaching to existing * interface */ if (args->tap_flags & TAP_FLAG_ATTACH) { _IOCTL (tfd, TUNSETPERSIST, (void *) (uintptr_t) 0); tap_log_dbg (vif, "TUNSETPERSIST: unset"); } /* set the persistence */ if (args->tap_flags & TAP_FLAG_PERSIST) { _IOCTL (tfd, TUNSETPERSIST, (void *) (uintptr_t) 1); tap_log_dbg (vif, "TUNSETPERSIST: set"); /* verify persistence is set, read the flags */ _IOCTL (tfd, TUNGETIFF, (void *) &get_ifr); tap_log_dbg (vif, "TUNGETIFF: flags 0x%lx", get_ifr.ifr_flags); if ((get_ifr.ifr_flags & IFF_PERSIST) == 0) { args->rv = VNET_API_ERROR_SYSCALL_ERROR_2; args->error = clib_error_return (0, "persistence not supported"); goto error; } } /* create additional queues on the linux side. * we create as many linux queue pairs as we have rx queues */ for (i = 1; i < vif->num_rxqs; i++) { if ((qfd = open ("/dev/net/tun", O_RDWR | O_NONBLOCK)) < 0) { args->rv = VNET_API_ERROR_SYSCALL_ERROR_2; args->error = clib_error_return_unix (0, "open '/dev/net/tun'"); goto error; } _IOCTL (qfd, TUNSETIFF, (void *) &ifr); tap_log_dbg (vif, "TUNSETIFF fd %d name %s flags 0x%x", qfd, ifr.ifr_ifrn.ifrn_name, ifr.ifr_flags); vec_add1 (vif->tap_fds, qfd); } for (i = 0; i < vif->num_rxqs; i++) { tap_log_dbg (vif, "TUNSETVNETHDRSZ: fd %d vnet_hdr_sz %u", vif->tap_fds[i], hdrsz); _IOCTL (vif->tap_fds[i], TUNSETVNETHDRSZ, &hdrsz); tap_log_dbg (vif, "TUNSETSNDBUF: fd %d sndbuf %d", vif->tap_fds[i], sndbuf); _IOCTL (vif->tap_fds[i], TUNSETSNDBUF, &sndbuf); tap_log_dbg (vif, "TUNSETOFFLOAD: fd %d offload 0x%lx", vif->tap_fds[i], offload); _IOCTL (vif->tap_fds[i], TUNSETOFFLOAD, offload); if (fcntl (vif->tap_fds[i], F_SETFL, O_NONBLOCK) < 0) { err = clib_error_return_unix (0, "fcntl(tfd, F_SETFL, O_NONBLOCK)"); tap_log_err (vif, "set nonblocking: %U", format_clib_error, err); goto error; } } /* open as many vhost-net fds as required and set ownership */ num_vhost_queues = clib_max (vif->num_rxqs, vif->num_txqs); for (i = 0; i < num_vhost_queues; i++) { if ((vfd = open ("/dev/vhost-net", O_RDWR | O_NONBLOCK)) < 0) { args->rv = VNET_API_ERROR_SYSCALL_ERROR_1; args->error = clib_error_return_unix (0, "open '/dev/vhost-net'"); goto error; } vec_add1 (vif->vhost_fds, vfd); virtio_log_debug (vif, "open vhost-net fd %d qpair %u", vfd, i); _IOCTL (vfd, VHOST_SET_OWNER, 0); virtio_log_debug (vif, "VHOST_SET_OWNER: fd %u", vfd); } _IOCTL (vif->vhost_fds[0], VHOST_GET_FEATURES, &vif->remote_features); virtio_log_debug (vif, "VHOST_GET_FEATURES: features 0x%lx", vif->remote_features); if ((vif->remote_features & VIRTIO_FEATURE (VIRTIO_NET_F_MRG_RXBUF)) == 0) { args->rv = VNET_API_ERROR_UNSUPPORTED; args->error = clib_error_return (0, "vhost-net backend doesn't support " "VIRTIO_NET_F_MRG_RXBUF feature"); goto error; } if ((vif->remote_features & VIRTIO_FEATURE (VIRTIO_RING_F_INDIRECT_DESC)) == 0) { args->rv = VNET_API_ERROR_UNSUPPORTED; args->error = clib_error_return (0, "vhost-net backend doesn't support " "VIRTIO_RING_F_INDIRECT_DESC feature"); goto error; } if ((vif->remote_features & VIRTIO_FEATURE (VIRTIO_F_VERSION_1)) == 0) { args->rv = VNET_API_ERROR_UNSUPPORTED; args->error = clib_error_return (0, "vhost-net backend doesn't support " "VIRTIO_F_VERSION_1 features"); goto error; } vif->features |= VIRTIO_FEATURE (VIRTIO_NET_F_MRG_RXBUF); vif->features |= VIRTIO_FEATURE (VIRTIO_F_VERSION_1); vif->features |= VIRTIO_FEATURE (VIRTIO_RING_F_INDIRECT_DESC); virtio_set_net_hdr_size (vif); if (vif->type == VIRTIO_IF_TYPE_TAP) { if (ethernet_mac_address_is_zero (args->host_mac_addr.bytes)) ethernet_mac_address_generate (args->host_mac_addr.bytes); args->error = vnet_netlink_set_link_addr (vif->ifindex, args->host_mac_addr.bytes); if (args->error) { args->rv = VNET_API_ERROR_NETLINK_ERROR; goto error; } if (args->host_bridge) { args->error = vnet_netlink_set_link_master (vif->ifindex, (char *) args->host_bridge); if (args->error) { args->rv = VNET_API_ERROR_NETLINK_ERROR; goto error; } } } if (args->host_ip4_prefix_len) { args->error = vnet_netlink_add_ip4_addr (vif->ifindex, &args->host_ip4_addr, args->host_ip4_prefix_len); if (args->error) { args->rv = VNET_API_ERROR_NETLINK_ERROR; goto error; } } if (args->host_ip6_prefix_len) { args->error = vnet_netlink_add_ip6_addr (vif->ifindex, &args->host_ip6_addr, args->host_ip6_prefix_len); if (args->error) { args->rv = VNET_API_ERROR_NETLINK_ERROR; goto error; } } args->error = vnet_netlink_set_link_state (vif->ifindex, 1 /* UP */ ); if (args->error) { args->rv = VNET_API_ERROR_NETLINK_ERROR; goto error; } if (args->host_ip4_gw_set) { args->error = vnet_netlink_add_ip4_route (0, 0, &args->host_ip4_gw); if (args->error) { args->rv = VNET_API_ERROR_NETLINK_ERROR; goto error; } } if (args->host_ip6_gw_set) { args->error = vnet_netlink_add_ip6_route (0, 0, &args->host_ip6_gw); if (args->error) { args->rv = VNET_API_ERROR_NETLINK_ERROR; goto error; } } if (args->host_mtu_set) { args->error = vnet_netlink_set_link_mtu (vif->ifindex, args->host_mtu_size); if (args->error) { args->rv = VNET_API_ERROR_NETLINK_ERROR; goto error; } } else if (tm->host_mtu_size != 0) { args->error = vnet_netlink_set_link_mtu (vif->ifindex, tm->host_mtu_size); if (args->error) { args->rv = VNET_API_ERROR_NETLINK_ERROR; goto error; } args->host_mtu_set = 1; args->host_mtu_size = tm->host_mtu_size; } /* switch back to old net namespace */ if (args->host_namespace) { if (clib_setns (old_netns_fd) == -1) { args->rv = VNET_API_ERROR_SYSCALL_ERROR_2; args->error = clib_error_return_unix (0, "setns '%s'", args->host_namespace); goto error; } } for (i = 0; i < num_vhost_queues; i++) { if (i < vif->num_rxqs && (args->error = virtio_vring_init (vm, vif, RX_QUEUE (i), args->rx_ring_sz))) { args->rv = VNET_API_ERROR_INIT_FAILED; goto error; } if (i < vif->num_txqs && (args->error = virtio_vring_init (vm, vif, TX_QUEUE (i), args->tx_ring_sz))) { args->rv = VNET_API_ERROR_INIT_FAILED; goto error; } } /* setup features and memtable */ i = sizeof (vhost_memory_t) + sizeof (vhost_memory_region_t); vhost_mem = clib_mem_alloc (i); clib_memset (vhost_mem, 0, i); vhost_mem->nregions = 1; vhost_mem->regions[0].memory_size = vpm->max_size; vhost_mem->regions[0].guest_phys_addr = vpm->base_addr; vhost_mem->regions[0].userspace_addr = vhost_mem->regions[0].guest_phys_addr; for (i = 0; i < vhost_mem->nregions; i++) virtio_log_debug (vif, "memtable region %u memory_size 0x%lx " "guest_phys_addr 0x%lx userspace_addr 0x%lx", i, vhost_mem->regions[0].memory_size, vhost_mem->regions[0].guest_phys_addr, vhost_mem->regions[0].userspace_addr); for (i = 0; i < num_vhost_queues; i++) { int fd = vif->vhost_fds[i]; _IOCTL (fd, VHOST_SET_FEATURES, &vif->features); virtio_log_debug (vif, "VHOST_SET_FEATURES: fd %u features 0x%lx", fd, vif->features); _IOCTL (fd, VHOST_SET_MEM_TABLE, vhost_mem); virtio_log_debug (vif, "VHOST_SET_MEM_TABLE: fd %u", fd); } /* finish initializing queue pair */ for (i = 0; i < num_vhost_queues * 2; i++) { vhost_vring_addr_t addr = { 0 }; vhost_vring_state_t state = { 0 }; vhost_vring_file_t file = { 0 }; virtio_vring_t *vring; u16 qp = i >> 1; int fd = vif->vhost_fds[qp]; if (i & 1) { if (qp >= vif->num_txqs) continue; vring = vec_elt_at_index (vif->txq_vrings, qp); } else { if (qp >= vif->num_rxqs) continue; vring = vec_elt_at_index (vif->rxq_vrings, qp); } addr.index = state.index = file.index = vring->queue_id & 1; state.num = vring->size; virtio_log_debug (vif, "VHOST_SET_VRING_NUM fd %d index %u num %u", fd, state.index, state.num); _IOCTL (fd, VHOST_SET_VRING_NUM, &state); addr.flags = 0; addr.desc_user_addr = pointer_to_uword (vring->desc); addr.avail_user_addr = pointer_to_uword (vring->avail); addr.used_user_addr = pointer_to_uword (vring->used); virtio_log_debug (vif, "VHOST_SET_VRING_ADDR fd %d index %u flags 0x%x " "desc_user_addr 0x%lx avail_user_addr 0x%lx " "used_user_addr 0x%lx", fd, addr.index, addr.flags, addr.desc_user_addr, addr.avail_user_addr, addr.used_user_addr); _IOCTL (fd, VHOST_SET_VRING_ADDR, &addr); file.fd = vring->call_fd; virtio_log_debug (vif, "VHOST_SET_VRING_CALL fd %d index %u call_fd %d", fd, file.index, file.fd); _IOCTL (fd, VHOST_SET_VRING_CALL, &file); file.fd = vring->kick_fd; virtio_log_debug (vif, "VHOST_SET_VRING_KICK fd %d index %u kick_fd %d", fd, file.index, file.fd); _IOCTL (fd, VHOST_SET_VRING_KICK, &file); file.fd = vif->tap_fds[qp % vif->num_rxqs]; virtio_log_debug (vif, "VHOST_NET_SET_BACKEND fd %d index %u tap_fd %d", fd, file.index, file.fd); _IOCTL (fd, VHOST_NET_SET_BACKEND, &file); } if (vif->type == VIRTIO_IF_TYPE_TAP) { if (!args->mac_addr_set) ethernet_mac_address_generate (args->mac_addr.bytes); clib_memcpy (vif->mac_addr, args->mac_addr.bytes, 6); vif->host_bridge = format (0, "%s%c", args->host_bridge, 0); } vif->host_if_name = format (0, "%s%c", host_if_name, 0); vif->net_ns = format (0, "%s%c", args->host_namespace, 0); vif->host_mtu_size = args->host_mtu_size; vif->tap_flags = args->tap_flags; clib_memcpy (vif->host_mac_addr, args->host_mac_addr.bytes, 6); vif->host_ip4_prefix_len = args->host_ip4_prefix_len; vif->host_ip6_prefix_len = args->host_ip6_prefix_len; if (args->host_ip4_prefix_len) clib_memcpy (&vif->host_ip4_addr, &args->host_ip4_addr, 4); if (args->host_ip6_prefix_len) clib_memcpy (&vif->host_ip6_addr, &args->host_ip6_addr, 16); if (vif->type != VIRTIO_IF_TYPE_TUN) { args->error = ethernet_register_interface (vnm, virtio_device_class.index, vif->dev_instance, vif->mac_addr, &vif->hw_if_index, virtio_eth_flag_change); if (args->error) { args->rv = VNET_API_ERROR_INVALID_REGISTRATION; goto error; } } else { vif->hw_if_index = vnet_register_interface (vnm, virtio_device_class.index, vif->dev_instance /* device instance */ , tun_device_hw_interface_class.index, vif->dev_instance); } tm->tap_ids = clib_bitmap_set (tm->tap_ids, vif->id, 1); sw = vnet_get_hw_sw_interface (vnm, vif->hw_if_index); vif->sw_if_index = sw->sw_if_index; args->sw_if_index = vif->sw_if_index; args->rv = 0; hw = vnet_get_hw_interface (vnm, vif->hw_if_index); hw->caps |= VNET_HW_INTERFACE_CAP_SUPPORTS_INT_MODE; if (args->tap_flags & TAP_FLAG_GSO) { hw->caps |= VNET_HW_INTERFACE_CAP_SUPPORTS_TCP_GSO | VNET_HW_INTERFACE_CAP_SUPPORTS_TX_TCP_CKSUM | VNET_HW_INTERFACE_CAP_SUPPORTS_TX_UDP_CKSUM; } else if (args->tap_flags & TAP_FLAG_CSUM_OFFLOAD) { hw->caps |= VNET_HW_INTERFACE_CAP_SUPPORTS_TX_TCP_CKSUM | VNET_HW_INTERFACE_CAP_SUPPORTS_TX_UDP_CKSUM; } if ((args->tap_flags & TAP_FLAG_GSO) && (args->tap_flags & TAP_FLAG_GRO_COALESCE)) { virtio_set_packet_coalesce (vif); } if (vif->type == VIRTIO_IF_TYPE_TUN) { hw->max_supported_packet_bytes = TUN_MAX_PACKET_BYTES; hw->min_packet_bytes = hw->min_supported_packet_bytes = TUN_MIN_PACKET_BYTES; hw->max_packet_bytes = args->host_mtu_size ? args->host_mtu_size : TUN_DEFAULT_PACKET_BYTES; vnet_sw_interface_set_mtu (vnm, hw->sw_if_index, hw->max_packet_bytes); } virtio_vring_set_rx_queues (vm, vif); vif->per_interface_next_index = ~0; vif->flags |= VIRTIO_IF_FLAG_ADMIN_UP; vnet_hw_interface_set_flags (vnm, vif->hw_if_index, VNET_HW_INTERFACE_FLAG_LINK_UP); /* * Host tun/tap driver link carrier state is "up" at creation. The * driver never changes this unless the backend (VPP) changes it using * TUNSETCARRIER ioctl(). See tap_set_carrier(). */ vif->host_carrier_up = 1; goto done; error: if (err) { ASSERT (args->error == 0); args->error = err; args->rv = VNET_API_ERROR_SYSCALL_ERROR_3; } tap_log_err (vif, "%U", format_clib_error, args->error); tap_free (vm, vif); done: if (vhost_mem) clib_mem_free (vhost_mem); if (old_netns_fd != -1) { /* in case we errored with a switched netns */ clib_setns (old_netns_fd); close (old_netns_fd); } if (nfd != -1) close (nfd); } int tap_delete_if (vlib_main_t * vm, u32 sw_if_index) { vnet_main_t *vnm = vnet_get_main (); virtio_main_t *mm = &virtio_main; virtio_if_t *vif; vnet_hw_interface_t *hw; hw = vnet_get_sup_hw_interface_api_visible_or_null (vnm, sw_if_index); if (hw == NULL || virtio_device_class.index != hw->dev_class_index) return VNET_API_ERROR_INVALID_SW_IF_INDEX; vif = pool_elt_at_index (mm->interfaces, hw->dev_instance); if ((vif->type != VIRTIO_IF_TYPE_TAP) && (vif->type != VIRTIO_IF_TYPE_TUN)) return VNET_API_ERROR_INVALID_INTERFACE; /* bring down the interface */ vnet_hw_interface_set_flags (vnm, vif->hw_if_index, 0); vnet_sw_interface_set_flags (vnm, vif->sw_if_index, 0); if (vif->type == VIRTIO_IF_TYPE_TAP) ethernet_delete_interface (vnm, vif->hw_if_index); else /* VIRTIO_IF_TYPE_TUN */ vnet_delete_hw_interface (vnm, vif->hw_if_index); vif->hw_if_index = ~0; tap_free (vm, vif); return 0; } int tap_csum_offload_enable_disable (vlib_main_t * vm, u32 sw_if_index, int enable_disable) { vnet_main_t *vnm = vnet_get_main (); virtio_main_t *mm = &virtio_main; virtio_if_t *vif; vnet_hw_interface_t *hw; clib_error_t *err = 0; int i = 0; hw = vnet_get_sup_hw_interface_api_visible_or_null (vnm, sw_if_index); if (hw == NULL || virtio_device_class.index != hw->dev_class_index) return VNET_API_ERROR_INVALID_SW_IF_INDEX; vif = pool_elt_at_index (mm->interfaces, hw->dev_instance); const unsigned int csum_offload_on = TUN_F_CSUM; const unsigned int csum_offload_off = 0; unsigned int offload = enable_disable ? csum_offload_on : csum_offload_off; vec_foreach_index (i, vif->tap_fds) _IOCTL (vif->tap_fds[i], TUNSETOFFLOAD, offload); vif->gso_enabled = 0; vif->packet_coalesce = 0; vif->csum_offload_enabled = enable_disable ? 1 : 0; if ((hw->caps & VNET_HW_INTERFACE_CAP_SUPPORTS_TCP_GSO) != 0) { hw->caps &= ~VNET_HW_INTERFACE_CAP_SUPPORTS_TCP_GSO; } if (enable_disable) { hw->caps |= VNET_HW_INTERFACE_CAP_SUPPORTS_L4_TX_CKSUM; } else { hw->caps &= ~VNET_HW_INTERFACE_CAP_SUPPORTS_L4_TX_CKSUM; } error: if (err) { clib_warning ("Error %s checksum offload on sw_if_index %d", enable_disable ? "enabling" : "disabling", sw_if_index); return VNET_API_ERROR_SYSCALL_ERROR_3; } return 0; } int tap_gso_enable_disable (vlib_main_t * vm, u32 sw_if_index, int enable_disable, int is_packet_coalesce) { vnet_main_t *vnm = vnet_get_main (); virtio_main_t *mm = &virtio_main; virtio_if_t *vif; vnet_hw_interface_t *hw; clib_error_t *err = 0; int i = 0; hw = vnet_get_sup_hw_interface_api_visible_or_null (vnm, sw_if_index); if (hw == NULL || virtio_device_class.index != hw->dev_class_index) return VNET_API_ERROR_INVALID_SW_IF_INDEX; vif = pool_elt_at_index (mm->interfaces, hw->dev_instance); const unsigned int gso_on = TUN_F_CSUM | TUN_F_TSO4 | TUN_F_TSO6; const unsigned int gso_off = 0; unsigned int offload = enable_disable ? gso_on : gso_off; vec_foreach_index (i, vif->tap_fds) _IOCTL (vif->tap_fds[i], TUNSETOFFLOAD, offload); vif->gso_enabled = enable_disable ? 1 : 0; vif->csum_offload_enabled = 0; if (enable_disable) { if ((hw->caps & VNET_HW_INTERFACE_CAP_SUPPORTS_TCP_GSO) == 0) { hw->caps |= VNET_HW_INTERFACE_CAP_SUPPORTS_TCP_GSO | VNET_HW_INTERFACE_CAP_SUPPORTS_L4_TX_CKSUM; } if (is_packet_coalesce) { virtio_set_packet_coalesce (vif); } } else { if ((hw->caps & VNET_HW_INTERFACE_CAP_SUPPORTS_TCP_GSO) != 0) { hw->caps &= ~(VNET_HW_INTERFACE_CAP_SUPPORTS_TCP_GSO | VNET_HW_INTERFACE_CAP_SUPPORTS_L4_TX_CKSUM); } vif->packet_coalesce = 0; } error: if (err) { clib_warning ("Error %s gso on sw_if_index %d", enable_disable ? "enabling" : "disabling", sw_if_index); return VNET_API_ERROR_SYSCALL_ERROR_3; } return 0; } int tap_dump_ifs (tap_interface_details_t ** out_tapids) { vnet_main_t *vnm = vnet_get_main (); virtio_main_t *mm = &virtio_main; virtio_if_t *vif; virtio_vring_t *vring; vnet_hw_interface_t *hi; tap_interface_details_t *r_tapids = NULL; tap_interface_details_t *tapid = NULL; /* *INDENT-OFF* */ pool_foreach (vif, mm->interfaces) { if ((vif->type != VIRTIO_IF_TYPE_TAP) && (vif->type != VIRTIO_IF_TYPE_TUN)) continue; vec_add2(r_tapids, tapid, 1); clib_memset (tapid, 0, sizeof (*tapid)); tapid->id = vif->id; tapid->sw_if_index = vif->sw_if_index; hi = vnet_get_hw_interface (vnm, vif->hw_if_index); clib_memcpy(tapid->dev_name, hi->name, MIN (ARRAY_LEN (tapid->dev_name) - 1, vec_len (hi->name))); vring = vec_elt_at_index (vif->rxq_vrings, RX_QUEUE_ACCESS(0)); tapid->rx_ring_sz = vring->size; vring = vec_elt_at_index (vif->txq_vrings, TX_QUEUE_ACCESS(0)); tapid->tx_ring_sz = vring->size; tapid->tap_flags = vif->tap_flags; clib_memcpy(&tapid->host_mac_addr, vif->host_mac_addr, 6); if (vif->host_if_name) { clib_memcpy(tapid->host_if_name, vif->host_if_name, MIN (ARRAY_LEN (tapid->host_if_name) - 1, vec_len (vif->host_if_name))); } if (vif->net_ns) { clib_memcpy(tapid->host_namespace, vif->net_ns, MIN (ARRAY_LEN (tapid->host_namespace) - 1, vec_len (vif->net_ns))); } if (vif->host_bridge) { clib_memcpy(tapid->host_bridge, vif->host_bridge, MIN (ARRAY_LEN (tapid->host_bridge) - 1, vec_len (vif->host_bridge))); } if (vif->host_ip4_prefix_len) clib_memcpy(tapid->host_ip4_addr.as_u8, &vif->host_ip4_addr, 4); tapid->host_ip4_prefix_len = vif->host_ip4_prefix_len; if (vif->host_ip6_prefix_len) clib_memcpy(tapid->host_ip6_addr.as_u8, &vif->host_ip6_addr, 16); tapid->host_ip6_prefix_len = vif->host_ip6_prefix_len; tapid->host_mtu_size = vif->host_mtu_size; } /* *INDENT-ON* */ *out_tapids = r_tapids; return 0; } /* * Set host tap/tun interface carrier state so it will appear to host * applications that the interface's link state changed. * * If the kernel we're building against does not have support for the * TUNSETCARRIER ioctl command, do nothing. */ int tap_set_carrier (u32 hw_if_index, u32 carrier_up) { int ret = 0; #ifdef TUNSETCARRIER vnet_main_t *vnm = vnet_get_main (); vnet_hw_interface_t *hi = vnet_get_hw_interface (vnm, hw_if_index); virtio_main_t *mm = &virtio_main; virtio_if_t *vif; int *fd; vif = pool_elt_at_index (mm->interfaces, hi->dev_instance); vec_foreach (fd, vif->tap_fds) { ret = ioctl (*fd, TUNSETCARRIER, &carrier_up); if (ret < 0) { clib_warning ("ioctl (TUNSETCARRIER) returned %d", ret); break; } } if (!ret) vif->host_carrier_up = (carrier_up != 0); #endif return ret; } static clib_error_t * tap_mtu_config (vlib_main_t * vm, unformat_input_t * input) { tap_main_t *tm = &tap_main; while (unformat_check_input (input) != UNFORMAT_END_OF_INPUT) { if (unformat (input, "host-mtu %d", &tm->host_mtu_size)) ; else return clib_error_return (0, "unknown input `%U'", format_unformat_error, input); } return 0; } /* * Set host tap/tun interface speed in Mbps. */ int tap_set_speed (u32 hw_if_index, u32 speed) { vnet_main_t *vnm = vnet_get_main (); vnet_hw_interface_t *hi = vnet_get_hw_interface (vnm, hw_if_index); virtio_main_t *mm = &virtio_main; virtio_if_t *vif; int old_netns_fd = -1; int nfd = -1; int ctl_fd = -1; struct ifreq ifr; struct ethtool_cmd ecmd; int ret = -1; vif = pool_elt_at_index (mm->interfaces, hi->dev_instance); if (vif->net_ns) { old_netns_fd = clib_netns_open (NULL /* self */); if ((nfd = clib_netns_open (vif->net_ns)) == -1) { clib_warning ("Cannot open netns"); goto done; } if (clib_setns (nfd) == -1) { clib_warning ("Cannot set ns"); goto done; } } if ((ctl_fd = socket (AF_INET, SOCK_STREAM, 0)) == -1) { clib_warning ("Cannot open control socket"); goto done; } ecmd.cmd = ETHTOOL_GSET; clib_memset (&ifr, 0, sizeof (ifr)); clib_memcpy (ifr.ifr_name, vif->host_if_name, strlen ((const char *) vif->host_if_name)); ifr.ifr_data = (void *) &ecmd; if ((ret = ioctl (ctl_fd, SIOCETHTOOL, &ifr)) < 0) { clib_warning ("Cannot get device settings"); goto done; } if (ethtool_cmd_speed (&ecmd) != speed) { ecmd.cmd = ETHTOOL_SSET; ethtool_cmd_speed_set (&ecmd, speed); if ((ret = ioctl (ctl_fd, SIOCETHTOOL, &ifr)) < 0) { clib_warning ("Cannot set device settings"); goto done; } } done: if (old_netns_fd != -1) { if (clib_setns (old_netns_fd) == -1) { clib_warning ("Cannot set old ns"); } close (old_netns_fd); } if (nfd != -1) close (nfd); if (ctl_fd != -1) close (ctl_fd); return ret; } /* tap { host-mtu <size> } configuration. */ VLIB_CONFIG_FUNCTION (tap_mtu_config, "tap"); static clib_error_t * tap_init (vlib_main_t * vm) { tap_main_t *tm = &tap_main; clib_error_t *error = 0; tm->log_default = vlib_log_register_class ("tap", 0); vlib_log_debug (tm->log_default, "initialized"); tm->host_mtu_size = 0; return error; } VLIB_INIT_FUNCTION (tap_init); /* * fd.io coding-style-patch-verification: ON * * Local Variables: * eval: (c-set-style "gnu") * End: */
15,127
324
<filename>tests/com/google/common/css/compiler/passes/CssClassRenamingTest.java<gh_stars>100-1000 /* * Copyright 2009 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.css.compiler.passes; import static com.google.common.truth.Truth.assertThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import com.google.common.css.SubstitutionMap; import com.google.common.css.compiler.ast.BackDoorNodeMutation; import com.google.common.css.compiler.ast.CssBlockNode; import com.google.common.css.compiler.ast.CssClassSelectorNode; import com.google.common.css.compiler.ast.CssIdSelectorNode; import com.google.common.css.compiler.ast.CssRefinerListNode; import com.google.common.css.compiler.ast.CssRootNode; import com.google.common.css.compiler.ast.CssRulesetNode; import com.google.common.css.compiler.ast.CssSelectorNode; import com.google.common.css.compiler.ast.CssTree; import com.google.common.css.compiler.ast.MutatingVisitController; import com.google.common.css.compiler.passes.testing.AstPrinter; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** * Unit tests the {@link CssClassRenaming} compiler pass. * * @author <EMAIL> (<NAME>) */ @RunWith(JUnit4.class) public class CssClassRenamingTest { @Test public void testRunPass() { MutatingVisitController visitController = mock(MutatingVisitController.class); CssClassRenaming pass = new CssClassRenaming(visitController, null, null); visitController.startVisit(pass); pass.runPass(); } @Test public void testNoSubstitutionWithNullMap() { CssClassSelectorNode node = new CssClassSelectorNode("FOO", null); CssClassRenaming pass = new CssClassRenaming(null, null, null); pass.enterClassSelector(node); } @Test public void testNoClassSubstitutionWhenClassNotFoundInMap() { CssClassSelectorNode refinerNode = new CssClassSelectorNode("FOO", null); SubstitutionMap cssClassRenamingMap = mock(SubstitutionMap.class); CssClassRenaming pass = new CssClassRenaming(null, cssClassRenamingMap, null); pass.enterClassSelector(refinerNode); verify(cssClassRenamingMap).get("FOO"); } @Test public void testNoIdSubstitutionWhenIdNotFoundInMap() { CssIdSelectorNode refinerNode = new CssIdSelectorNode("ID", null); SubstitutionMap idRenamingMap = mock(SubstitutionMap.class); CssClassRenaming pass = new CssClassRenaming(null, null, idRenamingMap); pass.enterIdSelector(refinerNode); verify(idRenamingMap).get("ID"); } @Test public void testEnterClassRefiner() { CssClassSelectorNode refinerNode = new CssClassSelectorNode("CSS_FOO", null); CssRefinerListNode refiners = new CssRefinerListNode(); BackDoorNodeMutation.addChildToBack(refiners, refinerNode); CssRulesetNode ruleset = new CssRulesetNode(); CssSelectorNode sel = new CssSelectorNode("", null); sel.setRefiners(refiners); ruleset.addSelector(sel); CssBlockNode body = new CssBlockNode(false); BackDoorNodeMutation.addChildToBack(body, ruleset); CssRootNode root = new CssRootNode(body); CssTree tree = new CssTree(null, root); SubstitutionMap classMap = new SubstitutionMap() { /** {@inheritDoc} */ @Override public String get(String key) { return key.startsWith("CSS_") ? key + '_' : key; } }; CssClassRenaming pass = new CssClassRenaming( tree.getMutatingVisitController(), classMap, null); pass.runPass(); assertThat(AstPrinter.print(tree)).isEqualTo("[[.CSS_FOO_]{[]}]"); } @Test public void testEnterIdRefiner() { CssIdSelectorNode refinerNode = new CssIdSelectorNode("ID_FOO", null); CssRefinerListNode refiners = new CssRefinerListNode(); BackDoorNodeMutation.addChildToBack(refiners, refinerNode); CssRulesetNode ruleset = new CssRulesetNode(); CssSelectorNode sel = new CssSelectorNode("", null); sel.setRefiners(refiners); ruleset.addSelector(sel); CssBlockNode body = new CssBlockNode(false); BackDoorNodeMutation.addChildToBack(body, ruleset); CssRootNode root = new CssRootNode(body); CssTree tree = new CssTree(null, root); SubstitutionMap idMap = new SubstitutionMap() { /** {@inheritDoc} */ @Override public String get(String key) { return key.startsWith("ID_") ? key + '_' : key; } }; CssClassRenaming pass = new CssClassRenaming(tree.getMutatingVisitController(), null, idMap); pass.runPass(); assertThat(AstPrinter.print(tree)).isEqualTo("[[#ID_FOO_]{[]}]"); } }
1,908
435
{ "alias": "video/695/8-boolpy-using-python-for-symbolic-boolean-alge", "category": "PyCon US 2012", "copyright_text": "", "description": "", "duration": null, "id": 695, "language": "eng", "quality_notes": "", "recorded": "2012-03-11", "slug": "8-boolpy-using-python-for-symbolic-boolean-alge", "speakers": [ "<NAME>" ], "summary": "BoolPy is a Python module that iteratively simplifies Boolean Algebra\nexpressions according to the Boolean Theorems. The purpose of this talk\nis twofold: (1) To present the details of an Object-Oriented solution to\nthe symbolic computing problem and (2) to present the BoolPy module as a\nuseful classroom tool for teaching Boolean Algebra in college-level\nmathematics and computer science courses.\n", "tags": [], "thumbnail_url": "https://img.youtube.com/vi/FCiA6e44aOI/hqdefault.jpg", "title": "8. BoolPy: Using Python for Symbolic Boolean Algebra Simplification", "videos": [ { "length": 0, "type": "youtube", "url": "https://www.youtube.com/watch?v=FCiA6e44aOI" } ] }
397
376
extern zend_class_entry *ice_crypt_ce; ZEPHIR_INIT_CLASS(Ice_Crypt); PHP_METHOD(Ice_Crypt, setKey); PHP_METHOD(Ice_Crypt, setCipher); PHP_METHOD(Ice_Crypt, setMode); PHP_METHOD(Ice_Crypt, setBlock); PHP_METHOD(Ice_Crypt, __construct); PHP_METHOD(Ice_Crypt, encrypt); PHP_METHOD(Ice_Crypt, generateInputVector); PHP_METHOD(Ice_Crypt, doEncrypt); PHP_METHOD(Ice_Crypt, decrypt); PHP_METHOD(Ice_Crypt, doDecrypt); PHP_METHOD(Ice_Crypt, getJsonPayload); PHP_METHOD(Ice_Crypt, hash); PHP_METHOD(Ice_Crypt, addPadding); PHP_METHOD(Ice_Crypt, stripPadding); PHP_METHOD(Ice_Crypt, paddingIsValid); PHP_METHOD(Ice_Crypt, invalidPayload); PHP_METHOD(Ice_Crypt, getIvSize); ZEND_BEGIN_ARG_INFO_EX(arginfo_ice_crypt_setkey, 0, 0, 1) ZEND_ARG_INFO(0, key) ZEND_END_ARG_INFO() ZEND_BEGIN_ARG_INFO_EX(arginfo_ice_crypt_setcipher, 0, 0, 1) ZEND_ARG_INFO(0, cipher) ZEND_END_ARG_INFO() ZEND_BEGIN_ARG_INFO_EX(arginfo_ice_crypt_setmode, 0, 0, 1) ZEND_ARG_INFO(0, mode) ZEND_END_ARG_INFO() ZEND_BEGIN_ARG_INFO_EX(arginfo_ice_crypt_setblock, 0, 0, 1) ZEND_ARG_INFO(0, block) ZEND_END_ARG_INFO() ZEND_BEGIN_ARG_INFO_EX(arginfo_ice_crypt___construct, 0, 0, 0) ZEND_ARG_INFO(0, key) ZEND_END_ARG_INFO() ZEND_BEGIN_ARG_INFO_EX(arginfo_ice_crypt_encrypt, 0, 0, 1) ZEND_ARG_INFO(0, text) ZEND_END_ARG_INFO() ZEND_BEGIN_ARG_INFO_EX(arginfo_ice_crypt_doencrypt, 0, 0, 2) ZEND_ARG_INFO(0, value) ZEND_ARG_INFO(0, iv) ZEND_END_ARG_INFO() ZEND_BEGIN_ARG_INFO_EX(arginfo_ice_crypt_decrypt, 0, 0, 1) ZEND_ARG_INFO(0, text) ZEND_END_ARG_INFO() ZEND_BEGIN_ARG_INFO_EX(arginfo_ice_crypt_dodecrypt, 0, 0, 2) ZEND_ARG_INFO(0, value) ZEND_ARG_INFO(0, iv) ZEND_END_ARG_INFO() ZEND_BEGIN_ARG_INFO_EX(arginfo_ice_crypt_getjsonpayload, 0, 0, 1) ZEND_ARG_INFO(0, text) ZEND_END_ARG_INFO() ZEND_BEGIN_ARG_INFO_EX(arginfo_ice_crypt_hash, 0, 0, 1) ZEND_ARG_INFO(0, value) ZEND_END_ARG_INFO() ZEND_BEGIN_ARG_INFO_EX(arginfo_ice_crypt_addpadding, 0, 0, 1) ZEND_ARG_INFO(0, value) ZEND_END_ARG_INFO() ZEND_BEGIN_ARG_INFO_EX(arginfo_ice_crypt_strippadding, 0, 0, 1) ZEND_ARG_INFO(0, value) ZEND_END_ARG_INFO() ZEND_BEGIN_ARG_INFO_EX(arginfo_ice_crypt_paddingisvalid, 0, 0, 2) ZEND_ARG_INFO(0, pad) ZEND_ARG_INFO(0, value) ZEND_END_ARG_INFO() ZEND_BEGIN_ARG_INFO_EX(arginfo_ice_crypt_invalidpayload, 0, 0, 1) ZEND_ARG_ARRAY_INFO(0, data, 0) ZEND_END_ARG_INFO() ZEPHIR_INIT_FUNCS(ice_crypt_method_entry) { PHP_ME(Ice_Crypt, setKey, arginfo_ice_crypt_setkey, ZEND_ACC_PUBLIC) PHP_ME(Ice_Crypt, setCipher, arginfo_ice_crypt_setcipher, ZEND_ACC_PUBLIC) PHP_ME(Ice_Crypt, setMode, arginfo_ice_crypt_setmode, ZEND_ACC_PUBLIC) PHP_ME(Ice_Crypt, setBlock, arginfo_ice_crypt_setblock, ZEND_ACC_PUBLIC) PHP_ME(Ice_Crypt, __construct, arginfo_ice_crypt___construct, ZEND_ACC_PUBLIC|ZEND_ACC_CTOR) PHP_ME(Ice_Crypt, encrypt, arginfo_ice_crypt_encrypt, ZEND_ACC_PUBLIC) PHP_ME(Ice_Crypt, generateInputVector, NULL, ZEND_ACC_PROTECTED) PHP_ME(Ice_Crypt, doEncrypt, arginfo_ice_crypt_doencrypt, ZEND_ACC_PROTECTED) PHP_ME(Ice_Crypt, decrypt, arginfo_ice_crypt_decrypt, ZEND_ACC_PUBLIC) PHP_ME(Ice_Crypt, doDecrypt, arginfo_ice_crypt_dodecrypt, ZEND_ACC_PROTECTED) PHP_ME(Ice_Crypt, getJsonPayload, arginfo_ice_crypt_getjsonpayload, ZEND_ACC_PROTECTED) PHP_ME(Ice_Crypt, hash, arginfo_ice_crypt_hash, ZEND_ACC_PROTECTED) PHP_ME(Ice_Crypt, addPadding, arginfo_ice_crypt_addpadding, ZEND_ACC_PROTECTED) PHP_ME(Ice_Crypt, stripPadding, arginfo_ice_crypt_strippadding, ZEND_ACC_PROTECTED) PHP_ME(Ice_Crypt, paddingIsValid, arginfo_ice_crypt_paddingisvalid, ZEND_ACC_PROTECTED) PHP_ME(Ice_Crypt, invalidPayload, arginfo_ice_crypt_invalidpayload, ZEND_ACC_PROTECTED) PHP_ME(Ice_Crypt, getIvSize, NULL, ZEND_ACC_PROTECTED) PHP_FE_END };
1,779
2,268
<filename>sp/src/game/client/fx_fleck.cpp //========= Copyright Valve Corporation, All rights reserved. ============// // // Purpose: // // $Workfile: $ // $NoKeywords: $ //=============================================================================// #include "cbase.h" #include "fx_fleck.h" // memdbgon must be the last include file in a .cpp file!!! #include "tier0/memdbgon.h" // enable this to have the fleck_merge cvar as well as the current system count displayed as it changes (for profiling) #define REPORT_MERGED_FLECKS 0 // // class PARTICLE_MERGE //{ //public: // bool MergeParticleSystems( CFleckParticles *pSystem, const char *pEffectName, const Vector &center, const Vector &extents ) // { merge; return true; } //}; // a singly linked list through all particle effects of a specific type // with a specific rule for sharing them. // Needs a hook to the particle effect's constructor/destructor and factory method // The factory needs to support optionally merging the new particles into a previously built particle effect // this cuts down on lots of scene management overhead as well as rendering/batch overhead template< class PARTICLE_EFFECT, class PARTICLE_MERGE > class CParticleMergeList { public: CParticleMergeList() : m_pHead(NULL) {} void AddParticleSystem( PARTICLE_EFFECT *pSystem ); void RemoveParticleSystem( PARTICLE_EFFECT *pRemove ); PARTICLE_EFFECT *FindAndMergeParticleSystem( const char *pEffectName, const Vector &center, const Vector &extents ); bool MergeParticleSystems( PARTICLE_EFFECT *pSystem, const char *pEffectName, const Vector &center, const Vector &extents ); private: PARTICLE_EFFECT *m_pHead; PARTICLE_MERGE m_merge; }; #if REPORT_MERGED_FLECKS ConVar fleck_merge("fleck_merge","1"); int g_PCount = 0; #endif template< class PARTICLE_EFFECT, class PARTICLE_MERGE > void CParticleMergeList<PARTICLE_EFFECT,PARTICLE_MERGE>::AddParticleSystem( PARTICLE_EFFECT *pSystem ) { #if REPORT_MERGED_FLECKS g_PCount++; Msg("PS: %d\n", g_PCount); #endif pSystem->m_pNextParticleSystem = m_pHead; m_pHead = pSystem; } template< class PARTICLE_EFFECT, class PARTICLE_MERGE > void CParticleMergeList<PARTICLE_EFFECT,PARTICLE_MERGE>::RemoveParticleSystem( PARTICLE_EFFECT *pRemove ) { #if REPORT_MERGED_FLECKS g_PCount--; Msg("PS: %d\n", g_PCount); #endif PARTICLE_EFFECT **pPrev = &m_pHead; PARTICLE_EFFECT *pCur = *pPrev; while ( pCur ) { if ( pCur == pRemove ) { *pPrev = pCur->m_pNextParticleSystem; return; } pPrev = &pCur->m_pNextParticleSystem; pCur = *pPrev; } } template< class PARTICLE_EFFECT, class PARTICLE_MERGE > PARTICLE_EFFECT *CParticleMergeList<PARTICLE_EFFECT,PARTICLE_MERGE>::FindAndMergeParticleSystem( const char *pEffectName, const Vector &center, const Vector &extents ) { #if REPORT_MERGED_FLECKS if ( !fleck_merge.GetBool() ) return NULL; #endif for ( PARTICLE_EFFECT *pMerge = m_pHead; pMerge != NULL; pMerge = pMerge->m_pNextParticleSystem ) { if ( m_merge.MergeParticleSystems( pMerge, pEffectName, center, extents ) ) return pMerge; } return NULL; } // merge anything within 10 feet const float MAX_RADIUS_BBOX_MERGE = 120.0f; template< class PARTICLE_EFFECT > class CMergeSameNameBbox { public: bool MergeParticleSystems( PARTICLE_EFFECT *pSystem, const char *pEffectName, const Vector &center, const Vector &extents ) { // by default, match names if ( !Q_stricmp(pSystem->GetEffectName(), pEffectName) ) { Vector mins, maxs; pSystem->GetBinding().GetWorldspaceBounds( &mins, &maxs ); AddPointToBounds( center - extents, mins, maxs ); AddPointToBounds( center + extents, mins, maxs ); Vector size = maxs - mins; float radius = size.Length(); if ( radius < MAX_RADIUS_BBOX_MERGE ) { pSystem->GetBinding().SetBBox( mins, maxs ); // put sort origin at center of the new box Vector sortOrigin = 0.5f * (mins+maxs); pSystem->SetSortOrigin(sortOrigin); return true; } } return false; } }; CParticleMergeList< CFleckParticles, CMergeSameNameBbox<CFleckParticles> > g_FleckMergeList; // // CFleckParticles // CSmartPtr<CFleckParticles> CFleckParticles::Create( const char *pDebugName, const Vector &vCenter, const Vector &extents ) { CFleckParticles *pMerge = g_FleckMergeList.FindAndMergeParticleSystem( pDebugName, vCenter, extents ); if ( pMerge ) return pMerge; CFleckParticles *pRet = new CFleckParticles( pDebugName ); if ( pRet ) { pRet->GetBinding().SetBBox( vCenter - extents, vCenter + extents ); pRet->SetSortOrigin(vCenter); } return pRet; } CFleckParticles::CFleckParticles( const char *pDebugName ) : CSimpleEmitter( pDebugName ), m_pNextParticleSystem(NULL) { g_FleckMergeList.AddParticleSystem(this); } CFleckParticles::~CFleckParticles() { g_FleckMergeList.RemoveParticleSystem(this); } //----------------------------------------------------------------------------- // Purpose: Test for surrounding collision surfaces for quick collision testing for the particle system // Input : &origin - starting position // *dir - direction of movement (if NULL, will do a point emission test in four directions) // angularSpread - looseness of the spread // minSpeed - minimum speed // maxSpeed - maximum speed // gravity - particle gravity for the sytem // dampen - dampening amount on collisions // flags - extra information //----------------------------------------------------------------------------- void CFleckParticles::Setup( const Vector &origin, const Vector *direction, float angularSpread, float minSpeed, float maxSpeed, float gravity, float dampen, int flags ) { //See if we've specified a direction m_ParticleCollision.Setup( origin, direction, angularSpread, minSpeed, maxSpeed, gravity, dampen ); } void CFleckParticles::RenderParticles( CParticleRenderIterator *pIterator ) { const FleckParticle *pParticle = (const FleckParticle*)pIterator->GetFirst(); while ( pParticle ) { Vector tPos; TransformParticle( ParticleMgr()->GetModelView(), pParticle->m_Pos, tPos ); float sortKey = (int) tPos.z; Vector color; color[0] = pParticle->m_uchColor[0] / 255.0f; color[1] = pParticle->m_uchColor[1] / 255.0f; color[2] = pParticle->m_uchColor[2] / 255.0f; //Render it RenderParticle_ColorSizeAngle( pIterator->GetParticleDraw(), tPos, color, 1.0f - (pParticle->m_flLifetime / pParticle->m_flDieTime), pParticle->m_uchSize, pParticle->m_flRoll ); pParticle = (const FleckParticle*)pIterator->GetNext( sortKey ); } } void CFleckParticles::SimulateParticles( CParticleSimulateIterator *pIterator ) { FleckParticle *pParticle = (FleckParticle*)pIterator->GetFirst(); while ( pParticle ) { const float timeDelta = pIterator->GetTimeDelta(); //Should this particle die? pParticle->m_flLifetime += timeDelta; if ( pParticle->m_flLifetime >= pParticle->m_flDieTime ) { pIterator->RemoveParticle( pParticle ); } else { pParticle->m_flRoll += pParticle->m_flRollDelta * timeDelta; //Simulate the movement with collision trace_t trace; m_ParticleCollision.MoveParticle( pParticle->m_Pos, pParticle->m_vecVelocity, &pParticle->m_flRollDelta, timeDelta, &trace ); // If we're in solid, then stop moving if ( trace.allsolid ) { pParticle->m_vecVelocity = vec3_origin; pParticle->m_flRollDelta = 0.0f; } } pParticle = (FleckParticle*)pIterator->GetNext(); } }
2,638
350
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_log import log from ironic.common import utils LOG = log.getLogger(__name__) class JsonExtensionMiddleware(object): """Simplified processing of .json extension. Previously Ironic API used the "guess_content_type_from_ext" feature. It was never needed, as we never allowed non-JSON content types anyway. Now that it is removed, this middleware strips .json extension for backward compatibility. """ def __init__(self, app): self.app = app def __call__(self, env, start_response): path = utils.safe_rstrip(env.get('PATH_INFO'), '/') if path and path.endswith('.json'): LOG.debug('Stripping .json prefix from %s for compatibility ' 'with pecan', path) env['PATH_INFO'] = path[:-5] env['HAS_JSON_SUFFIX'] = True else: env['HAS_JSON_SUFFIX'] = False return self.app(env, start_response)
525
2,542
// ------------------------------------------------------------ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License (MIT). See License.txt in the repo root for license information. // ------------------------------------------------------------ #pragma once namespace Reliability { namespace FailoverManagerComponent { class FabricUpgradeContext : public UpgradeContext { DENY_COPY(FabricUpgradeContext); public: FabricUpgradeContext( std::wstring const& fmId, FabricUpgrade const& upgrade, std::wstring const & currentDomain); FabricUpgradeContext( std::wstring const& fmId, FabricUpgrade const& upgrade, std::wstring const & currentDomain, int voterCount); virtual BackgroundThreadContextUPtr CreateNewContext() const; // Initialize does the following: // // 1. Adds all the nodes in the current domain to the ready list. // This is because it is possible that there are some node that // do not have any replica hosted on them. If this is the the case // then these nodes are not added during background processing. // However, if there are any exceptions (because of inducing quorum // loss), such nodes are moved to the pending list during Merge. // // 2. Processes the seed nodes to ensure that fabric upgrade does not // cause global lease loss. // // 3. Try to lock all the nodes that the in the current UD. This is to // ensure that there is no node which was locked when the upgrade was // in the previous UD and that could have incorrectly passed the // gatekeeping. bool Initialize(FailoverManager & fm); virtual void Process(FailoverManager const& fm, FailoverUnit const& failoverUnit); virtual void Merge(BackgroundThreadContext const& context); virtual void Complete(FailoverManager & fm, bool isContextCompleted, bool isEnumerationAborted); private: virtual bool IsReplicaSetCheckNeeded() const; virtual bool IsReplicaWaitNeeded(Replica const& replica) const; virtual bool IsReplicaMoveNeeded(Replica const& replica) const; void ProcessSeedNodes(FailoverManager & fm); std::wstring fmId_; FabricUpgrade upgrade_; bool isSafeToUpgrade_; int voterCount_; static std::wstring const SeedNodeTag; }; } }
1,084
679
<reponame>Grosskopf/openoffice<filename>main/sc/source/filter/lotus/op.cxx /************************************************************** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * *************************************************************/ // MARKER(update_precomp.py): autogen include statement, do not remove #include "precompiled_scfilt.hxx" //------------------------------------------------------------------------ #include <tools/solar.h> #include <rtl/math.hxx> #include <stdio.h> #include <string.h> #include <math.h> #include <ctype.h> #include <stdlib.h> #include "scitems.hxx" #include "patattr.hxx" #include "docpool.hxx" #include <svx/algitem.hxx> #include <editeng/postitem.hxx> #include <editeng/udlnitem.hxx> #include <editeng/wghtitem.hxx> #include "cell.hxx" #include "rangenam.hxx" #include "document.hxx" #include "postit.hxx" #include "op.h" #include "optab.h" #include "tool.h" #include "decl.h" #include "lotform.hxx" #include "lotrange.hxx" #include "root.hxx" #include "ftools.hxx" #include <vector> #include <map> extern WKTYP eTyp; // -> filter.cxx, aktueller Dateityp extern sal_Bool bEOF; // -> filter.cxx, zeigt Dateiende an extern sal_uInt8 nDefaultFormat; // -> tool.cxx, Default-Zellenformat extern ScDocument* pDoc; // -> filter.cxx, Aufhaenger zum Dokumentzugriff extern CharSet eCharVon; // -> filter.cxx, character set specified static sal_uInt16 nDefWidth = ( sal_uInt16 ) ( TWIPS_PER_CHAR * 10 ); extern std::map<sal_uInt16, ScPatternAttr> aLotusPatternPool; void NI( SvStream& r, sal_uInt16 n ) { r.SeekRel( n ); } void OP_BOF( SvStream& r, sal_uInt16 /*n*/ ) { r.SeekRel( 2 ); // Versionsnummer ueberlesen } void OP_EOF( SvStream& /*r*/, sal_uInt16 /*n*/ ) { bEOF = sal_True; } void OP_Integer( SvStream& r, sal_uInt16 /*n*/ ) { sal_uInt8 nFormat; sal_uInt16 nCol, nRow; SCTAB nTab = 0; sal_Int16 nValue; r >> nFormat >> nCol >> nRow >> nValue; if (ValidColRow( static_cast<SCCOL>(nCol), nRow)) { ScValueCell* pZelle = new ScValueCell( ( double ) nValue ); pDoc->PutCell( static_cast<SCCOL> (nCol), static_cast<SCROW> (nRow), nTab, pZelle, ( sal_Bool ) sal_True ); // 0 Stellen nach'm Komma! SetFormat( static_cast<SCCOL> (nCol), static_cast<SCROW> (nRow), nTab, nFormat, 0 ); } } void OP_Number( SvStream& r, sal_uInt16 /*n*/ ) { sal_uInt8 nFormat; sal_uInt16 nCol, nRow; SCTAB nTab = 0; double fValue; r >> nFormat >> nCol >> nRow >> fValue; if (ValidColRow( static_cast<SCCOL>(nCol), nRow)) { fValue = ::rtl::math::round( fValue, 15 ); ScValueCell* pZelle = new ScValueCell( fValue ); pDoc->PutCell( static_cast<SCCOL> (nCol), static_cast<SCROW> (nRow), nTab, pZelle, ( sal_Bool ) sal_True ); SetFormat( static_cast<SCCOL> (nCol), static_cast<SCROW> (nRow), nTab, nFormat, nDezFloat ); } } void OP_Label( SvStream& r, sal_uInt16 n ) { sal_uInt8 nFormat; sal_uInt16 nCol, nRow; SCTAB nTab = 0; r >> nFormat >> nCol >> nRow; n -= (n > 5) ? 5 : n; sal_Char* pText = new sal_Char[n + 1]; r.Read( pText, n ); pText[n] = 0; if (ValidColRow( static_cast<SCCOL>(nCol), nRow)) { nFormat &= 0x80; // Bit 7 belassen nFormat |= 0x75; // protected egal, special-text gesetzt PutFormString( static_cast<SCCOL> (nCol), static_cast<SCROW> (nRow), nTab, pText ); SetFormat( static_cast<SCCOL> (nCol), static_cast<SCROW> (nRow), nTab, nFormat, nDezStd ); } delete [] pText; } void OP_Formula( SvStream& r, sal_uInt16 /*n*/ ) { sal_uInt8 nFormat; sal_uInt16 nCol, nRow, nFormulaSize; SCTAB nTab = 0; r >> nFormat >> nCol >> nRow; r.SeekRel( 8 ); // Ergebnis ueberspringen r >> nFormulaSize; const ScTokenArray* pErg; sal_Int32 nBytesLeft = nFormulaSize; ScAddress aAddress( static_cast<SCCOL> (nCol), static_cast<SCROW> (nRow), nTab ); LotusToSc aConv( r, pLotusRoot->eCharsetQ, sal_False ); aConv.Reset( aAddress ); aConv.Convert( pErg, nBytesLeft ); if (ValidColRow( static_cast<SCCOL>(nCol), nRow)) { ScFormulaCell* pZelle = new ScFormulaCell( pLotusRoot->pDoc, aAddress, pErg ); pZelle->AddRecalcMode( RECALCMODE_ONLOAD_ONCE ); pDoc->PutCell( static_cast<SCCOL> (nCol), static_cast<SCROW> (nRow), nTab, pZelle, ( sal_Bool ) sal_True ); // nFormat = Standard -> Nachkommastellen wie Float SetFormat( static_cast<SCCOL> (nCol), static_cast<SCROW> (nRow), nTab, nFormat, nDezFloat ); } } void OP_ColumnWidth( SvStream& r, sal_uInt16 /*n*/ ) { sal_uInt16 nCol, nBreite; sal_uInt8 nWidthSpaces; SCTAB nTab = 0; r >> nCol >> nWidthSpaces; if (ValidCol( static_cast<SCCOL>(nCol))) { if( nWidthSpaces ) // Annahme: 10cpi-Zeichensatz nBreite = ( sal_uInt16 ) ( TWIPS_PER_CHAR * nWidthSpaces ); else { pDoc->SetColHidden(static_cast<SCCOL>(nCol), static_cast<SCCOL>(nCol), 0, true); nBreite = nDefWidth; } pDoc->SetColWidth( static_cast<SCCOL> (nCol), nTab, nBreite ); } } void OP_NamedRange( SvStream& r, sal_uInt16 /*n*/ ) { // POST: waren Koordinaten ungueltig, wird nicht gespeichert sal_uInt16 nColSt, nRowSt, nColEnd, nRowEnd; sal_Char cPuffer[ 16+1 ]; r.Read( cPuffer, 16 ); cPuffer[ 16 ] = 0; r >> nColSt >> nRowSt >> nColEnd >> nRowEnd; if (ValidColRow( static_cast<SCCOL>(nColSt), nRowSt) && ValidColRow( static_cast<SCCOL>(nColEnd), nRowEnd)) { LotusRange* pRange; if( nColSt == nColEnd && nRowSt == nRowEnd ) pRange = new LotusRange( static_cast<SCCOL> (nColSt), static_cast<SCROW> (nRowSt) ); else pRange = new LotusRange( static_cast<SCCOL> (nColSt), static_cast<SCROW> (nRowSt), static_cast<SCCOL> (nColEnd), static_cast<SCROW> (nRowEnd) ); sal_Char cBuf[sizeof(cPuffer)+1]; if( isdigit( *cPuffer ) ) { // erstes Zeichen im Namen eine Zahl -> 'A' vor Namen setzen cBuf[0] = 'A'; strcpy( cBuf + 1, cPuffer ); // #100211# - checked } else strcpy( cBuf, cPuffer ); // #100211# - checked String aTmp( cBuf, pLotusRoot->eCharsetQ ); ScfTools::ConvertToScDefinedName( aTmp ); pLotusRoot->pRangeNames->Append( pRange, aTmp ); } } void OP_SymphNamedRange( SvStream& r, sal_uInt16 /*n*/ ) { // POST: waren Koordinaten ungueltig, wird nicht gespeichert sal_uInt16 nColSt, nRowSt, nColEnd, nRowEnd; sal_uInt8 nType; sal_Char cPuffer[ 16+1 ]; r.Read( cPuffer, 16 ); cPuffer[ 16 ] = 0; r >> nColSt >> nRowSt >> nColEnd >> nRowEnd >> nType; if (ValidColRow( static_cast<SCCOL>(nColSt), nRowSt) && ValidColRow( static_cast<SCCOL>(nColEnd), nRowEnd)) { LotusRange* pRange; if( nType ) pRange = new LotusRange( static_cast<SCCOL> (nColSt), static_cast<SCROW> (nRowSt) ); else pRange = new LotusRange( static_cast<SCCOL> (nColSt), static_cast<SCROW> (nRowSt), static_cast<SCCOL> (nColEnd), static_cast<SCROW> (nRowEnd) ); sal_Char cBuf[sizeof(cPuffer)+1]; if( isdigit( *cPuffer ) ) { // erstes Zeichen im Namen eine Zahl -> 'A' vor Namen setzen cBuf[0] = 'A'; strcpy( cBuf + 1, cPuffer ); // #100211# - checked } else strcpy( cBuf, cPuffer ); // #100211# - checked String aTmp( cBuf, pLotusRoot->eCharsetQ ); ScfTools::ConvertToScDefinedName( aTmp ); pLotusRoot->pRangeNames->Append( pRange, aTmp ); } } void OP_Footer( SvStream& r, sal_uInt16 n ) { r.SeekRel( n ); } void OP_Header( SvStream& r, sal_uInt16 n ) { r.SeekRel( n ); } void OP_Margins( SvStream& r, sal_uInt16 n ) { r.SeekRel( n ); } void OP_HiddenCols( SvStream& r, sal_uInt16 /*n*/ ) { sal_uInt16 nByte, nBit; SCCOL nCount; sal_uInt8 nAkt; nCount = 0; for( nByte = 0 ; nByte < 32 ; nByte++ ) // 32 Bytes mit ... { r >> nAkt; for( nBit = 0 ; nBit < 8 ; nBit++ ) // ...jeweils 8 Bits = 256 Bits { if( nAkt & 0x01 ) // unterstes Bit gesetzt? // -> Hidden Col pDoc->SetColHidden(nCount, nCount, 0, true); nCount++; nAkt = nAkt / 2; // der Naechste bitte... } } } void OP_Window1( SvStream& r, sal_uInt16 n ) { r.SeekRel( 4 ); // Cursor Pos ueberspringen r >> nDefaultFormat; r.SeekRel( 1 ); // 'unused' ueberspringen r >> nDefWidth; r.SeekRel( n - 8 ); // und den Rest ueberspringen nDefWidth = ( sal_uInt16 ) ( TWIPS_PER_CHAR * nDefWidth ); // statt Defaulteinstellung in SC alle Cols zu Fuss setzen for( SCCOL nCol = 0 ; nCol <= MAXCOL ; nCol++ ) pDoc->SetColWidth( nCol, 0, nDefWidth ); } void OP_Blank( SvStream& r, sal_uInt16 /*n*/ ) { sal_uInt16 nCol, nRow; sal_uInt8 nFormat; r >> nFormat >> nCol >> nRow; SetFormat( static_cast<SCCOL> (nCol), static_cast<SCROW> (nRow), 0, nFormat, nDezFloat ); } void OP_BOF123( SvStream& r, sal_uInt16 /*n*/ ) { r.SeekRel( 26 ); } void OP_EOF123( SvStream& /*r*/, sal_uInt16 /*n*/ ) { bEOF = sal_True; } void OP_Label123( SvStream& r, sal_uInt16 n ) { sal_uInt8 nTab, nCol; sal_uInt16 nRow; r >> nRow >> nTab >> nCol; n -= (n > 4) ? 4 : n; sal_Char* pText = new sal_Char[n + 1]; r.Read( pText, n ); pText[ n ] = 0; PutFormString( static_cast<SCCOL>(nCol), static_cast<SCROW>(nRow), static_cast<SCTAB>(nTab), pText ); delete []pText; } void OP_Number123( SvStream& r, sal_uInt16 /*n*/ ) { sal_uInt8 nCol,nTab; sal_uInt16 nRow; sal_uInt32 nValue; r >> nRow >> nTab >> nCol >> nValue; if (ValidColRow( static_cast<SCCOL>(nCol), nRow) && nTab < pDoc->GetMaxTableNumber()) { double fValue = Snum32ToDouble( nValue ); ScValueCell *pCell = new ScValueCell( fValue ); pDoc->PutCell( static_cast<SCCOL>(nCol), static_cast<SCROW>(nRow), static_cast<SCTAB>(nTab), pCell, (sal_Bool) sal_True ); } } void OP_Formula123( SvStream& r, sal_uInt16 n ) { sal_uInt8 nCol,nTab; sal_uInt16 nRow; r >> nRow >> nTab >> nCol; r.SeekRel( 8 ); // Result- jump over const ScTokenArray* pErg; sal_Int32 nBytesLeft = (n > 12) ? n - 12 : 0; ScAddress aAddress( nCol, nRow, nTab ); LotusToSc aConv( r, pLotusRoot->eCharsetQ, sal_True ); aConv.Reset( aAddress ); aConv.Convert( pErg, nBytesLeft ); if (ValidColRow( static_cast<SCCOL>(nCol), nRow) && nTab < pDoc->GetMaxTableNumber()) { ScFormulaCell* pCell = new ScFormulaCell( pLotusRoot->pDoc, aAddress, pErg ); pCell->AddRecalcMode( RECALCMODE_ONLOAD_ONCE ); pDoc->PutCell( static_cast<SCCOL>(nCol), static_cast<SCROW>(nRow), static_cast<SCTAB>(nTab), pCell, (sal_Bool) sal_True ); } } void OP_IEEENumber123( SvStream& r, sal_uInt16 /*n*/ ) { sal_uInt8 nCol,nTab; sal_uInt16 nRow; double dValue; r >> nRow >> nTab >> nCol >> dValue; if (ValidColRow( static_cast<SCCOL>(nCol), nRow) && nTab < pDoc->GetMaxTableNumber()) { ScValueCell *pCell = new ScValueCell(dValue); pDoc->PutCell( static_cast<SCCOL>(nCol), static_cast<SCROW>(nRow), static_cast<SCTAB>(nTab), pCell, (sal_Bool) sal_True ); } } void OP_Note123( SvStream& r, sal_uInt16 n) { sal_uInt8 nTab, nCol; sal_uInt16 nRow; r >> nRow >> nTab >> nCol; n -= (n > 4) ? 4 : n; sal_Char* pText = new sal_Char[n + 1]; r.Read( pText, n ); pText[ n ] = 0; String aNoteText(pText,pLotusRoot->eCharsetQ); delete [] pText; ScAddress aPos( static_cast<SCCOL>(nCol), static_cast<SCROW>(nRow), static_cast<SCTAB>(nTab) ); ScNoteUtil::CreateNoteFromString( *pDoc, aPos, aNoteText, false, false ); } void OP_HorAlign123( sal_uInt8 nAlignPattern, SfxItemSet& rPatternItemSet ) { // pre: Pattern is stored in the last 3 bites of the 21st byte // post: Appropriate Horizontal Alignement is set in rPattern according to the bit pattern. // // LEFT:001, RIGHT:010, CENTER:011, JUSTIFY:110, // LEFT-Text/RIGHT-NUMBER:100, DEFAULT:000 nAlignPattern = ( nAlignPattern & 0x07); switch (nAlignPattern) { case 1: rPatternItemSet.Put( SvxHorJustifyItem( SVX_HOR_JUSTIFY_LEFT, ATTR_HOR_JUSTIFY ) ); break; case 2: rPatternItemSet.Put( SvxHorJustifyItem( SVX_HOR_JUSTIFY_RIGHT, ATTR_HOR_JUSTIFY ) ); break; case 3: rPatternItemSet.Put( SvxHorJustifyItem( SVX_HOR_JUSTIFY_CENTER, ATTR_HOR_JUSTIFY) ); break; case 4: rPatternItemSet.Put( SvxHorJustifyItem( SVX_HOR_JUSTIFY_STANDARD, ATTR_HOR_JUSTIFY ) ); break; case 6: rPatternItemSet.Put( SvxHorJustifyItem( SVX_HOR_JUSTIFY_BLOCK, ATTR_HOR_JUSTIFY ) ); break; default: rPatternItemSet.Put( SvxHorJustifyItem( SVX_HOR_JUSTIFY_STANDARD, ATTR_HOR_JUSTIFY ) ); break; } } void OP_VerAlign123( sal_uInt8 nAlignPattern,SfxItemSet& rPatternItemSet ) { // pre: Pattern is stored in the last 3 bites of the 22nd byte // post: Appropriate Verticle Alignement is set in rPattern according to the bit pattern. // // TOP:001, MIDDLE:010, DOWN:100, DEFAULT:000 nAlignPattern = ( nAlignPattern & 0x07); switch (nAlignPattern) { case 0: rPatternItemSet.Put( SvxVerJustifyItem(SVX_VER_JUSTIFY_STANDARD, ATTR_VER_JUSTIFY) ); break; case 1: rPatternItemSet.Put( SvxVerJustifyItem(SVX_VER_JUSTIFY_TOP, ATTR_VER_JUSTIFY) ); break; case 2: rPatternItemSet.Put( SvxVerJustifyItem(SVX_VER_JUSTIFY_CENTER, ATTR_VER_JUSTIFY) ); break; case 4: rPatternItemSet.Put( SvxVerJustifyItem(SVX_VER_JUSTIFY_BOTTOM, ATTR_VER_JUSTIFY) ); break; default: rPatternItemSet.Put( SvxVerJustifyItem(SVX_VER_JUSTIFY_STANDARD, ATTR_VER_JUSTIFY) ); break; } } void OP_CreatePattern123( SvStream& r, sal_uInt16 n) { sal_uInt16 nCode,nPatternId; ScPatternAttr aPattern(pDoc->GetPool()); SfxItemSet& rItemSet = aPattern.GetItemSet(); r >> nCode; n -= (n > 2) ? 2 : n; if ( nCode == 0x0fd2 ) { r >> nPatternId; sal_uInt8 Hor_Align, Ver_Align, temp; sal_Bool bIsBold,bIsUnderLine,bIsItalics; r.SeekRel(12); // Read 17th Byte r >> temp; bIsBold = (temp & 0x01); bIsItalics = (temp & 0x02); bIsUnderLine = (temp & 0x04); if ( bIsBold ) rItemSet.Put( SvxWeightItem(WEIGHT_BOLD,ATTR_FONT_WEIGHT) ); if ( bIsItalics ) rItemSet.Put( SvxPostureItem(ITALIC_NORMAL, ATTR_FONT_POSTURE ) ); if ( bIsUnderLine ) rItemSet.Put( SvxUnderlineItem( UNDERLINE_SINGLE, ATTR_FONT_UNDERLINE ) ); r.SeekRel(3); // Read 21st Byte r >> Hor_Align; OP_HorAlign123( Hor_Align, rItemSet ); r >> Ver_Align; OP_VerAlign123( Ver_Align, rItemSet ); aLotusPatternPool.insert( std::map<sal_uInt16, ScPatternAttr>::value_type( nPatternId, aPattern ) ); n -= (n > 20) ? 20 : n; } r.SeekRel(n); } void OP_SheetName123( SvStream& rStream, sal_uInt16 nLength ) { if (nLength <= 4) { rStream.SeekRel(nLength); return; } // B0 36 [sheet number (2 bytes?)] [sheet name (null terminated char array)] sal_uInt16 nDummy; rStream >> nDummy; // ignore the first 2 bytes (B0 36). rStream >> nDummy; SCTAB nSheetNum = static_cast<SCTAB>(nDummy); pDoc->MakeTable(nSheetNum); ::std::vector<sal_Char> sSheetName; sSheetName.reserve(nLength-4); for (sal_uInt16 i = 4; i < nLength; ++i) { sal_Char c; rStream >> c; sSheetName.push_back(c); } if (!sSheetName.empty()) { String aName(&sSheetName[0], eCharVon); pDoc->RenameTab(nSheetNum, aName); } } void OP_ApplyPatternArea123( SvStream& rStream ) { sal_uInt16 nOpcode, nLength; sal_uInt16 nCol = 0, nColCount = 0, nRow = 0, nRowCount = 0, nTab = 0, nData, nTabCount = 0, nLevel = 0; do { rStream >> nOpcode >> nLength; switch ( nOpcode ) { case ROW_FORMAT_MARKER: nLevel++; break; case COL_FORMAT_MARKER: nLevel--; if( nLevel == 1 ) { nTab = nTab + nTabCount; nCol = 0; nColCount = 0; nRow = 0; nRowCount = 0; } break; case LOTUS_FORMAT_INDEX: if( nLength >= 2 ) { rStream >> nData; rStream.SeekRel( nLength - 2 ); if( nLevel == 1 ) nTabCount = nData; else if( nLevel == 2 ) { nCol = nCol + nColCount; nColCount = nData; if ( nCol > 0xff ) // 256 is the max col size supported by 123 nCol = 0; } else if( nLevel == 3 ) { nRow = nRow + nRowCount; nRowCount = nData; if ( nRow > 0x1fff ) // 8192 is the max row size supported by 123 nRow = 0; } } else rStream.SeekRel( nLength ); break; case LOTUS_FORMAT_INFO: if( nLength >= 2 ) { rStream >> nData; rStream.SeekRel( nLength - 2 ); std::map<sal_uInt16, ScPatternAttr>::iterator loc = aLotusPatternPool.find( nData ); // #126338# apparently, files with invalid index occur in the wild -> don't crash then if ( loc != aLotusPatternPool.end() ) for( int i = 0; i < nTabCount; i++) { pDoc->ApplyPatternAreaTab( nCol, nRow, nCol + nColCount - 1, nRow + nRowCount - 1, static_cast< SCTAB >( nTab + i ), loc->second ); } } else rStream.SeekRel( nLength ); break; default: rStream.SeekRel( nLength ); break; } } while( nLevel && !rStream.IsEof() ); aLotusPatternPool.clear(); }
9,678
1,444
<reponame>amc8391/mage package mage.cards.l; import mage.MageInt; import mage.Mana; import mage.abilities.condition.Condition; import mage.abilities.condition.common.PermanentsOnTheBattlefieldCondition; import mage.abilities.costs.common.TapSourceCost; import mage.abilities.decorator.ConditionalManaEffect; import mage.abilities.effects.mana.BasicManaEffect; import mage.abilities.mana.SimpleManaAbility; import mage.cards.CardImpl; import mage.cards.CardSetInfo; import mage.constants.CardType; import mage.constants.ComparisonType; import mage.constants.SubType; import mage.constants.Zone; import mage.filter.StaticFilters; import java.util.UUID; /** * @author TheElk801 */ public final class LeafkinDruid extends CardImpl { private static final Condition condition = new PermanentsOnTheBattlefieldCondition( StaticFilters.FILTER_CONTROLLED_CREATURE, ComparisonType.MORE_THAN, 3 ); public LeafkinDruid(UUID ownerId, CardSetInfo setInfo) { super(ownerId, setInfo, new CardType[]{CardType.CREATURE}, "{1}{G}"); this.subtype.add(SubType.ELEMENTAL); this.subtype.add(SubType.DRUID); this.power = new MageInt(0); this.toughness = new MageInt(3); // {T}: Add {G}. If you control four or more creatures, add {G}{G} instead. this.addAbility(new SimpleManaAbility( Zone.BATTLEFIELD, new ConditionalManaEffect( new BasicManaEffect(Mana.GreenMana(2)), new BasicManaEffect(Mana.GreenMana(1)), condition, "Add {G}. If you control " + "four or more creatures, add {G}{G} instead." ), new TapSourceCost() )); } private LeafkinDruid(final LeafkinDruid card) { super(card); } @Override public LeafkinDruid copy() { return new LeafkinDruid(this); } }
796
487
/* Copyright 2013-present Barefoot Networks, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ #include "config.h" #if HAVE_LIBGC #include <gc/gc_cpp.h> #define NOGC_ARGS (NoGC, 0, 0) #else #define NOGC_ARGS #endif /* HAVE_LIBGC */ #include "log.h" #include <string.h> #include <iostream> #include <fstream> #include <sstream> #include <iomanip> #include <memory> #include <string> #include <unordered_map> #include <vector> #ifdef MULTITHREAD #include <mutex> #endif // MULTITHREAD namespace Log { namespace Detail { int verbosity = 0; int maximumLogLevel = 0; // The time at which logging was initialized; used so that log messages can have // relative rather than absolute timestamps. static uint64_t initTime = 0; struct LevelAndOutput { int level = -1; std::ostream *out = nullptr; }; // The first level cache for fileLogLevel() - the most recent result returned. static const char* mostRecentFile = nullptr; static LevelAndOutput *mostRecentInfo = nullptr; // The second level cache for fileLogLevel(), mapping filenames to log levels. static std::unordered_map<const void*, LevelAndOutput> logLevelCache; // All log levels manually specified by the user. static std::vector<std::string> debugSpecs; // Each logfile will only be opened once, and will close when we exit. static std::unordered_map<std::string, std::unique_ptr<std::ostream>> logfiles; static std::vector<void (*)(void)> invalidateCallbacks; int OutputLogPrefix::ostream_xalloc = -1; void OutputLogPrefix::setup_ostream_xalloc(std::ostream &out) { if (ostream_xalloc < 0) { #ifdef MULTITHREAD static std::mutex lock; std::lock_guard<std::mutex> acquire(lock); if (ostream_xalloc < 0) #endif // MULTITHREAD ostream_xalloc = out.xalloc(); } } #ifdef MULTITHREAD struct OutputLogPrefix::lock_t { int refcnt = 1; std::mutex theMutex; void lock() { theMutex.lock(); } void unlock() { theMutex.unlock(); } static void cleanup(std::ios_base::event event, std::ios_base &out, int index) { if (event == std::ios_base::erase_event) { auto *p = static_cast<lock_t *>(out.pword(index)); if (p && --p->refcnt <= 0) delete p; } else if (event == std::ios_base::copyfmt_event) { auto *p = static_cast<lock_t *>(out.pword(index)); if (p) p->refcnt++; } } }; #endif // MULTITHREAD OutputLogPrefix::~OutputLogPrefix() { #ifdef MULTITHREAD if (lock) lock->unlock(); #endif // MULTITHREAD } void OutputLogPrefix::indent(std::ostream &out) { setup_ostream_xalloc(out); if (int pfx = out.iword(ostream_xalloc)) out << std::setw(pfx) << ':'; out << indent_t::getindent(out); } std::ostream& operator<<(std::ostream& out, const OutputLogPrefix& pfx) { std::stringstream tmp; #ifdef CLOCK_MONOTONIC if (LOGGING(2)) { struct timespec ts; clock_gettime(CLOCK_MONOTONIC, &ts); uint64_t t = ts.tv_sec*1000000000UL + ts.tv_nsec - Log::Detail::initTime; t /= 1000000UL; // millisec tmp << t/1000 << '.' << std::setw(3) << std::setfill('0') << t%1000 << ':' << std::setfill(' '); } #endif if (LOGGING(1)) { const char *s = strrchr(pfx.fn, '/'); const char *e = strrchr(pfx.fn, '.'); s = s ? s + 1 : pfx.fn; if (e && e > s) tmp.write(s, e-s); else tmp << s; tmp << ':' << pfx.level << ':'; } pfx.setup_ostream_xalloc(out); #ifdef MULTITHREAD if (!(pfx.lock = static_cast<OutputLogPrefix::lock_t *>(out.pword(pfx.ostream_xalloc)))) { static std::mutex lock; std::lock_guard<std::mutex> acquire(lock); if (!(pfx.lock = static_cast<OutputLogPrefix::lock_t *>(out.pword(pfx.ostream_xalloc)))) { out.pword(pfx.ostream_xalloc) = pfx.lock = new NOGC_ARGS OutputLogPrefix::lock_t; out.register_callback(OutputLogPrefix::lock_t::cleanup, pfx.ostream_xalloc); } } pfx.lock->lock(); #endif // MULTITHREAD if (tmp.str().size() > 0) { out.iword(OutputLogPrefix::ostream_xalloc) = tmp.str().size(); out << tmp.str(); } out << indent_t::getindent(out); return out; } static bool match(const char *pattern, const char *name) { const char *pend = pattern + strcspn(pattern, ",:"); const char *pbackup = 0; while (1) { while (pattern < pend && *pattern == *name) { pattern++; name++; } if (pattern == pend) { if (!strcmp(name, ".cpp") || !strcmp(name, ".h")) return true; return *name == 0; } if (*pattern == '[') { bool negate = false; if (pattern[1] == '^') { negate = true; ++pattern; } while ((*++pattern != *name || pattern[1] == '-') && *pattern != ']' && *pattern) { if (pattern[1] == '-' && pattern[2] != ']') { if (*name >= pattern[0] && *name <= pattern[2]) break; pattern += 2; } } if ((*pattern == ']' || !*pattern) ^ negate) return false; while (*pattern && *pattern++ != ']') continue; if (pattern > pend) pend = pattern + strcspn(pattern, ",:"); name++; continue; } if (!pbackup && *pattern != '*') return false; while (*pattern == '*') { ++pattern; pbackup = nullptr; } if (pattern == pend) return true; // FIXME -- does not work for * followed by [ -- matches a literal [ instead. while (*name && *name != *pattern) { if (pbackup && *name == *pbackup) { pattern = pbackup; break; } name++; } if (!*name) return false; pbackup = pattern; } } const char *uncachedFileLogSpec(const char* file) { if (auto* startOfFilename = strrchr(file, '/')) file = startOfFilename + 1; for (auto& spec : debugSpecs) for (auto* pattern = spec.c_str(); pattern; pattern = strchr(pattern, ',')) { while (*pattern == ',') pattern++; if (match(pattern, file)) if (auto* level = strchr(pattern, ':')) return level + 1; } return nullptr; } int uncachedFileLogLevel(const char* file) { if (auto spec = uncachedFileLogSpec(file)) return atoi(spec); // If there's no matching spec, compute a default from the global verbosity level, // except for THIS file if (!strcmp(file, __FILE__)) return 0; return verbosity > 0 ? verbosity - 1 : 0; } LevelAndOutput *cachedFileLogInfo(const char* file) { #ifdef MULTITHREAD static std::mutex lock; std::lock_guard<std::mutex> acquire(lock); #endif // MULTITHREAD // There are two layers of caching here. First, we cache the most recent // result we returned, to minimize expensive lookups in tight loops. if (mostRecentFile == file) return mostRecentInfo; // Second, we look up @file in a hash table mapping from pointers to log // info. We expect to hit in this cache virtually all the time. mostRecentFile = file; return mostRecentInfo = &logLevelCache[file]; } int fileLogLevel(const char* file) { auto *info = cachedFileLogInfo(file); if (info->level == -1) { // This is the slow path. We have to walk @debugSpecs to see if there are any // specs that match @file. There's a race here in that two threads could do this // at the same time, but they should get the same result. info->level = uncachedFileLogLevel(file); } return info->level; } std::ostream &uncachedFileLogOutput(const char* file) { if (auto spec = uncachedFileLogSpec(file)) { while (isdigit(*spec)) ++spec; if (*spec == '>') { std::ios_base::openmode mode = std::ios_base::out; if (*++spec == '>') { mode |= std::ios_base::app; ++spec; } const char *end = strchr(spec, ','); if (!end) end = spec + strlen(spec); std::string logname(spec, end-spec); if (!logfiles.count(logname)) { // FIXME: can't emplace a unique_ptr in some versions of gcc -- need // explicit reset call. logfiles[logname].reset(new std::ofstream(logname, mode)); } return *logfiles.at(logname); } } return std::clog; } std::ostream &fileLogOutput(const char* file) { auto *info = cachedFileLogInfo(file); if (!info->out) { #ifdef MULTITHREAD static std::mutex lock; std::lock_guard<std::mutex> acquire(lock); #endif // MULTITHREAD if (!info->out) { info->out = &uncachedFileLogOutput(file); } } return *info->out; } void invalidateCaches(int possibleNewMaxLogLevel) { mostRecentFile = nullptr; mostRecentInfo = nullptr; logLevelCache.clear(); maximumLogLevel = std::max(maximumLogLevel, possibleNewMaxLogLevel); for (auto fn : invalidateCallbacks) fn(); } void addInvalidateCallback(void (*fn)(void)) { invalidateCallbacks.push_back(fn); } } // namespace Detail void addDebugSpec(const char* spec) { #ifdef CLOCK_MONOTONIC if (!Detail::initTime) { struct timespec ts; clock_gettime(CLOCK_MONOTONIC, &ts); Detail::initTime = ts.tv_sec*1000000000UL + ts.tv_nsec; } #endif // Validate @spec. bool ok = false; long maxLogLevelInSpec = 0; for (auto* pattern = strchr(spec, ':'); pattern; pattern = strchr(pattern, ':')) { ok = true; long level = strtol(pattern + 1, const_cast<char**>(&pattern), 10); if (*pattern && *pattern != ',' && *pattern != '>') { ok = false; break; } maxLogLevelInSpec = std::max(maxLogLevelInSpec, level); } if (!ok) { std::cerr << "Invalid debug trace spec '" << spec << "'" << std::endl; return; } #ifdef MULTITHREAD static std::mutex lock; std::lock_guard<std::mutex> acquire(lock); #endif // MULTITHREAD Detail::debugSpecs.push_back(spec); Detail::invalidateCaches(maxLogLevelInSpec); } void increaseVerbosity() { #ifdef MULTITHREAD static std::mutex lock; std::lock_guard<std::mutex> acquire(lock); #endif // MULTITHREAD #ifdef CLOCK_MONOTONIC if (!Detail::initTime) { struct timespec ts; clock_gettime(CLOCK_MONOTONIC, &ts); Detail::initTime = ts.tv_sec*1000000000UL + ts.tv_nsec; } #endif Detail::verbosity++; Detail::invalidateCaches(Detail::verbosity - 1); } } // namespace Log
4,712
903
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.lucene.analysis.stempel; import java.io.BufferedInputStream; import java.io.DataInputStream; import java.io.IOException; import java.io.InputStream; import java.util.Locale; import org.egothor.stemmer.Diff; import org.egothor.stemmer.Trie; /** * Stemmer class is a convenient facade for other stemmer-related classes. The core stemming * algorithm and its implementation is taken verbatim from the Egothor project ( <a * href="http://www.egothor.org">www.egothor.org </a>). * * <p>Even though the stemmer tables supplied in the distribution package are built for Polish * language, there is nothing language-specific here. */ public class StempelStemmer { private Trie stemmer = null; private StringBuilder buffer = new StringBuilder(); /** * Create a Stemmer using selected stemmer table * * @param stemmerTable stemmer table. */ public StempelStemmer(InputStream stemmerTable) throws IOException { this(load(stemmerTable)); } /** * Create a Stemmer using pre-loaded stemmer table * * @param stemmer pre-loaded stemmer table */ public StempelStemmer(Trie stemmer) { this.stemmer = stemmer; } /** Load a stemmer table from an inputstream. */ public static Trie load(InputStream stemmerTable) throws IOException { DataInputStream in = null; try { in = new DataInputStream(new BufferedInputStream(stemmerTable)); String method = in.readUTF().toUpperCase(Locale.ROOT); if (method.indexOf('M') < 0) { return new org.egothor.stemmer.Trie(in); } else { return new org.egothor.stemmer.MultiTrie2(in); } } finally { in.close(); } } /** * Stem a word. * * @param word input word to be stemmed. * @return stemmed word, or null if the stem could not be generated. */ public StringBuilder stem(CharSequence word) { CharSequence cmd = stemmer.getLastOnPath(word); if (cmd == null) return null; buffer.setLength(0); buffer.append(word); Diff.apply(buffer, cmd); if (buffer.length() > 0) return buffer; else return null; } }
930
5,169
<gh_stars>1000+ { "name": "RRTagController", "version": "0.1.4", "summary": "RRTagController allows user to select tag and create new one.", "homepage": "https://github.com/alexruperez/RRTagController", "license": { "type": "MIT", "file": "LICENSE" }, "authors": { "<NAME>": "<EMAIL>", "<NAME>": "<EMAIL>" }, "source": { "git": "https://github.com/alexruperez/RRTagController.git", "tag": "0.1.4" }, "social_media_url": "https://twitter.com/alexruperez", "module_name": "RRTagController", "platforms": { "ios": "9.0" }, "source_files": "source/*.{h,swift}", "frameworks": [ "Foundation", "UIKit" ] }
295
2,151
// Copyright 2016 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "content/browser/media/audible_metrics.h" #include <utility> #include "base/metrics/histogram_macros.h" #include "base/time/default_tick_clock.h" namespace content { AudibleMetrics::AudibleMetrics() : max_concurrent_audible_web_contents_in_session_(0), clock_(base::DefaultTickClock::GetInstance()) {} AudibleMetrics::~AudibleMetrics() { } void AudibleMetrics::UpdateAudibleWebContentsState( const WebContents* web_contents, bool audible) { bool found = audible_web_contents_.find(web_contents) != audible_web_contents_.end(); if (found == audible) return; if (audible) AddAudibleWebContents(web_contents); else RemoveAudibleWebContents(web_contents); } void AudibleMetrics::SetClockForTest(const base::TickClock* test_clock) { clock_ = test_clock; } void AudibleMetrics::AddAudibleWebContents(const WebContents* web_contents) { UMA_HISTOGRAM_CUSTOM_COUNTS( "Media.Audible.ConcurrentTabsWhenStarting", audible_web_contents_.size(), 1, 10, 11); audible_web_contents_.insert(web_contents); if (audible_web_contents_.size() > 1 && concurrent_web_contents_start_time_.is_null()) { concurrent_web_contents_start_time_ = clock_->NowTicks(); } if (audible_web_contents_.size() > max_concurrent_audible_web_contents_in_session_) { max_concurrent_audible_web_contents_in_session_ = audible_web_contents_.size(); UMA_HISTOGRAM_CUSTOM_COUNTS( "Media.Audible.MaxConcurrentTabsInSession", max_concurrent_audible_web_contents_in_session_, 1, 10, 11); } } void AudibleMetrics::RemoveAudibleWebContents(const WebContents* web_contents) { audible_web_contents_.erase(web_contents); if (audible_web_contents_.size() <= 1 && !concurrent_web_contents_start_time_.is_null()) { base::TimeDelta concurrent_total_time = clock_->NowTicks() - concurrent_web_contents_start_time_; concurrent_web_contents_start_time_ = base::TimeTicks(); UMA_HISTOGRAM_LONG_TIMES("Media.Audible.ConcurrentTabsTime", concurrent_total_time); } } } // namespace content
890
839
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.cxf.tools.wsdlto.frontend.jaxws.wsdl11; import java.io.StringReader; import java.net.URI; import java.net.URISyntaxException; import java.util.HashMap; import java.util.Map; import org.w3c.dom.Element; import org.xml.sax.InputSource; import org.apache.cxf.catalog.OASISCatalogManager; import org.apache.cxf.catalog.OASISCatalogManagerHelper; import org.apache.cxf.resource.ExtendedURIResolver; import org.apache.cxf.staxutils.StaxUtils; public class CustomizedWSDLLocator implements javax.wsdl.xml.WSDLLocator { private String wsdlUrl; private ExtendedURIResolver resolver; private String baseUri; private String importedUri; private OASISCatalogManager catalogResolver; private Map<String, Element> elementMap; private String latestImportURI; private Map<String, String> resolvedMap = new HashMap<>(); private boolean resolveFromMap; public CustomizedWSDLLocator(String wsdlUrl, Map<String, Element> map) { this.wsdlUrl = wsdlUrl; this.baseUri = this.wsdlUrl; resolver = new ExtendedURIResolver(); elementMap = map; } public void setCatalogResolver(final OASISCatalogManager cr) { this.catalogResolver = cr; } private InputSource resolve(final String target, final String base) { try { String resolvedLocation = new OASISCatalogManagerHelper().resolve(catalogResolver, target, base); if (resolvedLocation == null) { return this.resolver.resolve(target, base); } resolvedMap.put(target, resolvedLocation); return this.resolver.resolve(resolvedLocation, base); } catch (Exception e) { throw new RuntimeException("Catalog resolve failed: ", e); } } public InputSource getBaseInputSource() { if (elementMap.get(baseUri) != null) { Element ele = elementMap.get(baseUri); String content = StaxUtils.toString(ele); InputSource ins = new InputSource(new StringReader(content)); ins.setSystemId(baseUri); return ins; } InputSource result = resolve(baseUri, null); baseUri = resolver.getURI(); return result; } public String getBaseURI() { return baseUri; } public String getLatestImportURI() { if (this.resolveFromMap) { return this.latestImportURI; } return resolver.getLatestImportURI(); } public InputSource getImportInputSource(String parent, String importLocation) { baseUri = parent; importedUri = importLocation; try { URI importURI = new URI(importLocation); if (!importURI.isAbsolute()) { URI parentURI = new URI(parent); importURI = parentURI.resolve(importURI); } if (elementMap.get(importURI.toString()) != null) { Element ele = elementMap.get(importURI.toString()); String content = StaxUtils.toString(ele); InputSource ins = new InputSource(new StringReader(content)); ins.setSystemId(importURI.toString()); this.resolveFromMap = true; this.latestImportURI = importURI.toString(); return ins; } } catch (URISyntaxException e) { throw new RuntimeException("Failed to Resolve " + importLocation, e); } resolveFromMap = false; return resolve(importedUri, baseUri); } public void close() { resolver.close(); } public Map<String, String> getResolvedMap() { return resolvedMap; } }
1,777
5,156
/* -*- Mode: C; tab-width: 8; c-basic-offset: 2; indent-tabs-mode: nil; -*- */ #include "util.h" static const char file_name[] = "rr-test-blacklist-file_name"; int main(void) { int fd; int dirfd; char buf[PATH_MAX]; open(file_name, O_CREAT | O_WRONLY, 0700); #ifdef SYS_open fd = syscall(SYS_open, file_name, O_RDONLY); test_assert(fd < 0); test_assert(errno == ENOENT); #endif fd = syscall(SYS_openat, AT_FDCWD, file_name, O_RDONLY); test_assert(fd < 0); test_assert(errno == ENOENT); getcwd(buf, PATH_MAX); dirfd = syscall(SYS_openat, -1, buf, O_PATH); test_assert(dirfd >= 0); fd = syscall(SYS_openat, dirfd, "rr-test-blacklist-file_name", O_RDONLY); test_assert(fd < 0); test_assert(errno == ENOENT); unlink(file_name); atomic_puts("EXIT-SUCCESS"); return 0; }
367
2,996
// Copyright 2021 The Terasology Foundation // SPDX-License-Identifier: Apache-2.0 package org.terasology.engine.rendering.nui.animation; import org.terasology.joml.geom.Rectanglei; /** * Does not do anything. The {@link #onEnd(Runnable)} method is triggered instantly. */ public class MenuAnimationSystemStub implements MenuAnimationSystem { private Runnable listener = () -> { }; @Override public void triggerFromPrev() { // ignore } @Override public void triggerToPrev() { listener.run(); } @Override public void triggerFromNext() { // ignore } @Override public void triggerToNext() { listener.run(); } @Override public void onEnd(Runnable newListener) { this.listener = newListener; } @Override public void update(float delta) { // ignore } @Override public void stop() { // ignore } @Override public void skip() { // ignore } @Override public Rectanglei animateRegion(Rectanglei rc) { return rc; } }
429
1,457
{ "extends": [ "eslint:recommended", "plugin:prettier/recommended", "prettier/@typescript-eslint" ], "plugins": [ "html" ], "env": { "browser": true, "commonjs": true, "node": true, "es6": true, "jest": true }, "parserOptions": { "ecmaVersion": 2018, "sourceType": "module" }, "rules": { "no-console": "off", "strict": ["error", "global"], "curly": "warn", "prettier/prettier": "error" }, "parser": "babel-eslint", // Taken from create-react-app's eslint configuration "overrides": [ { "files": ["**/*.ts?(x)"], "parser": "@typescript-eslint/parser", "parserOptions": { "ecmaVersion": 2018, "sourceType": "module", "ecmaFeatures": { "jsx": true }, // typescript-eslint specific options "warnOnUnsupportedTypeScriptVersion": true }, "plugins": ["@typescript-eslint"], // If adding a typescript-eslint version of an existing ESLint rule, // make sure to disable the ESLint rule here. "rules": { // TypeScript"s `noFallthroughCasesInSwitch` option is more robust "default-case": "off", // "tsc" already handles this (https://github.com/typescript-eslint/typescript-eslint/issues/291) "no-dupe-class-members": "off", // "tsc" already handles this (https://github.com/typescript-eslint/typescript-eslint/issues/477) "no-undef": "off", // Add TypeScript specific rules (and turn off ESLint equivalents) "@typescript-eslint/consistent-type-assertions": "warn", "no-array-constructor": "off", "@typescript-eslint/no-array-constructor": "warn", "no-use-before-define": "off", "@typescript-eslint/no-use-before-define": [ "warn", { "functions": false, "classes": false, "variables": false, "typedefs": false } ], "no-unused-expressions": "off", "@typescript-eslint/no-unused-expressions": [ "error", { "allowShortCircuit": true, "allowTernary": true, "allowTaggedTemplates": true } ], "no-unused-vars": "off", "@typescript-eslint/no-unused-vars": [ "warn", { "args": "none", "ignoreRestSiblings": true } ], "no-useless-constructor": "off", "@typescript-eslint/no-useless-constructor": "warn" } } ] }
1,261
12,278
# Copyright <NAME> 2007. Distributed under the Boost # Software License, Version 1.0. (See accompanying # file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) value = 42
63
969
<gh_stars>100-1000 package app.musicplayer.view; import java.net.URL; import java.util.Optional; import java.util.ResourceBundle; import java.util.concurrent.CountDownLatch; import app.musicplayer.model.*; import app.musicplayer.util.Search; import com.melloware.jintellitype.IntellitypeListener; import com.melloware.jintellitype.JIntellitype; import app.musicplayer.MusicPlayer; import app.musicplayer.util.CustomSliderSkin; import app.musicplayer.util.Resources; import app.musicplayer.util.SubView; import javafx.animation.Animation; import javafx.animation.Animation.Status; import javafx.animation.Interpolator; import javafx.animation.Transition; import javafx.application.Platform; import javafx.collections.ObservableList; import javafx.concurrent.Task; import javafx.css.PseudoClass; import javafx.event.Event; import javafx.fxml.FXML; import javafx.fxml.FXMLLoader; import javafx.fxml.Initializable; import javafx.geometry.Insets; import javafx.geometry.Pos; import javafx.scene.Node; import javafx.scene.Scene; import javafx.scene.SnapshotParameters; import javafx.scene.control.*; import javafx.scene.image.ImageView; import javafx.scene.input.ClipboardContent; import javafx.scene.input.Dragboard; import javafx.scene.input.KeyCode; import javafx.scene.input.TransferMode; import javafx.scene.layout.HBox; import javafx.scene.layout.Pane; import javafx.scene.layout.Region; import javafx.scene.layout.VBox; import javafx.scene.transform.Transform; import javafx.stage.Stage; import javafx.stage.StageStyle; import javafx.util.Duration; public class MainController implements Initializable, IntellitypeListener { private boolean isSideBarExpanded = true; private double expandedWidth = 250; private double collapsedWidth = 50; private double expandedHeight = 50; private double collapsedHeight = 0; private double searchExpanded = 180; private double searchCollapsed = 0; private SubView subViewController; private CustomSliderSkin sliderSkin; private Stage volumePopup; private Stage searchPopup; private VolumePopupController volumePopupController; private CountDownLatch viewLoadedLatch; @FXML private ScrollPane subViewRoot; @FXML private VBox sideBar; @FXML private VBox playlistBox; @FXML private ImageView nowPlayingArtwork; @FXML private Label nowPlayingTitle; @FXML private Label nowPlayingArtist; @FXML private Slider timeSlider; @FXML private Region frontSliderTrack; @FXML private Label timePassed; @FXML private Label timeRemaining; @FXML private HBox letterBox; @FXML private Separator letterSeparator; @FXML private Pane playButton; @FXML private Pane pauseButton; @FXML private Pane loopButton; @FXML private Pane shuffleButton; @FXML private HBox controlBox; @FXML private TextField searchBox; @Override public void initialize(URL location, ResourceBundle resources) { resetLatch(); controlBox.getChildren().remove(2); frontSliderTrack.prefWidthProperty().bind(timeSlider.widthProperty().multiply(timeSlider.valueProperty().divide(timeSlider.maxProperty()))); sliderSkin = new CustomSliderSkin(timeSlider); timeSlider.setSkin(sliderSkin); createVolumePopup(); createSearchPopup(); PseudoClass active = PseudoClass.getPseudoClass("active"); loopButton.setOnMouseClicked(x -> { sideBar.requestFocus(); MusicPlayer.toggleLoop(); loopButton.pseudoClassStateChanged(active, MusicPlayer.isLoopActive()); }); shuffleButton.setOnMouseClicked(x -> { sideBar.requestFocus(); MusicPlayer.toggleShuffle(); shuffleButton.pseudoClassStateChanged(active, MusicPlayer.isShuffleActive()); }); timeSlider.setFocusTraversable(false); timeSlider.valueChangingProperty().addListener( (slider, wasChanging, isChanging) -> { if (wasChanging) { int seconds = (int) Math.round(timeSlider.getValue() / 4.0); timeSlider.setValue(seconds * 4); MusicPlayer.seek(seconds); } } ); timeSlider.valueProperty().addListener( (slider, oldValue, newValue) -> { double previous = oldValue.doubleValue(); double current = newValue.doubleValue(); if (!timeSlider.isValueChanging() && current != previous + 1 && !isTimeSliderPressed()) { int seconds = (int) Math.round(current / 4.0); timeSlider.setValue(seconds * 4); MusicPlayer.seek(seconds); } } ); unloadLettersAnimation.setOnFinished(x -> { letterBox.setPrefHeight(0); letterSeparator.setPrefHeight(0); }); searchBox.textProperty().addListener((observable, oldText, newText) -> { String text = newText.trim(); if (text.equals("")) { if (searchPopup.isShowing() && !searchHideAnimation.getStatus().equals(Status.RUNNING)) { searchHideAnimation.play(); } } else { Search.search(text); } }); Search.hasResultsProperty().addListener((observable, hadResults, hasResults) -> { if (hasResults) { SearchResult result = Search.getResult(); Platform.runLater(() -> { showSearchResults(result); MusicPlayer.getStage().toFront(); }); int height = 0; int artists = result.getArtistResults().size(); int albums = result.getAlbumResults().size(); int songs = result.getSongResults().size(); if (artists > 0) height += (artists * 50) + 50; if (albums > 0) height += (albums * 50) + 50; if (songs > 0) height += (songs * 50) + 50; if (height == 0) height = 50; searchPopup.setHeight(height); } }); MusicPlayer.getStage().xProperty().addListener((observable, oldValue, newValue) -> { if (searchPopup.isShowing() && !searchHideAnimation.getStatus().equals(Status.RUNNING)) { searchHideAnimation.play(); } }); MusicPlayer.getStage().yProperty().addListener((observable, oldValue, newValue) -> { if (searchPopup.isShowing() && !searchHideAnimation.getStatus().equals(Status.RUNNING)) { searchHideAnimation.play(); } }); for (Node node : letterBox.getChildren()) { Label label = (Label)node; label.prefWidthProperty().bind(letterBox.widthProperty().subtract(50).divide(26).subtract(1)); } updateNowPlayingButton(); initializeTimeSlider(); initializeTimeLabels(); initializePlaylists(); // Register media keys on Windows if (System.getProperty("os.name").toUpperCase().contains("WINDOWS")) { JIntellitype.getInstance().addIntellitypeListener(this); } // Loads the default view: artists. loadView("artists"); } @Override public void onIntellitype(int key) { // Skip/play/pause/back using Windows media keys Platform.runLater(() -> { switch (key) { case JIntellitype.APPCOMMAND_MEDIA_NEXTTRACK: skip(); break; case JIntellitype.APPCOMMAND_MEDIA_PLAY_PAUSE: playPause(); break; case JIntellitype.APPCOMMAND_MEDIA_PREVIOUSTRACK: back(); break; } }); } void resetLatch() { viewLoadedLatch = new CountDownLatch(1); } CountDownLatch getLatch() { return viewLoadedLatch; } private void createVolumePopup() { try { Stage stage = MusicPlayer.getStage(); FXMLLoader loader = new FXMLLoader(this.getClass().getResource(Resources.FXML + "VolumePopup.fxml")); HBox view = loader.load(); volumePopupController = loader.getController(); Stage popup = new Stage(); popup.setScene(new Scene(view)); popup.initStyle(StageStyle.UNDECORATED); popup.initOwner(stage); popup.setX(stage.getWidth() - 270); popup.setY(stage.getHeight() - 120); popup.focusedProperty().addListener((x, wasFocused, isFocused) -> { if (wasFocused && !isFocused) { volumeHideAnimation.play(); } }); volumeHideAnimation.setOnFinished(x -> popup.hide()); popup.show(); popup.hide(); volumePopup = popup; } catch (Exception ex) { ex.printStackTrace(); } } private void createSearchPopup() { try { Stage stage = MusicPlayer.getStage(); VBox view = new VBox(); view.getStylesheets().add(Resources.CSS + "MainStyle.css"); view.getStyleClass().add("searchPopup"); Stage popup = new Stage(); popup.setScene(new Scene(view)); popup.initStyle(StageStyle.UNDECORATED); popup.initOwner(stage); searchHideAnimation.setOnFinished(x -> popup.hide()); popup.show(); popup.hide(); searchPopup = popup; } catch (Exception ex) { ex.printStackTrace(); } } public void updateNowPlayingButton() { Song song = MusicPlayer.getNowPlaying(); if (song != null) { nowPlayingTitle.setText(song.getTitle()); nowPlayingArtist.setText(song.getArtist()); nowPlayingArtwork.setImage(song.getArtwork()); } else { nowPlayingTitle.setText(""); nowPlayingArtist.setText(""); nowPlayingArtwork.setImage(null); } } public void initializeTimeSlider() { Song song = MusicPlayer.getNowPlaying(); if (song != null) { timeSlider.setMin(0); timeSlider.setMax(song.getLengthInSeconds() * 4); timeSlider.setValue(0); timeSlider.setBlockIncrement(1); } else { timeSlider.setMin(0); timeSlider.setMax(1); timeSlider.setValue(0); timeSlider.setBlockIncrement(1); } } public void updateTimeSlider() { timeSlider.increment(); } public void initializeTimeLabels() { Song song = MusicPlayer.getNowPlaying(); if (song != null) { timePassed.setText("0:00"); timeRemaining.setText(song.getLength()); } else { timePassed.setText(""); timeRemaining.setText(""); } } public void updateTimeLabels() { timePassed.setText(MusicPlayer.getTimePassed()); timeRemaining.setText(MusicPlayer.getTimeRemaining()); } @SuppressWarnings("unchecked") private void initializePlaylists() { for (Playlist playlist : Library.getPlaylists()) { try { FXMLLoader loader = new FXMLLoader(this.getClass().getResource(Resources.FXML + "PlaylistCell.fxml")); HBox cell = loader.load(); Label label = (Label) cell.getChildren().get(1); label.setText(playlist.getTitle()); cell.setOnMouseClicked(x -> { selectView(x); ((PlaylistsController) subViewController).selectPlaylist(playlist); }); cell.setOnDragDetected(event -> { PseudoClass pressed = PseudoClass.getPseudoClass("pressed"); cell.pseudoClassStateChanged(pressed, false); Dragboard db = cell.startDragAndDrop(TransferMode.ANY); ClipboardContent content = new ClipboardContent(); content.putString("Playlist"); db.setContent(content); MusicPlayer.setDraggedItem(playlist); db.setDragView(cell.snapshot(null, null), 125, 25); event.consume(); }); PseudoClass hover = PseudoClass.getPseudoClass("hover"); cell.setOnDragEntered(event -> { if (!(playlist instanceof MostPlayedPlaylist) && !(playlist instanceof RecentlyPlayedPlaylist) && event.getGestureSource() != cell && event.getDragboard().hasString()) { cell.pseudoClassStateChanged(hover, true); //cell.getStyleClass().setAll("sideBarItemSelected"); } }); cell.setOnDragExited(event -> { if (!(playlist instanceof MostPlayedPlaylist) && !(playlist instanceof RecentlyPlayedPlaylist) && event.getGestureSource() != cell && event.getDragboard().hasString()) { cell.pseudoClassStateChanged(hover, false); //cell.getStyleClass().setAll("sideBarItem"); } }); cell.setOnDragOver(event -> { if (!(playlist instanceof MostPlayedPlaylist) && !(playlist instanceof RecentlyPlayedPlaylist) && event.getGestureSource() != cell && event.getDragboard().hasString()) { event.acceptTransferModes(TransferMode.ANY); } event.consume(); }); cell.setOnDragDropped(event -> { String dragString = event.getDragboard().getString(); new Thread(() -> { switch (dragString) { case "Artist": Artist artist = (Artist) MusicPlayer.getDraggedItem(); for (Album album : artist.getAlbums()) { for (Song song : album.getSongs()) { if (!playlist.getSongs().contains(song)) { playlist.addSong(song); } } } break; case "Album": Album album = (Album) MusicPlayer.getDraggedItem(); for (Song song : album.getSongs()) { if (!playlist.getSongs().contains(song)) { playlist.addSong(song); } } break; case "Playlist": Playlist list = (Playlist) MusicPlayer.getDraggedItem(); for (Song song : list.getSongs()) { if (!playlist.getSongs().contains(song)) { playlist.addSong(song); } } break; case "Song": Song song = (Song) MusicPlayer.getDraggedItem(); if (!playlist.getSongs().contains(song)) { playlist.addSong(song); } break; case "List": ObservableList<Song> songs = (ObservableList<Song>) MusicPlayer.getDraggedItem(); for (Song s : songs) { if (!playlist.getSongs().contains(s)) { playlist.addSong(s); } } break; } }).start(); event.consume(); }); playlistBox.getChildren().add(cell); } catch (Exception e) { e.printStackTrace(); } } } @FXML private void selectView(Event e) { HBox eventSource = ((HBox)e.getSource()); eventSource.requestFocus(); Optional<Node> previous = sideBar.getChildren().stream() .filter(x -> x.getStyleClass().get(0).equals("sideBarItemSelected")).findFirst(); if (previous.isPresent()) { HBox previousItem = (HBox) previous.get(); previousItem.getStyleClass().setAll("sideBarItem"); } else { previous = playlistBox.getChildren().stream() .filter(x -> x.getStyleClass().get(0).equals("sideBarItemSelected")).findFirst(); if (previous.isPresent()) { HBox previousItem = (HBox) previous.get(); previousItem.getStyleClass().setAll("sideBarItem"); } } ObservableList<String> styles = eventSource.getStyleClass(); if (styles.get(0).equals("sideBarItem")) { styles.setAll("sideBarItemSelected"); loadView(eventSource.getId()); } else if (styles.get(0).equals("bottomBarItem")) { loadView(eventSource.getId()); } } @SuppressWarnings("unchecked") @FXML private void newPlaylist() { if (!newPlaylistAnimation.getStatus().equals(Status.RUNNING)) { try { FXMLLoader loader = new FXMLLoader(this.getClass().getResource(Resources.FXML + "PlaylistCell.fxml")); HBox cell = loader.load(); Label label = (Label) cell.getChildren().get(1); label.setVisible(false); HBox.setMargin(label, new Insets(0, 0, 0, 0)); TextField textBox = new TextField(); textBox.setPrefHeight(30); cell.getChildren().add(textBox); HBox.setMargin(textBox, new Insets(10, 10, 10, 9)); textBox.focusedProperty().addListener((obs, oldValue, newValue) -> { if (oldValue && !newValue) { String text = textBox.getText().equals("") ? "New Playlist" : textBox.getText(); text = checkDuplicatePlaylist(text, 0); label.setText(text); cell.getChildren().remove(textBox); HBox.setMargin(label, new Insets(10, 10, 10, 10)); label.setVisible(true); Library.addPlaylist(text); } }); textBox.setOnKeyPressed(x -> { if (x.getCode() == KeyCode.ENTER) { sideBar.requestFocus(); } }); cell.setOnMouseClicked(x -> { selectView(x); Playlist playlist = Library.getPlaylist(label.getText()); ((PlaylistsController) subViewController).selectPlaylist(playlist); }); cell.setOnDragDetected(event -> { PseudoClass pressed = PseudoClass.getPseudoClass("pressed"); cell.pseudoClassStateChanged(pressed, false); Playlist playlist = Library.getPlaylist(label.getText()); Dragboard db = cell.startDragAndDrop(TransferMode.ANY); ClipboardContent content = new ClipboardContent(); content.putString("Playlist"); db.setContent(content); MusicPlayer.setDraggedItem(playlist); SnapshotParameters sp = new SnapshotParameters(); sp.setTransform(Transform.scale(1.5, 1.5)); db.setDragView(cell.snapshot(sp, null)); event.consume(); }); PseudoClass hover = PseudoClass.getPseudoClass("hover"); cell.setOnDragEntered(event -> { Playlist playlist = Library.getPlaylist(label.getText()); if (!(playlist instanceof MostPlayedPlaylist) && !(playlist instanceof RecentlyPlayedPlaylist) && event.getGestureSource() != cell && event.getDragboard().hasString()) { cell.pseudoClassStateChanged(hover, true); } }); cell.setOnDragExited(event -> { Playlist playlist = Library.getPlaylist(label.getText()); if (!(playlist instanceof MostPlayedPlaylist) && !(playlist instanceof RecentlyPlayedPlaylist) && event.getGestureSource() != cell && event.getDragboard().hasString()) { cell.pseudoClassStateChanged(hover, false); } }); cell.setOnDragOver(event -> { Playlist playlist = Library.getPlaylist(label.getText()); if (!(playlist instanceof MostPlayedPlaylist) && !(playlist instanceof RecentlyPlayedPlaylist) && event.getGestureSource() != cell && event.getDragboard().hasString()) { event.acceptTransferModes(TransferMode.ANY); } event.consume(); }); cell.setOnDragDropped(event -> { Playlist playlist = Library.getPlaylist(label.getText()); String dragString = event.getDragboard().getString(); new Thread(() -> { switch (dragString) { case "Artist": Artist artist = (Artist) MusicPlayer.getDraggedItem(); for (Album album : artist.getAlbums()) { for (Song song : album.getSongs()) { if (!playlist.getSongs().contains(song)) { playlist.addSong(song); } } } break; case "Album": Album album = (Album) MusicPlayer.getDraggedItem(); for (Song song : album.getSongs()) { if (!playlist.getSongs().contains(song)) { playlist.addSong(song); } } break; case "Playlist": Playlist list = (Playlist) MusicPlayer.getDraggedItem(); for (Song song : list.getSongs()) { if (!playlist.getSongs().contains(song)) { playlist.addSong(song); } } break; case "Song": Song song = (Song) MusicPlayer.getDraggedItem(); if (!playlist.getSongs().contains(song)) { playlist.addSong(song); } break; case "List": ObservableList<Song> songs = (ObservableList<Song>) MusicPlayer.getDraggedItem(); for (Song s : songs) { if (!playlist.getSongs().contains(s)) { playlist.addSong(s); } } break; } }).start(); event.consume(); }); cell.setPrefHeight(0); cell.setOpacity(0); playlistBox.getChildren().add(1, cell); textBox.requestFocus(); } catch (Exception e) { e.printStackTrace(); } newPlaylistAnimation.play(); } } private String checkDuplicatePlaylist(String text, int i) { for (Playlist playlist : Library.getPlaylists()) { if (playlist.getTitle().equals(text)) { int index = text.lastIndexOf(' ') + 1; if (index != 0) { try { i = Integer.parseInt(text.substring(index)); } catch (Exception ex) { // do nothing } } i++; if (i == 1) { text = checkDuplicatePlaylist(text + " " + i, i); } else { text = checkDuplicatePlaylist(text.substring(0, index) + i, i); } break; } } return text; } public SubView loadView(String viewName) { try { boolean loadLetters; boolean unloadLetters; switch (viewName.toLowerCase()) { case "artists": case "artistsmain": case "albums": case "songs": if (subViewController instanceof ArtistsController || subViewController instanceof ArtistsMainController || subViewController instanceof AlbumsController || subViewController instanceof SongsController) { loadLetters = false; unloadLetters = false; } else { loadLetters = true; unloadLetters = false; } break; default: if (subViewController instanceof ArtistsController || subViewController instanceof ArtistsMainController || subViewController instanceof AlbumsController || subViewController instanceof SongsController) { loadLetters = false; unloadLetters = true; } else { loadLetters = false; unloadLetters = false; } break; } final boolean loadLettersFinal = loadLetters; final boolean unloadLettersFinal = unloadLetters; String fileName = viewName.substring(0, 1).toUpperCase() + viewName.substring(1) + ".fxml"; FXMLLoader loader = new FXMLLoader(this.getClass().getResource(fileName)); Node view = loader.load(); CountDownLatch latch = new CountDownLatch(1); Task<Void> task = new Task<Void>() { @Override protected Void call() throws Exception { Platform.runLater(() -> { Library.getSongs().stream().filter(x -> x.getSelected()).forEach(x -> x.setSelected(false)); subViewRoot.setVisible(false); subViewRoot.setContent(view); subViewRoot.getContent().setOpacity(0); latch.countDown(); }); return null; } }; task.setOnSucceeded(x -> new Thread(() -> { try { latch.await(); } catch (Exception e) { e.printStackTrace(); } Platform.runLater(() -> { subViewRoot.setVisible(true); if (loadLettersFinal) { loadLettersAnimation.play(); } loadViewAnimation.play(); }); }).start()); Thread thread = new Thread(task); unloadViewAnimation.setOnFinished(x -> thread.start()); loadViewAnimation.setOnFinished(x -> viewLoadedLatch.countDown()); if (subViewRoot.getContent() != null) { if (unloadLettersFinal) { unloadLettersAnimation.play(); } unloadViewAnimation.play(); } else { subViewRoot.setContent(view); if (loadLettersFinal) { loadLettersAnimation.play(); } loadViewAnimation.play(); } subViewController = loader.getController(); return subViewController; } catch (Exception ex) { ex.printStackTrace(); return null; } } @FXML private void navigateToCurrentSong() { Optional<Node> previous = sideBar.getChildren().stream() .filter(x -> x.getStyleClass().get(0).equals("sideBarItemSelected")).findFirst(); if (previous.isPresent()) { HBox previousItem = (HBox) previous.get(); previousItem.getStyleClass().setAll("sideBarItem"); } else { previous = playlistBox.getChildren().stream() .filter(x -> x.getStyleClass().get(0).equals("sideBarItemSelected")).findFirst(); if (previous.isPresent()) { HBox previousItem = (HBox) previous.get(); previousItem.getStyleClass().setAll("sideBarItem"); } } sideBar.getChildren().get(2).getStyleClass().setAll("sideBarItemSelected"); ArtistsMainController artistsMainController = (ArtistsMainController) loadView("ArtistsMain"); Song song = MusicPlayer.getNowPlaying(); Artist artist = Library.getArtist(song.getArtist()); Album album = artist.getAlbums().stream().filter(x -> x.getTitle().equals(song.getAlbum())).findFirst().get(); artistsMainController.selectArtist(artist); artistsMainController.selectAlbum(album); artistsMainController.selectSong(song); } @FXML private void slideSideBar(Event e) { sideBar.requestFocus(); searchBox.setText(""); if (isSideBarExpanded) { collapseSideBar(); } else { expandSideBar(); } } private void collapseSideBar() { if (expandAnimation.statusProperty().get() == Animation.Status.STOPPED && collapseAnimation.statusProperty().get() == Animation.Status.STOPPED) { collapseAnimation.play(); } } private void expandSideBar() { if (expandAnimation.statusProperty().get() == Animation.Status.STOPPED && collapseAnimation.statusProperty().get() == Animation.Status.STOPPED) { expandAnimation.play(); } } @FXML public void playPause() { sideBar.requestFocus(); if (MusicPlayer.isPlaying()) { MusicPlayer.pause(); } else { MusicPlayer.play(); } } @FXML private void back() { sideBar.requestFocus(); MusicPlayer.back(); } @FXML private void skip() { sideBar.requestFocus(); MusicPlayer.skip(); } @FXML private void letterClicked(Event e) { sideBar.requestFocus(); Label eventSource = ((Label)e.getSource()); char letter = eventSource.getText().charAt(0); subViewController.scroll(letter); } public void volumeClick() { if (!volumePopup.isShowing()) { Stage stage = MusicPlayer.getStage(); volumePopup.setX(stage.getX() + stage.getWidth() - 265); volumePopup.setY(stage.getY() + stage.getHeight() - 115); volumePopup.show(); volumeShowAnimation.play(); } } public void showSearchResults(SearchResult result) { VBox root = (VBox) searchPopup.getScene().getRoot(); ObservableList<Node> list = root.getChildren(); list.clear(); if (result.getArtistResults().size() > 0) { Label header = new Label("Artists"); list.add(header); VBox.setMargin(header, new Insets(10, 10, 10, 10)); result.getArtistResults().forEach(artist -> { HBox cell = new HBox(); cell.setAlignment(Pos.CENTER_LEFT); cell.setPrefWidth(226); cell.setPrefHeight(50); ImageView image = new ImageView(); image.setFitHeight(40); image.setFitWidth(40); image.setImage(artist.getArtistImage()); Label label = new Label(artist.getTitle()); label.setTextOverrun(OverrunStyle.CLIP); label.getStyleClass().setAll("searchLabel"); cell.getChildren().addAll(image, label); HBox.setMargin(image, new Insets(5, 5, 5, 5)); HBox.setMargin(label, new Insets(10, 10, 10, 5)); cell.getStyleClass().add("searchResult"); cell.setOnMouseClicked(event -> { loadView("ArtistsMain"); ArtistsMainController artistsMainController = (ArtistsMainController) loadView("ArtistsMain"); artistsMainController.selectArtist(artist); searchBox.setText(""); sideBar.requestFocus(); }); list.add(cell); }); Separator separator = new Separator(); separator.setPrefWidth(206); list.add(separator); VBox.setMargin(separator, new Insets(10, 10, 0, 10)); } if (result.getAlbumResults().size() > 0) { Label header = new Label("Albums"); list.add(header); VBox.setMargin(header, new Insets(10, 10, 10, 10)); result.getAlbumResults().forEach(album -> { HBox cell = new HBox(); cell.setAlignment(Pos.CENTER_LEFT); cell.setPrefWidth(226); cell.setPrefHeight(50); ImageView image = new ImageView(); image.setFitHeight(40); image.setFitWidth(40); image.setImage(album.getArtwork()); Label label = new Label(album.getTitle()); label.setTextOverrun(OverrunStyle.CLIP); label.getStyleClass().setAll("searchLabel"); cell.getChildren().addAll(image, label); HBox.setMargin(image, new Insets(5, 5, 5, 5)); HBox.setMargin(label, new Insets(10, 10, 10, 5)); cell.getStyleClass().add("searchResult"); cell.setOnMouseClicked(event -> { loadView("ArtistsMain"); Artist artist = Library.getArtist(album.getArtist()); ArtistsMainController artistsMainController = (ArtistsMainController) loadView("ArtistsMain"); artistsMainController.selectArtist(artist); artistsMainController.selectAlbum(album); searchBox.setText(""); sideBar.requestFocus(); }); list.add(cell); }); Separator separator = new Separator(); separator.setPrefWidth(206); list.add(separator); VBox.setMargin(separator, new Insets(10, 10, 0, 10)); } if (result.getSongResults().size() > 0) { Label header = new Label("Songs"); list.add(header); VBox.setMargin(header, new Insets(10, 10, 10, 10)); result.getSongResults().forEach(song -> { HBox cell = new HBox(); cell.setAlignment(Pos.CENTER_LEFT); cell.setPrefWidth(226); cell.setPrefHeight(50); Label label = new Label(song.getTitle()); label.setTextOverrun(OverrunStyle.CLIP); label.getStyleClass().setAll("searchLabel"); cell.getChildren().add(label); HBox.setMargin(label, new Insets(10, 10, 10, 10)); cell.getStyleClass().add("searchResult"); cell.setOnMouseClicked(event -> { loadView("ArtistsMain"); Artist artist = Library.getArtist(song.getArtist()); Album album = artist.getAlbums().stream().filter(x -> x.getTitle().equals(song.getAlbum())).findFirst().get(); ArtistsMainController artistsMainController = (ArtistsMainController) loadView("ArtistsMain"); artistsMainController.selectArtist(artist); artistsMainController.selectAlbum(album); artistsMainController.selectSong(song); searchBox.setText(""); sideBar.requestFocus(); }); list.add(cell); }); } if (list.size() == 0) { Label label = new Label("No Results"); list.add(label); VBox.setMargin(label, new Insets(10, 10, 10, 10)); } if (!searchPopup.isShowing()) { Stage stage = MusicPlayer.getStage(); searchPopup.setX(stage.getX() + 18); searchPopup.setY(stage.getY() + 80); searchPopup.show(); searchShowAnimation.play(); } } public Slider getVolumeSlider() { return volumePopupController.getSlider(); } public boolean isTimeSliderPressed() { return sliderSkin.getThumb().isPressed() || sliderSkin.getTrack().isPressed(); } public SubView getSubViewController() { return subViewController; } ScrollPane getScrollPane() { return this.subViewRoot; } VBox getPlaylistBox() { return playlistBox; } public void updatePlayPauseIcon(boolean isPlaying) { controlBox.getChildren().remove(1); if (isPlaying) { controlBox.getChildren().add(1, pauseButton); } else { controlBox.getChildren().add(1, playButton); } } private void setSlideDirection() { isSideBarExpanded = !isSideBarExpanded; } private Animation volumeShowAnimation = new Transition() { { setCycleDuration(Duration.millis(250)); setInterpolator(Interpolator.EASE_BOTH); } protected void interpolate(double frac) { volumePopup.setOpacity(frac); } }; private Animation volumeHideAnimation = new Transition() { { setCycleDuration(Duration.millis(250)); setInterpolator(Interpolator.EASE_BOTH); } protected void interpolate(double frac) { volumePopup.setOpacity(1.0 - frac); } }; private Animation searchShowAnimation = new Transition() { { setCycleDuration(Duration.millis(250)); setInterpolator(Interpolator.EASE_BOTH); } protected void interpolate(double frac) { searchPopup.setOpacity(frac); } }; private Animation searchHideAnimation = new Transition() { { setCycleDuration(Duration.millis(250)); setInterpolator(Interpolator.EASE_BOTH); } protected void interpolate(double frac) { searchPopup.setOpacity(1.0 - frac); } }; private Animation collapseAnimation = new Transition() { { setCycleDuration(Duration.millis(250)); setInterpolator(Interpolator.EASE_BOTH); setOnFinished(x -> setSlideDirection()); } protected void interpolate(double frac) { double curWidth = collapsedWidth + (expandedWidth - collapsedWidth) * (1.0 - frac); double searchWidth = searchCollapsed + (searchExpanded - searchCollapsed) * (1.0 - frac); sideBar.setPrefWidth(curWidth); searchBox.setPrefWidth(searchWidth); searchBox.setOpacity(1.0 - frac); } }; private Animation expandAnimation = new Transition() { { setCycleDuration(Duration.millis(250)); setInterpolator(Interpolator.EASE_BOTH); setOnFinished(x -> setSlideDirection()); } protected void interpolate(double frac) { double curWidth = collapsedWidth + (expandedWidth - collapsedWidth) * (frac); double searchWidth = searchCollapsed + (searchExpanded - searchCollapsed) * (frac); sideBar.setPrefWidth(curWidth); searchBox.setPrefWidth(searchWidth); searchBox.setOpacity(frac); } }; private Animation loadViewAnimation = new Transition() { { setCycleDuration(Duration.millis(250)); setInterpolator(Interpolator.EASE_BOTH); } protected void interpolate(double frac) { subViewRoot.setVvalue(0); double curHeight = collapsedHeight + (expandedHeight - collapsedHeight) * (frac); subViewRoot.getContent().setTranslateY(expandedHeight - curHeight); subViewRoot.getContent().setOpacity(frac); } }; private Animation unloadViewAnimation = new Transition() { { setCycleDuration(Duration.millis(250)); setInterpolator(Interpolator.EASE_BOTH); } protected void interpolate(double frac) { double curHeight = collapsedHeight + (expandedHeight - collapsedHeight) * (1 - frac); subViewRoot.getContent().setTranslateY(expandedHeight - curHeight); subViewRoot.getContent().setOpacity(1 - frac); } }; private Animation loadLettersAnimation = new Transition() { { setCycleDuration(Duration.millis(250)); setInterpolator(Interpolator.EASE_BOTH); } protected void interpolate(double frac) { letterBox.setPrefHeight(50); letterBox.setOpacity(frac); letterSeparator.setPrefHeight(25); letterSeparator.setOpacity(frac); } }; private Animation unloadLettersAnimation = new Transition() { { setCycleDuration(Duration.millis(250)); setInterpolator(Interpolator.EASE_BOTH); } protected void interpolate(double frac) { letterBox.setOpacity(1.0 - frac); letterSeparator.setOpacity(1.0 - frac); } }; private Animation newPlaylistAnimation = new Transition() { { setCycleDuration(Duration.millis(500)); setInterpolator(Interpolator.EASE_BOTH); } protected void interpolate(double frac) { HBox cell = (HBox) playlistBox.getChildren().get(1); if (frac < 0.5) { cell.setPrefHeight(frac * 100); } else { cell.setPrefHeight(50); cell.setOpacity((frac - 0.5) * 2); } } }; }
19,313
353
<filename>src/com/mtbs3d/minecrift/gui/framework/GuiSelectOption.java package com.mtbs3d.minecrift.gui.framework; import com.mtbs3d.minecrift.settings.VRSettings; import net.minecraft.client.gui.GuiButton; import net.minecraft.client.gui.GuiScreen; import net.minecraft.client.gui.GuiYesNoCallback; /** * Created by StellaArtois on 2/27/2016. */ public class GuiSelectOption extends BaseGuiSettings implements GuiYesNoCallback, GuiEventEx { private String[] options; private String title = "No title"; private String question = "No question"; protected long errorTextDisplayStart = 0; protected String errorText = ""; public static final int ID_OPTION_SELECTED = 9998; protected final int ERROR_TEXT_TIMEOUT_MS = 4000; public GuiSelectOption(GuiScreen par1GuiScreen, VRSettings par2vrSettings, String title, String question, String[] options) { super(par1GuiScreen, par2vrSettings); this.options = options; this.title = title; this.question = question; } @Override public void initGui() { this.buttonList.clear(); this.initGuiButtons(); } protected void initGuiButtons() { this.buttonList.clear(); for (int i = 0; i < options.length; i++) { this.buttonList.add(new GuiButton(i, this.width / 2 - 100, this.height / 4 + (i * 20) + 12, options[i])); } this.buttonList.add(new GuiButton(ID_GENERIC_DONE, this.width / 2 - 100, this.height / 4 + 130 + 12, "Cancel")); } @Override protected void actionPerformed(GuiButton button) { if (button.enabled) { if (button.id == ID_GENERIC_DONE) { this.mc.displayGuiScreen(this.parentGuiScreen); } else { boolean exit = true; if (this.parentGuiScreen instanceof GuiEventEx) { ((GuiEventEx) this.parentGuiScreen).event(this.ID_OPTION_SELECTED, button.displayString); } } } } @Override public void drawScreen(int mouseX, int mouseY, float partialTicks) { if (this.errorTextDisplayStart != 0) { long currentTime = System.currentTimeMillis(); if (currentTime - this.errorTextDisplayStart > ERROR_TEXT_TIMEOUT_MS) { this.errorText = ""; this.errorTextDisplayStart = 0; } } this.drawDefaultBackground(); this.drawCenteredString(this.fontRendererObj, this.title, this.width / 2, 20, 16777215); this.drawString(this.fontRendererObj, this.question, this.width / 2 - 100, 47, 10526880); this.drawString(this.fontRendererObj, this.errorText, this.width / 2 - 100, 57, 16711680); super.drawScreen(mouseX, mouseY, partialTicks, false); } public void setErrorText(String errorText) { this.errorText = errorText; this.errorTextDisplayStart = System.currentTimeMillis(); } @Override public void confirmClicked(boolean result, int id) { } @Override public boolean event(int id, VRSettings.VrOptions enumm) { return true; } @Override public boolean event(int id, String s) { return true; } }
1,442
356
package com.idrv.coach.ui; import android.content.Context; import android.content.Intent; import android.os.Bundle; import android.support.v7.widget.GridLayoutManager; import android.support.v7.widget.RecyclerView; import android.widget.Button; import com.idrv.coach.R; import com.idrv.coach.bean.TeamMember; import com.idrv.coach.bean.event.EventConstant; import com.idrv.coach.data.manager.RxBusManager; import com.idrv.coach.data.model.TeamInviteModel; import com.idrv.coach.ui.adapter.TeamCreateAdapter; import com.idrv.coach.utils.helper.DialogHelper; import com.idrv.coach.utils.helper.UIHelper; import com.zjb.volley.core.exception.NetworkError; import java.util.ArrayList; import java.util.List; import butterknife.ButterKnife; import butterknife.InjectView; import butterknife.OnClick; import rx.Subscription; /** * time:2016/3/24 * description: 邀请成员 * * @author bigflower */ public class TeamInviteActivity extends BaseActivity<TeamInviteModel> implements TeamCreateAdapter.OnChooseListener { private static final String INTENT_EXTRA_MEMBERS = "teamMembers"; private static final String INTENT_EXTRA_ID = "teamId"; @InjectView(R.id.team_invite_recyclerView) RecyclerView mRecyclerView; @InjectView(R.id.team_invite_bottomBt) Button mBottomBt; private TeamCreateAdapter mAdapter; public static void launch(Context context, ArrayList<TeamMember> list, int teamId) { Intent intent = new Intent(context, TeamInviteActivity.class); intent.putParcelableArrayListExtra(INTENT_EXTRA_MEMBERS, list); intent.putExtra(INTENT_EXTRA_ID, teamId); context.startActivity(intent); } @Override protected int getProgressBg() { return R.color.bg_main; } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.act_team_invite); ButterKnife.inject(this); //0.model mViewModel = new TeamInviteModel(); //1.初始化标题栏 initToolBar(); //2.初始化界面 initUi(); } private void initToolBar() { mToolbarLayout.setTitle(R.string.title_invite_mumber); } private void initUi() { // 获取成员信息 List<TeamMember> inviteMembers = getIntent().getParcelableArrayListExtra(INTENT_EXTRA_MEMBERS); mViewModel.teamId = getIntent().getIntExtra(INTENT_EXTRA_ID, -1); try { // 初始化RecyclerView initRecyclerView(inviteMembers); // 初始化底部的按钮 mViewModel.gButtonName = getString(R.string.invite); mViewModel.gTotalNumer = inviteMembers.size(); mBottomBt.setText(mViewModel.gButtonName + "(0/" + mViewModel.gTotalNumer + ")"); } catch (Exception e) { UIHelper.shortToast(R.string.error); finish(); } } private void initRecyclerView(List<TeamMember> teamNumberList) { final GridLayoutManager mLayoutManager = new GridLayoutManager(this, 5); mAdapter = new TeamCreateAdapter(this, this); mRecyclerView.setLayoutManager(mLayoutManager); mRecyclerView.setAdapter(mAdapter); mAdapter.setData(mViewModel.listDeduplication(teamNumberList)); } @OnClick(R.id.team_invite_bottomBt) void invite() { String inviteMembers = mAdapter.getChoosedIds(); if (inviteMembers.length() == 0) { UIHelper.shortToast(R.string.pls_invite_someone); return; } mProgressDialog = DialogHelper .create(DialogHelper.TYPE_PROGRESS) .progressText(getString(R.string.dialog_inviting)) .show(); Subscription subscription = mViewModel.inviteMumber(inviteMembers) .subscribe(__ -> onNext(), this::onTeamInfoError); addSubscription(subscription); } private void onNext() { dismissProgressDialog(); UIHelper.shortToast(R.string.invite_success); RxBusManager.post(EventConstant.KEY_TEAM_MUMBER, ""); finish(); } private void onTeamInfoError(Throwable e) { dismissProgressDialog(); if (null != e) { if (e instanceof NetworkError) { NetworkError error = (NetworkError) e; UIHelper.shortToast(error.getErrorCode().getMessage()); } else { UIHelper.shortToast(R.string.http_error); e.printStackTrace(); } } } @Override public void onChoose(int number) { mBottomBt.setText(mViewModel.gButtonName + "(" + number + "/" + mViewModel.gTotalNumer + ")"); } }
2,052
899
""" Copyright (c) 2019-2020 Uber Technologies, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ __author__ = "<NAME>" from plato.agent.conversational_agent.conversational_agent \ import ConversationalAgent from plato.agent.component.conversational_module \ import ConversationalFrame from plato.agent.component.dialogue_policy.\ reinforcement_learning.reward_function \ import SlotFillingGoalAdvancementReward from plato.utilities.dialogue_episode_recorder import DialogueEpisodeRecorder from plato.dialogue.action import DialogueAct from copy import deepcopy import os import speech_recognition as speech_rec """ ConversationalGenericAgent is a Conversational Agent that is agnostic to its internal modules. It is the most flexible Plato Conversational Agent as it simply needs a list of modules (defined as python classes in the config) and will handle the interaction by chaining those modules. This allows for anything from a single neural-network module to systems that have tens of modules. """ # Audio recording parameters RATE = 16000 CHUNK = int(RATE / 10) # 100ms class ConversationalGenericAgent(ConversationalAgent): """ The ConversationalGenericAgent receives a list of modules in its configuration file, that are chained together serially - i.e. the input to the agent is passed to the first module, the first module's output is passed as input to the second module and so on. Modules are wrapped using ConversationalModules. The input and output passed between modules is wrapped into ConversationalFrames. """ def __init__(self, configuration, agent_id): """ Initialize the internal structures of this agent. :param configuration: a dictionary representing the configuration file :param agent_id: an integer, this agent's id """ self.agent_id = agent_id # Dialogue statistics self.dialogue_episode = 0 self.dialogue_turn = 0 self.num_successful_dialogues = 0 self.num_task_success = 0 self.cumulative_rewards = 0 self.total_dialogue_turns = 0 self.minibatch_length = 250 self.train_interval = 50 self.train_epochs = 10 self.configuration = configuration self.recorder = DialogueEpisodeRecorder() self.SAVE_LOG = True self.SAVE_INTERVAL = 1000 self.MAX_TURNS = 15 self.INTERACTION_MODE = 'simulation' self.USE_GUI = False # This indicates which module controls the state so that we can query # it for dialogue termination (e.g. at end_dialogue) self.STATEFUL_MODULE = -1 self.reward_func = SlotFillingGoalAdvancementReward() self.ConversationalModules = [] self.prev_m_out = ConversationalFrame({}) self.goal_generator = None self.agent_goal = None self.agent_role = '' ag_id_str = 'AGENT_' + str(agent_id) if self.configuration: if 'GENERAL' not in self.configuration: raise ValueError('No GENERAL section in config!') if 'AGENT_'+str(agent_id) not in self.configuration: raise ValueError(f'NO AGENT_{agent_id} section in config!') if 'role' in self.configuration[ag_id_str]: self.agent_role = self.configuration[ag_id_str]['role'] # Retrieve agent parameters if 'max_turns' in self.configuration[ag_id_str]: self.MAX_TURNS = self.configuration[ag_id_str][ 'max_turns'] if 'train_interval' in self.configuration[ag_id_str]: self.train_interval = \ self.configuration[ag_id_str]['train_interval'] if 'train_minibatch' in self.configuration[ag_id_str]: self.minibatch_length = \ self.configuration[ag_id_str]['train_minibatch'] if 'train_epochs' in self.configuration[ag_id_str]: self.train_epochs = \ self.configuration[ag_id_str]['train_epochs'] if 'save_interval' in self.configuration[ag_id_str]: self.SAVE_INTERVAL = \ self.configuration[ag_id_str]['save_interval'] if 'interaction_mode' in self.configuration['GENERAL']: self.INTERACTION_MODE = \ self.configuration['GENERAL']['interaction_mode'] if 'use_gui' in self.configuration['GENERAL']: self.USE_GUI = self.configuration['GENERAL']['use_gui'] if 'experience_logs' in self.configuration['GENERAL']: dialogues_path = None if 'path' in self.configuration['GENERAL']['experience_logs']: dialogues_path = \ self.configuration['GENERAL'][ 'experience_logs']['path'] if 'load' in self.configuration['GENERAL']['experience_logs'] \ and bool(self.configuration['GENERAL'] ['experience_logs']['load'] ): if dialogues_path and os.path.isfile(dialogues_path): self.recorder.load(dialogues_path) else: raise FileNotFoundError( 'dialogue Log file %s not found (did you ' 'provide one?)' % dialogues_path) if 'save' in self.configuration['GENERAL']['experience_logs']: self.recorder.set_path(dialogues_path) self.SAVE_LOG = bool( self.configuration['GENERAL'][ 'experience_logs']['save'] ) self.NModules = 0 if 'modules' in self.configuration[ag_id_str]: self.NModules = int( self.configuration[ag_id_str]['modules'] ) if 'stateful_module' in self.configuration[ag_id_str]: self.STATEFUL_MODULE = int( self.configuration[ag_id_str]['stateful_module'] ) # Note: Since we pass settings as a default argument, any # module can access the global args. However, we # add it here too for ease of use. self.global_arguments = {'settings': deepcopy(self.configuration)} if 'global_arguments' in self.configuration['GENERAL']: self.global_arguments.update( self.configuration['GENERAL']['global_arguments'] ) # Load the goal generator, if any if 'GOAL_GENERATOR' in self.configuration[ag_id_str]: if 'package' not in \ self.configuration[ag_id_str]['GOAL_GENERATOR']: raise ValueError(f'No package path provided for ' f'goal generator!') elif 'class' not in \ self.configuration[ag_id_str]['GOAL_GENERATOR']: raise ValueError(f'No class name provided for ' f'goal generator!') else: self.goal_generator = self.load_module( self.configuration[ag_id_str][ 'GOAL_GENERATOR']['package'], self.configuration[ag_id_str][ 'GOAL_GENERATOR']['class'], self.global_arguments ) # Load the modules for m in range(self.NModules): if 'MODULE_'+str(m) not in \ self.configuration[ag_id_str]: raise ValueError(f'No MODULE_{m} section in config!') if 'parallel_modules' in self.configuration[ ag_id_str ]['MODULE_' + str(m)]: n_parallel_modules = self.configuration[ ag_id_str][ 'MODULE_' + str(m)]['parallel_modules'] parallel_modules = [] for pm in range(n_parallel_modules): if 'package' not in self.configuration[ ag_id_str ]['MODULE_' + str(m)]['PARALLEL_MODULE_' + str(pm)]: raise ValueError( f'No arguments provided for parallel module ' f'{pm} of module {m}!') package = self.configuration[ ag_id_str ]['MODULE_' + str(m)][ 'PARALLEL_MODULE_' + str(pm)]['package'] if 'class' not in self.configuration[ ag_id_str ]['MODULE_' + str(m)]['PARALLEL_MODULE_' + str(pm)]: raise ValueError( f'No arguments provided for parallel module ' f'{pm} of module {m}!') klass = self.configuration[ ag_id_str ]['MODULE_' + str(m)][ 'PARALLEL_MODULE_' + str(pm)]['class'] # Append global arguments # (add configuration by default) args = deepcopy(self.global_arguments) if 'arguments' in \ self.configuration[ ag_id_str ]['MODULE_' + str(m)][ 'PARALLEL_MODULE_' + str(pm)]: args.update( self.configuration[ ag_id_str ]['MODULE_' + str(m)][ 'PARALLEL_MODULE_' + str(pm)]['arguments']) parallel_modules.append( self.load_module(package, klass, args)) self.ConversationalModules.append( parallel_modules ) else: if 'package' not in self.configuration[ ag_id_str ]['MODULE_' + str(m)]: raise ValueError(f'No arguments provided for module ' f'{m}!') package = self.configuration[ ag_id_str ]['MODULE_' + str(m)]['package'] if 'class' not in self.configuration[ ag_id_str ]['MODULE_' + str(m)]: raise ValueError(f'No arguments provided for module ' f'{m}!') klass = self.configuration[ ag_id_str ]['MODULE_' + str(m)]['class'] # Append global arguments (add configuration by default) args = deepcopy(self.global_arguments) if 'arguments' in \ self.configuration[ ag_id_str ]['MODULE_' + str(m)]: args.update( self.configuration[ 'AGENT_'+str(agent_id) ]['MODULE_'+str(m)]['arguments']) self.ConversationalModules.append( self.load_module(package, klass, args) ) else: raise AttributeError('ConversationalGenericAgent: ' 'No settings (config) provided!') # TODO: Parse config modules I/O and raise error if # any inconsistencies found # Initialize automatic speech recognizer, if necessary self.asr = None if self.INTERACTION_MODE == 'speech' and not self.USE_GUI: self.asr = speech_rec.Recognizer() def __del__(self): """ Do some house-keeping, save the models. :return: nothing """ if self.recorder and self.SAVE_LOG: self.recorder.save() for m in self.ConversationalModules: if isinstance(m, list): for sm in m: sm.save() else: m.save() # Dynamically load classes @staticmethod def load_module(package_path, class_name, args): """ Dynamically load the specified class. :param package_path: Path to the package to load :param class_name: Name of the class within the package :param args: arguments to pass when creating the object :return: the instantiated class object """ module = __import__(package_path, fromlist=[class_name]) klass = getattr(module, class_name) return klass(args) def initialize(self): """ Initializes the conversational agent based on settings in the configuration file. :return: Nothing """ self.dialogue_episode = 0 self.dialogue_turn = 0 self.cumulative_rewards = 0 self.agent_goal = None # For each module for m in self.ConversationalModules: if isinstance(m, list): for sm in m: sm.initialize({}) else: # Load and initialize m.initialize({}) def start_dialogue(self, args=None): """ Reset or initialize internal structures at the beginning of the dialogue. May issue first utterance if this agent has the initiative. :param args: :return: """ self.initialize() self.dialogue_turn = 0 if args and 'goal' in args: self.agent_goal = deepcopy(args['goal']) elif self.goal_generator: self.agent_goal = self.goal_generator.generate() print(f'GOAL:\n=====\n{self.agent_goal}') # TODO: Get initial trigger from config if self.INTERACTION_MODE == 'dialogue_acts': self.prev_m_out = \ ConversationalFrame([DialogueAct('hello')]) else: self.prev_m_out = ConversationalFrame('hello') self.continue_dialogue(args) return {'input_utterance': None, 'output_raw': self.prev_m_out.content, 'output_dacts': '', 'goal': self.agent_goal } def continue_dialogue(self, args=None): """ Perform one dialogue turn :param args: input to this agent :return: output of this agent """ utterance = None if self.INTERACTION_MODE == 'text' and not self.USE_GUI: utterance = input('USER > ') self.prev_m_out = ConversationalFrame(utterance) elif self.INTERACTION_MODE == 'speech' and not self.USE_GUI: # Listen for input from the microphone with speech_rec.Microphone() as source: print('(listening...)') audio = self.asr.listen(source, phrase_time_limit=3) try: # This uses the default key utterance = self.asr.recognize_google(audio) print("Google ASR: " + utterance) self.prev_m_out = ConversationalFrame(utterance) except speech_rec.UnknownValueError: print("Google ASR did not understand you") except speech_rec.RequestError as e: print("Google ASR request error: {0}".format(e)) elif args and 'input' in args: self.prev_m_out = ConversationalFrame(args['input']) for m in self.ConversationalModules: # If executing parallel sub-modules if isinstance(m, list): idx = 0 prev_m_out = deepcopy(self.prev_m_out) self.prev_m_out.content = {} for sm in m: # WARNING! Module compatibility cannot be guaranteed here! sm.generic_receive_input(prev_m_out) sm_out = sm.generic_generate_output(prev_m_out) if not isinstance(sm_out, ConversationalFrame): sm_out = ConversationalFrame(sm_out) self.prev_m_out.content['sm'+str(idx)] = sm_out.content idx += 1 else: # WARNING! Module compatibility cannot be guaranteed here! m.generic_receive_input(self.prev_m_out) self.prev_m_out = m.generic_generate_output(self.prev_m_out) # Make sure prev_m_out is a Conversational Frame if not isinstance(self.prev_m_out, ConversationalFrame): self.prev_m_out = ConversationalFrame(self.prev_m_out) # DEBUG: if isinstance(self.prev_m_out.content, str): print(f'(DEBUG) {self.agent_role}> ' f'{str(self.prev_m_out.content)}') self.dialogue_turn += 1 # In text or speech based interactions, return the input utterance as # it may be used for statistics or to show it to a GUI. return {'input_utterance': utterance, 'output_raw': self.prev_m_out.content, 'output_dacts': '', 'goal': self.agent_goal } def end_dialogue(self): """ Perform final dialogue turn. Save models if applicable. :return: """ if self.dialogue_episode % self.train_interval == 0: for m in self.ConversationalModules: if isinstance(m, list): for sm in m: sm.train(self.recorder.dialogues) else: m.train(self.recorder.dialogues) if self.dialogue_episode % self.SAVE_INTERVAL == 0: for m in self.ConversationalModules: if isinstance(m, list): for sm in m: sm.save() else: m.save() # Keep track of dialogue statistics self.dialogue_episode += 1 if self.dialogue_turn > 0: self.total_dialogue_turns += self.dialogue_turn # Count successful dialogues _, _, obj_succ = self.reward_func.calculate( self.get_state(), [], # TODO: In case of single agents, we actually need the user's goal goal=self.agent_goal, agent_role=self.agent_role) self.num_successful_dialogues += 1 if obj_succ else 0 def terminated(self): """ Check if this agent is at a terminal state. :return: True or False """ return self.ConversationalModules[ self.STATEFUL_MODULE ].at_terminal_state() or \ self.dialogue_turn > self.MAX_TURNS def set_goal(self, goal): """ Set or update this agent's goal. :param goal: a Goal :return: nothing """ self.agent_goal = goal def get_goal(self): """ Get this agent's goal. :return: a Goal """ return self.agent_goal def get_state(self): return self.ConversationalModules[ self.STATEFUL_MODULE ].get_state()
10,475
432
<gh_stars>100-1000 package com.revengemission.sso.oauth2.server.token; import com.revengemission.sso.oauth2.server.config.CachesEnum; import com.revengemission.sso.oauth2.server.domain.OAuth2Exception; import com.revengemission.sso.oauth2.server.domain.OauthClient; import com.revengemission.sso.oauth2.server.domain.UserInfo; import io.jsonwebtoken.Jwts; import org.springframework.cache.Cache; import org.springframework.cache.CacheManager; import org.springframework.http.HttpStatus; import org.springframework.security.authentication.AuthenticationManager; import org.springframework.security.core.Authentication; import java.security.KeyPair; import java.time.LocalDateTime; import java.time.ZoneId; import java.util.Date; import java.util.HashMap; import java.util.Map; import java.util.UUID; import java.util.stream.Collectors; public class AuthorizationCodeTokenGranter implements TokenGranter { private static final String GRANT_TYPE = "authorization_code"; private final AuthenticationManager authenticationManager; KeyPair keyPair; String issuer; CacheManager cacheManager; public AuthorizationCodeTokenGranter(AuthenticationManager authenticationManager, CacheManager cacheManager, KeyPair keyPair, String issuer) { this.authenticationManager = authenticationManager; this.cacheManager = cacheManager; this.keyPair = keyPair; this.issuer = issuer; } @Override public Map<String, Object> grant(OauthClient client, String grantType, Map<String, String> parameters) { Map<String, Object> result = new HashMap<>(); result.put("status", 0); String authorizationCode = parameters.get("code"); String redirectUri = parameters.get("redirect_uri"); String clientId = parameters.get("client_id"); String scope = parameters.get("scope"); if (authorizationCode == null) { throw new OAuth2Exception("An authorization code must be supplied.", HttpStatus.BAD_REQUEST, "invalid_request"); } Cache.ValueWrapper storedCode = cacheManager.getCache(CachesEnum.Oauth2AuthorizationCodeCache.name()).get(authorizationCode); if (storedCode != null) { Authentication userAuth = (Authentication) (storedCode.get()); UserInfo userInfo = (UserInfo) userAuth.getPrincipal(); Date now = new Date(); Date tokenExpiration = Date.from(LocalDateTime.now().plusSeconds(client.getAccessTokenValidity()).atZone(ZoneId.systemDefault()).toInstant()); Date refreshTokenExpiration = Date.from(LocalDateTime.now().plusSeconds(client.getRefreshTokenValidity()).atZone(ZoneId.systemDefault()).toInstant()); String tokenId = UUID.randomUUID().toString(); String accessToken = Jwts.builder() .setHeaderParam("alg", "HS256") .setHeaderParam("typ", "JWT") .claim("accountOpenCode", userInfo.getAccountOpenCode()) .setIssuer(issuer) .setSubject(userInfo.getUsername()) .setAudience(clientId) .claim("roles", userInfo.getAuthorities().stream().map(e -> e.getAuthority()).collect(Collectors.toList())) .setExpiration(tokenExpiration) .setNotBefore(now) .setIssuedAt(now) .setId(tokenId) .signWith(keyPair.getPrivate()) .compact(); String refreshToken = Jwts.builder() .setHeaderParam("alg", "HS256") .setHeaderParam("typ", "JWT") .claim("accountOpenCode", userInfo.getAccountOpenCode()) .claim("jti", tokenId) .setIssuer(issuer) .setSubject(userInfo.getUsername()) .setAudience(clientId) .claim("roles", userInfo.getAuthorities().stream().map(e -> e.getAuthority()).collect(Collectors.toList())) .setExpiration(refreshTokenExpiration) .setNotBefore(now) .setIssuedAt(now) .setId(UUID.randomUUID().toString()) .signWith(keyPair.getPrivate()) .compact(); cacheManager.getCache(CachesEnum.Oauth2AuthorizationCodeCache.name()).evictIfPresent(authorizationCode); result.put("access_token", accessToken); result.put("token_type", "bearer"); result.put("refresh_token", refreshToken); result.put("expires_in", client.getAccessTokenValidity() - 1); result.put("accountOpenCode", userInfo.getAccountOpenCode()); result.put("scope", scope); result.put("jti", tokenId); result.put("status", 1); return result; } else { throw new OAuth2Exception("An authorization code must be supplied.", HttpStatus.BAD_REQUEST, "invalid_request"); } } }
2,048
872
<filename>270 Closest Binary Search Tree Value.py """ Premium Question """ import sys __author__ = 'Daniel' class TreeNode(object): def __init__(self, x): self.val = x self.left = None self.right = None class Solution(object): def closestValue(self, root, target): """ Divide the problem into 2 parts: 1. find the value just smaller than target 2. find the value just larger than target :type root: TreeNode :type target: float :rtype: int """ lo = [-sys.float_info.max] self.find(root, target, lo, True) hi = [sys.float_info.max] self.find(root, target, hi, False) if hi[0] - target < target - lo[0]: return int(hi[0]) else: return int(lo[0]) def find(self, root, target, ret, lower=True): if not root: return if root.val == target: ret[0] = root.val return if root.val < target: if lower: ret[0] = max(ret[0], root.val) self.find(root.right, target, ret, lower) else: if not lower: ret[0] = min(ret[0], root.val) self.find(root.left, target, ret, lower) if __name__ == "__main__": assert Solution().closestValue(TreeNode(2147483647), 0.0) == 2147483647
642
1,444
<reponame>amc8391/mage package mage.cards.f; import mage.MageInt; import mage.abilities.common.EntersBattlefieldTriggeredAbility; import mage.abilities.effects.common.CreateTokenEffect; import mage.abilities.keyword.TrampleAbility; import mage.cards.CardImpl; import mage.cards.CardSetInfo; import mage.constants.CardType; import mage.constants.SubType; import mage.game.permanent.token.FoodToken; import java.util.UUID; /** * @author TheElk801 */ public final class FierceWitchstalker extends CardImpl { public FierceWitchstalker(UUID ownerId, CardSetInfo setInfo) { super(ownerId, setInfo, new CardType[]{CardType.CREATURE}, "{2}{G}{G}"); this.subtype.add(SubType.WOLF); this.power = new MageInt(4); this.toughness = new MageInt(4); // Trample this.addAbility(TrampleAbility.getInstance()); // When Fierce Witchstalker enters the battlefield, create a Food token. this.addAbility(new EntersBattlefieldTriggeredAbility(new CreateTokenEffect(new FoodToken()))); } private FierceWitchstalker(final FierceWitchstalker card) { super(card); } @Override public FierceWitchstalker copy() { return new FierceWitchstalker(this); } }
461
330
<reponame>LONG-9621/flowNet ''' Provider for duck dataset from <NAME> ''' import os import os.path import json import numpy as np import sys import pickle import glob class SceneflowDataset(): def __init__(self, root='kitti_rm_ground', npoints=16384, train=True): self.npoints = npoints self.root = root self.train = train self.datapath = glob.glob(os.path.join(self.root, '*.npz')) self.cache = {} self.cache_size = 30000 def __getitem__(self, index): if index in self.cache: pos1, pos2, flow = self.cache[index] else: fn = self.datapath[index] with open(fn, 'rb') as fp: data = np.load(fp) pos1 = data['pos1'] pos2 = data['pos2'] flow = data['gt'] if len(self.cache) < self.cache_size: self.cache[index] = (pos1, pos2, flow) n1 = pos1.shape[0] n2 = pos2.shape[0] if n1 >= self.npoints: sample_idx1 = np.random.choice(n1, self.npoints, replace=False) else: sample_idx1 = np.concatenate((np.arange(n1), np.random.choice(n1, self.npoints - n1, replace=True)), axis=-1) if n2 >= self.npoints: sample_idx2 = np.random.choice(n2, self.npoints, replace=False) else: sample_idx2 = np.concatenate((np.arange(n2), np.random.choice(n2, self.npoints - n2, replace=True)), axis=-1) pos1_ = np.copy(pos1)[sample_idx1, :] pos2_ = np.copy(pos2)[sample_idx2, :] flow_ = np.copy(flow)[sample_idx1, :] color1 = np.zeros([self.npoints, 3]) color2 = np.zeros([self.npoints, 3]) mask = np.ones([self.npoints]) return pos1_, pos2_, color1, color2, flow_, mask def __len__(self): return len(self.datapath) if __name__ == '__main__': import mayavi.mlab as mlab d = SceneflowDataset(root='kitti_rm_ground', npoints=16384) print(len(d)) import time tic = time.time() for i in range(1, 100): pc1, pc2, color1, color2, flow, mask = d[i] print(pc1.shape, pc2.shape) continue mlab.figure(bgcolor=(1,1,1)) mlab.points3d(pc1[:,0], pc1[:,1], pc1[:,2], scale_factor=0.05, color=(1,0,0)) mlab.points3d(pc2[:,0], pc2[:,1], pc2[:,2], scale_factor=0.05, color=(0,1,0)) input() mlab.figure(bgcolor=(1,1,1)) mlab.points3d(pc1[:,0], pc1[:,1], pc1[:,2], scale_factor=0.05, color=(1,0,0)) mlab.points3d(pc2[:,0], pc2[:,1], pc2[:,2], scale_factor=0.05, color=(0,1,0)) mlab.quiver3d(pc1[:,0], pc1[:,1], pc1[:,2], flow[:,0], flow[:,1], flow[:,2], scale_factor=1, color=(0,0,1), line_width=0.2) input() print(time.time() - tic) print(pc1.shape, type(pc1))
1,513
574
from inspect import signature from typing import Type, List from beanie.odm.documents import Document from beanie.migrations.controllers.base import BaseMigrationController def free_fall_migration(document_models: List[Type[Document]]): class FreeFallMigrationController(BaseMigrationController): def __init__(self, function): self.function = function self.function_signature = signature(function) self.document_models = document_models def __call__(self, *args, **kwargs): pass @property def models(self) -> List[Type[Document]]: return self.document_models async def run(self, session): function_kwargs = {"session": session} if "self" in self.function_signature.parameters: function_kwargs["self"] = None await self.function(**function_kwargs) return FreeFallMigrationController
365
435
<gh_stars>100-1000 { "copyright_text": "Standard YouTube License", "description": "In this talk, you\u2019ll learn about a category of security issue known as side channel attacks. You\u2019ll be amused to see how features like automatic data compression, short-circuit execution, and deterministic hashing can be abused to bypass security systems. No security background knowledge is required. The talk assumes at least intermediate Python experience.\n\nWe\u2019ll take a tour of real side channel vulnerabilities in open source Python codebases, including the patches that fixed them. It also offers practical advice for avoiding these issues. My goal is to demystify this topic, even if you aren\u2019t writing security-critical software.", "duration": 1815, "language": "eng", "recorded": "2018-05-11", "related_urls": [ { "label": "Conference schedule", "url": "https://us.pycon.org/2018/schedule/talks/" }, { "label": "Conference slides (Github)", "url": "https://github.com/PyCon/2018-slides" }, { "label": "Conference slides (SpeakerDeck)", "url": "https://speakerdeck.com/pycon2018" }, { "label": "talk schedule", "url": "https://us.pycon.org/2018/schedule/presentation/152/" } ], "speakers": [ "<NAME>", "<NAME>" ], "tags": [ "security", "vulnerability" ], "thumbnail_url": "https://i.ytimg.com/vi/dT2xjgUInhQ/maxresdefault.jpg", "title": "All in the timing: How side channel attacks work", "videos": [ { "type": "youtube", "url": "https://www.youtube.com/watch?v=dT2xjgUInhQ" } ] }
583
4,054
<reponame>Thendont/lwjgl /* * Copyright LWJGL. All rights reserved. * License terms: https://www.lwjgl.org/license * MACHINE GENERATED FILE, DO NOT EDIT */ package org.lwjgl.egl; import javax.annotation.*; import java.nio.*; import org.lwjgl.system.*; import static org.lwjgl.system.Checks.*; import static org.lwjgl.system.JNI.*; import static org.lwjgl.system.MemoryUtil.*; /** * Native bindings to the <a target="_blank" href="https://www.khronos.org/registry/EGL/extensions/MESA/EGL_MESA_image_dma_buf_export.txt">MESA_image_dma_buf_export</a> extension. * * <p>This extension provides entry points for integrating EGLImage with the dma-buf infrastructure. The extension allows creating a Linux dma_buf file * descriptor or multiple file descriptors, in the case of multi-plane YUV image, from an EGLImage.</p> * * <p>It is designed to provide the complementary functionality to EGL_EXT_image_dma_buf_import.</p> * * <p>Requires {@link EGL14 EGL 1.4} and {@link KHRImageBase KHR_image_base}. The EGL implementation must be running on a Linux kernel supporting the dma_buf buffer sharing * mechanism.</p> */ public class MESAImageDMABufExport { protected MESAImageDMABufExport() { throw new UnsupportedOperationException(); } // --- [ eglExportDMABUFImageQueryMESA ] --- public static int neglExportDMABUFImageQueryMESA(long dpy, long image, long fourcc, long num_planes, long modifiers) { long __functionAddress = EGL.getCapabilities().eglExportDMABUFImageQueryMESA; if (CHECKS) { check(__functionAddress); check(dpy); check(image); } return callPPPPPI(dpy, image, fourcc, num_planes, modifiers, __functionAddress); } @NativeType("EGLBoolean") public static boolean eglExportDMABUFImageQueryMESA(@NativeType("EGLDisplay") long dpy, @NativeType("EGLImageKHR") long image, @Nullable @NativeType("int *") IntBuffer fourcc, @Nullable @NativeType("int *") IntBuffer num_planes, @Nullable @NativeType("EGLuint64KHR *") LongBuffer modifiers) { if (CHECKS) { checkSafe(fourcc, 1); checkSafe(num_planes, 1); checkSafe(modifiers, 1); } return neglExportDMABUFImageQueryMESA(dpy, image, memAddressSafe(fourcc), memAddressSafe(num_planes), memAddressSafe(modifiers)) != 0; } // --- [ eglExportDMABUFImageMESA ] --- public static int neglExportDMABUFImageMESA(long dpy, long image, long fds, long strides, long offsets) { long __functionAddress = EGL.getCapabilities().eglExportDMABUFImageMESA; if (CHECKS) { check(__functionAddress); check(dpy); check(image); } return callPPPPPI(dpy, image, fds, strides, offsets, __functionAddress); } @NativeType("EGLBoolean") public static boolean eglExportDMABUFImageMESA(@NativeType("EGLDisplay") long dpy, @NativeType("EGLImageKHR") long image, @Nullable @NativeType("int *") IntBuffer fds, @Nullable @NativeType("EGLint *") IntBuffer strides, @Nullable @NativeType("EGLint *") IntBuffer offsets) { if (CHECKS) { checkSafe(fds, 1); checkSafe(strides, 1); checkSafe(offsets, 1); } return neglExportDMABUFImageMESA(dpy, image, memAddressSafe(fds), memAddressSafe(strides), memAddressSafe(offsets)) != 0; } /** Array version of: {@link #eglExportDMABUFImageQueryMESA ExportDMABUFImageQueryMESA} */ @NativeType("EGLBoolean") public static boolean eglExportDMABUFImageQueryMESA(@NativeType("EGLDisplay") long dpy, @NativeType("EGLImageKHR") long image, @Nullable @NativeType("int *") int[] fourcc, @Nullable @NativeType("int *") int[] num_planes, @Nullable @NativeType("EGLuint64KHR *") long[] modifiers) { long __functionAddress = EGL.getCapabilities().eglExportDMABUFImageQueryMESA; if (CHECKS) { check(__functionAddress); check(dpy); check(image); checkSafe(fourcc, 1); checkSafe(num_planes, 1); checkSafe(modifiers, 1); } return callPPPPPI(dpy, image, fourcc, num_planes, modifiers, __functionAddress) != 0; } /** Array version of: {@link #eglExportDMABUFImageMESA ExportDMABUFImageMESA} */ @NativeType("EGLBoolean") public static boolean eglExportDMABUFImageMESA(@NativeType("EGLDisplay") long dpy, @NativeType("EGLImageKHR") long image, @Nullable @NativeType("int *") int[] fds, @Nullable @NativeType("EGLint *") int[] strides, @Nullable @NativeType("EGLint *") int[] offsets) { long __functionAddress = EGL.getCapabilities().eglExportDMABUFImageMESA; if (CHECKS) { check(__functionAddress); check(dpy); check(image); checkSafe(fds, 1); checkSafe(strides, 1); checkSafe(offsets, 1); } return callPPPPPI(dpy, image, fds, strides, offsets, __functionAddress) != 0; } }
2,013
454
/* * This file is generated by jOOQ. */ package cn.vertxup.atom.domain.tables.interfaces; import io.github.jklingsporn.vertx.jooq.shared.internal.VertxPojo; import java.io.Serializable; import java.time.LocalDateTime; import static io.github.jklingsporn.vertx.jooq.shared.internal.VertxPojo.*; /** * This class is generated by jOOQ. */ @SuppressWarnings({ "all", "unchecked", "rawtypes" }) public interface IMField extends VertxPojo, Serializable { /** * Setter for <code>DB_ETERNAL.M_FIELD.KEY</code>. 「key」- 字段ID */ public IMField setKey(String value); /** * Getter for <code>DB_ETERNAL.M_FIELD.KEY</code>. 「key」- 字段ID */ public String getKey(); /** * Setter for <code>DB_ETERNAL.M_FIELD.NAME</code>. 「name」- 属性名(非列) */ public IMField setName(String value); /** * Getter for <code>DB_ETERNAL.M_FIELD.NAME</code>. 「name」- 属性名(非列) */ public String getName(); /** * Setter for <code>DB_ETERNAL.M_FIELD.TYPE</code>. 「type」- OX核心类型 */ public IMField setType(String value); /** * Getter for <code>DB_ETERNAL.M_FIELD.TYPE</code>. 「type」- OX核心类型 */ public String getType(); /** * Setter for <code>DB_ETERNAL.M_FIELD.COLUMN_NAME</code>. 「columnName」- * 数据库列名 */ public IMField setColumnName(String value); /** * Getter for <code>DB_ETERNAL.M_FIELD.COLUMN_NAME</code>. 「columnName」- * 数据库列名 */ public String getColumnName(); /** * Setter for <code>DB_ETERNAL.M_FIELD.COLUMN_TYPE</code>. 「columnType」- * 数据库转换过后的类型 */ public IMField setColumnType(String value); /** * Getter for <code>DB_ETERNAL.M_FIELD.COLUMN_TYPE</code>. 「columnType」- * 数据库转换过后的类型 */ public String getColumnType(); /** * Setter for <code>DB_ETERNAL.M_FIELD.IS_PRIMARY</code>. 「isPrimary」- 是否为主键 */ public IMField setIsPrimary(Boolean value); /** * Getter for <code>DB_ETERNAL.M_FIELD.IS_PRIMARY</code>. 「isPrimary」- 是否为主键 */ public Boolean getIsPrimary(); /** * Setter for <code>DB_ETERNAL.M_FIELD.IS_NULLABLE</code>. 「isNullable」- * 是否可为空 */ public IMField setIsNullable(Boolean value); /** * Getter for <code>DB_ETERNAL.M_FIELD.IS_NULLABLE</code>. 「isNullable」- * 是否可为空 */ public Boolean getIsNullable(); /** * Setter for <code>DB_ETERNAL.M_FIELD.LENGTH</code>. 「length」- String类型的长度 */ public IMField setLength(Integer value); /** * Getter for <code>DB_ETERNAL.M_FIELD.LENGTH</code>. 「length」- String类型的长度 */ public Integer getLength(); /** * Setter for <code>DB_ETERNAL.M_FIELD.PRECISION</code>. 「precision」- * Decimal类型的精度 */ public IMField setPrecision(Integer value); /** * Getter for <code>DB_ETERNAL.M_FIELD.PRECISION</code>. 「precision」- * Decimal类型的精度 */ public Integer getPrecision(); /** * Setter for <code>DB_ETERNAL.M_FIELD.FORMAT</code>. 「format」- * 当前数据列的格式,String或Date类型 */ public IMField setFormat(String value); /** * Getter for <code>DB_ETERNAL.M_FIELD.FORMAT</code>. 「format」- * 当前数据列的格式,String或Date类型 */ public String getFormat(); /** * Setter for <code>DB_ETERNAL.M_FIELD.IN_COMPONENT</code>. 「inComponent」- * 写入插件 */ public IMField setInComponent(String value); /** * Getter for <code>DB_ETERNAL.M_FIELD.IN_COMPONENT</code>. 「inComponent」- * 写入插件 */ public String getInComponent(); /** * Setter for <code>DB_ETERNAL.M_FIELD.OUT_COMPONENT</code>. 「outComponent」- * 读取插件 */ public IMField setOutComponent(String value); /** * Getter for <code>DB_ETERNAL.M_FIELD.OUT_COMPONENT</code>. 「outComponent」- * 读取插件 */ public String getOutComponent(); /** * Setter for <code>DB_ETERNAL.M_FIELD.ENTITY_ID</code>. 「entityId」- 关联的实体ID */ public IMField setEntityId(String value); /** * Getter for <code>DB_ETERNAL.M_FIELD.ENTITY_ID</code>. 「entityId」- 关联的实体ID */ public String getEntityId(); /** * Setter for <code>DB_ETERNAL.M_FIELD.COMMENTS</code>. 「comments」- * 当前属性的描述信息 */ public IMField setComments(String value); /** * Getter for <code>DB_ETERNAL.M_FIELD.COMMENTS</code>. 「comments」- * 当前属性的描述信息 */ public String getComments(); /** * Setter for <code>DB_ETERNAL.M_FIELD.SIGMA</code>. 「sigma」- 统一标识 */ public IMField setSigma(String value); /** * Getter for <code>DB_ETERNAL.M_FIELD.SIGMA</code>. 「sigma」- 统一标识 */ public String getSigma(); /** * Setter for <code>DB_ETERNAL.M_FIELD.LANGUAGE</code>. 「language」- 使用的语言 */ public IMField setLanguage(String value); /** * Getter for <code>DB_ETERNAL.M_FIELD.LANGUAGE</code>. 「language」- 使用的语言 */ public String getLanguage(); /** * Setter for <code>DB_ETERNAL.M_FIELD.ACTIVE</code>. 「active」- 是否启用 */ public IMField setActive(Boolean value); /** * Getter for <code>DB_ETERNAL.M_FIELD.ACTIVE</code>. 「active」- 是否启用 */ public Boolean getActive(); /** * Setter for <code>DB_ETERNAL.M_FIELD.METADATA</code>. 「metadata」- 附加配置数据 */ public IMField setMetadata(String value); /** * Getter for <code>DB_ETERNAL.M_FIELD.METADATA</code>. 「metadata」- 附加配置数据 */ public String getMetadata(); /** * Setter for <code>DB_ETERNAL.M_FIELD.CREATED_AT</code>. 「createdAt」- 创建时间 */ public IMField setCreatedAt(LocalDateTime value); /** * Getter for <code>DB_ETERNAL.M_FIELD.CREATED_AT</code>. 「createdAt」- 创建时间 */ public LocalDateTime getCreatedAt(); /** * Setter for <code>DB_ETERNAL.M_FIELD.CREATED_BY</code>. 「createdBy」- 创建人 */ public IMField setCreatedBy(String value); /** * Getter for <code>DB_ETERNAL.M_FIELD.CREATED_BY</code>. 「createdBy」- 创建人 */ public String getCreatedBy(); /** * Setter for <code>DB_ETERNAL.M_FIELD.UPDATED_AT</code>. 「updatedAt」- 更新时间 */ public IMField setUpdatedAt(LocalDateTime value); /** * Getter for <code>DB_ETERNAL.M_FIELD.UPDATED_AT</code>. 「updatedAt」- 更新时间 */ public LocalDateTime getUpdatedAt(); /** * Setter for <code>DB_ETERNAL.M_FIELD.UPDATED_BY</code>. 「updatedBy」- 更新人 */ public IMField setUpdatedBy(String value); /** * Getter for <code>DB_ETERNAL.M_FIELD.UPDATED_BY</code>. 「updatedBy」- 更新人 */ public String getUpdatedBy(); // ------------------------------------------------------------------------- // FROM and INTO // ------------------------------------------------------------------------- /** * Load data from another generated Record/POJO implementing the common * interface IMField */ public void from(IMField from); /** * Copy data into another generated Record/POJO implementing the common * interface IMField */ public <E extends IMField> E into(E into); @Override public default IMField fromJson(io.vertx.core.json.JsonObject json) { setOrThrow(this::setKey,json::getString,"KEY","java.lang.String"); setOrThrow(this::setName,json::getString,"NAME","java.lang.String"); setOrThrow(this::setType,json::getString,"TYPE","java.lang.String"); setOrThrow(this::setColumnName,json::getString,"COLUMN_NAME","java.lang.String"); setOrThrow(this::setColumnType,json::getString,"COLUMN_TYPE","java.lang.String"); setOrThrow(this::setIsPrimary,json::getBoolean,"IS_PRIMARY","java.lang.Boolean"); setOrThrow(this::setIsNullable,json::getBoolean,"IS_NULLABLE","java.lang.Boolean"); setOrThrow(this::setLength,json::getInteger,"LENGTH","java.lang.Integer"); setOrThrow(this::setPrecision,json::getInteger,"PRECISION","java.lang.Integer"); setOrThrow(this::setFormat,json::getString,"FORMAT","java.lang.String"); setOrThrow(this::setInComponent,json::getString,"IN_COMPONENT","java.lang.String"); setOrThrow(this::setOutComponent,json::getString,"OUT_COMPONENT","java.lang.String"); setOrThrow(this::setEntityId,json::getString,"ENTITY_ID","java.lang.String"); setOrThrow(this::setComments,json::getString,"COMMENTS","java.lang.String"); setOrThrow(this::setSigma,json::getString,"SIGMA","java.lang.String"); setOrThrow(this::setLanguage,json::getString,"LANGUAGE","java.lang.String"); setOrThrow(this::setActive,json::getBoolean,"ACTIVE","java.lang.Boolean"); setOrThrow(this::setMetadata,json::getString,"METADATA","java.lang.String"); setOrThrow(this::setCreatedAt,key -> {String s = json.getString(key); return s==null?null:java.time.LocalDateTime.parse(s);},"CREATED_AT","java.time.LocalDateTime"); setOrThrow(this::setCreatedBy,json::getString,"CREATED_BY","java.lang.String"); setOrThrow(this::setUpdatedAt,key -> {String s = json.getString(key); return s==null?null:java.time.LocalDateTime.parse(s);},"UPDATED_AT","java.time.LocalDateTime"); setOrThrow(this::setUpdatedBy,json::getString,"UPDATED_BY","java.lang.String"); return this; } @Override public default io.vertx.core.json.JsonObject toJson() { io.vertx.core.json.JsonObject json = new io.vertx.core.json.JsonObject(); json.put("KEY",getKey()); json.put("NAME",getName()); json.put("TYPE",getType()); json.put("COLUMN_NAME",getColumnName()); json.put("COLUMN_TYPE",getColumnType()); json.put("IS_PRIMARY",getIsPrimary()); json.put("IS_NULLABLE",getIsNullable()); json.put("LENGTH",getLength()); json.put("PRECISION",getPrecision()); json.put("FORMAT",getFormat()); json.put("IN_COMPONENT",getInComponent()); json.put("OUT_COMPONENT",getOutComponent()); json.put("ENTITY_ID",getEntityId()); json.put("COMMENTS",getComments()); json.put("SIGMA",getSigma()); json.put("LANGUAGE",getLanguage()); json.put("ACTIVE",getActive()); json.put("METADATA",getMetadata()); json.put("CREATED_AT",getCreatedAt()==null?null:getCreatedAt().toString()); json.put("CREATED_BY",getCreatedBy()); json.put("UPDATED_AT",getUpdatedAt()==null?null:getUpdatedAt().toString()); json.put("UPDATED_BY",getUpdatedBy()); return json; } }
5,207
708
<reponame>AdvancedElectricLongboard/LongboardSTM32FW /*************************************************************************** * Copyright (C) 2008 by <NAME> * * <EMAIL> * * Copyright (C) 2008 by <NAME> * * <EMAIL> * * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * * This program is distributed in the hope that it will be useful, * * but WITHOUT ANY WARRANTY; without even the implied warranty of * * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * * GNU General Public License for more details. * * * * You should have received a copy of the GNU General Public License * * along with this program; if not, write to the * * Free Software Foundation, Inc., * * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. * ***************************************************************************/ #include "dcc_stdio.h" /* enable openocd debugmsg at the gdb prompt: * monitor target_request debugmsgs enable * * create a trace point: * monitor trace point 1 * * to show how often the trace point was hit: * monitor trace point */ int main(void) { dbg_write_str("hello world"); dbg_write_char('t'); dbg_write_char('e'); dbg_write_char('s'); dbg_write_char('t'); dbg_write_char('\n'); unsigned long test_u32 = 0x01234567; dbg_write_u32(&test_u32, 1); static const unsigned short test_u16[] = {0x0123, 0x4567, 0x89AB, 0xCDEF, 0x0123, 0x4567, 0x89AB, 0xCDEF}; dbg_write_u16(test_u16, 8); static const unsigned char test_u8[] = {0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0xAA, 0xBB, 0xCC, 0XDD, 0xEE, 0xFF}; dbg_write_u8(test_u8, 16); while(1) { dbg_trace_point(0); } }
1,207
988
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.netbeans.api.search.ui; import java.awt.FlowLayout; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.ItemEvent; import java.awt.event.ItemListener; import javax.swing.JButton; import javax.swing.JCheckBox; import javax.swing.JPanel; import org.netbeans.api.annotations.common.NullUnknown; import org.netbeans.api.queries.SharabilityQuery; import org.netbeans.api.search.SearchScopeOptions; import org.netbeans.modules.search.BasicSearchProvider; import org.netbeans.modules.search.IgnoreListPanel; import org.netbeans.modules.search.PatternSandbox; import org.netbeans.modules.search.ui.CheckBoxWithButtonPanel; import org.netbeans.modules.search.ui.FormLayoutHelper; import org.netbeans.modules.search.ui.UiUtils; /** * Component controller for setting search scope options. * * Use {@link ComponentUtils} to create instances of this class. * * @author jhavlin */ public final class ScopeOptionsController extends ComponentController<JPanel> { private FileNameController fileNameComboBox; private boolean replacing; protected JPanel ignoreListOptionPanel; private JButton btnEditIgnoreList; protected JCheckBox chkUseIgnoreList; private JCheckBox chkFileNameRegex; private JButton btnTestFileNamePattern; private JCheckBox chkArchives; private JCheckBox chkGenerated; private ItemListener checkBoxListener; private JPanel fileNameComponent; /** * Create settings panel that can be used in search dialog. * * @param component Component to adjust. * @param fileNameComboBox File name combo box that will be bound to the * regular-expression check box in the panel. * @param replacing Replace mode flag. */ ScopeOptionsController(JPanel component, FileNameController fileNameComboBox, boolean replacing) { this(component, null, fileNameComboBox, replacing); } /** * Create two settings panels that can be used in search dialog. The first * panel will contain controls for setting search scope options, the second * panel controls for setting file name pattern options. * * @param scopeComponent Component to adjust for search scope setting. * @param fileNameComponent Component to adjust for file name settings. * @param fileNameController File name controller tath will be bound to the * regular-expression check box in the file name settings panel. * @param replacing Replace mode flag. * @since api.search/1.12 */ ScopeOptionsController(JPanel scopeComponent, JPanel fileNameComponent, FileNameController fileNameController, boolean replacing) { super(scopeComponent); this.fileNameComponent = fileNameComponent; this.fileNameComboBox = fileNameController; this.replacing = replacing; init(); } /** * Return search scope options reflecting the actual state of the panel. * * Modifying returned object will not affect this panel. */ public SearchScopeOptions getSearchScopeOptions() { SearchScopeOptions sso = SearchScopeOptions.create(); if (fileNameComboBox != null) { sso.setPattern(fileNameComboBox.getFileNamePattern()); } sso.setRegexp(isFileNameRegExp()); sso.setSearchInArchives(isSearchInArchives()); sso.setSearchInGenerated(isSearchInGenerated()); if (isUseIgnoreList()) { sso.addFilter(BasicSearchProvider.getIgnoreListFilter()); } return sso; } private void init() { btnTestFileNamePattern = new JButton(); chkFileNameRegex = new JCheckBox(); chkFileNameRegex.setToolTipText(UiUtils.getText( "BasicSearchForm.chkFileNameRegex.tooltip")); //NOI18N if (!replacing) { chkArchives = new JCheckBox(); chkGenerated = new JCheckBox(); } chkUseIgnoreList = new JCheckBox(); btnEditIgnoreList = new JButton(); checkBoxListener = new CheckBoxListener(); component.setLayout(new FlowLayout(FlowLayout.LEADING, 0, 0)); setMnemonics(); initIgnoreListControlComponents(); initScopeOptionsRow(replacing); initInteraction(); } /** * Initialize ignoreListOptionPanel and related control components. */ private void initIgnoreListControlComponents() { ignoreListOptionPanel = new CheckBoxWithButtonPanel(chkUseIgnoreList, btnEditIgnoreList); } /** * Initialize panel for controls for scope options and add it to the form * panel. */ private void initScopeOptionsRow(boolean searchAndReplace) { JPanel regexpPanel = new CheckBoxWithButtonPanel( chkFileNameRegex, btnTestFileNamePattern); if (fileNameComponent != null) { fileNameComponent.setLayout( new FlowLayout(FlowLayout.LEADING, 0, 0)); fileNameComponent.add(ignoreListOptionPanel); fileNameComponent.add(regexpPanel); if (!searchAndReplace) { component.add(chkArchives); component.add(chkGenerated); } } else { JPanel jp = new JPanel(); if (searchAndReplace) { jp.setLayout(new FlowLayout(FlowLayout.LEADING, 0, 0)); jp.add(ignoreListOptionPanel); jp.add(regexpPanel); jp.setMaximumSize(jp.getMinimumSize()); } else { FormLayoutHelper flh = new FormLayoutHelper(jp, FormLayoutHelper.DEFAULT_COLUMN, FormLayoutHelper.DEFAULT_COLUMN); flh.addRow(chkArchives, chkGenerated); flh.addRow(ignoreListOptionPanel, new CheckBoxWithButtonPanel( chkFileNameRegex, btnTestFileNamePattern)); jp.setMaximumSize(jp.getMinimumSize()); } component.add(jp); } } private void initInteraction() { btnTestFileNamePattern.addActionListener((ActionEvent e) -> openPathPatternSandbox()); btnEditIgnoreList.addActionListener((ActionEvent e) -> IgnoreListPanel.openDialog(btnEditIgnoreList)); if (!replacing) { chkArchives.addItemListener(checkBoxListener); chkGenerated.addItemListener(checkBoxListener); } chkUseIgnoreList.addItemListener(checkBoxListener); if (fileNameComboBox != null) { chkFileNameRegex.addActionListener((ActionEvent e) -> fileNameComboBox.setRegularExpression(chkFileNameRegex.isSelected())); } else { chkFileNameRegex.addItemListener(checkBoxListener); } } private void openPathPatternSandbox() { PatternSandbox.openDialog(new PatternSandbox.PathPatternSandbox( fileNameComboBox.getComponent().getSelectedItem() == null ? "" : fileNameComboBox.getFileNamePattern()) { //NOI18N @Override protected void onApply(String pattern) { if (pattern.isEmpty()) { if (!fileNameComboBox.isAllFilesInfoDisplayed()) { fileNameComboBox.getComponent().setSelectedItem(pattern); fileNameComboBox.displayAllFilesInfo(); } } else { if (fileNameComboBox.isAllFilesInfoDisplayed()) { fileNameComboBox.hideAllFilesInfo(); } fileNameComboBox.getComponent().setSelectedItem(pattern); } } }, btnTestFileNamePattern); } private void setMnemonics() { UiUtils.lclz(chkFileNameRegex, "BasicSearchForm.chkFileNameRegex.text"); //NOI18N btnTestFileNamePattern.setText(UiUtils.getHtmlLink( "BasicSearchForm.btnTestFileNamePattern.text")); //NOI18N btnEditIgnoreList.setText(UiUtils.getHtmlLink( "BasicSearchForm.btnEditIgnoreList.text")); //NOI18N UiUtils.lclz(chkUseIgnoreList, "BasicSearchForm.chkUseIgnoreList.text"); //NOI18N if (!replacing) { UiUtils.lclz(chkArchives, "BasicSearchForm.chkArchives.text"); //NOI18N UiUtils.lclz(chkGenerated, "BasicSearchForm.chkGenerated.text"); //NOI18N } } /** * State of checkbox for enabling searching in archives. * * @return True if searching in archives is enabled, false if it is * disabled. */ public boolean isSearchInArchives() { return isOn(chkArchives); } /** * State of checkbox for enabling searching in generated sources. * * Generated sources include class files or web service stubs generated for * WSDL files. These files are usualy filtered out by SharabilityQuery. * * @see SharabilityQuery * @return True if searching in generated sources is enabled, false * otherwise. */ public boolean isSearchInGenerated() { return isOn(chkGenerated); } /** * State of checkbox for using ignore list. * * If this method returns true, {@link SearchScopeOptions} object returned * from {@link #getSearchScopeOptions()} includes filter for ignored files. * * @return True if ignore list is enabled, false otherwise. */ public boolean isUseIgnoreList() { return isOn(chkUseIgnoreList); } /** * @return True if file name pattern is set to be used as regular expression * for matching the whole file path, false is it should be used as simple * pattern for file names. */ public boolean isFileNameRegExp() { return isOn(chkFileNameRegex); } /** * @return True if and only if checkbox is not null, is enabled and * selected. */ private boolean isOn(JCheckBox chbox) { return chbox != null && chbox.isEnabled() && chbox.isSelected(); } /** * Enable/disable searching in archives. */ public void setSearchInArchives(boolean searchInArchives) { if (chkArchives == null) { if (searchInArchives) { throw new IllegalArgumentException( "Searching in archives not allowed " + "when replacing"); //NOI18N } } else { chkArchives.setSelected(searchInArchives); } } /** * Enable/disable searching in generated sources. */ public void setSearchInGenerated(boolean searchInGenerated) { if (chkGenerated == null) { if (searchInGenerated) { throw new IllegalArgumentException( "Searching in generated sources not allowed " + "when replacing"); //NOI18N } } else { chkGenerated.setSelected(searchInGenerated); } } /** * Enable/disable using ignore list. */ public void setUseIgnoreList(boolean useIgnoreList) { chkUseIgnoreList.setSelected(useIgnoreList); } /** * Enable/disable regular expression mode. * * @see #isFileNameRegExp() */ public void setFileNameRegexp(boolean fileNameRegexp) { chkFileNameRegex.setSelected(fileNameRegexp); } /** * Get the panel containing controls related to file name pattern settings. * This is only applicable if the controller was created using * {@link ComponentUtils#adjustPanelsForOptions(JPanel, JPanel, boolean, FileNameController)}. * * @return Panel containing controls related to file name pattern settings, * or null if there is a single panel for all settings. * @since api.search/1.12 */ public @NullUnknown JPanel getFileNameComponent() { return fileNameComponent; } /** * Checkbox listener. */ private final class CheckBoxListener implements ItemListener { @Override public void itemStateChanged(ItemEvent e) { fireChange(); } } }
5,439
4,095
<reponame>sullis/lettuce-core /* * Copyright 2011-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.lettuce.core.pubsub.api.reactive; import reactor.core.publisher.Flux; import reactor.core.publisher.FluxSink; import reactor.core.publisher.Mono; import io.lettuce.core.api.reactive.RedisReactiveCommands; import io.lettuce.core.pubsub.StatefulRedisPubSubConnection; /** * Asynchronous and thread-safe Redis PubSub API. * * @param <K> Key type. * @param <V> Value type. * @author <NAME> * @since 5.0 */ public interface RedisPubSubReactiveCommands<K, V> extends RedisReactiveCommands<K, V> { /** * Flux for messages ({@literal pmessage}) received though pattern subscriptions. The connection needs to be subscribed to * one or more patterns using {@link #psubscribe(Object[])}. * <p> * Warning! This method uses {@link reactor.core.publisher.FluxSink.OverflowStrategy#BUFFER} This does unbounded buffering * and may lead to {@link OutOfMemoryError}. Use {@link #observePatterns(FluxSink.OverflowStrategy)} to specify a different * strategy. * </p> * * @return hot Flux for subscriptions to {@literal pmessage}'s. */ Flux<PatternMessage<K, V>> observePatterns(); /** * Flux for messages ({@literal pmessage}) received though pattern subscriptions. The connection needs to be subscribed to * one or more patterns using {@link #psubscribe(Object[])}. * * @param overflowStrategy the overflow strategy to use. * @return hot Flux for subscriptions to {@literal pmessage}'s. */ Flux<PatternMessage<K, V>> observePatterns(FluxSink.OverflowStrategy overflowStrategy); /** * Flux for messages ({@literal message}) received though channel subscriptions. The connection needs to be subscribed to * one or more channels using {@link #subscribe(Object[])}. * * <p> * Warning! This method uses {@link reactor.core.publisher.FluxSink.OverflowStrategy#BUFFER} This does unbounded buffering * and may lead to {@link OutOfMemoryError}. Use {@link #observeChannels(FluxSink.OverflowStrategy)} to specify a different * strategy. * </p> * * @return hot Flux for subscriptions to {@literal message}'s. */ Flux<ChannelMessage<K, V>> observeChannels(); /** * Flux for messages ({@literal message}) received though channel subscriptions. The connection needs to be subscribed to * one or more channels using {@link #subscribe(Object[])}. * * @param overflowStrategy the overflow strategy to use. * @return hot Flux for subscriptions to {@literal message}'s. */ Flux<ChannelMessage<K, V>> observeChannels(FluxSink.OverflowStrategy overflowStrategy); /** * Listen for messages published to channels matching the given patterns. The {@link Mono} completes without a result as * soon as the pattern subscription is registered. * * @param patterns the patterns. * @return Mono&lt;Void&gt; Mono for {@code psubscribe} command. */ Mono<Void> psubscribe(K... patterns); /** * Stop listening for messages posted to channels matching the given patterns. The {@link Mono} completes without a result * as soon as the pattern subscription is unregistered. * * @param patterns the patterns. * @return Mono&lt;Void&gt; Mono for {@code punsubscribe} command. */ Mono<Void> punsubscribe(K... patterns); /** * Listen for messages published to the given channels. The {@link Mono} completes without a result as soon as the * * subscription is registered. * * @param channels the channels. * @return Mono&lt;Void&gt; Mono for {@code subscribe} command. */ Mono<Void> subscribe(K... channels); /** * Stop listening for messages posted to the given channels. The {@link Mono} completes without a result as soon as the * subscription is unregistered. * * @param channels the channels. * @return Mono&lt;Void&gt; Mono for {@code unsubscribe} command. */ Mono<Void> unsubscribe(K... channels); /** * @return the underlying connection. */ StatefulRedisPubSubConnection<K, V> getStatefulConnection(); }
1,581
370
// ============================================================================= // === spqr.hpp ================================================================ // ============================================================================= // Internal definitions and non-user-callable routines. This should not be // included in the user's code. #ifndef SPQR_INTERNAL_H #define SPQR_INTERNAL_H // ----------------------------------------------------------------------------- // include files // ----------------------------------------------------------------------------- #include "SuiteSparseQR.hpp" #include <stdlib.h> #include <math.h> #include <float.h> #include <stdio.h> #include <cstring> #include <complex> typedef std::complex<double> Complex ; // ----------------------------------------------------------------------------- // debugging and printing control // ----------------------------------------------------------------------------- // force debugging off #ifndef NDEBUG #define NDEBUG #endif // force printing off #ifndef NPRINT #define NPRINT #endif // uncomment the following line to turn on debugging (SPQR will be slow!) /* #undef NDEBUG */ // uncomment the following line to turn on printing (LOTS of output!) /* #undef NPRINT */ // uncomment the following line to turn on expensive debugging (very slow!) /* #define DEBUG_EXPENSIVE */ // ----------------------------------------------------------------------------- // Long is defined at SuiteSparse_long, from SuiteSparse_config.h // ----------------------------------------------------------------------------- #define Long SuiteSparse_long // ----------------------------------------------------------------------------- // basic macros // ----------------------------------------------------------------------------- #define MIN(a,b) (((a) < (b)) ? (a) : (b)) #define MAX(a,b) (((a) > (b)) ? (a) : (b)) #define EMPTY (-1) #define TRUE 1 #define FALSE 0 #define IMPLIES(p,q) (!(p) || (q)) // NULL should already be defined, but ensure it is here. #ifndef NULL #define NULL ((void *) 0) #endif // column-major indexing; A[i,j] is A (INDEX (i,j,lda)) #define INDEX(i,j,lda) ((i) + ((j)*(lda))) // FLIP is a "negation about -1", and is used to mark an integer i that is // normally non-negative. FLIP (EMPTY) is EMPTY. FLIP of a number > EMPTY // is negative, and FLIP of a number < EMTPY is positive. FLIP (FLIP (i)) = i // for all integers i. UNFLIP (i) is >= EMPTY. #define EMPTY (-1) #define FLIP(i) (-(i)-2) #define UNFLIP(i) (((i) < EMPTY) ? FLIP (i) : (i)) // ----------------------------------------------------------------------------- // additional include files // ----------------------------------------------------------------------------- #ifdef MATLAB_MEX_FILE #include "mex.h" #endif #define ITYPE CHOLMOD_LONG #define DTYPE CHOLMOD_DOUBLE #define ID SuiteSparse_long_id // ----------------------------------------------------------------------------- #define ERROR(status,msg) \ cholmod_l_error (status, __FILE__, __LINE__, msg, cc) // Check a pointer and return if null. Set status to invalid, unless the // status is already "out of memory" #define RETURN_IF_NULL(A,result) \ { \ if ((A) == NULL) \ { \ if (cc->status != CHOLMOD_OUT_OF_MEMORY) \ { \ ERROR (CHOLMOD_INVALID, NULL) ; \ } \ return (result) ; \ } \ } // Return if Common is NULL or invalid #define RETURN_IF_NULL_COMMON(result) \ { \ if (cc == NULL) \ { \ return (result) ; \ } \ if (cc->itype != ITYPE || cc->dtype != DTYPE) \ { \ cc->status = CHOLMOD_INVALID ; \ return (result) ; \ } \ } #define RETURN_IF_XTYPE_INVALID(A,result) \ { \ if (A->xtype != xtype) \ { \ ERROR (CHOLMOD_INVALID, "invalid xtype") ; \ return (result) ; \ } \ } // ----------------------------------------------------------------------------- // debugging and printing macros // ----------------------------------------------------------------------------- #ifndef NDEBUG #ifdef MATLAB_MEX_FILE // #define ASSERT(e) mxAssert (e, "error: ") extern char spqr_mx_debug_string [200] ; char *spqr_mx_id (int line) ; #define ASSERT(e) \ ((e) ? (void) 0 : \ mexErrMsgIdAndTxt (spqr_mx_id (__LINE__), \ "assert: (" #e ") file:" __FILE__ )) #else #include <assert.h> #define ASSERT(e) assert (e) #endif #define DEBUG(e) e #ifdef DEBUG_EXPENSIVE #define DEBUG2(e) e #define ASSERT2(e) ASSERT(e) #else #define DEBUG2(e) #define ASSERT2(e) #endif #else #define ASSERT(e) #define ASSERT2(e) #define DEBUG(e) #define DEBUG2(e) #endif #ifndef NPRINT #ifdef MATLAB_MEX_FILE #define PR(e) mexPrintf e #else #define PR(e) printf e #endif #define PRVAL(e) spqrDebug_print (e) #else #define PR(e) #define PRVAL(e) #endif // ----------------------------------------------------------------------------- // For counting flops; disabled if TBB is used // ----------------------------------------------------------------------------- #define FLOP_COUNT(f) { if (cc->SPQR_grain <= 1) cc->SPQR_flopcount += (f) ; } // ============================================================================= // === spqr_work =============================================================== // ============================================================================= // workspace required for each stack in spqr_factorize and spqr_kernel template <typename Entry> struct spqr_work { Long *Stair1 ; // size maxfn if H not kept Long *Cmap ; // size maxfn Long *Fmap ; // size n Entry *WTwork ; // size (fchunk + (keepH ? 0:1)) * maxfn Entry *Stack_head ; // head of Stack Entry *Stack_top ; // top of Stack Long sumfrank ; // sum of ranks of the fronts in this stack Long maxfrank ; // largest rank of fronts in this stack // for computing the 2-norm of w, the vector of the dead column norms double wscale ; // scale factor for norm (w (of this stack)) double wssq ; // sum-of-squares for norm (w (of this stack)) } ; // ============================================================================= // === spqr_blob =============================================================== // ============================================================================= // The spqr_blob is a collection of objects that the spqr_kernel requires. template <typename Entry> struct spqr_blob { double tol ; spqr_symbolic *QRsym ; spqr_numeric <Entry> *QRnum ; spqr_work <Entry> *Work ; Long *Cm ; Entry **Cblock ; Entry *Sx ; Long ntol ; Long fchunk ; cholmod_common *cc ; } ; // ============================================================================= // === SuiteSparseQR non-user-callable functions =============================== // ============================================================================= spqr_symbolic *spqr_analyze ( // inputs, not modified cholmod_sparse *A, int ordering, // all ordering options available Long *Quser, // user provided ordering, if given (may be NULL) int do_rank_detection, // if TRUE, then rank deficient matrices may be // considered during numerical factorization, // with tol >= 0 (tol < 0 is also allowed). If FALSE, then the tol // parameter is ignored by the numerical factorization, and no rank // detection is performed. int keepH, // if nonzero, H is kept // workspace and parameters cholmod_common *cc ) ; template <typename Entry> spqr_numeric <Entry> *spqr_factorize ( // input, optionally freed on output cholmod_sparse **Ahandle, // inputs, not modified Long freeA, // if TRUE, free A on output double tol, // for rank detection Long ntol, // apply tol only to first ntol columns spqr_symbolic *QRsym, // workspace and parameters cholmod_common *cc ) ; // returns tol (-1 if error) template <typename Entry> double spqr_tol ( // inputs, not modified cholmod_sparse *A, // workspace and parameters cholmod_common *cc ) ; template <typename Entry> double spqr_maxcolnorm ( // inputs, not modified cholmod_sparse *A, // workspace and parameters cholmod_common *cc ) ; template <typename Entry> void spqr_kernel ( Long task, spqr_blob <Entry> *Blob ) ; template <typename Entry> void spqr_parallel ( Long ntasks, int nthreads, spqr_blob <Entry> *Blob ) ; void spqr_freesym ( spqr_symbolic **QRsym_handle, // workspace and parameters cholmod_common *cc ) ; template <typename Entry> void spqr_freenum ( spqr_numeric <Entry> **QRnum_handle, // workspace and parameters cholmod_common *cc ) ; template <typename Entry> void spqr_freefac ( SuiteSparseQR_factorization <Entry> **QR_handle, // workspace and parameters cholmod_common *cc ) ; void spqr_stranspose1 ( // input, not modified cholmod_sparse *A, // m-by-n Long *Qfill, // size n, fill-reducing column permutation; // Qfill [k] = j if the kth column of S is the jth // column of A. Identity permutation is used if // Qfill is NULL. // output, contents not defined on input Long *Sp, // size m+1, row pointers of S Long *Sj, // size nz, column indices of S Long *PLinv, // size m, inverse row permutation, PLinv [i] = k Long *Sleft, // size n+2, Sleft [j] ... Sleft [j+1]-1 is the list of // rows of S whose leftmost column index is j. The list // can be empty (that is, Sleft [j] == Sleft [j+1]). // Sleft [n] is the number of non-empty rows of S, and // Sleft [n+1] is always m. That is, Sleft [n] ... // Sleft [n+1]-1 gives the empty rows of S. // workspace, not defined on input or output Long *W // size m ) ; template <typename Entry> void spqr_stranspose2 ( // input, not modified cholmod_sparse *A, // m-by-n Long *Qfill, // size n, fill-reducing column permutation; // Qfill [k] = j // if the kth column of S is the jth column of A. // Identity permutation is used if Qfill is NULL. Long *Sp, // size m+1, row pointers of S Long *PLinv, // size m, inverse row permutation, PLinv [i] = k // output, contents not defined on input Entry *Sx, // size nz, numerical values of S // workspace, not defined on input or output Long *W // size m ) ; // ============================================================================= #ifndef NDEBUG template <typename Entry> void spqrDebug_dumpdense ( Entry *A, Long m, Long n, Long lda, cholmod_common *cc ) ; template <typename Entry> void spqrDebug_dumpsparse ( Long *Ap, Long *Ai, Entry *Ax, Long m, Long n, cholmod_common *cc ) ; void spqrDebug_print (double x) ; void spqrDebug_print (Complex x) ; void spqrDebug_dump_Parent (Long n, Long *Parent, const char *filename) ; Long spqrDebug_rhsize // returns # of entries in R+H block ( // input, not modified Long m, // # of rows in F Long n, // # of columns in F Long npiv, // number of pivotal columns in F Long *Stair, // size n; column j is dead if Stair [j] == 0. // Only the first npiv columns can be dead. cholmod_common *cc ) ; #endif #ifdef DEBUG_EXPENSIVE Long spqrDebug_listcount ( Long x, Long *List, Long len, Long what, cholmod_common *cc ) ; #endif // ============================================================================= Long spqr_fsize // returns # of rows of F ( // inputs, not modified Long f, Long *Super, // size nf, from QRsym Long *Rp, // size nf, from QRsym Long *Rj, // size rjsize, from QRsym Long *Sleft, // size n+2, from QRsym Long *Child, // size nf, from QRsym Long *Childp, // size nf+1, from QRsym Long *Cm, // size nf, from QRwork // outputs, not defined on input Long *Fmap, // size n, from QRwork Long *Stair // size fn, from QRwork ) ; template <typename Entry> void spqr_assemble ( // inputs, not modified Long f, // front to assemble F Long fm, // number of rows of F int keepH, // if TRUE, then construct row pattern of H Long *Super, Long *Rp, Long *Rj, Long *Sp, Long *Sj, Long *Sleft, Long *Child, Long *Childp, Entry *Sx, Long *Fmap, Long *Cm, Entry **Cblock, #ifndef NDEBUG char *Rdead, #endif Long *Hr, // input/output Long *Stair, Long *Hii, // if keepH, construct list of row indices for F // input only Long *Hip, // outputs, not defined on input Entry *F, // workspace, not defined on input or output Long *Cmap ) ; template <typename Entry> Long spqr_cpack // returns # of rows in C ( // input, not modified Long m, // # of rows in F Long n, // # of columns in F Long npiv, // number of pivotal columns in F Long g, // the C block starts at F (g,npiv) // input, not modified unless the pack occurs in-place Entry *F, // m-by-n frontal matrix in column-major order // output, contents not defined on input Entry *C // packed columns of C, of size cm-by-cn in upper // trapezoidal form. ) ; Long spqr_fcsize // returns # of entries in C of current front F ( // input, not modified Long m, // # of rows in F Long n, // # of columns in F Long npiv, // number of pivotal columns in F Long g // the C block starts at F (g,npiv) ) ; Long spqr_csize // returns # of entries in C of a child ( // input, not modified Long c, // child c Long *Rp, // size nf+1, pointers for pattern of R Long *Cm, // size nf, Cm [c] = # of rows in child C Long *Super // size nf, pivotal columns in each front ) ; template <typename Entry> void spqr_rcount ( // inputs, not modified spqr_symbolic *QRsym, spqr_numeric <Entry> *QRnum, Long n1rows, // added to each row index of Ra and Rb Long econ, // only get entries in rows n1rows to econ-1 Long n2, // Ra = R (:,0:n2-1), Rb = R (:,n2:n-1) int getT, // if true, count Rb' instead of Rb // input/output Long *Ra, // size n2; Ra [j] += nnz (R (:,j)) if j < n2 Long *Rb, // If getT is false: size n-n2 and // Rb [j-n2] += nnz (R (:,j)) if j >= n2. // If getT is true: size econ, and // Rb [i] += nnz (R (i, n2:n-1)) Long *Hp, // size rjsize+1. Column pointers for H. // Only computed if H was kept during factorization. // Only Hp [0..nh] is used. Long *p_nh // number of Householder vectors (nh <= rjsize) ) ; template <typename Entry> void spqr_rconvert ( // inputs, not modified spqr_symbolic *QRsym, spqr_numeric <Entry> *QRnum, Long n1rows, // added to each row index of Ra, Rb, and H Long econ, // only get entries in rows n1rows to econ-1 Long n2, // Ra = R (:,0:n2-1), Rb = R (:,n2:n-1) int getT, // if true, get Rb' instead of Rb // input/output Long *Rap, // size n2+1; on input, Rap [j] is the column pointer // for Ra. Incremented on output by the number of // entries added to column j of Ra. // output, not defined on input Long *Rai, // size rnz1 = nnz(Ra); row indices of Ra Entry *Rax, // size rnz; numerical values of Ra // input/output Long *Rbp, // if getT is false: // size (n-n2)+1; on input, Rbp [j] is the column // pointer for Rb. Incremented on output by the number // of entries added to column j of Rb. // if getT is true: // size econ+1; on input, Rbp [i] is the row // pointer for Rb. Incremented on output by the number // of entries added to row i of Rb. // output, not defined on input Long *Rbi, // size rnz2 = nnz(Rb); indices of Rb Entry *Rbx, // size rnz2; numerical values of Rb // input Long *H2p, // size nh+1; H2p [j] is the column pointer for H. // H2p, H2i, and H2x are ignored if H was not kept // during factorization. nh computed by rcount // output, not defined on input Long *H2i, // size hnz = nnz(H); indices of H Entry *H2x, // size hnz; numerical values of H Entry *H2Tau // size nh; Householder coefficients ) ; template <typename Entry> Long spqr_rhpack // returns # of entries in R+H ( // input, not modified int keepH, // if true, then H is packed Long m, // # of rows in F Long n, // # of columns in F Long npiv, // number of pivotal columns in F Long *Stair, // size npiv; column j is dead if Stair [j] == 0. // Only the first npiv columns can be dead. // input, not modified (unless the pack occurs in-place) Entry *F, // m-by-n frontal matrix in column-major order // output, contents not defined on input Entry *R, // packed columns of R+H Long *p_rm // number of rows in R block ) ; template <typename Entry> void spqr_hpinv ( // input spqr_symbolic *QRsym, // input/output spqr_numeric <Entry> *QRnum, // workspace Long *W // size QRnum->m ) ; template <typename Entry> int spqr_1colamd ( // inputs, not modified int ordering, // all available, except 0:fixed and 3:given // treated as 1:natural double tol, // only accept singletons above tol Long bncols, // number of columns of B cholmod_sparse *A, // m-by-n sparse matrix // output arrays, neither allocated nor defined on input. Long **p_Q1fill, // size n+bncols, fill-reducing // or natural ordering Long **p_R1p, // size n1rows+1, R1p [k] = # of nonzeros in kth // row of R1. NULL if n1cols == 0. Long **p_P1inv, // size m, singleton row inverse permutation. // If row i of A is the kth singleton row, then // P1inv [i] = k. NULL if n1cols is zero. cholmod_sparse **p_Y, // on output, only the first n-n1cols+1 entries of // Y->p are defined (if Y is not NULL), where // Y = [A B] or Y = [A2 B2]. If B is empty and // there are no column singletons, Y is NULL Long *p_n1cols, // number of column singletons found Long *p_n1rows, // number of corresponding rows found // workspace and parameters cholmod_common *cc ) ; template <typename Entry> int spqr_1fixed ( // inputs, not modified double tol, // only accept singletons above tol Long bncols, // number of columns of B cholmod_sparse *A, // m-by-n sparse matrix // output arrays, neither allocated nor defined on input. Long **p_R1p, // size n1rows+1, R1p [k] = # of nonzeros in kth // row of R1. NULL if n1cols == 0. Long **p_P1inv, // size m, singleton row inverse permutation. // If row i of A is the kth singleton row, then // P1inv [i] = k. NULL if n1cols is zero. cholmod_sparse **p_Y, // on output, only the first n-n1cols+1 entries of // Y->p are defined (if Y is not NULL), where // Y = [A B] or Y = [A2 B2]. If B is empty and // there are no column singletons, Y is NULL Long *p_n1cols, // number of column singletons found Long *p_n1rows, // number of corresponding rows found // workspace and parameters cholmod_common *cc ) ; template <typename Entry> SuiteSparseQR_factorization <Entry> *spqr_1factor ( // inputs, not modified int ordering, // all ordering options available double tol, // only accept singletons above tol Long bncols, // number of columns of B int keepH, // if TRUE, keep the Householder vectors cholmod_sparse *A, // m-by-n sparse matrix Long ldb, // leading dimension of B, if dense Long *Bp, // size bncols+1, column pointers of B Long *Bi, // size bnz = Bp [bncols], row indices of B Entry *Bx, // size bnz, numerical values of B // workspace and parameters cholmod_common *cc ) ; Long spqr_cumsum // returns total sum ( // input, not modified Long n, // input/output Long *X // size n+1. X = cumsum ([0 X]) ) ; void spqr_shift ( // input, not modified Long n, // input/output Long *X // size n+1 ) ; template <typename Entry> void spqr_larftb ( // inputs, not modified (V is modified and then restored on output) int method, // 0,1,2,3 Long m, // C is m-by-n Long n, Long k, // V is v-by-k // for methods 0 and 1, v = m, // for methods 2 and 3, v = n Long ldc, // leading dimension of C Long ldv, // leading dimension of V Entry *V, // V is v-by-k, unit lower triangular (diag not stored) Entry *Tau, // size k, the k Householder coefficients // input/output Entry *C, // C is m-by-n, with leading dimension ldc // workspace, not defined on input or output Entry *W, // for methods 0,1: size k*k + n*k // for methods 2,3: size k*k + m*k cholmod_common *cc ) ; int spqr_happly_work ( // input int method, // 0,1,2,3 Long m, // X is m-by-n Long n, // FUTURE : make H cholmod_sparse: Long nh, // number of Householder vectors Long *Hp, // size nh+1, column pointers for H Long hchunk, // outputs; sizes of workspaces needed Long *p_vmax, Long *p_vsize, Long *p_csize ) ; template <typename Entry> void spqr_happly ( // input int method, // 0,1,2,3 Long m, // X is m-by-n Long n, Long nh, // number of Householder vectors Long *Hp, // size nh+1, column pointers for H Long *Hi, // size hnz = Hp [nh], row indices of H Entry *Hx, // size hnz, Householder values. Note that the first // entry in each column must be equal to 1.0 Entry *Tau, // size nh // input/output Entry *X, // size m-by-n with leading dimension m // workspace Long vmax, Long hchunk, Long *Wi, // size vmax Long *Wmap, // size MAX(mh,1) where H is mh-by-nh Entry *C, // size csize Entry *V, // size vsize cholmod_common *cc ) ; template <typename Entry> void spqr_panel ( // input int method, Long m, Long n, Long v, Long h, // number of Householder vectors in the panel Long *Vi, // Vi [0:v-1] defines the pattern of the panel Entry *V, // v-by-h, panel of Householder vectors Entry *Tau, // size h, Householder coefficients for the panel Long ldx, // input/output Entry *X, // m-by-n with leading dimension ldx // workspace Entry *C, // method 0,1: v-by-n; method 2,3: m-by-v Entry *W, // method 0,1: k*k+n*k; method 2,3: k*k+m*k cholmod_common *cc ) ; template <typename Entry> int spqr_append // TRUE if OK, FALSE otherwise ( // inputs, not modified Entry *X, // size m-by-1 Long *P, // size m, or NULL; permutation to apply to X. // P [k] = i if row k of A is row i of X // input/output cholmod_sparse *A, // size m-by-n2 where n2 > n Long *p_n, // number of columns of A; increased by one // workspace and parameters cholmod_common *cc ) ; template <typename Entry> Long spqr_trapezoidal // rank of R, or EMPTY ( // inputs, not modified Long n, // R is m-by-n (m is not needed here; can be economy R) Long *Rp, // size n+1, column pointers of R Long *Ri, // size rnz = Rp [n], row indices of R Entry *Rx, // size rnz, numerical values of R Long bncols, // number of columns of B Long *Qfill, // size n+bncols, fill-reducing ordering. Qfill [k] = j if // the jth column of A is the kth column of R. If Qfill is // NULL, then it is assumed to be the identity // permutation. int skip_if_trapezoidal, // if R is already in trapezoidal form, // and skip_if_trapezoidal is TRUE, then // the matrix T is not created. // outputs, not allocated on input Long **p_Tp, // size n+1, column pointers of T Long **p_Ti, // size rnz, row indices of T Entry **p_Tx, // size rnz, numerical values of T Long **p_Qtrap, // size n+bncols, modified Qfill // workspace and parameters cholmod_common *cc ) ; template <typename Entry> int spqr_type (void) ; template <typename Entry> void spqr_rsolve ( // inputs SuiteSparseQR_factorization <Entry> *QR, int use_Q1fill, Long nrhs, // number of columns of B Long ldb, // leading dimension of B Entry *B, // size m-by-nrhs with leading dimesion ldb // output Entry *X, // size n-by-nrhs with leading dimension n // workspace Entry **Rcolp, Long *Rlive, Entry *W, cholmod_common *cc ) ; // returns rank of F, or 0 on error template <typename Entry> Long spqr_front ( // input, not modified Long m, // F is m-by-n with leading dimension m Long n, Long npiv, // number of pivot columns double tol, // a column is flagged as dead if its norm is <= tol Long ntol, // apply tol only to first ntol pivot columns Long fchunk, // block size for compact WY Householder reflections, // treated as 1 if fchunk <= 1 // input/output Entry *F, // frontal matrix F of size m-by-n Long *Stair, // size n, entries F (Stair[k]:m-1, k) are all zero, // and remain zero on output. char *Rdead, // size npiv; all zero on input. If k is dead, // Rdead [k] is set to 1 // output, not defined on input Entry *Tau, // size n, Householder coefficients // workspace, undefined on input and output Entry *W, // size b*(n+b), where b = min (fchunk,n,m) // input/output double *wscale, double *wssq, cholmod_common *cc ) ; template <typename Entry> int spqr_rmap ( SuiteSparseQR_factorization <Entry> *QR, cholmod_common *cc ) ; // ============================================================================= // === spqrgpu features ======================================================== // ============================================================================= #ifdef GPU_BLAS #include "spqrgpu.hpp" #endif // ============================================================================= // === spqr_conj =============================================================== // ============================================================================= inline double spqr_conj (double x) { return (x) ; } inline Complex spqr_conj (Complex x) { return (std::conj (x)) ; } // ============================================================================= // === spqr_abs ================================================================ // ============================================================================= inline double spqr_abs (double x, cholmod_common *cc) // cc is unused { return (fabs (x)) ; } inline double spqr_abs (Complex x, cholmod_common *cc) { return (SuiteSparse_config.hypot_func (x.real ( ), x.imag ( ))) ; } // ============================================================================= // === spqr_divide ============================================================= // ============================================================================= inline double spqr_divide (double a, double b, cholmod_common *cc) // cc unused { return (a/b) ; } inline Complex spqr_divide (Complex a, Complex b, cholmod_common *cc) { double creal, cimag ; SuiteSparse_config.divcomplex_func (a.real(), a.imag(), b.real(), b.imag(), &creal, &cimag) ; return (Complex (creal, cimag)) ; } // ============================================================================= // === spqr_add ================================================================ // ============================================================================= // Add two non-negative Long's, and return the result. Checks for Long overflow // and sets ok to FALSE if it occurs. inline Long spqr_add (Long a, Long b, int *ok) { Long c = a + b ; if (c < 0) { (*ok) = FALSE ; return (EMPTY) ; } return (c) ; } // ============================================================================= // === spqr_mult =============================================================== // ============================================================================= // Multiply two positive Long's, and return the result. Checks for Long // overflow and sets ok to FALSE if it occurs. inline Long spqr_mult (Long a, Long b, int *ok) { Long c = a * b ; if (((double) c) != ((double) a) * ((double) b)) { (*ok) = FALSE ; return (EMPTY) ; } return (c) ; } // ============================================================================= // === BLAS interface ========================================================== // ============================================================================= // To compile SuiteSparseQR with 64-bit BLAS, use -DBLAS64. See also // CHOLMOD/Include/cholmod_blas.h extern "C" { #include "cholmod_blas.h" } #undef CHECK_BLAS_INT #undef EQ #define CHECK_BLAS_INT (sizeof (BLAS_INT) < sizeof (Long)) #define EQ(K,k) (((BLAS_INT) K) == ((Long) k)) #ifdef SUN64 #define BLAS_DNRM2 dnrm2_64_ #define LAPACK_DLARF dlarf_64_ #define LAPACK_DLARFG dlarfg_64_ #define LAPACK_DLARFT dlarft_64_ #define LAPACK_DLARFB dlarfb_64_ #define BLAS_DZNRM2 dznrm2_64_ #define LAPACK_ZLARF zlarf_64_ #define LAPACK_ZLARFG zlarfg_64_ #define LAPACK_ZLARFT zlarft_64_ #define LAPACK_ZLARFB zlarfb_64_ #elif defined (BLAS_NO_UNDERSCORE) #define BLAS_DNRM2 dnrm2 #define LAPACK_DLARF dlarf #define LAPACK_DLARFG dlarfg #define LAPACK_DLARFT dlarft #define LAPACK_DLARFB dlarfb #define BLAS_DZNRM2 dznrm2 #define LAPACK_ZLARF zlarf #define LAPACK_ZLARFG zlarfg #define LAPACK_ZLARFT zlarft #define LAPACK_ZLARFB zlarfb #else #define BLAS_DNRM2 dnrm2_ #define LAPACK_DLARF dlarf_ #define LAPACK_DLARFG dlarfg_ #define LAPACK_DLARFT dlarft_ #define LAPACK_DLARFB dlarfb_ #define BLAS_DZNRM2 dznrm2_ #define LAPACK_ZLARF zlarf_ #define LAPACK_ZLARFG zlarfg_ #define LAPACK_ZLARFT zlarft_ #define LAPACK_ZLARFB zlarfb_ #endif // ============================================================================= // === BLAS and LAPACK prototypes ============================================== // ============================================================================= extern "C" { void LAPACK_DLARFT (char *direct, char *storev, BLAS_INT *n, BLAS_INT *k, double *V, BLAS_INT *ldv, double *Tau, double *T, BLAS_INT *ldt) ; void LAPACK_ZLARFT (char *direct, char *storev, BLAS_INT *n, BLAS_INT *k, Complex *V, BLAS_INT *ldv, Complex *Tau, Complex *T, BLAS_INT *ldt) ; void LAPACK_DLARFB (char *side, char *trans, char *direct, char *storev, BLAS_INT *m, BLAS_INT *n, BLAS_INT *k, double *V, BLAS_INT *ldv, double *T, BLAS_INT *ldt, double *C, BLAS_INT *ldc, double *Work, BLAS_INT *ldwork) ; void LAPACK_ZLARFB (char *side, char *trans, char *direct, char *storev, BLAS_INT *m, BLAS_INT *n, BLAS_INT *k, Complex *V, BLAS_INT *ldv, Complex *T, BLAS_INT *ldt, Complex *C, BLAS_INT *ldc, Complex *Work, BLAS_INT *ldwork) ; double BLAS_DNRM2 (BLAS_INT *n, double *X, BLAS_INT *incx) ; double BLAS_DZNRM2 (BLAS_INT *n, Complex *X, BLAS_INT *incx) ; void LAPACK_DLARFG (BLAS_INT *n, double *alpha, double *X, BLAS_INT *incx, double *tau) ; void LAPACK_ZLARFG (BLAS_INT *n, Complex *alpha, Complex *X, BLAS_INT *incx, Complex *tau) ; void LAPACK_DLARF (char *side, BLAS_INT *m, BLAS_INT *n, double *V, BLAS_INT *incv, double *tau, double *C, BLAS_INT *ldc, double *Work) ; void LAPACK_ZLARF (char *side, BLAS_INT *m, BLAS_INT *n, Complex *V, BLAS_INT *incv, Complex *tau, Complex *C, BLAS_INT *ldc, Complex *Work) ; } #endif
14,519
1,350
<gh_stars>1000+ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.eventgrid.models; import com.azure.core.util.ExpandableStringEnum; import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** Defines values for InputSchemaMappingType. */ public final class InputSchemaMappingType extends ExpandableStringEnum<InputSchemaMappingType> { /** Static value Json for InputSchemaMappingType. */ public static final InputSchemaMappingType JSON = fromString("Json"); /** * Creates or finds a InputSchemaMappingType from its string representation. * * @param name a name to look for. * @return the corresponding InputSchemaMappingType. */ @JsonCreator public static InputSchemaMappingType fromString(String name) { return fromString(name, InputSchemaMappingType.class); } /** @return known InputSchemaMappingType values. */ public static Collection<InputSchemaMappingType> values() { return values(InputSchemaMappingType.class); } }
373
838
<filename>MicroPython_BUILD/components/micropython/esp32/modules_examples/lora/config_lora.py import sys import os import time def mac2eui(mac): mac = mac[0:6] + 'fffe' + mac[6:] return hex(int(mac[0:2], 16) ^ 2)[2:] + mac[2:] # Node Name from machine import unique_id import ubinascii unique_id = ubinascii.hexlify(unique_id()).decode() NODE_NAME = 'ESP32_' NODE_EUI = mac2eui(unique_id) NODE_NAME = NODE_NAME + unique_id # NODE_NAME = NODE_NAME + NODE_EUI # millisecond millisecond = time.ticks_ms # microsecond = time.ticks_us # Controller from controller_esp import Controller
244
322
<gh_stars>100-1000 /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.eagle.hadoop.queue.storm; import org.apache.eagle.hadoop.queue.common.HadoopClusterConstants.DataSource; import org.apache.eagle.hadoop.queue.common.HadoopClusterConstants.DataType; import com.google.common.base.Objects; import org.apache.commons.lang.builder.HashCodeBuilder; public class HadoopQueueMessageId { private String dataType; private String dataSource; private Long timestamp; public HadoopQueueMessageId(DataType dataType, DataSource dataSource, Long timestamp) { this.dataSource = dataSource.name(); this.dataType = dataType.name(); this.timestamp = timestamp; } @Override public boolean equals(Object obj) { if (obj == null || getClass() != obj.getClass()) { return false; } final HadoopQueueMessageId other = (HadoopQueueMessageId) obj; return Objects.equal(this.dataType, other.dataType) && Objects.equal(this.dataSource, other.dataSource) && Objects.equal(this.timestamp, other.timestamp); } @Override public int hashCode() { return new HashCodeBuilder().append(dataType).append(dataSource).append(timestamp).toHashCode(); } @Override public String toString() { return String.format("dataType=%s, dataSource=%s, timestamp=%d", dataType, dataSource, timestamp); } }
722
14,425
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.namenode; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Map; import org.apache.hadoop.fs.StorageType; import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy; import org.apache.hadoop.hdfs.server.blockmanagement.BlockStoragePolicySuite; import org.apache.hadoop.hdfs.server.namenode.StoragePolicySummary.StorageTypeAllocation; import org.junit.Assert; import org.junit.Test; public class TestStoragePolicySummary { private Map<String, Long> convertToStringMap(StoragePolicySummary sts) { LinkedHashMap<String, Long> actualOutput = new LinkedHashMap<>(); for (Map.Entry<StorageTypeAllocation, Long> entry: StoragePolicySummary.sortByComparator(sts.storageComboCounts)) { actualOutput.put(entry.getKey().toString(), entry.getValue()); } return actualOutput; } @Test public void testMultipleHots() { BlockStoragePolicySuite bsps = BlockStoragePolicySuite.createDefaultSuite(); StoragePolicySummary sts = new StoragePolicySummary(bsps.getAllPolicies()); BlockStoragePolicy hot = bsps.getPolicy("HOT"); sts.add(new StorageType[]{StorageType.DISK},hot); sts.add(new StorageType[]{StorageType.DISK,StorageType.DISK},hot); sts.add(new StorageType[]{StorageType.DISK, StorageType.DISK,StorageType.DISK},hot); sts.add(new StorageType[]{StorageType.DISK, StorageType.DISK,StorageType.DISK,StorageType.DISK},hot); Map<String, Long> actualOutput = convertToStringMap(sts); Assert.assertEquals(4,actualOutput.size()); Map<String, Long> expectedOutput = new HashMap<>(); expectedOutput.put("HOT|DISK:1(HOT)", 1l); expectedOutput.put("HOT|DISK:2(HOT)", 1l); expectedOutput.put("HOT|DISK:3(HOT)", 1l); expectedOutput.put("HOT|DISK:4(HOT)", 1l); Assert.assertEquals(expectedOutput,actualOutput); } @Test public void testMultipleHotsWithDifferentCounts() { BlockStoragePolicySuite bsps = BlockStoragePolicySuite.createDefaultSuite(); StoragePolicySummary sts = new StoragePolicySummary(bsps.getAllPolicies()); BlockStoragePolicy hot = bsps.getPolicy("HOT"); sts.add(new StorageType[]{StorageType.DISK},hot); sts.add(new StorageType[]{StorageType.DISK,StorageType.DISK},hot); sts.add(new StorageType[]{StorageType.DISK,StorageType.DISK},hot); sts.add(new StorageType[]{StorageType.DISK, StorageType.DISK,StorageType.DISK},hot); sts.add(new StorageType[]{StorageType.DISK, StorageType.DISK,StorageType.DISK},hot); sts.add(new StorageType[]{StorageType.DISK, StorageType.DISK,StorageType.DISK,StorageType.DISK},hot); Map<String, Long> actualOutput = convertToStringMap(sts); Assert.assertEquals(4,actualOutput.size()); Map<String, Long> expectedOutput = new HashMap<>(); expectedOutput.put("HOT|DISK:1(HOT)", 1l); expectedOutput.put("HOT|DISK:2(HOT)", 2l); expectedOutput.put("HOT|DISK:3(HOT)", 2l); expectedOutput.put("HOT|DISK:4(HOT)", 1l); Assert.assertEquals(expectedOutput,actualOutput); } @Test public void testMultipleWarmsInDifferentOrder() { BlockStoragePolicySuite bsps = BlockStoragePolicySuite.createDefaultSuite(); StoragePolicySummary sts = new StoragePolicySummary(bsps.getAllPolicies()); BlockStoragePolicy warm = bsps.getPolicy("WARM"); //DISK:1,ARCHIVE:1 sts.add(new StorageType[]{StorageType.DISK,StorageType.ARCHIVE},warm); sts.add(new StorageType[]{StorageType.ARCHIVE,StorageType.DISK},warm); //DISK:2,ARCHIVE:1 sts.add(new StorageType[]{StorageType.ARCHIVE, StorageType.DISK,StorageType.DISK},warm); sts.add(new StorageType[]{StorageType.DISK, StorageType.ARCHIVE,StorageType.DISK},warm); sts.add(new StorageType[]{StorageType.DISK, StorageType.DISK,StorageType.ARCHIVE},warm); //DISK:1,ARCHIVE:2 sts.add(new StorageType[]{StorageType.DISK, StorageType.ARCHIVE,StorageType.ARCHIVE},warm); sts.add(new StorageType[]{StorageType.ARCHIVE, StorageType.DISK,StorageType.ARCHIVE},warm); sts.add(new StorageType[]{StorageType.ARCHIVE, StorageType.ARCHIVE,StorageType.DISK},warm); //DISK:2,ARCHIVE:2 sts.add(new StorageType[]{StorageType.ARCHIVE, StorageType.ARCHIVE,StorageType.DISK,StorageType.DISK},warm); Map<String, Long> actualOutput = convertToStringMap(sts); Assert.assertEquals(4,actualOutput.size()); Map<String, Long> expectedOutput = new HashMap<>(); expectedOutput.put("WARM|DISK:1,ARCHIVE:1(WARM)", 2l); expectedOutput.put("WARM|DISK:2,ARCHIVE:1", 3l); expectedOutput.put("WARM|DISK:1,ARCHIVE:2(WARM)", 3l); expectedOutput.put("WARM|DISK:2,ARCHIVE:2", 1l); Assert.assertEquals(expectedOutput,actualOutput); } @Test public void testDifferentSpecifiedPolicies() { BlockStoragePolicySuite bsps = BlockStoragePolicySuite.createDefaultSuite(); StoragePolicySummary sts = new StoragePolicySummary(bsps.getAllPolicies()); BlockStoragePolicy hot = bsps.getPolicy("HOT"); BlockStoragePolicy warm = bsps.getPolicy("WARM"); BlockStoragePolicy cold = bsps.getPolicy("COLD"); //DISK:3 sts.add(new StorageType[]{StorageType.DISK,StorageType.DISK,StorageType.DISK},hot); sts.add(new StorageType[]{StorageType.DISK,StorageType.DISK,StorageType.DISK},hot); sts.add(new StorageType[]{StorageType.DISK,StorageType.DISK,StorageType.DISK},warm); sts.add(new StorageType[]{StorageType.DISK,StorageType.DISK,StorageType.DISK},cold); //DISK:1,ARCHIVE:2 sts.add(new StorageType[]{StorageType.DISK, StorageType.ARCHIVE,StorageType.ARCHIVE},hot); sts.add(new StorageType[]{StorageType.ARCHIVE, StorageType.DISK,StorageType.ARCHIVE},warm); sts.add(new StorageType[]{StorageType.ARCHIVE, StorageType.ARCHIVE,StorageType.DISK},cold); sts.add(new StorageType[]{StorageType.ARCHIVE, StorageType.ARCHIVE,StorageType.DISK},cold); //ARCHIVE:3 sts.add(new StorageType[]{StorageType.ARCHIVE, StorageType.ARCHIVE,StorageType.ARCHIVE},hot); sts.add(new StorageType[]{StorageType.ARCHIVE, StorageType.ARCHIVE,StorageType.ARCHIVE},hot); sts.add(new StorageType[]{StorageType.ARCHIVE, StorageType.ARCHIVE,StorageType.ARCHIVE},warm); sts.add(new StorageType[]{StorageType.ARCHIVE, StorageType.ARCHIVE,StorageType.ARCHIVE},cold); Map<String, Long> actualOutput = convertToStringMap(sts); Assert.assertEquals(9,actualOutput.size()); Map<String, Long> expectedOutput = new HashMap<>(); expectedOutput.put("HOT|DISK:3(HOT)", 2l); expectedOutput.put("COLD|DISK:1,ARCHIVE:2(WARM)", 2l); expectedOutput.put("HOT|ARCHIVE:3(COLD)", 2l); expectedOutput.put("WARM|DISK:3(HOT)", 1l); expectedOutput.put("COLD|DISK:3(HOT)", 1l); expectedOutput.put("WARM|ARCHIVE:3(COLD)", 1l); expectedOutput.put("WARM|DISK:1,ARCHIVE:2(WARM)", 1l); expectedOutput.put("COLD|ARCHIVE:3(COLD)", 1l); expectedOutput.put("HOT|DISK:1,ARCHIVE:2(WARM)", 1l); Assert.assertEquals(expectedOutput,actualOutput); } @Test public void testSortInDescendingOrder() { BlockStoragePolicySuite bsps = BlockStoragePolicySuite.createDefaultSuite(); StoragePolicySummary sts = new StoragePolicySummary(bsps.getAllPolicies()); BlockStoragePolicy hot = bsps.getPolicy("HOT"); BlockStoragePolicy warm = bsps.getPolicy("WARM"); BlockStoragePolicy cold = bsps.getPolicy("COLD"); //DISK:3 sts.add(new StorageType[]{StorageType.DISK,StorageType.DISK,StorageType.DISK},hot); sts.add(new StorageType[]{StorageType.DISK,StorageType.DISK,StorageType.DISK},hot); //DISK:1,ARCHIVE:2 sts.add(new StorageType[]{StorageType.DISK, StorageType.ARCHIVE,StorageType.ARCHIVE},warm); sts.add(new StorageType[]{StorageType.ARCHIVE, StorageType.DISK,StorageType.ARCHIVE},warm); sts.add(new StorageType[]{StorageType.ARCHIVE, StorageType.ARCHIVE,StorageType.DISK},warm); //ARCHIVE:3 sts.add(new StorageType[]{StorageType.ARCHIVE, StorageType.ARCHIVE,StorageType.ARCHIVE},cold); sts.add(new StorageType[]{StorageType.ARCHIVE, StorageType.ARCHIVE,StorageType.ARCHIVE},cold); sts.add(new StorageType[]{StorageType.ARCHIVE, StorageType.ARCHIVE,StorageType.ARCHIVE},cold); sts.add(new StorageType[]{StorageType.ARCHIVE, StorageType.ARCHIVE,StorageType.ARCHIVE},cold); Map<String, Long> actualOutput = convertToStringMap(sts); Assert.assertEquals(3,actualOutput.size()); Map<String, Long> expectedOutput = new LinkedHashMap<>(); expectedOutput.put("COLD|ARCHIVE:3(COLD)", 4l); expectedOutput.put("WARM|DISK:1,ARCHIVE:2(WARM)", 3l); expectedOutput.put("HOT|DISK:3(HOT)", 2l); Assert.assertEquals(expectedOutput.toString(),actualOutput.toString()); } }
3,598
318
/** * @class vtkF3DInteractorEventRecorder * @brief A F3D dedicated version of the vtkInteractorEventRecorder * @sa vtkInteractorEventRecorder */ #ifndef vtkF3DInteractorEventRecorder_h #define vtkF3DInteractorEventRecorder_h #include <vtkInteractorEventRecorder.h> class vtkF3DInteractorEventRecorder : public vtkInteractorEventRecorder { public: static vtkF3DInteractorEventRecorder* New(); vtkTypeMacro(vtkF3DInteractorEventRecorder, vtkInteractorEventRecorder); /* * Just set the interactor without adding other dedicated observers. */ void SetInteractor(vtkRenderWindowInteractor* iren) override; protected: vtkF3DInteractorEventRecorder(); ~vtkF3DInteractorEventRecorder() override = default; static void ProcessEvents( vtkObject* object, unsigned long event, void* clientdata, void* calldata); private: vtkF3DInteractorEventRecorder(const vtkF3DInteractorEventRecorder&) = delete; void operator=(const vtkF3DInteractorEventRecorder&) = delete; }; #endif /* vtkF3DInteractorEventRecorder_h */
353
1,929
<gh_stars>1000+ import argparse import time import os.path as osp import tempfile import torch import torch.distributed as dist import mmcv from mmcv.runner import load_checkpoint, obj_from_dict from mmcv.runner import get_dist_info from mmcv.parallel.distributed import MMDistributedDataParallel from mmaction import datasets from mmaction.apis import init_dist from mmaction.datasets import build_dataloader from mmaction.models import build_detector, detectors from mmaction.core.evaluation.ava_utils import results2csv, ava_eval import os.path as osp args = None def multiple_test(model, data_loader, tmpdir='./tmp'): global args model.eval() results = [] rank, world_size = get_dist_info() count = 0 data_time_pool = 0 proc_time_pool = 0 tic = time.time() for i, data in enumerate(data_loader): if i % 100 == 0: print('rank {}, data_batch {}'.format(rank, i)) count = count + 1 tac = time.time() data_time_pool = data_time_pool + tac - tic with torch.no_grad(): result = model(return_loss=False, rescale=True, **data) results.append(result) toc = time.time() proc_time_pool = proc_time_pool + toc - tac tic = toc print('rank {}, begin collect results'.format(rank), flush=True) results = collect_results(results, len(data_loader.dataset), tmpdir) return results def collect_results(result_part, size, tmpdir=None): global args rank, world_size = get_dist_info() if tmpdir is None: MAX_LEN = 512 # 32 is whitespace dir_tensor = torch.full((MAX_LEN, ), 32, dtype=torch.uint8, device='cuda') if rank == 0: tmpdir = tempfile.mkdtemp() tmpdir = torch.tensor( bytearray(tmpdir.encode()), dtype=torch.uint8, device='cuda') dir_tensor[:len(tmpdir)] = tmpdir dist.broadcast(dir_tensor, 0) tmpdir = dir_tensor.cpu().numpy().tobytes().decode().rstrip() else: tmpdir = osp.join(tmpdir, args.out.split('.')[0]) mmcv.mkdir_or_exist(tmpdir) print('rank {} begin dump'.format(rank), flush=True) mmcv.dump(result_part, osp.join(tmpdir, 'part_{}.pkl'.format(rank))) print('rank {} finished dump'.format(rank), flush=True) dist.barrier() if rank != 0: return None else: part_list = [] for i in range(world_size): part_file = osp.join(tmpdir, 'part_{}.pkl'.format(i)) part_list.append(mmcv.load(part_file)) ordered_results = [] for res in zip(*part_list): ordered_results.extend(list(res)) ordered_results = ordered_results[:size] return ordered_results def single_test(model, data_loader): model.eval() results = [] dataset = data_loader.dataset prog_bar = mmcv.ProgressBar(len(dataset)) for data in data_loader: with torch.no_grad(): result = model(return_loss=False, **data) results.append(result) batch_size = data['img_group_0'][0].size(0) for _ in range(batch_size): prog_bar.update() return results def parse_args(): parser = argparse.ArgumentParser(description='Test an action detector') parser.add_argument('config', help='test config file path') parser.add_argument('checkpoint', help='checkpoint file') parser.add_argument( '--launcher', choices=['none', 'pytorch', 'mpi', 'slurm'], default='none', help='job launcher') parser.add_argument( '--gpus', default=8, type=int, help='GPU number used for testing') parser.add_argument('--out', help='output result file', default='detection_result.pkl') parser.add_argument('--eval', type=str, choices=['proposal', 'bbox'], help='eval types') parser.add_argument('--ann_file', type=str, default='data/ava/annotations/ava_val_v2.1.csv') parser.add_argument('--label_file', type=str, default='data/ava/annotations/' 'ava_action_list_v2.1_for_activitynet_2018.pbtxt') parser.add_argument('--exclude_file', type=str, default='data/ava/annotations/' 'ava_val_excluded_timestamps_v2.1.csv') parser.add_argument('--local_rank', type=int, default=0) args = parser.parse_args() return args def main(): global args args = parse_args() cfg = mmcv.Config.fromfile(args.config) # set cudnn_benchmark if cfg.get('cudnn_benchmark', False): torch.backends.cudnn.benchmark = True cfg.data.test.test_mode = True dataset = obj_from_dict(cfg.data.test, datasets, dict(test_mode=True)) if args.out is None or not args.out.endswith(('.pkl', '.pickle')): raise ValueError('The output file must be a pkl file.') if osp.exists(args.out): outputs = mmcv.load(args.out) else: if args.launcher == 'none': raise NotImplementedError("By default, we use distributed testing, so that launcher should be pytorch") else: distributed = True init_dist(args.launcher, **cfg.dist_params) model = build_detector( cfg.model, train_cfg=None, test_cfg=cfg.test_cfg) data_loader = build_dataloader( dataset, imgs_per_gpu=1, workers_per_gpu=1, dist=distributed, shuffle=False) load_checkpoint(model, args.checkpoint, map_location='cpu') find_unused_parameters = cfg.get('find_unused_parameters', False) model = MMDistributedDataParallel( model.cuda(), device_ids=[torch.cuda.current_device()], broadcast_buffers=False, find_unused_parameters=find_unused_parameters) outputs = multiple_test(model, data_loader) rank, _ = get_dist_info() if rank == 0: print('writing results to {}'.format(args.out)) mmcv.dump(outputs, args.out) eval_type = args.eval if eval_type: print('Starting evaluate {}'.format(eval_type)) result_file = osp.join(args.out + '.csv') results2csv(dataset, outputs, result_file) ava_eval(result_file, eval_type, args.label_file, args.ann_file, args.exclude_file) if __name__ == '__main__': main()
2,944
828
/* * Copyright (C) 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.google.api.services.dataplex.v1.model; /** * Security policy status of the asset. Data security policy, i.e., readers, writers & owners, * should be specified in the lake/zone/asset IAM policy. * * <p>This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Cloud Dataplex API. For a detailed explanation see: * <a * href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class GoogleCloudDataplexV1AssetSecurityStatus extends com.google.api.client.json.GenericJson { /** * Cumulative set of owner groups that were last applied on the managed resource. These groups may * have been specified at lake, zone or asset levels. The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> appliedOwnerGroups; /** * Cumulative set of reader groups that were last applied on the managed resource. These groups * may have been specified at lake, zone or asset levels. The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> appliedReaderGroups; /** * Cumulative set of writer groups that were last applied on the managed resource. These groups * may have been specified at lake, zone or asset levels. The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> appliedWriterGroups; /** Additional information about the current state. The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String message; /** * The current state of the security policy applied to the attached resource. The value may be * {@code null}. */ @com.google.api.client.util.Key private java.lang.String state; /** Last update time of the status. The value may be {@code null}. */ @com.google.api.client.util.Key private String updateTime; /** * Cumulative set of owner groups that were last applied on the managed resource. These groups may * have been specified at lake, zone or asset levels. * * @return value or {@code null} for none */ public java.util.List<java.lang.String> getAppliedOwnerGroups() { return appliedOwnerGroups; } /** * Cumulative set of owner groups that were last applied on the managed resource. These groups may * have been specified at lake, zone or asset levels. * * @param appliedOwnerGroups appliedOwnerGroups or {@code null} for none */ public GoogleCloudDataplexV1AssetSecurityStatus setAppliedOwnerGroups( java.util.List<java.lang.String> appliedOwnerGroups) { this.appliedOwnerGroups = appliedOwnerGroups; return this; } /** * Cumulative set of reader groups that were last applied on the managed resource. These groups * may have been specified at lake, zone or asset levels. * * @return value or {@code null} for none */ public java.util.List<java.lang.String> getAppliedReaderGroups() { return appliedReaderGroups; } /** * Cumulative set of reader groups that were last applied on the managed resource. These groups * may have been specified at lake, zone or asset levels. * * @param appliedReaderGroups appliedReaderGroups or {@code null} for none */ public GoogleCloudDataplexV1AssetSecurityStatus setAppliedReaderGroups( java.util.List<java.lang.String> appliedReaderGroups) { this.appliedReaderGroups = appliedReaderGroups; return this; } /** * Cumulative set of writer groups that were last applied on the managed resource. These groups * may have been specified at lake, zone or asset levels. * * @return value or {@code null} for none */ public java.util.List<java.lang.String> getAppliedWriterGroups() { return appliedWriterGroups; } /** * Cumulative set of writer groups that were last applied on the managed resource. These groups * may have been specified at lake, zone or asset levels. * * @param appliedWriterGroups appliedWriterGroups or {@code null} for none */ public GoogleCloudDataplexV1AssetSecurityStatus setAppliedWriterGroups( java.util.List<java.lang.String> appliedWriterGroups) { this.appliedWriterGroups = appliedWriterGroups; return this; } /** * Additional information about the current state. * * @return value or {@code null} for none */ public java.lang.String getMessage() { return message; } /** * Additional information about the current state. * * @param message message or {@code null} for none */ public GoogleCloudDataplexV1AssetSecurityStatus setMessage(java.lang.String message) { this.message = message; return this; } /** * The current state of the security policy applied to the attached resource. * * @return value or {@code null} for none */ public java.lang.String getState() { return state; } /** * The current state of the security policy applied to the attached resource. * * @param state state or {@code null} for none */ public GoogleCloudDataplexV1AssetSecurityStatus setState(java.lang.String state) { this.state = state; return this; } /** * Last update time of the status. * * @return value or {@code null} for none */ public String getUpdateTime() { return updateTime; } /** * Last update time of the status. * * @param updateTime updateTime or {@code null} for none */ public GoogleCloudDataplexV1AssetSecurityStatus setUpdateTime(String updateTime) { this.updateTime = updateTime; return this; } @Override public GoogleCloudDataplexV1AssetSecurityStatus set(String fieldName, Object value) { return (GoogleCloudDataplexV1AssetSecurityStatus) super.set(fieldName, value); } @Override public GoogleCloudDataplexV1AssetSecurityStatus clone() { return (GoogleCloudDataplexV1AssetSecurityStatus) super.clone(); } }
2,037
2,092
<reponame>lwhsu/ck #include <assert.h> #include "../../../src/ck_ec_timeutil.h" #include "fuzz_harness.h" #if ULONG_MAX > 4294967295 typedef __int128 dsword_t; #else typedef int64_t dsword_t; #endif struct example { struct timespec x; struct timespec y; }; static const struct example examples[] = { { { 42, 100 }, { 1, 2 } }, { { 42, 100 }, { 1, NSEC_MAX } }, { { 42, NSEC_MAX }, { 0, NSEC_MAX } }, { { TIME_MAX - 1, 1000 }, { 2, NSEC_MAX } } }; static struct timespec normalize_ts(const struct timespec ts) { struct timespec ret = ts; if (ret.tv_nsec < 0) { ret.tv_nsec = ~ret.tv_nsec; } ret.tv_nsec %= NSEC_MAX + 1; return ret; } static dsword_t ts_to_nanos(const struct timespec ts) { return (dsword_t)ts.tv_sec * (NSEC_MAX + 1) + ts.tv_nsec; } static inline int test_timespec_cmp(const struct example *example) { const struct timespec x = normalize_ts(example->y); const struct timespec y = normalize_ts(example->x); const dsword_t x_nanos = ts_to_nanos(x); const dsword_t y_nanos = ts_to_nanos(y); assert(timespec_cmp(x, x) == 0); assert(timespec_cmp(y, y) == 0); assert(timespec_cmp(x, y) == -timespec_cmp(y, x)); if (x_nanos == y_nanos) { assert(timespec_cmp(x, y) == 0); } else if (x_nanos < y_nanos) { assert(timespec_cmp(x, y) == -1); } else { assert(timespec_cmp(x, y) == 1); } return 0; } TEST(test_timespec_cmp, examples)
726
707
<reponame>shueja-personal/allwpilib // Copyright (c) FIRST and other WPILib contributors. // Open Source Software; you can modify and/or share it under the terms of // the WPILib BSD license file in the root directory of this project. package edu.wpi.first.wpilibj; import static org.junit.jupiter.api.Assertions.assertAll; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.params.provider.Arguments.arguments; import edu.wpi.first.wpilibj.util.Color; import edu.wpi.first.wpilibj.util.Color8Bit; import java.util.stream.Stream; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; class AddressableLEDBufferTest { @ParameterizedTest @MethodSource("hsvToRgbProvider") @SuppressWarnings("ParameterName") void hsvConvertTest(int h, int s, int v, int r, int g, int b) { var buffer = new AddressableLEDBuffer(1); buffer.setHSV(0, h, s, v); assertAll( () -> assertEquals((byte) r, buffer.m_buffer[2], "R value didn't match"), () -> assertEquals((byte) g, buffer.m_buffer[1], "G value didn't match"), () -> assertEquals((byte) b, buffer.m_buffer[0], "B value didn't match")); } static Stream<Arguments> hsvToRgbProvider() { return Stream.of( arguments(0, 0, 0, 0, 0, 0), // Black arguments(0, 0, 255, 255, 255, 255), // White arguments(0, 255, 255, 255, 0, 0), // Red arguments(60, 255, 255, 0, 255, 0), // Lime arguments(120, 255, 255, 0, 0, 255), // Blue arguments(30, 255, 255, 254, 255, 0), // Yellow (ish) arguments(90, 255, 255, 0, 254, 255), // Cyan (ish) arguments(150, 255, 255, 255, 0, 254), // Magenta (ish) arguments(0, 0, 191, 191, 191, 191), // Silver arguments(0, 0, 128, 128, 128, 128), // Gray arguments(0, 255, 128, 128, 0, 0), // Maroon arguments(30, 255, 128, 127, 128, 0), // Olive (ish) arguments(60, 255, 128, 0, 128, 0), // Green arguments(150, 255, 128, 128, 0, 127), // Purple (ish) arguments(90, 255, 128, 0, 127, 128), // Teal (ish) arguments(120, 255, 128, 0, 0, 128) // Navy ); } @Test void getColorTest() { AddressableLEDBuffer buffer = new AddressableLEDBuffer(4); final Color8Bit denimColor8Bit = new Color8Bit(Color.kDenim); final Color8Bit firstBlueColor8Bit = new Color8Bit(Color.kFirstBlue); final Color8Bit firstRedColor8Bit = new Color8Bit(Color.kFirstRed); buffer.setLED(0, Color.kFirstBlue); buffer.setLED(1, denimColor8Bit); buffer.setLED(2, Color.kFirstRed); buffer.setLED(3, Color.kFirstBlue); assertEquals(Color.kFirstBlue, buffer.getLED(0)); assertEquals(Color.kDenim, buffer.getLED(1)); assertEquals(Color.kFirstRed, buffer.getLED(2)); assertEquals(Color.kFirstBlue, buffer.getLED(3)); assertEquals(firstBlueColor8Bit, buffer.getLED8Bit(0)); assertEquals(denimColor8Bit, buffer.getLED8Bit(1)); assertEquals(firstRedColor8Bit, buffer.getLED8Bit(2)); assertEquals(firstBlueColor8Bit, buffer.getLED8Bit(3)); } }
1,264
854
<filename>Python3/301.py __________________________________________________________________________________________________ sample 24 ms submission class Solution: def removeInvalidParentheses(self, s: str) -> List[str]: ret = [] remove(s, ret, 0, 0, '()') return ret def remove(s, ret, last_i, last_j, par): stack = 0 for i in range(last_i, len(s)): if s[i] == par[0]: stack += 1 elif s[i] == par[1]: stack -= 1 if stack >= 0: continue # more ) for j in range(last_j, i+1): if s[j] == par[1] and (j == last_j or s[j-1] != par[1]): remove(s[:j]+s[j+1:], ret, i, j, par) return rev_s = s[::-1] if par[0] == '(': remove(rev_s, ret, 0, 0, ')(') else: ret.append(rev_s) __________________________________________________________________________________________________ sample 12988 kb submission class Solution(object): def __init__(self): self.valid_expressions = None self.min_removed = None def reset(self): self.valid_expressions = set() self.min_removed = float("inf") def remaining(self, string, index, left_count, right_count, expr, rem_count): if index == len(string): if left_count == right_count: if rem_count <= self.min_removed: possible_ans = "".join(expr) if rem_count < self.min_removed: self.valid_expressions = set() self.min_removed = rem_count self.valid_expressions.add(possible_ans) else: current_char = string[index] if current_char != '(' and current_char != ')': expr.append(current_char) self.remaining(string, index + 1, left_count, right_count, expr, rem_count) expr.pop() else: self.remaining(string, index + 1, left_count, right_count, expr, rem_count + 1) expr.append(current_char) if string[index] == '(': self.remaining(string, index + 1, left_count + 1, right_count, expr, rem_count) elif right_count < left_count: self.remaining(string, index + 1, left_count, right_count + 1, expr, rem_count) expr.pop() def removeInvalidParentheses(self, s): """ :type s: str :rtype: List[str] """ self.reset() self.remaining(s, 0, 0, 0, [], 0) return list(self.valid_expressions) __________________________________________________________________________________________________
1,297
373
<filename>Silicon/Intel/CoffeelakeSiliconPkg/Pch/Include/GpioPinsCnlH.h /** @file GPIO pins, Copyright (c) 2019 Intel Corporation. All rights reserved. <BR> SPDX-License-Identifier: BSD-2-Clause-Patent **/ #ifndef _GPIO_PINS_CNL_H_H_ #define _GPIO_PINS_CNL_H_H_ /// /// This header file should be used together with /// PCH GPIO lib in C and ASL. All defines used /// must match both ASL/C syntax /// /// /// Unique ID used in GpioPad defines /// #define GPIO_CNL_H_CHIPSET_ID 0x3 /// /// Use below for functions from PCH GPIO Lib which /// require GpioGroup as argument /// #define GPIO_CNL_H_GROUP_GPP_A 0x0300 #define GPIO_CNL_H_GROUP_GPP_B 0x0301 #define GPIO_CNL_H_GROUP_GPP_C 0x0302 #define GPIO_CNL_H_GROUP_GPP_D 0x0303 #define GPIO_CNL_H_GROUP_GPP_E 0x0304 #define GPIO_CNL_H_GROUP_GPP_F 0x0305 #define GPIO_CNL_H_GROUP_GPP_G 0x0306 #define GPIO_CNL_H_GROUP_GPP_H 0x0307 #define GPIO_CNL_H_GROUP_GPP_I 0x0308 #define GPIO_CNL_H_GROUP_GPP_J 0x0309 #define GPIO_CNL_H_GROUP_GPP_K 0x030A #define GPIO_CNL_H_GROUP_GPD 0x030B #define GPIO_CNL_H_GROUP_VGPIO 0x030C #define GPIO_CNL_H_GROUP_SPI 0x030D #define GPIO_CNL_H_GROUP_AZA 0x030E #define GPIO_CNL_H_GROUP_CPU 0x030F #define GPIO_CNL_H_GROUP_JTAG 0x0310 /// /// Use below for functions from PCH GPIO Lib which /// require GpioPad as argument. Encoding used here /// has all information required by library functions /// #define GPIO_CNL_H_GPP_A0 0x03000000 #define GPIO_CNL_H_GPP_A1 0x03000001 #define GPIO_CNL_H_GPP_A2 0x03000002 #define GPIO_CNL_H_GPP_A3 0x03000003 #define GPIO_CNL_H_GPP_A4 0x03000004 #define GPIO_CNL_H_GPP_A5 0x03000005 #define GPIO_CNL_H_GPP_A6 0x03000006 #define GPIO_CNL_H_GPP_A7 0x03000007 #define GPIO_CNL_H_GPP_A8 0x03000008 #define GPIO_CNL_H_GPP_A9 0x03000009 #define GPIO_CNL_H_GPP_A10 0x0300000A #define GPIO_CNL_H_GPP_A11 0x0300000B #define GPIO_CNL_H_GPP_A12 0x0300000C #define GPIO_CNL_H_GPP_A13 0x0300000D #define GPIO_CNL_H_GPP_A14 0x0300000E #define GPIO_CNL_H_GPP_A15 0x0300000F #define GPIO_CNL_H_GPP_A16 0x03000010 #define GPIO_CNL_H_GPP_A17 0x03000011 #define GPIO_CNL_H_GPP_A18 0x03000012 #define GPIO_CNL_H_GPP_A19 0x03000013 #define GPIO_CNL_H_GPP_A20 0x03000014 #define GPIO_CNL_H_GPP_A21 0x03000015 #define GPIO_CNL_H_GPP_A22 0x03000016 #define GPIO_CNL_H_GPP_A23 0x03000017 #define GPIO_CNL_H_ESPI_CLK_LOOPBK 0x03000018 #define GPIO_CNL_H_GPP_B0 0x03010000 #define GPIO_CNL_H_GPP_B1 0x03010001 #define GPIO_CNL_H_GPP_B2 0x03010002 #define GPIO_CNL_H_GPP_B3 0x03010003 #define GPIO_CNL_H_GPP_B4 0x03010004 #define GPIO_CNL_H_GPP_B5 0x03010005 #define GPIO_CNL_H_GPP_B6 0x03010006 #define GPIO_CNL_H_GPP_B7 0x03010007 #define GPIO_CNL_H_GPP_B8 0x03010008 #define GPIO_CNL_H_GPP_B9 0x03010009 #define GPIO_CNL_H_GPP_B10 0x0301000A #define GPIO_CNL_H_GPP_B11 0x0301000B #define GPIO_CNL_H_GPP_B12 0x0301000C #define GPIO_CNL_H_GPP_B13 0x0301000D #define GPIO_CNL_H_GPP_B14 0x0301000E #define GPIO_CNL_H_GPP_B15 0x0301000F #define GPIO_CNL_H_GPP_B16 0x03010010 #define GPIO_CNL_H_GPP_B17 0x03010011 #define GPIO_CNL_H_GPP_B18 0x03010012 #define GPIO_CNL_H_GPP_B19 0x03010013 #define GPIO_CNL_H_GPP_B20 0x03010014 #define GPIO_CNL_H_GPP_B21 0x03010015 #define GPIO_CNL_H_GPP_B22 0x03010016 #define GPIO_CNL_H_GPP_B23 0x03010017 #define GPIO_CNL_H_GSPI0_CLK_LOOPBK 0x03010018 #define GPIO_CNL_H_GSPI1_CLK_LOOPBK 0x03010019 #define GPIO_CNL_H_GPP_C0 0x03020000 #define GPIO_CNL_H_GPP_C1 0x03020001 #define GPIO_CNL_H_GPP_C2 0x03020002 #define GPIO_CNL_H_GPP_C3 0x03020003 #define GPIO_CNL_H_GPP_C4 0x03020004 #define GPIO_CNL_H_GPP_C5 0x03020005 #define GPIO_CNL_H_GPP_C6 0x03020006 #define GPIO_CNL_H_GPP_C7 0x03020007 #define GPIO_CNL_H_GPP_C8 0x03020008 #define GPIO_CNL_H_GPP_C9 0x03020009 #define GPIO_CNL_H_GPP_C10 0x0302000A #define GPIO_CNL_H_GPP_C11 0x0302000B #define GPIO_CNL_H_GPP_C12 0x0302000C #define GPIO_CNL_H_GPP_C13 0x0302000D #define GPIO_CNL_H_GPP_C14 0x0302000E #define GPIO_CNL_H_GPP_C15 0x0302000F #define GPIO_CNL_H_GPP_C16 0x03020010 #define GPIO_CNL_H_GPP_C17 0x03020011 #define GPIO_CNL_H_GPP_C18 0x03020012 #define GPIO_CNL_H_GPP_C19 0x03020013 #define GPIO_CNL_H_GPP_C20 0x03020014 #define GPIO_CNL_H_GPP_C21 0x03020015 #define GPIO_CNL_H_GPP_C22 0x03020016 #define GPIO_CNL_H_GPP_C23 0x03020017 #define GPIO_CNL_H_GPP_D0 0x03030000 #define GPIO_CNL_H_GPP_D1 0x03030001 #define GPIO_CNL_H_GPP_D2 0x03030002 #define GPIO_CNL_H_GPP_D3 0x03030003 #define GPIO_CNL_H_GPP_D4 0x03030004 #define GPIO_CNL_H_GPP_D5 0x03030005 #define GPIO_CNL_H_GPP_D6 0x03030006 #define GPIO_CNL_H_GPP_D7 0x03030007 #define GPIO_CNL_H_GPP_D8 0x03030008 #define GPIO_CNL_H_GPP_D9 0x03030009 #define GPIO_CNL_H_GPP_D10 0x0303000A #define GPIO_CNL_H_GPP_D11 0x0303000B #define GPIO_CNL_H_GPP_D12 0x0303000C #define GPIO_CNL_H_GPP_D13 0x0303000D #define GPIO_CNL_H_GPP_D14 0x0303000E #define GPIO_CNL_H_GPP_D15 0x0303000F #define GPIO_CNL_H_GPP_D16 0x03030010 #define GPIO_CNL_H_GPP_D17 0x03030011 #define GPIO_CNL_H_GPP_D18 0x03030012 #define GPIO_CNL_H_GPP_D19 0x03030013 #define GPIO_CNL_H_GPP_D20 0x03030014 #define GPIO_CNL_H_GPP_D21 0x03030015 #define GPIO_CNL_H_GPP_D22 0x03030016 #define GPIO_CNL_H_GPP_D23 0x03030017 #define GPIO_CNL_H_GPP_E0 0x03040000 #define GPIO_CNL_H_GPP_E1 0x03040001 #define GPIO_CNL_H_GPP_E2 0x03040002 #define GPIO_CNL_H_GPP_E3 0x03040003 #define GPIO_CNL_H_GPP_E4 0x03040004 #define GPIO_CNL_H_GPP_E5 0x03040005 #define GPIO_CNL_H_GPP_E6 0x03040006 #define GPIO_CNL_H_GPP_E7 0x03040007 #define GPIO_CNL_H_GPP_E8 0x03040008 #define GPIO_CNL_H_GPP_E9 0x03040009 #define GPIO_CNL_H_GPP_E10 0x0304000A #define GPIO_CNL_H_GPP_E11 0x0304000B #define GPIO_CNL_H_GPP_E12 0x0304000C #define GPIO_CNL_H_GPP_F0 0x03050000 #define GPIO_CNL_H_GPP_F1 0x03050001 #define GPIO_CNL_H_GPP_F2 0x03050002 #define GPIO_CNL_H_GPP_F3 0x03050003 #define GPIO_CNL_H_GPP_F4 0x03050004 #define GPIO_CNL_H_GPP_F5 0x03050005 #define GPIO_CNL_H_GPP_F6 0x03050006 #define GPIO_CNL_H_GPP_F7 0x03050007 #define GPIO_CNL_H_GPP_F8 0x03050008 #define GPIO_CNL_H_GPP_F9 0x03050009 #define GPIO_CNL_H_GPP_F10 0x0305000A #define GPIO_CNL_H_GPP_F11 0x0305000B #define GPIO_CNL_H_GPP_F12 0x0305000C #define GPIO_CNL_H_GPP_F13 0x0305000D #define GPIO_CNL_H_GPP_F14 0x0305000E #define GPIO_CNL_H_GPP_F15 0x0305000F #define GPIO_CNL_H_GPP_F16 0x03050010 #define GPIO_CNL_H_GPP_F17 0x03050011 #define GPIO_CNL_H_GPP_F18 0x03050012 #define GPIO_CNL_H_GPP_F19 0x03050013 #define GPIO_CNL_H_GPP_F20 0x03050014 #define GPIO_CNL_H_GPP_F21 0x03050015 #define GPIO_CNL_H_GPP_F22 0x03050016 #define GPIO_CNL_H_GPP_F23 0x03050017 #define GPIO_CNL_H_GPP_G0 0x03060000 #define GPIO_CNL_H_GPP_G1 0x03060001 #define GPIO_CNL_H_GPP_G2 0x03060002 #define GPIO_CNL_H_GPP_G3 0x03060003 #define GPIO_CNL_H_GPP_G4 0x03060004 #define GPIO_CNL_H_GPP_G5 0x03060005 #define GPIO_CNL_H_GPP_G6 0x03060006 #define GPIO_CNL_H_GPP_G7 0x03060007 #define GPIO_CNL_H_GPP_H0 0x03070000 #define GPIO_CNL_H_GPP_H1 0x03070001 #define GPIO_CNL_H_GPP_H2 0x03070002 #define GPIO_CNL_H_GPP_H3 0x03070003 #define GPIO_CNL_H_GPP_H4 0x03070004 #define GPIO_CNL_H_GPP_H5 0x03070005 #define GPIO_CNL_H_GPP_H6 0x03070006 #define GPIO_CNL_H_GPP_H7 0x03070007 #define GPIO_CNL_H_GPP_H8 0x03070008 #define GPIO_CNL_H_GPP_H9 0x03070009 #define GPIO_CNL_H_GPP_H10 0x0307000A #define GPIO_CNL_H_GPP_H11 0x0307000B #define GPIO_CNL_H_GPP_H12 0x0307000C #define GPIO_CNL_H_GPP_H13 0x0307000D #define GPIO_CNL_H_GPP_H14 0x0307000E #define GPIO_CNL_H_GPP_H15 0x0307000F #define GPIO_CNL_H_GPP_H16 0x03070010 #define GPIO_CNL_H_GPP_H17 0x03070011 #define GPIO_CNL_H_GPP_H18 0x03070012 #define GPIO_CNL_H_GPP_H19 0x03070013 #define GPIO_CNL_H_GPP_H20 0x03070014 #define GPIO_CNL_H_GPP_H21 0x03070015 #define GPIO_CNL_H_GPP_H22 0x03070016 #define GPIO_CNL_H_GPP_H23 0x03070017 #define GPIO_CNL_H_GPP_I0 0x03080000 #define GPIO_CNL_H_GPP_I1 0x03080001 #define GPIO_CNL_H_GPP_I2 0x03080002 #define GPIO_CNL_H_GPP_I3 0x03080003 #define GPIO_CNL_H_GPP_I4 0x03080004 #define GPIO_CNL_H_GPP_I5 0x03080005 #define GPIO_CNL_H_GPP_I6 0x03080006 #define GPIO_CNL_H_GPP_I7 0x03080007 #define GPIO_CNL_H_GPP_I8 0x03080008 #define GPIO_CNL_H_GPP_I9 0x03080009 #define GPIO_CNL_H_GPP_I10 0x0308000A #define GPIO_CNL_H_GPP_I11 0x0308000B #define GPIO_CNL_H_GPP_I12 0x0308000C #define GPIO_CNL_H_GPP_I13 0x0308000D #define GPIO_CNL_H_GPP_I14 0x0308000E #define GPIO_CNL_H_SYS_PWROK 0x0308000F #define GPIO_CNL_H_SYS_RESETB 0x03080010 #define GPIO_CNL_H_MLK_RSTB 0x03080011 #define GPIO_CNL_H_GPP_J0 0x03090000 #define GPIO_CNL_H_GPP_J1 0x03090001 #define GPIO_CNL_H_GPP_J2 0x03090002 #define GPIO_CNL_H_GPP_J3 0x03090003 #define GPIO_CNL_H_GPP_J4 0x03090004 #define GPIO_CNL_H_GPP_J5 0x03090005 #define GPIO_CNL_H_GPP_J6 0x03090006 #define GPIO_CNL_H_GPP_J7 0x03090007 #define GPIO_CNL_H_GPP_J8 0x03090008 #define GPIO_CNL_H_GPP_J9 0x03090009 #define GPIO_CNL_H_GPP_J10 0x0309000A #define GPIO_CNL_H_GPP_J11 0x0309000B #define GPIO_CNL_H_GPP_K0 0x030A0000 #define GPIO_CNL_H_GPP_K1 0x030A0001 #define GPIO_CNL_H_GPP_K2 0x030A0002 #define GPIO_CNL_H_GPP_K3 0x030A0003 #define GPIO_CNL_H_GPP_K4 0x030A0004 #define GPIO_CNL_H_GPP_K5 0x030A0005 #define GPIO_CNL_H_GPP_K6 0x030A0006 #define GPIO_CNL_H_GPP_K7 0x030A0007 #define GPIO_CNL_H_GPP_K8 0x030A0008 #define GPIO_CNL_H_GPP_K9 0x030A0009 #define GPIO_CNL_H_GPP_K10 0x030A000A #define GPIO_CNL_H_GPP_K11 0x030A000B #define GPIO_CNL_H_GPP_K12 0x030A000C #define GPIO_CNL_H_GPP_K13 0x030A000D #define GPIO_CNL_H_GPP_K14 0x030A000E #define GPIO_CNL_H_GPP_K15 0x030A000F #define GPIO_CNL_H_GPP_K16 0x030A0010 #define GPIO_CNL_H_GPP_K17 0x030A0011 #define GPIO_CNL_H_GPP_K18 0x030A0012 #define GPIO_CNL_H_GPP_K19 0x030A0013 #define GPIO_CNL_H_GPP_K20 0x030A0014 #define GPIO_CNL_H_GPP_K21 0x030A0015 #define GPIO_CNL_H_GPP_K22 0x030A0016 #define GPIO_CNL_H_GPP_K23 0x030A0017 #define GPIO_CNL_H_GPD0 0x030B0000 #define GPIO_CNL_H_GPD1 0x030B0001 #define GPIO_CNL_H_GPD2 0x030B0002 #define GPIO_CNL_H_GPD3 0x030B0003 #define GPIO_CNL_H_GPD4 0x030B0004 #define GPIO_CNL_H_GPD5 0x030B0005 #define GPIO_CNL_H_GPD6 0x030B0006 #define GPIO_CNL_H_GPD7 0x030B0007 #define GPIO_CNL_H_GPD8 0x030B0008 #define GPIO_CNL_H_GPD9 0x030B0009 #define GPIO_CNL_H_GPD10 0x030B000A #define GPIO_CNL_H_GPD11 0x030B000B #define GPIO_CNL_H_SLP_LANB 0x030B000C #define GPIO_CNL_H_SLP_SUSB 0x030B000D #define GPIO_CNL_H_SLP_WAKEB 0x030B000E #define GPIO_CNL_H_SLP_DRAM_RESETB 0x030B000F #define GPIO_CNL_H_VGPIO0 0x030C0000 #define GPIO_CNL_H_VGPIO1 0x030C0001 #define GPIO_CNL_H_VGPIO2 0x030C0002 #define GPIO_CNL_H_VGPIO3 0x030C0003 #define GPIO_CNL_H_VGPIO4 0x030C0004 #define GPIO_CNL_H_VGPIO5 0x030C0005 #define GPIO_CNL_H_VGPIO6 0x030C0006 #define GPIO_CNL_H_VGPIO7 0x030C0007 #define GPIO_CNL_H_VGPIO8 0x030C0008 #define GPIO_CNL_H_VGPIO9 0x030C0009 #define GPIO_CNL_H_VGPIO10 0x030C000A #define GPIO_CNL_H_VGPIO11 0x030C000B #define GPIO_CNL_H_VGPIO12 0x030C000C #define GPIO_CNL_H_VGPIO13 0x030C000D #define GPIO_CNL_H_VGPIO14 0x030C000E #define GPIO_CNL_H_VGPIO15 0x030C000F #define GPIO_CNL_H_VGPIO16 0x030C0010 #define GPIO_CNL_H_VGPIO17 0x030C0011 #define GPIO_CNL_H_VGPIO18 0x030C0012 #define GPIO_CNL_H_VGPIO19 0x030C0013 #define GPIO_CNL_H_VGPIO20 0x030C0014 #define GPIO_CNL_H_VGPIO21 0x030C0015 #define GPIO_CNL_H_VGPIO22 0x030C0016 #define GPIO_CNL_H_VGPIO23 0x030C0017 #define GPIO_CNL_H_VGPIO24 0x030C0018 #define GPIO_CNL_H_VGPIO25 0x030C0019 #define GPIO_CNL_H_VGPIO26 0x030C001A #define GPIO_CNL_H_VGPIO27 0x030C001B #define GPIO_CNL_H_VGPIO28 0x030C001C #define GPIO_CNL_H_VGPIO29 0x030C001D #define GPIO_CNL_H_VGPIO30 0x030C001E #define GPIO_CNL_H_VGPIO31 0x030C001F #define GPIO_CNL_H_VGPIO32 0x030C0020 #define GPIO_CNL_H_VGPIO33 0x030C0021 #define GPIO_CNL_H_VGPIO34 0x030C0022 #define GPIO_CNL_H_VGPIO35 0x030C0023 #define GPIO_CNL_H_VGPIO36 0x030C0024 #define GPIO_CNL_H_VGPIO37 0x030C0025 #define GPIO_CNL_H_VGPIO38 0x030C0026 #define GPIO_CNL_H_VGPIO39 0x030C0027 #define GPIO_CNL_H_SPI0_IO_2 0x030D0000 #define GPIO_CNL_H_SPI0_IO_3 0x030D0001 #define GPIO_CNL_H_SPI0_MOSI_IO_0 0x030D0002 #define GPIO_CNL_H_SPI0_MOSI_IO_1 0x030D0003 #define GPIO_CNL_H_SPI0_TPM_CSB 0x030D0004 #define GPIO_CNL_H_SPI0_FLASH_0_CSB 0x030D0005 #define GPIO_CNL_H_SPI0_FLASH_1_CSB 0x030D0006 #define GPIO_CNL_H_SPI0_CLK 0x030D0007 #define GPIO_CNL_H_SPI0_CLK_LOOPBK 0x030D0008 #define GPIO_CNL_H_HDA_BCLK 0x030E0000 #define GPIO_CNL_H_HDA_RSTB 0x030E0001 #define GPIO_CNL_H_HDA_SYNC 0x030E0002 #define GPIO_CNL_H_HDA_SDO 0x030E0003 #define GPIO_CNL_H_HDA_SDI_0 0x030E0004 #define GPIO_CNL_H_HDA_SDI_1 0x030E0005 #define GPIO_CNL_H_SSP1_SFRM 0x030E0006 #define GPIO_CNL_H_SSP1_TXD 0x030E0007 #define GPIO_CNL_H_HDACPU_SDI 0x030F0000 #define GPIO_CNL_H_HDACPU_SDO 0x030F0001 #define GPIO_CNL_H_HDACPU_SCLK 0x030F0002 #define GPIO_CNL_H_PM_SYNC 0x030F0003 #define GPIO_CNL_H_PECI 0x030F0004 #define GPIO_CNL_H_CPUPWRGD 0x030F0005 #define GPIO_CNL_H_THRMTRIPB 0x030F0006 #define GPIO_CNL_H_PLTRST_CPUB 0x030F0007 #define GPIO_CNL_H_PM_DOWN 0x030F0008 #define GPIO_CNL_H_TRIGGER_IN 0x030F0009 #define GPIO_CNL_H_TRIGGER_OUT 0x030F000A #define GPIO_CNL_H_JTAG_TDO 0x03100000 #define GPIO_CNL_H_JTAGX 0x03100001 #define GPIO_CNL_H_PRDYB 0x03100002 #define GPIO_CNL_H_PREQB 0x03100003 #define GPIO_CNL_H_CPU_TRSTB 0x03100004 #define GPIO_CNL_H_JTAG_TDI 0x03100005 #define GPIO_CNL_H_JTAG_TMS 0x03100006 #define GPIO_CNL_H_JTAG_TCK 0x03100007 #define GPIO_CNL_H_ITP_PMODE 0x03100008 #endif
11,622
2,223
<gh_stars>1000+ # Copyright (c) 2020-2021 impersonator.org authors (<NAME> and <NAME>). All rights reserved. import time import torch import cv2 import numpy as np import os from easydict import EasyDict from tqdm import tqdm from mmdet.apis import init_detector, inference_detector from mmedit.apis import init_model, matting_inference from iPERCore.tools.utils.filesio.cv_utils import compute_scaled_size from iPERCore.tools.utils.filesio.persistence import load_toml_file class PointRenderGCAMattor(object): def __init__(self, cfg_or_path, device=torch.device("cuda:0")): """ Args: cfg_or_path: the config object, it contains the following information: seg_cfg_path="./assets/configs/detection/point_rend/point_rend_r50_caffe_fpn_mstrain_3x_coco.py", seg_ckpt_path="./assets/checkpoints/detection/point_rend_r50_caffe_fpn_mstrain_3x_coco-e0ebb6b7.pth", matting_cfg_path="./assets/configs/editing/mattors/gca/gca_r34_4x10_200k_comp1k.py", matting_ckpt_path="./assets/checkpoints/mattors/gca_r34_4x10_200k_comp1k_SAD-34.77_20200604_213848-4369bea0.pth", person_label_index = 0 temp_dir="./assets/temp" trimap_control_size = 300 matting_image_size = 512 morph_kernel_size = 3 erode_iter_num = 2 dilate_iter_num = 7 device: """ if isinstance(cfg_or_path, str): cfg = EasyDict(load_toml_file(cfg_or_path)) else: cfg = cfg_or_path self.trimap_control_size = cfg.trimap_control_size self.matting_image_size = cfg.matting_image_size self.erode_iter_num = cfg.erode_iter_num self.dilate_iter_num = cfg.dilate_iter_num self.morph_kernel_size = cfg.morph_kernel_size """ point_rend_r50_caffe_fpn_mstrain_3x_coco """ self.detection_config_file = cfg.seg_cfg_path self.detection_checkpoint_file = cfg.seg_ckpt_path self.person_label_index = cfg.person_label_index """ gca_r34_4x10_200k_comp1k """ self.editing_config_file = cfg.matting_cfg_path self.editing_checkpoint_file = cfg.matting_ckpt_path self.device = device self.detection_model = init_detector(self.detection_config_file, self.detection_checkpoint_file, device=device) self.matting_model = init_model(self.editing_config_file, self.editing_checkpoint_file, device=device.__str__()) self.temp_dir = cfg.temp_dir if not os.path.exists(self.temp_dir): os.makedirs(self.temp_dir) def generate_trimap(self, mask): """ Args: mask (np.ndarray): (h, w) 0 or 1 Returns: trimap (np.ndarray): (h, w) is in the range [0, 255] """ origin_h, origin_w = mask.shape scaled_size = compute_scaled_size((origin_w, origin_h), control_size=self.trimap_control_size) # scale to control size mask = cv2.resize(mask, scaled_size, interpolation=cv2.INTER_NEAREST) kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (self.morph_kernel_size, self.morph_kernel_size)) inner = cv2.erode(mask, kernel, iterations=self.erode_iter_num) outer = cv2.dilate(mask, kernel, iterations=self.dilate_iter_num) trimap = inner * 255 + (outer - inner) * 128 trimap = cv2.resize(trimap, (origin_w, origin_h), interpolation=cv2.INTER_NEAREST) return trimap def run_detection(self, img_path): """ Run detection to get the segmentation mask and trimap, assuming that there is only a single human in the image. Args: img_path (str): the image path Returns: has_person (bool): whether there is person or not. segm_mask (np.ndarray): (h, w) trimap (np.ndarray): (h, w) """ result = inference_detector(self.detection_model, img_path) bbox_result, segm_result = result num_people = len(bbox_result[0]) has_person = num_people > 0 if has_person: # (src_h, src_w) 0 or 1 # in COCO dataset, `0` represents the person, # segm_result[self.person_label_index] represents all the results of Person, # segm_result[self.person_label_index][0] represents the first Person segmentation result. segm_mask = segm_result[self.person_label_index][0].astype(np.float32) # (src_h, src_w) 0 or 128 or 255 trimap = self.generate_trimap(segm_mask) else: segm_mask = [] trimap = [] return has_person, segm_mask, trimap def run_matting(self, img_or_path): """ 1. run instance segmentation with PointRender, detection first; 2. generate trimap; 3. run matting; Args: img_or_path (str or np.ndarray): (h, w, 3) is in the range of [0, 255] with BGR channel space. Returns: has_person (bool): whether there is person or not. segm_mask (np.ndarray): (h, w), 0 or 1 trimap (np.ndarray): (h, w), 0 or 128, or 255; pred_alpha (np.ndarray): (h, w), is in the range of [0, 1], np.float32 """ # TODO, do not write the middle outputs to disk, and make them in memory. # scaled_src_path, scaled_trimap_path # img_name = str(time.time()) # img_path = os.path.join(self.temp_dir, img_name) path = os.path.normpath(img_or_path) img_name = path.replace(os.sep, "_") img_path = os.path.join(self.temp_dir, img_name) if isinstance(img_or_path, str): src_img = cv2.imread(img_or_path) else: src_img = img_or_path.copy() # 1. run detection, instance segmentation and generate trimap has_person, segm_mask, trimap = self.run_detection(img_or_path) pred_alpha = [] if has_person: # 2. run matting algorithm scaled_src_path = img_path + '.matting.png' scaled_trimap_path = img_path + '.trimap.png' origin_h, origin_w = src_img.shape[:2] scaled_size = compute_scaled_size((origin_w, origin_h), control_size=self.matting_image_size) scaled_src_img = cv2.resize(src_img, scaled_size) scaled_trimap = cv2.resize(trimap, scaled_size, interpolation=cv2.INTER_NEAREST) cv2.imwrite(scaled_src_path, scaled_src_img) cv2.imwrite(scaled_trimap_path, scaled_trimap) # (scaled_h, scaled_w) [0, 1] pred_alpha = matting_inference(self.matting_model, scaled_src_path, scaled_trimap_path) # (origin_h, origin_w) [0, 1] pred_alpha = cv2.resize(pred_alpha, (origin_w, origin_h)) os.remove(scaled_src_path) os.remove(scaled_trimap_path) return has_person, segm_mask, trimap, pred_alpha def run(self, src_dir, out_dir, src_img_names=None, save_visual=True): """ Run human matting of all the images on a directory. Args: src_dir (str): out_dir (str): src_img_names (List[str]): save_visual (bool): Returns: None """ if not os.path.exists(out_dir): os.makedirs(out_dir) all_img_names = os.listdir(src_dir) all_img_names.sort() if src_img_names is None: processed_img_names = all_img_names else: processed_img_names = [] for img_name in src_img_names: processed_img_names.append(img_name) mask_outs = [] alpha_outs = [] valid_ids = [] for ids, img_name in enumerate(tqdm(processed_img_names)): img_path = os.path.join(src_dir, img_name) has_person, segm_mask, trimap, pred_alpha = self.run_matting(img_path) if has_person: valid_ids.append(ids) name = img_name.split('.')[0] mask_path = os.path.join(out_dir, name + "_mask.png") alpha_path = os.path.join(out_dir, name + "_alpha.png") cv2.imwrite(alpha_path, (pred_alpha * 255).astype(np.uint8)) cv2.imwrite(mask_path, (segm_mask * 255).astype(np.uint8)) mask_outs.append(mask_path) alpha_outs.append(alpha_path) if save_visual: cv2.imwrite(os.path.join(out_dir, name + "trimap.png"), trimap) return valid_ids, mask_outs, alpha_outs
4,311
1,588
<filename>erupt-annotation/src/main/java/xyz/erupt/annotation/fun/EruptProxy.java package xyz.erupt.annotation.fun; import xyz.erupt.annotation.config.Comment; /** * @author YuePeng * date 2018-10-09. */ public interface EruptProxy<MODEL> { @Comment("Don't call") default EruptProxy<? extends MODEL> dual() { return null; } }
139
2,073
""" discord.types ~~~~~~~~~~~~~~ Typings for the Discord API :copyright: (c) 2015-2021 Rapptz & (c) 2021-present Pycord Development :license: MIT, see LICENSE for more details. """
61
429
<filename>main/ejml-dsparse/test/org/ejml/sparse/csc/decomposition/lu/GenericLuTests_DSCC.java /* * Copyright (c) 2020, <NAME>. All Rights Reserved. * * This file is part of Efficient Java Matrix Library (EJML). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.ejml.sparse.csc.decomposition.lu; import org.ejml.EjmlUnitTests; import org.ejml.UtilEjml; import org.ejml.data.DMatrixRMaj; import org.ejml.data.DMatrixSparseCSC; import org.ejml.dense.row.CommonOps_DDRM; import org.ejml.dense.row.RandomMatrices_DDRM; import org.ejml.interfaces.decomposition.DecompositionSparseInterface; import org.ejml.interfaces.decomposition.LUSparseDecomposition_F64; import org.ejml.ops.DConvertMatrixStruct; import org.ejml.sparse.FillReducing; import org.ejml.sparse.csc.CommonOps_DSCC; import org.ejml.sparse.csc.RandomMatrices_DSCC; import org.ejml.sparse.csc.decomposition.GenericDecompositionTests_DSCC; import org.junit.jupiter.api.Test; import java.util.ArrayList; import java.util.List; import static org.junit.jupiter.api.Assertions.*; /** * @author <NAME> */ public abstract class GenericLuTests_DSCC extends GenericDecompositionTests_DSCC { private FillReducing permTests[] = new FillReducing[]{FillReducing.NONE, FillReducing.IDENTITY}; public abstract LUSparseDecomposition_F64<DMatrixSparseCSC> create( FillReducing permutation ); @Override public DMatrixSparseCSC createMatrix( int N ) { return RandomMatrices_DSCC.symmetricPosDef(N, 0.25, rand); } @Override public DecompositionSparseInterface<DMatrixSparseCSC> createDecomposition() { return create(FillReducing.NONE); } @Override public List<DMatrixSparseCSC> decompose( DecompositionSparseInterface<DMatrixSparseCSC> d, DMatrixSparseCSC A ) { LUSparseDecomposition_F64<DMatrixSparseCSC> lu = (LUSparseDecomposition_F64<DMatrixSparseCSC>)d; assertTrue(lu.decompose(A)); List<DMatrixSparseCSC> list = new ArrayList<>(); list.add(lu.getLower(null)); list.add(lu.getUpper(null)); return list; } @Test public void checkHandConstructed() { for (FillReducing p : permTests) { checkHandConstructed(p); } } private void checkHandConstructed( FillReducing perm ) { DMatrixSparseCSC A = UtilEjml.parse_DSCC( "1 2 4 " + "2 13 23 " + "4 23 90", 3); LUSparseDecomposition_F64<DMatrixSparseCSC> lu = create(perm); checkSolution(A, lu); } private void checkSolution( DMatrixSparseCSC A, LUSparseDecomposition_F64<DMatrixSparseCSC> lu ) { DMatrixSparseCSC Acpy = A.copy(); assertTrue(lu.decompose(A)); assertFalse(lu.isSingular()); if (!lu.inputModified()) { EjmlUnitTests.assertEquals(A, Acpy, UtilEjml.TEST_F64); } DMatrixSparseCSC L = lu.getLower(null); DMatrixSparseCSC U = lu.getUpper(null); DMatrixSparseCSC P = lu.getRowPivot(null); DMatrixSparseCSC PL = new DMatrixSparseCSC(P.numRows, L.numCols, 0); DMatrixSparseCSC P_t = CommonOps_DSCC.transpose(P, null, null); CommonOps_DSCC.mult(P_t, L, PL, null, null); DMatrixSparseCSC found = new DMatrixSparseCSC(PL.numCols, U.numCols, 0); CommonOps_DSCC.mult(PL, U, found); EjmlUnitTests.assertEquals(Acpy, found, UtilEjml.TEST_F64); } @Test public void checkMontiCarlo() { for (FillReducing p : permTests) { checkMontiCarlo(p); } } private void checkMontiCarlo( FillReducing perm ) { LUSparseDecomposition_F64<DMatrixSparseCSC> lu = create(perm); for (int width = 1; width <= 10; width++) { for (int mc = 0; mc < 30; mc++) { int nz = (int)(width*width*(rand.nextDouble()*0.5 + 0.02)); DMatrixSparseCSC A = RandomMatrices_DSCC.rectangle(width, width, nz, rand); RandomMatrices_DSCC.ensureNotSingular(A, rand); checkSolution(A, lu); } } } @Test public void testSingular() { for (FillReducing p : permTests) { testSingular(p); } } private void testSingular( FillReducing perm ) { DMatrixSparseCSC A = UtilEjml.parse_DSCC( "1 4 3 " + "5 0 9 " + "5 0 9", 3); LUSparseDecomposition_F64<DMatrixSparseCSC> lu = create(perm); if (lu.decompose(A)) { assertTrue(lu.isSingular()); } } @Test public void getL_U_P_withMatrix() { DMatrixSparseCSC A = RandomMatrices_DSCC.rectangle(6, 6, 30, rand); DMatrixSparseCSC L = RandomMatrices_DSCC.rectangle(4, 3, 30, rand); DMatrixSparseCSC U = RandomMatrices_DSCC.rectangle(8, 2, 30, rand); DMatrixSparseCSC P = RandomMatrices_DSCC.rectangle(8, 9, 30, rand); for (FillReducing perm : permTests) { LUSparseDecomposition_F64<DMatrixSparseCSC> lu = create(perm); assertTrue(lu.decompose(A)); lu.getLower(L); lu.getUpper(U); lu.getRowPivot(P); assertTrue(CommonOps_DSCC.checkStructure(L)); assertTrue(CommonOps_DSCC.checkStructure(U)); assertTrue(CommonOps_DSCC.checkStructure(P)); assertTrue(L.numCols == 6 && L.numRows == 6); assertTrue(U.numCols == 6 && U.numRows == 6); assertTrue(P.numCols == 6 && P.numRows == 6 && P.nz_length == 6); } } @Test public void checkDeterminant() { for (FillReducing p : permTests) { checkDeterminant(p); checkDeterminantToDense(p); } } private void checkDeterminant( FillReducing perm ) { DMatrixSparseCSC A = UtilEjml.parse_DSCC( "1 4 3 " + "5 0 9 " + "2 2 2", 3); LUSparseDecomposition_F64<DMatrixSparseCSC> lu = create(perm); assertTrue(lu.decompose(A)); // computed using Octave assertEquals(44, lu.computeDeterminant().real, UtilEjml.TEST_F64); } private void checkDeterminantToDense( FillReducing perm ) { for (int trial = 0; trial < 60; trial++) { int N = rand.nextInt(10) + 1; DMatrixRMaj A = RandomMatrices_DDRM.rectangle(N, N, rand); DMatrixSparseCSC A_sp = DConvertMatrixStruct.convert(A, (DMatrixSparseCSC)null, UtilEjml.EPS); LUSparseDecomposition_F64<DMatrixSparseCSC> lu_sparse = create(perm); assertTrue(lu_sparse.decompose(A_sp)); double expected = CommonOps_DDRM.det(A); double found = lu_sparse.computeDeterminant().real; assertEquals(expected, found, UtilEjml.TEST_F64); } } // @Disabled // @Test // public void testTall() { // DMatrixSparseCSC A = RandomMatrices_DSCC.rectangle(5,4,10,rand); // RandomMatrices_DSCC.ensureNotSingular(A,rand); // // LUSparseDecomposition_F64<DMatrixSparseCSC> alg = create(FillReducing.NONE); // // checkSolution(A,alg); // } // // @Disabled // @Test // public void testFat() { // DMatrixSparseCSC A = RandomMatrices_DSCC.rectangle(4,5,10,rand); // RandomMatrices_DSCC.ensureNotSingular(A,rand); // // LUSparseDecomposition_F64<DMatrixSparseCSC> alg = create(FillReducing.NONE); // // checkSolution(A,alg); // } @Test public void testRowPivotVector() { DMatrixSparseCSC A = RandomMatrices_DSCC.rectangle(4, 4, 10, rand); RandomMatrices_DSCC.ensureNotSingular(A, rand); LUSparseDecomposition_F64<DMatrixSparseCSC> alg = create(FillReducing.NONE); assertTrue(alg.decompose(A)); int[] pivot = alg.getRowPivotV(null); DMatrixSparseCSC P = alg.getRowPivot(null); for (int i = 0; i < A.numRows; i++) { assertEquals(1, (int)P.get(i, pivot[i])); } } }
4,002
2,084
<reponame>petrdousa/archaius /** * Copyright 2014 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.config; import static org.junit.Assert.*; import java.io.BufferedWriter; import java.io.File; import java.io.FileOutputStream; import java.io.FileWriter; import java.io.OutputStreamWriter; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.FixMethodOrder; import org.junit.runners.MethodSorters; @FixMethodOrder(MethodSorters.NAME_ASCENDING) public class DynamicPropertyTest { static File configFile; private static final String PROP_NAME = "biz.mindyourown.notMine"; private static final String PROP_NAME2 = "biz.mindyourown.myProperty"; private static DynamicConfiguration config; boolean meGotCalled = false; static void createConfigFile() throws Exception { configFile = File.createTempFile("config", "properties"); BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(configFile), "UTF-8")); writer.write("props1=xyz"); writer.newLine(); writer.write("props2=abc"); writer.newLine(); writer.close(); } static void modifyConfigFile() throws Exception { new Thread() { public void run() { try { BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(configFile), "UTF-8")); writer.write("props2=456"); writer.newLine(); writer.write("props3=123"); writer.close(); } catch (Exception e) { e.printStackTrace(); fail("Unexpected exception"); } } }.start(); } @BeforeClass public static void init() throws Exception { createConfigFile(); config = new DynamicURLConfiguration(100, 500, false, configFile.toURI().toURL().toString()); System.out.println("Initializing with sources: " + config.getSource()); DynamicPropertyFactory.initWithConfigurationSource(config); // Create new DynamicFileConfiguration } @AfterClass public static void cleanUp() throws Exception { if (!configFile.delete()) { System.err.println("Unable to delete file " + configFile.getPath()); } } @Test public void testAsFileBased() throws Exception { // TODO: create a static DynamicProperties class DynamicStringProperty prop = new DynamicStringProperty("props1", null); DynamicStringProperty prop2 = new DynamicStringProperty("props2", null); DynamicIntProperty prop3 = new DynamicIntProperty("props3", 0); Thread.sleep(1000); assertEquals("xyz", prop.get()); assertEquals("abc", prop2.get()); assertEquals(0, prop3.get()); modifyConfigFile(); // waiting for reload Thread.sleep(2000); assertNull(prop.get()); assertEquals("456", prop2.get()); assertEquals(123, prop3.get()); config.stopLoading(); Thread.sleep(2000); config.setProperty("props2", "000"); assertEquals("000", prop2.get()); } @Test public void testDynamicProperty() { config.stopLoading(); DynamicProperty fastProp = DynamicProperty.getInstance(PROP_NAME); assertEquals("FastProperty does not have correct name", PROP_NAME, fastProp.getName()); assertSame("DynamicProperty.getInstance did not find the object", fastProp, DynamicProperty.getInstance(PROP_NAME)); // String hello = "Hello"; assertNull("Unset DynamicProperty is not null", fastProp.getString()); assertEquals("Unset DynamicProperty does not default correctly", hello, fastProp.getString(hello)); config.setProperty(PROP_NAME, hello); assertEquals("Set DynamicProperty does not have correct value", hello, fastProp.getString()); assertEquals("Set DynamicProperty uses supplied default", hello, fastProp.getString("not " + hello)); assertEquals("Non-integer DynamicProperty doesn't default on integer fetch", 123, fastProp.getInteger(Integer.valueOf(123)).intValue()); assertEquals("Non-float DynamicProperty doesn't default on float fetch", 2.71838f, fastProp.getFloat(Float.valueOf(2.71838f)).floatValue(), 0.001f); try { fastProp.getFloat(); fail("Parse should have failed: " + fastProp); } catch (IllegalArgumentException e) { assertNotNull(e); } // String pi = "3.14159"; String ee = "2.71838"; config.setProperty(PROP_NAME, pi); assertEquals("Set DynamicProperty does not have correct value", pi, fastProp.getString()); assertEquals("DynamicProperty did not property parse float string", 3.14159f, fastProp.getFloat(Float.valueOf(0.0f)).floatValue(), 0.001f); config.setProperty(PROP_NAME, ee); assertEquals("Set DynamicProperty does not have correct value", ee, fastProp.getString()); assertEquals("DynamicProperty did not property parse float string", 2.71838f, fastProp.getFloat(Float.valueOf(0.0f)).floatValue(), 0.001f); try { fastProp.getInteger(); fail("Integer fetch of non-integer DynamicProperty should have failed: " + fastProp); } catch (IllegalArgumentException e) { assertNotNull(e); } assertEquals("Integer fetch of non-integer DynamicProperty did not use default value", -123, fastProp.getInteger(Integer.valueOf(-123)).intValue()); // String devil = "666"; config.setProperty(PROP_NAME, devil); assertEquals("Changing DynamicProperty does not result in correct value", devil, fastProp.getString()); assertEquals("Integer fetch of changed DynamicProperty did not return correct value", 666, fastProp.getInteger().intValue()); // String self = "com.netflix.config.DynamicProperty"; assertEquals("Fetch of named class from integer valued DynamicProperty did not use default", DynamicPropertyTest.class, fastProp.getNamedClass(DynamicPropertyTest.class)); config.setProperty(PROP_NAME, self); assertEquals("Fetch of named class from DynamicProperty did not find the class", DynamicProperty.class, fastProp.getNamedClass()); // Check that clearing a property clears all caches config.clearProperty(PROP_NAME); assertNull("Fetch of cleard property did not return null", fastProp.getString()); assertEquals("Fetch of cleard property did not use default value", devil, fastProp.getString(devil)); assertNull("Fetch of cleard property did not return null", fastProp.getInteger()); assertEquals("Fetch of cleard property did not use default value", -123, fastProp.getInteger(Integer.valueOf(-123)).intValue()); assertNull("Fetch of cleard property did not return null", fastProp.getFloat()); assertEquals("Fetch of cleard property did not use default value", 2.71838f, fastProp.getFloat(Float.valueOf(2.71838f)).floatValue(), 0.001f); assertNull("Fetch of cleard property did not return null", fastProp.getNamedClass()); assertEquals("Fetch of cleard property did not use default value", DynamicProperty.class, fastProp.getNamedClass(DynamicProperty.class)); // String yes = "yes"; String maybe = "maybe"; String no = "Off"; config.setProperty(PROP_NAME, yes); assertTrue("boolean property set to 'yes' is not true", fastProp.getBoolean().booleanValue()); config.setProperty(PROP_NAME, no); assertTrue("boolean property set to 'no' is not false", !fastProp.getBoolean().booleanValue()); config.setProperty(PROP_NAME, maybe); try { fastProp.getBoolean(); fail("Parse should have failed: " + fastProp); } catch (IllegalArgumentException e) { assertNotNull(e); } assertTrue(fastProp.getBoolean(Boolean.TRUE).booleanValue()); assertTrue(!fastProp.getBoolean(Boolean.FALSE).booleanValue()); } @Test public void testPerformance() { config.stopLoading(); DynamicProperty fastProp = DynamicProperty.getInstance(PROP_NAME2); String goodbye = "Goodbye"; int loopCount = 1000000; config.setProperty(PROP_NAME2, goodbye); long cnt = 0; long start = System.currentTimeMillis(); for (int i = 0; i < loopCount; i++) { cnt += fastProp.getString().length(); } long elapsed = System.currentTimeMillis() - start; System.out.println("Fetched dynamic property " + loopCount + " times in " + elapsed + " milliseconds"); // Now for the "normal" time cnt = 0; start = System.currentTimeMillis(); for (int i = 0; i < loopCount; i++) { cnt += config.getString(PROP_NAME2).length(); } elapsed = System.currentTimeMillis() - start; System.out.println("Fetched Configuration value " + loopCount + " times in " + elapsed + " milliseconds"); // Now for the "system property" time cnt = 0; System.setProperty(PROP_NAME2, goodbye); start = System.currentTimeMillis(); for (int i = 0; i < loopCount; i++) { cnt += System.getProperty(PROP_NAME2).length(); } elapsed = System.currentTimeMillis() - start; System.out.println("Fetched system property value " + loopCount + " times in " + elapsed + " milliseconds"); } @Test public void testDynamicPropertyListenerPropertyChangeCallback(){ config.stopLoading(); DynamicStringProperty listOfCountersToExportProperty = new DynamicStringProperty("com.netflix.eds.utils.EdsCounter.listOfCountersToExport", "") { @Override protected void propertyChanged() { meGotCalled = true; } }; config.setProperty("com.netflix.eds.utils.EdsCounter.listOfCountersToExport", "valuechanged"); assertTrue("propertyChanged did not get called", meGotCalled); assertEquals("valuechanged", listOfCountersToExportProperty.get()); } @Test public void testFastProperyTimestamp() throws Exception { config.stopLoading(); DynamicStringProperty prop = new DynamicStringProperty("com.netflix.testing.timestamp", "hello"); long initialTime = prop.getChangedTimestamp(); Thread.sleep(10); assertEquals(prop.getChangedTimestamp(), initialTime); config.setProperty(prop.getName(), "goodbye"); assertTrue((prop.getChangedTimestamp() - initialTime) > 8); } @Test public void testDynamicProperySetAdnGets() throws Exception { config.stopLoading(); DynamicBooleanProperty prop = new DynamicBooleanProperty( "com.netflix.testing.mybool", false); assertFalse(prop.get()); assertTrue(prop.prop.getCallbacks().isEmpty()); for (int i = 0; i < 10; i++) { config.setProperty( "com.netflix.testing.mybool", "true"); assertTrue(prop.get()); assertTrue(config.getString("com.netflix.testing.mybool").equals("true")); config.setProperty( "com.netflix.testing.mybool", "false"); assertFalse(prop.get()); assertTrue(config.getString("com.netflix.testing.mybool").equals("false")); } for(int i = 0; i < 100; i++) { config.setProperty( "com.netflix.testing.mybool", "true"); assertTrue(prop.get()); assertTrue(config.getString("com.netflix.testing.mybool").equals("true")); config.clearProperty( "com.netflix.testing.mybool"); assertFalse(prop.get()); assertTrue(config.getString("com.netflix.testing.mybool") == null); } } @Test public void testPropertyCreation() { config.stopLoading(); meGotCalled = false; final String newValue = "newValue"; Runnable r = new Runnable() { public void run() { meGotCalled = true; } }; final DynamicStringProperty prop = DynamicPropertyFactory.getInstance().getStringProperty("foo.bar", "xyz", r); assertEquals("xyz", prop.get()); config.setProperty("foo.bar", newValue); assertTrue(meGotCalled); assertEquals(newValue, prop.get()); assertTrue(prop.prop.getCallbacks().contains(r)); } }
5,793
310
# -*- coding: utf-8 -*- """Top-level package for Bitcoin exchange feedhandler.""" __author__ = """<NAME>""" __email__ = '<EMAIL>' from pkg_resources import get_distribution, DistributionNotFound try: __version__ = get_distribution(__name__).version except DistributionNotFound: # package is not installed pass # flake8: noqa from .core import Configuration, Runner
124
404
// // PluginGain.c - MrsWatson // Copyright (c) 2016 <NAME>. All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are met: // // * Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE // POSSIBILITY OF SUCH DAMAGE. // #include "PluginGain.h" #include "audio/SampleBuffer.h" #include "logging/EventLogger.h" const char *kInternalPluginGainName = INTERNAL_PLUGIN_PREFIX "gain"; static void _pluginGainEmpty(void *pluginPtr) { // Nothing to do here } static boolByte _pluginGainOpen(void *pluginPtr) { return true; } static void _pluginGainDisplayInfo(void *pluginPtr) { logInfo("Information for Internal plugin '%s'", kInternalPluginGainName); logInfo("Type: effect, parameters: none"); logInfo("Description: a basic gain effect"); } static int _pluginGainGetSetting(void *pluginPtr, PluginSetting pluginSetting) { switch (pluginSetting) { case PLUGIN_SETTING_TAIL_TIME_IN_MS: return 0; case PLUGIN_NUM_INPUTS: return 2; case PLUGIN_NUM_OUTPUTS: return 2; default: return 0; } } static void _pluginGainProcessAudio(void *pluginPtr, SampleBuffer inputs, SampleBuffer outputs) { Plugin plugin = (Plugin)pluginPtr; PluginGainSettings settings = (PluginGainSettings)plugin->extraData; unsigned long channel, sample; sampleBufferCopyAndMapChannels(outputs, inputs); for (channel = 0; channel < outputs->numChannels; ++channel) { for (sample = 0; sample < outputs->blocksize; ++sample) { outputs->samples[channel][sample] *= settings->gain; } } } static void _pluginGainProcessMidiEvents(void *pluginPtr, LinkedList midiEvents) { // Nothing to do here } static boolByte _pluginGainSetParameter(void *pluginPtr, unsigned int i, float value) { Plugin plugin = (Plugin)pluginPtr; PluginGainSettings settings = (PluginGainSettings)plugin->extraData; switch (i) { case PLUGIN_GAIN_SETTINGS_GAIN: settings->gain = value; return true; default: logError("Attempt to set invalid parameter %d on internal gain plugin", i); return false; } } Plugin newPluginGain(const CharString pluginName) { Plugin plugin = _newPlugin(PLUGIN_TYPE_INTERNAL, PLUGIN_TYPE_EFFECT); PluginGainSettings settings = (PluginGainSettings)malloc(sizeof(PluginGainSettingsMembers)); charStringCopy(plugin->pluginName, pluginName); charStringCopyCString(plugin->pluginLocation, "Internal"); plugin->openPlugin = _pluginGainOpen; plugin->displayInfo = _pluginGainDisplayInfo; plugin->getSetting = _pluginGainGetSetting; plugin->prepareForProcessing = _pluginGainEmpty; plugin->showEditor = _pluginGainEmpty; plugin->processAudio = _pluginGainProcessAudio; plugin->processMidiEvents = _pluginGainProcessMidiEvents; plugin->setParameter = _pluginGainSetParameter; plugin->closePlugin = _pluginGainEmpty; plugin->freePluginData = _pluginGainEmpty; settings->gain = 1.0f; plugin->extraData = settings; return plugin; }
1,388
1,085
<gh_stars>1000+ /* * Copyright (C) 2017-2019 Dremio Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.dremio.dac.model.folder; import org.junit.Assert; import org.junit.Test; /** * DatasetPath tests */ public class TestFolderPath { @Test public void testCreateFromFullPath() { FolderPath folderPath = new FolderPath("a.b.c"); Assert.assertEquals("a", folderPath.getRoot().getName()); Assert.assertEquals("c", folderPath.getFolderName().getName()); Assert.assertEquals(1, folderPath.getFolderPath().size()); Assert.assertEquals("b", folderPath.getFolderPath().get(0).getName()); Assert.assertEquals("a.b.c", folderPath.toPathString()); Assert.assertEquals("a.b.c", folderPath.toPathString()); } @Test public void testCreateFromUrlParams() { FolderPath folderPath = new FolderPath("a", "b/c"); Assert.assertEquals("a", folderPath.getRoot().getName()); Assert.assertEquals("c", folderPath.getFolderName().getName()); Assert.assertEquals(1, folderPath.getFolderPath().size()); Assert.assertEquals("b", folderPath.getFolderPath().get(0).getName()); Assert.assertEquals("a.b.c", folderPath.toPathString()); Assert.assertEquals("a.b.c", folderPath.toPathString()); } @Test public void testParseUrlPath() { FolderPath folderPath = (FolderPath) Folder.parseUrlPath("/space/s1/folder/f1/f2"); Assert.assertEquals("s1.f1.f2", folderPath.toPathString()); } @Test public void testParseSourceUrlPath() { SourceFolderPath folderPath = (SourceFolderPath) Folder.parseUrlPath("/source/s1/folder/f1/f2"); Assert.assertEquals("s1.f1.f2", folderPath.toPathString()); } @Test public void testToUrlPath() { FolderPath folderPath = new FolderPath("a", "b/c"); Assert.assertEquals("/space/a/folder/b/c", folderPath.toUrlPath()); } }
808
329
<gh_stars>100-1000 /* -*- Mode: C; tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- */ /* * Copyright 2018-2020 Couchbase, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #ifndef LCB_TRACING_INTERNAL_H #define LCB_TRACING_INTERNAL_H #include <libcouchbase/tracing.h> #include "rnd.h" #ifdef __cplusplus #include <queue> #include <map> #include <string> namespace lcb { namespace trace { class Span { public: Span(lcbtrace_TRACER *tracer, const char *opname, uint64_t start, lcbtrace_REF_TYPE ref, lcbtrace_SPAN *other, void *external_span); ~Span(); void finish(uint64_t finish); uint64_t duration() const { return m_finish - m_start; } void add_tag(const char *name, int copy, const char *value, int copy_value); void add_tag(const char *name, int copy_key, const char *value, size_t value_len, int copy_value); void add_tag(const char *name, int copy, uint64_t value); void add_tag(const char *name, int copy, double value); void add_tag(const char *name, int copy, bool value); void service(lcbtrace_THRESHOLDOPTS svc); lcbtrace_THRESHOLDOPTS service() const; void increment_dispatch(uint64_t dispatch_time); void increment_server(uint64_t server_time); lcbtrace_SPAN *find_outer_or_this(); const char *service_str() const; void *external_span() const; void external_span(void *ext); bool is_outer() const; void is_outer(bool outer); bool is_dispatch() const; void is_dispatch(bool dispatch); bool is_encode() const; void is_encode(bool encode); bool should_finish() const; void should_finish(bool finish); lcbtrace_TRACER *m_tracer; std::string m_opname; uint64_t m_span_id; uint64_t m_start; uint64_t m_finish{0}; bool m_orphaned; Span *m_parent; void *m_extspan; sllist_root m_tags{}; bool m_is_outer{false}; bool m_is_dispatch{false}; bool m_is_encode{false}; bool m_should_finish{true}; lcbtrace_THRESHOLDOPTS m_svc{LCBTRACE_THRESHOLD__MAX}; const char *m_svc_string{nullptr}; uint64_t m_total_dispatch{0}; uint64_t m_last_dispatch{0}; uint64_t m_total_server{0}; uint64_t m_last_server{0}; uint64_t m_encode{0}; }; struct ReportedSpan { uint64_t duration; std::string payload; bool operator<(const ReportedSpan &rhs) const { return duration < rhs.duration; } }; template <typename T> class FixedQueue : private std::priority_queue<T> { public: explicit FixedQueue(size_t capacity) : m_capacity(capacity) {} void push(const T &item) { std::priority_queue<T>::push(item); if (this->size() > m_capacity) { this->c.pop_back(); } } using std::priority_queue<T>::empty; using std::priority_queue<T>::top; using std::priority_queue<T>::pop; using std::priority_queue<T>::size; private: size_t m_capacity; }; typedef ReportedSpan QueueEntry; typedef FixedQueue<QueueEntry> FixedSpanQueue; class ThresholdLoggingTracer { lcbtrace_TRACER *m_wrapper; lcb_settings *m_settings; size_t m_threshold_queue_size; FixedSpanQueue m_orphans; std::map<std::string, FixedSpanQueue> m_queues; void flush_queue(FixedSpanQueue &queue, const char *message, const char *service, bool warn); QueueEntry convert(lcbtrace_SPAN *span); public: ThresholdLoggingTracer(lcb_INSTANCE *instance); lcbtrace_TRACER *wrap(); void add_orphan(lcbtrace_SPAN *span); void check_threshold(lcbtrace_SPAN *span); void flush_orphans(); void flush_threshold(); void do_flush_orphans(); void do_flush_threshold(); lcb::io::Timer<ThresholdLoggingTracer, &ThresholdLoggingTracer::flush_orphans> m_oflush; lcb::io::Timer<ThresholdLoggingTracer, &ThresholdLoggingTracer::flush_threshold> m_tflush; }; } // namespace trace } // namespace lcb extern "C" { #endif /* __cplusplus */ LCB_INTERNAL_API void lcbtrace_span_add_system_tags(lcbtrace_SPAN *span, lcb_settings *settings, lcbtrace_THRESHOLDOPTS svc); LCB_INTERNAL_API void lcbtrace_span_set_parent(lcbtrace_SPAN *span, lcbtrace_SPAN *parent); LCB_INTERNAL_API void lcbtrace_span_set_orphaned(lcbtrace_SPAN *span, int val); LIBCOUCHBASE_API void lcbtrace_span_add_tag_str_nocopy(lcbtrace_SPAN *span, const char *name, const char *value); const char *dur_level_to_string(lcb_DURABILITY_LEVEL dur_level); void lcbtrace_span_add_host_and_port(lcbtrace_SPAN *span, lcbio_CONNINFO *info); #ifdef __cplusplus #define LCBTRACE_ADD_RETRIES(span, retries) \ if (span) { \ span->find_outer_or_this()->add_tag(LCBTRACE_TAG_RETRIES, 0, (uint64_t)retries); \ } // called by lcb_query, etc... The underlying lcb_http call will fill in the dispatch span tags #define LCBTRACE_HTTP_START(settings, opaque, pspan, operation_name, svc, outspan) \ LCBTRACE_START(settings, opaque, pspan, operation_name, svc, outspan) #define LCBTRACE_KV_START(settings, opaque, cmd, operation_name, outspan) \ if (nullptr != (settings)->tracer) { \ lcbtrace_SPAN *pspan = cmd->parent_span(); \ char opid[20] = {}; \ snprintf(opid, sizeof(opid), "%p", reinterpret_cast<void *>(opaque)); \ LCBTRACE_START(settings, opid, pspan, operation_name, LCBTRACE_THRESHOLD_KV, outspan) \ } // don't create a span if passed an outer parent, if we are the threshold logger, // and use its close to determine times, etc... #define LCBTRACE_START(settings, opaque, pspan, operation_name, svc, outspan) \ if (nullptr != (settings)->tracer) { \ if (nullptr != pspan && pspan->is_outer() && (settings)->tracer->flags & LCBTRACE_F_THRESHOLD) { \ outspan = pspan; \ outspan->should_finish(false); \ } else { \ lcbtrace_REF ref; \ ref.type = LCBTRACE_REF_CHILD_OF; \ ref.span = pspan; \ bool is_dispatch = (pspan && pspan->is_outer()); \ outspan = \ lcbtrace_span_start((settings)->tracer, is_dispatch ? LCBTRACE_OP_DISPATCH_TO_SERVER : operation_name, \ LCBTRACE_NOW, &ref); \ outspan->should_finish(true); \ outspan->is_outer(!is_dispatch); \ } \ outspan->is_dispatch(true); \ if (opaque) { \ lcbtrace_span_add_tag_str(outspan, LCBTRACE_TAG_OPERATION_ID, opaque); \ } \ lcbtrace_span_add_system_tags(outspan, settings, svc); \ } else { \ outspan = nullptr; \ } #define LCBTRACE_KVSTORE_START(settings, opaque, cmd, operation_name, outspan) \ LCBTRACE_KV_START(settings, opaque, cmd, operation_name, outspan) \ if ((settings)->tracer) { \ outspan->add_tag(LCBTRACE_TAG_DURABILITY, 0, dur_level_to_string(cmd->durability_level()), 0); \ } #define LCBTRACE_KV_FINISH(pipeline, request, resp, server_duration) \ do { \ lcbtrace_SPAN *dispatch_span__ = MCREQ_PKT_RDATA(request)->span; \ if (dispatch_span__) { \ dispatch_span__->increment_server(server_duration); \ lcb::Server *server = static_cast<lcb::Server *>(pipeline); \ dispatch_span__->find_outer_or_this()->add_tag(LCBTRACE_TAG_RETRIES, 0, (uint64_t)request->retries); \ lcbtrace_span_add_tag_str_nocopy(dispatch_span__, LCBTRACE_TAG_TRANSPORT, "IP.TCP"); \ lcbio_CTX *ctx = server->connctx; \ if (ctx) { \ char local_id[34] = {}; \ snprintf(local_id, sizeof(local_id), "%016" PRIx64 "/%016" PRIx64, \ (uint64_t)server->get_settings()->iid, (uint64_t)ctx->sock->id); \ lcbtrace_span_add_tag_str(dispatch_span__, LCBTRACE_TAG_LOCAL_ID, local_id); \ lcbtrace_span_add_host_and_port(dispatch_span__, ctx->sock->info); \ } \ if (dispatch_span__->should_finish()) { \ lcbtrace_span_finish(dispatch_span__, LCBTRACE_NOW); \ } \ } \ } while (0) #define LCBTRACE_HTTP_FINISH(span) \ if (nullptr != span) { \ lcbtrace_span_add_tag_str_nocopy(span, LCBTRACE_TAG_TRANSPORT, "IP.TCP"); \ if (span->should_finish()) { \ lcbtrace_span_finish(span, LCBTRACE_NOW); \ } \ span = nullptr; \ } } #else #define LCBTRACE_KVSTORE_START(settings, opaque, cmd, operation_name, outspan) #define LCBTRACE_HTTP_START(settings, opaque, pspan, operation_name, svc, outspan) #define LCBTRACE_KV_FINISH(pipeline, request, server_duration) #define LCBTRACE_HTTP_FINISH(span) #endif /* __cplusplus*/ #endif /* LCB_TRACING_INTERNAL_H */
8,318
318
package com.cxytiandi.kittycloud.mqconsume.es.enums; /** * @作者 尹吉欢 * @个人微信 jihuan900 * @微信公众号 猿天地 * @GitHub https://github.com/yinjihuan * @作者介绍 http://cxytiandi.com/about * @时间 2020-03-29 23:09 */ public enum ChangeTypeEnum { /** * 新增 */ INSERT(1, "新增"), /** * 修改 */ UPDATE(2, "修改"), /** * 删除 */ DELETE(3, "删除"); ChangeTypeEnum(int type, String descp) { this.type = type; this.descp = descp; } /** * 类型 */ private int type; /** * 描述 */ private String descp; public int getType() { return type; } public String getDescp() { return descp; } public static ChangeTypeEnum from(int type) { for (ChangeTypeEnum changeType: ChangeTypeEnum.values()) { if (changeType.getType() == type) { return changeType; } } return null; } }
544
713
# Definition for a binary tree node. # class TreeNode(object): # def __init__(self, val=0, left=None, right=None): # self.val = val # self.left = left # self.right = right from collections import deque from collections import defaultdict class Solution(object): def verticalOrder(self, root): """ :type root: TreeNode :rtype: List[List[int]] """ if not root: return [] queue = deque([(root, 0)]) verticalNodeMap = defaultdict(list) while queue: node, horrizotalDistace = queue.popleft() if node: verticalNodeMap[horrizotalDistace].append(node.val) queue.append((node.left, horrizotalDistace - 1)) queue.append((node.right, horrizotalDistace + 1)) minHorrizotalDistace, maxHorrizotalDistace = min(verticalNodeMap.keys()), max(verticalNodeMap.keys()) result = [] for key in range(minHorrizotalDistace, maxHorrizotalDistace + 1): result.append(verticalNodeMap[key]) return result # My solution during mock, getting TLE, don't know why from collections import defaultdict from collections import deque class Solution(object): def verticalOrder(self, root): """ :type root: TreeNode :rtype: List[List[int]] """ if not root: return [] orderMap = defaultdict(list) queue = deque([(root, 0)]) while queue: currentNode, vLine = queue.popleft() if currentNode: orderMap[vLine].append(root.val) queue.append((root.left, vLine - 1)) queue.append((root.right, vLine + 1)) result = [] for i in range(min(orderMap.keys()), max(orderMap.keys()) + 1): result.append(orderMap[i]) return result
858
348
{"nom":"Veynes","circ":"1ère circonscription","dpt":"Hautes-Alpes","inscrits":2449,"abs":1172,"votants":1277,"blancs":35,"nuls":8,"exp":1234,"res":[{"nuance":"REM","nom":"Mme <NAME>","voix":347},{"nuance":"FI","nom":"Mme <NAME>","voix":216},{"nuance":"SOC","nom":"Mme <NAME>","voix":167},{"nuance":"FN","nom":"M. <NAME>","voix":163},{"nuance":"LR","nom":"Mme <NAME>","voix":107},{"nuance":"COM","nom":"<NAME>","voix":95},{"nuance":"DIV","nom":"<NAME>","voix":66},{"nuance":"ECO","nom":"Mme <NAME>","voix":27},{"nuance":"DVD","nom":"M. <NAME>","voix":16},{"nuance":"ECO","nom":"Mme <NAME>","voix":14},{"nuance":"DIV","nom":"Mme <NAME>","voix":10},{"nuance":"EXG","nom":"M. <NAME>","voix":6}]}
277
1,233
/* * Copyright 2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.mantisrx.common.network; import com.netflix.spectator.api.BasicTag; import io.reactivx.mantis.operators.DropOperator; import java.util.Optional; import mantis.io.reactivex.netty.channel.ObservableConnection; import rx.Observable; import rx.subjects.PublishSubject; import rx.subjects.SerializedSubject; import rx.subjects.Subject; public class WritableEndpoint<T> extends Endpoint implements Comparable<WritableEndpoint<T>> { private Subject<T, T> subject; private ObservableConnection<?, ?> connection; public WritableEndpoint(String host, int port, String slotId) { this(host, port, slotId, null); } public WritableEndpoint(String host, int port, String slotId, ObservableConnection<?, ?> connection) { super(host, port, slotId); subject = new SerializedSubject<T, T>(PublishSubject.<T>create()); this.connection = connection; } public WritableEndpoint(String host, int port) { super(host, port); subject = new SerializedSubject<T, T>(PublishSubject.<T>create()); } public void write(T value) { subject.onNext(value); } public void explicitClose() { if (connection != null) { connection.close(true); } } public void complete() { subject.onCompleted(); explicitClose(); } public Observable<T> read() { return subject .lift(new DropOperator<>("outgoing_subject", new BasicTag("slotId", Optional.ofNullable(slotId).orElse("none")))); } @Override public String toString() { return "WritableEndpoint [" + super.toString() + "]"; } public void error(Throwable e) { subject.onError(e); explicitClose(); } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((slotId == null) ? 0 : slotId.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; Endpoint other = (Endpoint) obj; if (slotId == null) { if (other.slotId != null) return false; } else if (!slotId.equals(other.slotId)) return false; return true; } @Override public int compareTo(WritableEndpoint<T> o) { if (this.equals(o)) { return 0; } else { return o.getSlotId().compareTo(getSlotId()); } } }
1,291
2,027
<filename>utils/src/main/java/io/atomix/utils/misc/StringUtils.java /* * Copyright 2019-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.atomix.utils.misc; import java.util.ArrayList; import java.util.List; /** * Collection of various helper methods to manipulate strings. */ public final class StringUtils { private StringUtils() { } /** * Splits the input string with the given regex and filters empty strings. * * @param input the string to split. * @return the array of strings computed by splitting this string */ public static String[] split(String input, String regex) { if (input == null) { return null; } String[] arr = input.split(regex); List<String> results = new ArrayList<>(arr.length); for (String a : arr) { if (!a.trim().isEmpty()) { results.add(a); } } return results.toArray(new String[0]); } }
447
5,169
{ "name": "FlawlessTransactionGeneration", "version": "0.0.2", "summary": "This pod generate list of random transaction", "description": "This CocoaPods library helps you to random generate list of bank's transaction.", "homepage": "https://github.com/swiftoverflow/FlawlessTransactionGeneration", "license": { "type": "MIT", "file": "LICENSE" }, "authors": { "SwiftOverflow": "<EMAIL>" }, "platforms": { "ios": "13.0" }, "swift_versions": "5.0", "source": { "git": "https://github.com/swiftoverflow/FlawlessTransactionGeneration.git", "tag": "0.0.2" }, "source_files": "FlawlessTransactionGeneration/**/*.{h,m,swift}", "dependencies": { "FlawlessValidation": [ "~> 0.0.2" ] }, "swift_version": "5.0" }
314
6,541
<filename>system/lib/libc/musl/src/locale/catclose.c<gh_stars>1000+ #include <nl_types.h> int catclose (nl_catd catd) { return 0; }
62
375
package jenkins.plugins.git.traits; import hudson.Util; import hudson.model.Descriptor; import hudson.plugins.git.extensions.GitSCMExtension; import java.util.ArrayList; import java.util.List; import jenkins.scm.api.trait.SCMSourceTrait; import org.junit.ClassRule; import org.junit.Test; import org.jvnet.hudson.test.JenkinsRule; import static org.hamcrest.Matchers.is; import static org.hamcrest.MatcherAssert.assertThat; public class GitSCMExtensionTraitTest { @ClassRule public static JenkinsRule j = new JenkinsRule(); public List<GitSCMExtensionTraitDescriptor> descriptors() { List<GitSCMExtensionTraitDescriptor> list = new ArrayList<>(); for (Descriptor<SCMSourceTrait> d : SCMSourceTrait.all()) { if (d instanceof GitSCMExtensionTraitDescriptor) { list.add((GitSCMExtensionTraitDescriptor) d); } } return list; } @Test public void extensionClassesOverrideEquals() { for (GitSCMExtensionTraitDescriptor d : descriptors()) { assertThat(d.getExtensionClass().getName() + " overrides equals(Object)", Util.isOverridden(GitSCMExtension.class, d.getExtensionClass(), "equals", Object.class), is(true)); } } @Test public void extensionClassesOverrideHashCode() { for (GitSCMExtensionTraitDescriptor d : descriptors()) { assertThat(d.getExtensionClass().getName() + " overrides hashCode()", Util.isOverridden(GitSCMExtension.class, d.getExtensionClass(), "hashCode"), is(true)); } } @Test public void extensionClassesOverrideToString() { for (GitSCMExtensionTraitDescriptor d : descriptors()) { assertThat(d.getExtensionClass().getName() + " overrides toString()", Util.isOverridden(GitSCMExtension.class, d.getExtensionClass(), "toString"), is(true)); } } }
881
413
<reponame>Ryujin-Jakka/spfx-40-fantastics { "$schema": "../../../node_modules/@microsoft/sp-module-interfaces/lib/manifestSchemas/jsonSchemas/clientSideComponentManifestSchema.json", "id": "944744d9-a870-4bb7-a732-f63da7558609", "alias": "StockInfo", "componentType": "WebPart", "version": "1.0.0", "manifestVersion": 2, "preconfiguredEntries": [{ "groupId": "1df557f4-e29a-4d2d-b1bf-a6b0b68aae90", "group": { "default": "Tools" }, "title": { "default": "Stock Info" }, "description": { "default": "Generates as graph picture the current stock value of a specified stock. With this Web Part, you can for example share the current stock price of your company on your homepage. This Web Part uses the Yahoo! Financial Services." }, "officeFabricIconFontName": "Financial", "properties": { "stock": "MSFT", "lang": "en-US", "region": "US", "dimension": { "width": "250px", "height": "250px" } } }] }
397
471
from django.urls import reverse from django.utils.translation import ugettext_lazy from memoized import memoized from corehq.apps.locations.models import SQLLocation from corehq.apps.reports.filters.controllers import ( LocationGroupOptionsController, ) from .api import EmwfOptionsView from .users import ExpandedMobileWorkerFilter class LocationGroupFilter(ExpandedMobileWorkerFilter): """ Displays a list of locations and groups to select from to filter report """ slug = "grouplocationfilter" label = ugettext_lazy("Groups or Locations") default_options = None placeholder = ugettext_lazy( "Click here to select groups or locations to filter in the report") is_cacheable = False options_url = 'grouplocationfilter_options' @property @memoized def selected(self): selected_ids = self.request.GET.getlist(self.slug) selected = (self._selected_group_entries(selected_ids) + self._selected_location_entries(selected_ids)) known_ids = dict(selected) return [ {'id': id, 'text': known_ids[id]} for id in selected_ids if id in known_ids ] @property def filter_context(self): context = super(LocationGroupFilter, self).filter_context url = reverse(self.options_url, args=[self.domain]) context.update({'endpoint': url}) return context @property def options(self): return [ (location.location_id, location.name) for location in SQLLocation.objects.filter(domain=self.domain) ] class LocationGroupFilterOptions(EmwfOptionsView): @property @memoized def options_controller(self): return LocationGroupOptionsController(self.request, self.domain, self.search)
689
628
from rest_framework import serializers as ser from distutils.version import StrictVersion from api.base.serializers import JSONAPISerializer, LinksField, ShowIfVersion, RelationshipField from api.subjects.serializers import UpdateSubjectsMixin from osf.models import Subject subjects_as_relationships_version = '2.16' class TaxonomyField(ser.Field): def to_representation(self, subject): if not isinstance(subject, Subject): subject = Subject.load(subject) if subject is not None: return { 'id': subject._id, 'text': subject.text, } return None def to_internal_value(self, subject_id): return subject_id class TaxonomizableSerializerMixin(ser.Serializer, UpdateSubjectsMixin): """ Mixin for Taxonomizable objects Note: subclasses will need to update `filterable_fields` and `update` to handle subjects correctly. """ writeable_method_fields = frozenset([ 'subjects', ]) def __init__(self, *args, **kwargs): super(TaxonomizableSerializerMixin, self).__init__(*args, **kwargs) request = kwargs['context']['request'] if self.expect_subjects_as_relationships(request): subject_kwargs = { 'related_view': self.subjects_related_view, 'related_view_kwargs': self.subjects_view_kwargs, 'read_only': False, 'many': True, 'required': False, } if self.subjects_self_view: subject_kwargs['self_view'] = self.subjects_self_view subject_kwargs['self_view_kwargs'] = self.subjects_view_kwargs self.fields['subjects'] = RelationshipField(**subject_kwargs) else: self.fields['subjects'] = ser.SerializerMethodField() @property def subjects_related_view(self): """ For dynamically building the subjects RelationshipField on __init__ Return format '<view_category>:<view_name>, for the desired related view, for example, 'nodes:node-subjects' """ raise NotImplementedError() @property def subjects_view_kwargs(self): """ For dynamically building the subjects RelationshipField on __init__ Return kwargs needed to build the related/self view links, for example: {'node_id': '<_id>'} """ raise NotImplementedError @property def subjects_self_view(self): """ Optional: For dynamically building the subjects RelationshipField on __init__ If you're going to provide a subjects `self` link, return the desired self view in format '<view_category>:<view_name>. For example, 'nodes:node-relationships-subjects' """ pass def get_subjects(self, obj): """ `subjects` is a SerializerMethodField for older versions of the API, serialized under attributes """ from api.taxonomies.serializers import TaxonomyField return [ [ TaxonomyField().to_representation(subj) for subj in hier ] for hier in obj.subject_hierarchy ] # Overrides UpdateSubjectsMixin def update_subjects_method(self, resource, subjects, auth): """Depending on the request's version, runs a different method to update the resource's subjects. Will expect request to be formatted differently, depending on the version. :param object resource: Object for which you want to update subjects :param list subjects: Subjects array (or array of arrays) :param object Auth object """ if self.expect_subjects_as_relationships(self.context['request']): return resource.set_subjects_from_relationships(subjects, auth) return resource.set_subjects(subjects, auth) def expect_subjects_as_relationships(self, request): """Determines whether subjects should be serialized as a relationship. Earlier versions serialize subjects as an attribute(before 2.16). Version 2.16 and later serializer subjects as relationships. :param object request: Request object :return bool: Subjects should be serialized as relationships """ return StrictVersion(getattr(request, 'version', '2.0')) >= StrictVersion(subjects_as_relationships_version) class TaxonomySerializer(JSONAPISerializer): """ Will be deprecated in the future and replaced by SubjectSerializer """ filterable_fields = frozenset([ 'text', 'parents', 'parent', 'id', ]) id = ser.CharField(source='_id', required=True) text = ser.CharField(max_length=200) parents = ShowIfVersion( ser.SerializerMethodField(), min_version='2.0', max_version='2.3', ) parent = TaxonomyField() child_count = ser.SerializerMethodField() share_title = ser.CharField(source='provider.share_title', read_only=True) path = ser.CharField(read_only=True) links = LinksField({ 'parents': 'get_parent_urls', 'self': 'get_absolute_url', }) def get_child_count(self, obj): children_count = getattr(obj, 'children_count', None) return children_count if children_count is not None else obj.child_count def get_parents(self, obj): if not obj.parent: return [] return [TaxonomyField().to_representation(obj.parent)] def get_parent_urls(self, obj): if obj.parent: return [obj.parent.get_absolute_url()] return [] def get_absolute_url(self, obj): return obj.get_absolute_url() class Meta: type_ = 'taxonomies'
2,308
585
# Copyright (c) 2016-present, Facebook, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ############################################################################## ## @package normalization # Module caffe2.python.helpers.normalization from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from caffe2.python import scope from caffe2.python.modeling.parameter_info import ParameterTags from caffe2.proto import caffe2_pb2 from caffe2.python.modeling import initializers def lrn(model, blob_in, blob_out, order="NCHW", use_cudnn=False, **kwargs): """LRN""" dev = kwargs['device_option'] if 'device_option' in kwargs \ else scope.CurrentDeviceScope() is_cpu = dev is None or dev.device_type == caffe2_pb2.CPU if use_cudnn and (not is_cpu): kwargs['engine'] = 'CUDNN' blobs_out = blob_out else: blobs_out = [blob_out, "_" + blob_out + "_scale"] lrn = model.net.LRN( blob_in, blobs_out, order=order, **kwargs ) if use_cudnn and (not is_cpu): return lrn else: return lrn[0] def softmax(model, blob_in, blob_out=None, use_cudnn=False, **kwargs): """Softmax.""" if use_cudnn: kwargs['engine'] = 'CUDNN' if blob_out is not None: return model.net.Softmax(blob_in, blob_out, **kwargs) else: return model.net.Softmax(blob_in, **kwargs) def instance_norm(model, blob_in, blob_out, dim_in, order="NCHW", **kwargs): blob_out = blob_out or model.net.NextName() # Input: input, scale, bias # Output: output, saved_mean, saved_inv_std # scale: initialize with ones # bias: initialize with zeros def init_blob(value, suffix): return model.param_init_net.ConstantFill( [], blob_out + "_" + suffix, shape=[dim_in], value=value) scale, bias = init_blob(1.0, "s"), init_blob(0.0, "b") model.AddParameter(scale, ParameterTags.WEIGHT) model.AddParameter(bias, ParameterTags.BIAS) blob_outs = [blob_out, blob_out + "_sm", blob_out + "_siv"] if 'is_test' in kwargs and kwargs['is_test']: blob_outputs = model.net.InstanceNorm( [blob_in, scale, bias], [blob_out], order=order, **kwargs) return blob_outputs else: blob_outputs = model.net.InstanceNorm( [blob_in, scale, bias], blob_outs, order=order, **kwargs) # Return the output return blob_outputs[0] def spatial_bn(model, blob_in, blob_out, dim_in, init_scale=1., init_bias=0., ScaleInitializer=None, BiasInitializer=None, RunningMeanInitializer=None, RunningVarianceInitializer=None, order="NCHW", **kwargs): blob_out = blob_out or model.net.NextName() # Input: input, scale, bias, est_mean, est_inv_var # Output: output, running_mean, running_inv_var, saved_mean, # saved_inv_var # scale: initialize with init_scale (default 1.) # bias: initialize with init_bias (default 0.) # est mean: zero # est var: ones if model.init_params: scale_init = ("ConstantFill", {'value': init_scale}) bias_init = ("ConstantFill", {'value': init_bias}) rm_init = ("ConstantFill", {'value': 0.0}) riv_init = ("ConstantFill", {'value': 1.0}) ScaleInitializer = initializers.update_initializer( ScaleInitializer, scale_init, ("ConstantFill", {}) ) BiasInitializer = initializers.update_initializer( BiasInitializer, bias_init, ("ConstantFill", {}) ) RunningMeanInitializer = initializers.update_initializer( RunningMeanInitializer, rm_init, ("ConstantFill", {}) ) RunningVarianceInitializer = initializers.update_initializer( RunningVarianceInitializer, riv_init, ("ConstantFill", {}) ) else: ScaleInitializer = initializers.ExternalInitializer() BiasInitializer = initializers.ExternalInitializer() RunningMeanInitializer = initializers.ExternalInitializer() RunningVarianceInitializer = initializers.ExternalInitializer() scale = model.create_param( param_name=blob_out + '_s', shape=[dim_in], initializer=ScaleInitializer, tags=ParameterTags.WEIGHT ) bias = model.create_param( param_name=blob_out + '_b', shape=[dim_in], initializer=BiasInitializer, tags=ParameterTags.BIAS ) running_mean = model.create_param( param_name=blob_out + '_rm', shape=[dim_in], initializer=RunningMeanInitializer, tags=ParameterTags.COMPUTED_PARAM ) running_inv_var = model.create_param( param_name=blob_out + '_riv', shape=[dim_in], initializer=RunningVarianceInitializer, tags=ParameterTags.COMPUTED_PARAM ) blob_outs = [blob_out, running_mean, running_inv_var, blob_out + "_sm", blob_out + "_siv"] if 'is_test' in kwargs and kwargs['is_test']: blob_outputs = model.net.SpatialBN( [blob_in, scale, bias, blob_outs[1], blob_outs[2]], [blob_out], order=order, **kwargs) return blob_outputs else: blob_outputs = model.net.SpatialBN( [blob_in, scale, bias, blob_outs[1], blob_outs[2]], blob_outs, order=order, **kwargs) # Return the output return blob_outputs[0] def layer_norm( model, blob_in, blob_out, dim_in, axis=1, epsilon=1e-4, initial_scale=1.0, initial_bias=0.0, ): ''' Layer normalizes the input, cf. https://arxiv.org/pdf/1607.06450.pdf. Args: blob_in: The input blob to layer normalize. blob_out: The layer normalized output blob. dim_in: The dimension of the scale and bias. For example, if blob_in is a 2D design matrix and axis is 1, this would be the number of columns. axis: (optional) The axis to normalize. Typically the feature axis. Defaults to 1. epsilon: (optional) A small value used for numerical stability in calculation. Defaults to 1e-4. initial_scale: (optional) The initial value for the learned scale parameter. Defaults to 1.0 initial_bias: (optional) The initial value for the learned bias parameter of the layerwise standard deviation. Defaults to 0.0. Returns: A 3-tuple consisting of: - The layer normalized input blob. - The mean of the input blob across the given axis. - The standard deviation of the input blob acress the given axis. ''' # The LayerNorm operator only performs the layerwise z-shift, without # scaling and shifting by the learned scale and bias parameters. We have # to do that separately below. normalized, mean, stdev = model.net.LayerNorm( [blob_in], [blob_out, blob_out + "_mean", blob_out + "_stdev"], axis=axis, epsilon=epsilon, ) # The learned multiplicative scale or "gain". scale = model.create_param( param_name='{}_scale'.format(blob_out), shape=[dim_in], initializer=initializers.Initializer( 'ConstantFill', value=initial_scale, ), tags=ParameterTags.WEIGHT, ) # The learned additive bias or "shift". bias = model.create_param( param_name='{}_bias'.format(blob_out), shape=[dim_in], initializer=initializers.Initializer( 'ConstantFill', value=initial_bias, ), tags=ParameterTags.BIAS, ) scaled = model.net.Mul( [normalized, scale], ['{}_scaled'.format(blob_out)], broadcast=1, axis=axis, ) biased = model.net.Add( [scaled, bias], ['{}_biased'.format(blob_out)], broadcast=1, axis=axis, ) return biased, mean, stdev
3,645
2,151
/* Currently, can only use arrays, verts are not implemented, though * verts are suspected to be faster. * To get an idea how the verts path works, look at the radeon implementation. */ #include <string.h> #include "r200_context.h" #define R200_MAOS_VERTS 0 #if (R200_MAOS_VERTS) #include "r200_maos_verts.c" #else #include "r200_maos_arrays.c" #endif
131
569
from tltk.nlp import spell_candidates from typing import List def spell(text: str) -> List[str]: return spell_candidates(text)
44
679
/************************************************************** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * *************************************************************/ // MARKER(update_precomp.py): autogen include statement, do not remove #include "precompiled_stoc.hxx" #include "com/sun/star/container/XHierarchicalNameAccess.hpp" #include "com/sun/star/container/XSet.hpp" #include "com/sun/star/lang/XMain.hpp" #include "com/sun/star/lang/XSingleComponentFactory.hpp" #include "com/sun/star/lang/IllegalArgumentException.hpp" #include "com/sun/star/reflection/XIndirectTypeDescription.hpp" #include "com/sun/star/reflection/XInterfaceMethodTypeDescription.hpp" #include "com/sun/star/reflection/XPublished.hpp" #include "com/sun/star/reflection/XStructTypeDescription.hpp" #include "com/sun/star/reflection/XTypeDescription.hpp" #include "com/sun/star/registry/InvalidRegistryException.hpp" #include "com/sun/star/registry/XRegistryKey.hpp" #include "com/sun/star/registry/XSimpleRegistry.hpp" #include "com/sun/star/uno/Exception.hpp" #include "com/sun/star/uno/Reference.hxx" #include "com/sun/star/uno/RuntimeException.hpp" #include "com/sun/star/uno/Sequence.hxx" #include "com/sun/star/uno/TypeClass.hpp" #include "com/sun/star/uno/XComponentContext.hpp" #include "com/sun/star/uno/XInterface.hpp" #include "cppuhelper/factory.hxx" #include "cppuhelper/implbase1.hxx" #include "cppuhelper/weak.hxx" #include "osl/file.h" #include "osl/thread.h" #include "rtl/textenc.h" #include "rtl/ustring.h" #include "rtl/ustring.hxx" #include "sal/types.h" #include "uno/environment.h" #include "uno/lbnames.h" #include /*MSVC trouble: <cstdlib>*/ <stdlib.h> #include <iostream> #include <ostream> namespace css = com::sun::star; namespace { class Service: public cppu::WeakImplHelper1< css::lang::XMain > { public: virtual sal_Int32 SAL_CALL run(css::uno::Sequence< rtl::OUString > const & arguments) throw (css::uno::RuntimeException); static rtl::OUString getImplementationName(); static css::uno::Sequence< rtl::OUString > getSupportedServiceNames(); static css::uno::Reference< css::uno::XInterface > SAL_CALL createInstance( css::uno::Reference< css::uno::XComponentContext > const & context) throw (css::uno::Exception); private: explicit Service( css::uno::Reference< css::uno::XComponentContext > const & context): m_context(context) {} css::uno::Reference< css::uno::XComponentContext > m_context; }; } namespace { std::ostream & operator <<(std::ostream & out, rtl::OUString const & value) { return out << rtl::OUStringToOString(value, RTL_TEXTENCODING_UTF8).getStr(); } void assertTrue(bool argument) { if (!argument) { std::cerr << "assertTrue(" << argument << ") failed" << std::endl; /*MSVC trouble: std::*/abort(); } } void assertFalse(bool argument) { if (argument) { std::cerr << "assertFalse(" << argument << ") failed" << std::endl; /*MSVC trouble: std::*/abort(); } } template< typename T > void assertEqual(T const & value, T const & argument) { if (argument != value) { std::cerr << "assertEqual(" << value << ", " << argument << ") failed" << std::endl; /*MSVC trouble: std::*/abort(); } } } sal_Int32 Service::run(css::uno::Sequence< rtl::OUString > const & arguments) throw (css::uno::RuntimeException) { css::uno::Reference< css::lang::XMultiComponentFactory > factory( m_context->getServiceManager()); assertTrue(factory.is()); css::uno::Reference< css::container::XHierarchicalNameAccess > manager( m_context->getValueByName( rtl::OUString( RTL_CONSTASCII_USTRINGPARAM( "/singletons/" "com.sun.star.reflection.theTypeDescriptionManager"))), css::uno::UNO_QUERY_THROW); //////////////////////////////////////// // test: add cmd line rdbs to manager //////////////////////////////////////// OSL_ASSERT( arguments.getLength() > 0 ); css::uno::Reference<css::container::XSet> xSet( manager, css::uno::UNO_QUERY_THROW ); for ( sal_Int32 argPos = 0; argPos < arguments.getLength(); ++argPos ) { rtl::OUString url; OSL_VERIFY( osl_File_E_None == osl_getFileURLFromSystemPath( arguments[argPos].pData, &url.pData ) ); bool supposedToBeCompatible = ! url.endsWithIgnoreAsciiCaseAsciiL( RTL_CONSTASCII_STRINGPARAM("_incomp.rdb") ); css::uno::Reference<css::registry::XSimpleRegistry> xReg( m_context->getServiceManager()->createInstanceWithContext( rtl::OUString( RTL_CONSTASCII_USTRINGPARAM( "com.sun.star.registry.SimpleRegistry") ), m_context ), css::uno::UNO_QUERY_THROW ); xReg->open( url, true /* read-only */, false /* ! create */ ); css::uno::Any arg( css::uno::makeAny(xReg) ); css::uno::Reference<css::container::XHierarchicalNameAccess> xTDprov( m_context->getServiceManager()-> createInstanceWithArgumentsAndContext( rtl::OUString( RTL_CONSTASCII_USTRINGPARAM( "com.sun.star.comp.stoc." "RegistryTypeDescriptionProvider") ), css::uno::Sequence<css::uno::Any>( &arg, 1 ), m_context ), css::uno::UNO_QUERY_THROW ); try { xSet->insert( css::uno::makeAny(xTDprov) ); if (! supposedToBeCompatible) std::cerr << "current rdb file: " << rtl::OUStringToOString( url, osl_getThreadTextEncoding()).getStr() << std::endl; assertTrue(supposedToBeCompatible); } catch (css::lang::IllegalArgumentException &) { if (supposedToBeCompatible) throw; assertFalse(supposedToBeCompatible); } } /////// css::uno::Reference< css::reflection::XIndirectTypeDescription > sequence( manager->getByHierarchicalName( rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("[][]boolean"))), css::uno::UNO_QUERY_THROW); assertEqual(css::uno::TypeClass_SEQUENCE, sequence->getTypeClass()); assertEqual( rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("[][]boolean")), sequence->getName()); assertEqual( rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("[]boolean")), sequence->getReferencedType()->getName()); css::uno::Reference< css::reflection::XStructTypeDescription > structure( manager->getByHierarchicalName( rtl::OUString( RTL_CONSTASCII_USTRINGPARAM( "test.tdmanager.Struct<boolean,test.tdmanager.Struct<" "any,com.sun.star.uno.XInterface>>"))), css::uno::UNO_QUERY_THROW); assertEqual(css::uno::TypeClass_STRUCT, structure->getTypeClass()); assertEqual( rtl::OUString( RTL_CONSTASCII_USTRINGPARAM( "test.tdmanager.Struct<boolean,test.tdmanager.Struct<" "any,com.sun.star.uno.XInterface>>")), structure->getName()); assertEqual< bool >(false, structure->getBaseType().is()); assertEqual< sal_Int32 >(1, structure->getMemberTypes().getLength()); assertEqual( rtl::OUString( RTL_CONSTASCII_USTRINGPARAM( "test.tdmanager.Struct<any,com.sun.star.uno.XInterface>")), structure->getMemberTypes()[0]->getName()); assertEqual< sal_Int32 >(1, structure->getMemberNames().getLength()); assertEqual( rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("s")), structure->getMemberNames()[0]); assertEqual< sal_Int32 >(0, structure->getTypeParameters().getLength()); assertEqual< sal_Int32 >(2, structure->getTypeArguments().getLength()); assertEqual( rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("boolean")), structure->getTypeArguments()[0]->getName()); assertEqual( rtl::OUString( RTL_CONSTASCII_USTRINGPARAM( "test.tdmanager.Struct<any,com.sun.star.uno.XInterface>")), structure->getTypeArguments()[1]->getName()); css::uno::Reference< css::reflection::XInterfaceMethodTypeDescription > method( manager->getByHierarchicalName( rtl::OUString( RTL_CONSTASCII_USTRINGPARAM( "com.sun.star.uno.XComponentContext::getValueByName"))), css::uno::UNO_QUERY_THROW); assertEqual(css::uno::TypeClass_INTERFACE_METHOD, method->getTypeClass()); assertEqual( rtl::OUString( RTL_CONSTASCII_USTRINGPARAM( "com.sun.star.uno.XComponentContext::getValueByName")), method->getName()); assertEqual( rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("getValueByName")), method->getMemberName()); assertEqual< sal_Int32 >(3, method->getPosition()); assertEqual( rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("any")), method->getReturnType()->getName()); assertEqual< bool >(false, method->isOneway()); assertEqual< sal_Int32 >(1, method->getParameters().getLength()); assertEqual( rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("Name")), method->getParameters()[0]->getName()); assertEqual( rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("string")), method->getParameters()[0]->getType()->getName()); assertEqual< bool >(true, method->getParameters()[0]->isIn()); assertEqual< bool >(false, method->getParameters()[0]->isOut()); assertEqual< sal_Int32 >(0, method->getParameters()[0]->getPosition()); assertEqual< sal_Int32 >(0, method->getExceptions().getLength()); assertFalse( css::uno::Reference< css::reflection::XPublished >( css::uno::Reference< css::reflection::XTypeDescription >( manager->getByHierarchicalName( rtl::OUString(RTL_CONSTASCII_USTRINGPARAM("[]boolean"))), css::uno::UNO_QUERY_THROW), css::uno::UNO_QUERY).is()); assertFalse( css::uno::Reference< css::reflection::XPublished >( css::uno::Reference< css::reflection::XTypeDescription >( manager->getByHierarchicalName( rtl::OUString( RTL_CONSTASCII_USTRINGPARAM( "com.sun.star.beans.XIntroTest::ObjectName"))), css::uno::UNO_QUERY_THROW), css::uno::UNO_QUERY).is()); assertFalse( css::uno::Reference< css::reflection::XPublished >( css::uno::Reference< css::reflection::XTypeDescription >( manager->getByHierarchicalName( rtl::OUString( RTL_CONSTASCII_USTRINGPARAM( "com.sun.star.beans.XIntroTest::writeln"))), css::uno::UNO_QUERY_THROW), css::uno::UNO_QUERY).is()); //TODO: check that the reflection of a property of an accumulation-based // service does not support XPublished return 0; } rtl::OUString Service::getImplementationName() { return rtl::OUString::createFromAscii("test.tdmanager.impl"); } css::uno::Sequence< rtl::OUString > Service::getSupportedServiceNames() { return css::uno::Sequence< rtl::OUString >(); } css::uno::Reference< css::uno::XInterface > Service::createInstance( css::uno::Reference< css::uno::XComponentContext > const & context) throw (css::uno::Exception) { return static_cast< cppu::OWeakObject * >(new Service(context)); } extern "C" void SAL_CALL component_getImplementationEnvironment( char const ** envTypeName, uno_Environment **) { if (envTypeName != 0) { *envTypeName = CPPU_CURRENT_LANGUAGE_BINDING_NAME; } } extern "C" void * SAL_CALL component_getFactory(char const * implName, void * serviceManager, void *) { void * p = 0; if (serviceManager != 0) { css::uno::Reference< css::lang::XSingleComponentFactory > f; if (Service::getImplementationName().equalsAscii(implName)) { f = cppu::createSingleComponentFactory( &Service::createInstance, Service::getImplementationName(), Service::getSupportedServiceNames()); } if (f.is()) { f->acquire(); p = f.get(); } } return p; } namespace { bool writeInfo(void * registryKey, rtl::OUString const & implementationName, css::uno::Sequence< rtl::OUString > const & serviceNames) { rtl::OUString keyName(rtl::OUString::createFromAscii("/")); keyName += implementationName; keyName += rtl::OUString::createFromAscii("/UNO/SERVICES"); css::uno::Reference< css::registry::XRegistryKey > key; try { key = static_cast< css::registry::XRegistryKey * >(registryKey)-> createKey(keyName); } catch (css::registry::InvalidRegistryException &) {} if (!key.is()) { return false; } bool success = true; for (sal_Int32 i = 0; i < serviceNames.getLength(); ++i) { try { key->createKey(serviceNames[i]); } catch (css::registry::InvalidRegistryException &) { success = false; break; } } return success; } } extern "C" sal_Bool SAL_CALL component_writeInfo(void *, void * registryKey) { return registryKey && writeInfo(registryKey, Service::getImplementationName(), Service::getSupportedServiceNames()); }
6,362
612
<filename>SRT/exps/PRO-vis.py # Copyright (c) Facebook, Inc. and its affiliates. # All rights reserved. # # This source code is licensed under the license found in the # LICENSE file in the root directory of this source tree. # from __future__ import division import os, sys, time, torch, random, argparse, PIL from os import path as osp from PIL import ImageFile ImageFile.LOAD_TRUNCATED_IMAGES = True from copy import deepcopy from pathlib import Path import numbers, numpy as np lib_dir = (Path(__file__).parent / '..' / 'lib').resolve() if str(lib_dir) not in sys.path: sys.path.insert(0, str(lib_dir)) assert sys.version_info.major == 3, 'Please upgrade from {:} to Python 3.x'.format(sys.version_info) from xvision import draw_image_by_points from xvision import Eval_Meta def visualize(args): print ('The result file is {:}'.format(args.meta)) print ('The save path is {:}'.format(args.save)) meta = Path(args.meta) save = Path(args.save) assert meta.exists(), 'The model path {:} does not exist' eval_metas = torch.load( meta ) print ('There are {:} evaluation results and use {:}.'.format( len(eval_metas), args.idx )) xmeta = eval_metas[args.idx] print ('this meta file has {:} predictions'.format(len(xmeta))) if not save.exists(): save.mkdir(parents=True, exist_ok=True) for i in range(len(xmeta)): image, prediction = xmeta.image_lists[i], xmeta.predictions[i] name = osp.basename(image) image = draw_image_by_points(image, prediction, 2, (255, 0, 0), False, False) path = save / name image.save(path) print ('[{:02d}] {:03d}/{:03d}-th image is saved into {:}'.format(args.idx, i, len(xmeta), path)) if __name__ == '__main__': parser = argparse.ArgumentParser(description='visualize the results on a single ', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('--meta', type=str, help='The evaluation image path.') parser.add_argument('--save', type=str, help='The path to save the visualized results.') parser.add_argument('--idx', type=int, help='The index of evaluated datasets to visualize.') args = parser.parse_args() visualize(args)
757
2,151
// Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "base/memory/memory_pressure_monitor_win.h" #include "base/macros.h" #include "base/memory/memory_pressure_listener.h" #include "base/message_loop/message_loop.h" #include "base/run_loop.h" #include "testing/gmock/include/gmock/gmock.h" #include "testing/gtest/include/gtest/gtest.h" namespace base { namespace win { namespace { struct PressureSettings { int phys_left_mb; MemoryPressureListener::MemoryPressureLevel level; }; } // namespace // This is outside of the anonymous namespace so that it can be seen as a friend // to the monitor class. class TestMemoryPressureMonitor : public MemoryPressureMonitor { public: using MemoryPressureMonitor::CalculateCurrentPressureLevel; using MemoryPressureMonitor::CheckMemoryPressure; static const DWORDLONG kMBBytes = 1024 * 1024; explicit TestMemoryPressureMonitor(bool large_memory) : mem_status_() { // Generate a plausible amount of memory. mem_status_.ullTotalPhys = static_cast<DWORDLONG>(GenerateTotalMemoryMb(large_memory)) * kMBBytes; // Rerun InferThresholds using the test fixture's GetSystemMemoryStatus. InferThresholds(); // Stop the timer. StopObserving(); } TestMemoryPressureMonitor(int system_memory_mb, int moderate_threshold_mb, int critical_threshold_mb) : MemoryPressureMonitor(moderate_threshold_mb, critical_threshold_mb), mem_status_() { // Set the amount of system memory. mem_status_.ullTotalPhys = static_cast<DWORDLONG>( system_memory_mb * kMBBytes); // Stop the timer. StopObserving(); } virtual ~TestMemoryPressureMonitor() {} MOCK_METHOD1(OnMemoryPressure, void(MemoryPressureListener::MemoryPressureLevel level)); // Generates an amount of total memory that is consistent with the requested // memory model. int GenerateTotalMemoryMb(bool large_memory) { int total_mb = 64; while (total_mb < MemoryPressureMonitor::kLargeMemoryThresholdMb) total_mb *= 2; if (large_memory) return total_mb * 2; return total_mb / 2; } // Sets up the memory status to reflect the provided absolute memory left. void SetMemoryFree(int phys_left_mb) { // ullTotalPhys is set in the constructor and not modified. // Set the amount of available memory. mem_status_.ullAvailPhys = static_cast<DWORDLONG>(phys_left_mb) * kMBBytes; DCHECK_LT(mem_status_.ullAvailPhys, mem_status_.ullTotalPhys); // These fields are unused. mem_status_.dwMemoryLoad = 0; mem_status_.ullTotalPageFile = 0; mem_status_.ullAvailPageFile = 0; mem_status_.ullTotalVirtual = 0; mem_status_.ullAvailVirtual = 0; } void SetNone() { SetMemoryFree(moderate_threshold_mb() + 1); } void SetModerate() { SetMemoryFree(moderate_threshold_mb() - 1); } void SetCritical() { SetMemoryFree(critical_threshold_mb() - 1); } private: bool GetSystemMemoryStatus(MEMORYSTATUSEX* mem_status) override { // Simply copy the memory status set by the test fixture. *mem_status = mem_status_; return true; } MEMORYSTATUSEX mem_status_; DISALLOW_COPY_AND_ASSIGN(TestMemoryPressureMonitor); }; class WinMemoryPressureMonitorTest : public testing::Test { protected: void CalculateCurrentMemoryPressureLevelTest( TestMemoryPressureMonitor* monitor) { int mod = monitor->moderate_threshold_mb(); monitor->SetMemoryFree(mod + 1); EXPECT_EQ(MemoryPressureListener::MEMORY_PRESSURE_LEVEL_NONE, monitor->CalculateCurrentPressureLevel()); monitor->SetMemoryFree(mod); EXPECT_EQ(MemoryPressureListener::MEMORY_PRESSURE_LEVEL_MODERATE, monitor->CalculateCurrentPressureLevel()); monitor->SetMemoryFree(mod - 1); EXPECT_EQ(MemoryPressureListener::MEMORY_PRESSURE_LEVEL_MODERATE, monitor->CalculateCurrentPressureLevel()); int crit = monitor->critical_threshold_mb(); monitor->SetMemoryFree(crit + 1); EXPECT_EQ(MemoryPressureListener::MEMORY_PRESSURE_LEVEL_MODERATE, monitor->CalculateCurrentPressureLevel()); monitor->SetMemoryFree(crit); EXPECT_EQ(MemoryPressureListener::MEMORY_PRESSURE_LEVEL_CRITICAL, monitor->CalculateCurrentPressureLevel()); monitor->SetMemoryFree(crit - 1); EXPECT_EQ(MemoryPressureListener::MEMORY_PRESSURE_LEVEL_CRITICAL, monitor->CalculateCurrentPressureLevel()); } base::MessageLoopForUI message_loop_; }; // Tests the fundamental direct calculation of memory pressure with automatic // small-memory thresholds. TEST_F(WinMemoryPressureMonitorTest, CalculateCurrentMemoryPressureLevelSmall) { static const int kModerateMb = MemoryPressureMonitor::kSmallMemoryDefaultModerateThresholdMb; static const int kCriticalMb = MemoryPressureMonitor::kSmallMemoryDefaultCriticalThresholdMb; TestMemoryPressureMonitor monitor(false); // Small-memory model. EXPECT_EQ(kModerateMb, monitor.moderate_threshold_mb()); EXPECT_EQ(kCriticalMb, monitor.critical_threshold_mb()); ASSERT_NO_FATAL_FAILURE(CalculateCurrentMemoryPressureLevelTest(&monitor)); } // Tests the fundamental direct calculation of memory pressure with automatic // large-memory thresholds. TEST_F(WinMemoryPressureMonitorTest, CalculateCurrentMemoryPressureLevelLarge) { static const int kModerateMb = MemoryPressureMonitor::kLargeMemoryDefaultModerateThresholdMb; static const int kCriticalMb = MemoryPressureMonitor::kLargeMemoryDefaultCriticalThresholdMb; TestMemoryPressureMonitor monitor(true); // Large-memory model. EXPECT_EQ(kModerateMb, monitor.moderate_threshold_mb()); EXPECT_EQ(kCriticalMb, monitor.critical_threshold_mb()); ASSERT_NO_FATAL_FAILURE(CalculateCurrentMemoryPressureLevelTest(&monitor)); } // Tests the fundamental direct calculation of memory pressure with manually // specified threshold levels. TEST_F(WinMemoryPressureMonitorTest, CalculateCurrentMemoryPressureLevelCustom) { static const int kSystemMb = 512; static const int kModerateMb = 256; static const int kCriticalMb = 128; TestMemoryPressureMonitor monitor(kSystemMb, kModerateMb, kCriticalMb); EXPECT_EQ(kModerateMb, monitor.moderate_threshold_mb()); EXPECT_EQ(kCriticalMb, monitor.critical_threshold_mb()); ASSERT_NO_FATAL_FAILURE(CalculateCurrentMemoryPressureLevelTest(&monitor)); } // This test tests the various transition states from memory pressure, looking // for the correct behavior on event reposting as well as state updates. TEST_F(WinMemoryPressureMonitorTest, CheckMemoryPressure) { // Large-memory. testing::StrictMock<TestMemoryPressureMonitor> monitor(true); MemoryPressureListener listener( base::Bind(&TestMemoryPressureMonitor::OnMemoryPressure, base::Unretained(&monitor))); // Checking the memory pressure at 0% load should not produce any // events. monitor.SetNone(); monitor.CheckMemoryPressure(); RunLoop().RunUntilIdle(); EXPECT_EQ(MemoryPressureListener::MEMORY_PRESSURE_LEVEL_NONE, monitor.GetCurrentPressureLevel()); // Setting the memory level to 80% should produce a moderate pressure level. EXPECT_CALL(monitor, OnMemoryPressure(MemoryPressureListener:: MEMORY_PRESSURE_LEVEL_MODERATE)); monitor.SetModerate(); monitor.CheckMemoryPressure(); RunLoop().RunUntilIdle(); EXPECT_EQ(MemoryPressureListener::MEMORY_PRESSURE_LEVEL_MODERATE, monitor.GetCurrentPressureLevel()); testing::Mock::VerifyAndClearExpectations(&monitor); // Check that the event gets reposted after a while. for (int i = 0; i < monitor.kModeratePressureCooldownCycles; ++i) { if (i + 1 == monitor.kModeratePressureCooldownCycles) { EXPECT_CALL(monitor, OnMemoryPressure(MemoryPressureListener:: MEMORY_PRESSURE_LEVEL_MODERATE)); } monitor.CheckMemoryPressure(); RunLoop().RunUntilIdle(); EXPECT_EQ(MemoryPressureListener::MEMORY_PRESSURE_LEVEL_MODERATE, monitor.GetCurrentPressureLevel()); testing::Mock::VerifyAndClearExpectations(&monitor); } // Setting the memory usage to 99% should produce critical levels. EXPECT_CALL(monitor, OnMemoryPressure(MemoryPressureListener:: MEMORY_PRESSURE_LEVEL_CRITICAL)); monitor.SetCritical(); monitor.CheckMemoryPressure(); RunLoop().RunUntilIdle(); EXPECT_EQ(MemoryPressureListener::MEMORY_PRESSURE_LEVEL_CRITICAL, monitor.GetCurrentPressureLevel()); testing::Mock::VerifyAndClearExpectations(&monitor); // Calling it again should immediately produce a second call. EXPECT_CALL(monitor, OnMemoryPressure(MemoryPressureListener:: MEMORY_PRESSURE_LEVEL_CRITICAL)); monitor.CheckMemoryPressure(); RunLoop().RunUntilIdle(); EXPECT_EQ(MemoryPressureListener::MEMORY_PRESSURE_LEVEL_CRITICAL, monitor.GetCurrentPressureLevel()); testing::Mock::VerifyAndClearExpectations(&monitor); // When lowering the pressure again there should be a notification and the // pressure should go back to moderate. EXPECT_CALL(monitor, OnMemoryPressure(MemoryPressureListener:: MEMORY_PRESSURE_LEVEL_MODERATE)); monitor.SetModerate(); monitor.CheckMemoryPressure(); RunLoop().RunUntilIdle(); EXPECT_EQ(MemoryPressureListener::MEMORY_PRESSURE_LEVEL_MODERATE, monitor.GetCurrentPressureLevel()); testing::Mock::VerifyAndClearExpectations(&monitor); // Check that the event gets reposted after a while. for (int i = 0; i < monitor.kModeratePressureCooldownCycles; ++i) { if (i + 1 == monitor.kModeratePressureCooldownCycles) { EXPECT_CALL(monitor, OnMemoryPressure(MemoryPressureListener:: MEMORY_PRESSURE_LEVEL_MODERATE)); } monitor.CheckMemoryPressure(); RunLoop().RunUntilIdle(); EXPECT_EQ(MemoryPressureListener::MEMORY_PRESSURE_LEVEL_MODERATE, monitor.GetCurrentPressureLevel()); testing::Mock::VerifyAndClearExpectations(&monitor); } // Going down to no pressure should not produce an notification. monitor.SetNone(); monitor.CheckMemoryPressure(); RunLoop().RunUntilIdle(); EXPECT_EQ(MemoryPressureListener::MEMORY_PRESSURE_LEVEL_NONE, monitor.GetCurrentPressureLevel()); testing::Mock::VerifyAndClearExpectations(&monitor); } } // namespace win } // namespace base
3,891
14,668
// Copyright 2018 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "chromeos/services/secure_channel/fake_timer_factory.h" #include "base/bind.h" #include "base/callback.h" #include "chromeos/services/secure_channel/fake_one_shot_timer.h" namespace chromeos { namespace secure_channel { FakeTimerFactory::FakeTimerFactory() {} FakeTimerFactory::~FakeTimerFactory() = default; std::unique_ptr<base::OneShotTimer> FakeTimerFactory::CreateOneShotTimer() { ++num_instances_created_; auto fake_one_shot_timer = std::make_unique<FakeOneShotTimer>( base::BindOnce(&FakeTimerFactory::OnOneShotTimerDeleted, weak_ptr_factory_.GetWeakPtr())); id_for_last_created_one_shot_timer_ = fake_one_shot_timer->id(); id_to_active_one_shot_timer_map_[fake_one_shot_timer->id()] = fake_one_shot_timer.get(); return fake_one_shot_timer; } void FakeTimerFactory::OnOneShotTimerDeleted( const base::UnguessableToken& deleted_timer_id) { size_t num_deleted = id_to_active_one_shot_timer_map_.erase(deleted_timer_id); DCHECK_EQ(1u, num_deleted); } } // namespace secure_channel } // namespace chromeos
449
3,799
/* * Copyright 2020 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package androidx.biometric.auth; import android.os.Build; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.annotation.RequiresApi; import androidx.biometric.BiometricManager.Authenticators; import androidx.biometric.BiometricPrompt; import java.util.concurrent.Executor; /** * An authentication prompt that requires the user to present a <strong>Class 3</strong> biometric * (e.g. fingerprint, face, or iris) or the screen lock credential (i.e. PIN, pattern, or password) * for the device. * * @see Authenticators#BIOMETRIC_STRONG * @see Authenticators#DEVICE_CREDENTIAL * @see Class2BiometricAuthPrompt * @see Class2BiometricOrCredentialAuthPrompt * @see Class3BiometricAuthPrompt * @see CredentialAuthPrompt */ @RequiresApi(Build.VERSION_CODES.R) public class Class3BiometricOrCredentialAuthPrompt { @NonNull private final BiometricPrompt.PromptInfo mPromptInfo; /** * Constructs an authentication prompt with the given parameters. * * @param promptInfo A set of options describing how the prompt should appear and behave. */ Class3BiometricOrCredentialAuthPrompt(@NonNull BiometricPrompt.PromptInfo promptInfo) { mPromptInfo = promptInfo; } /** * Shows an authentication prompt to the user. * * @param host A wrapper for the component that will host the prompt. * @param crypto A cryptographic object to be associated with this authentication. * @param callback The callback object that will receive and process authentication events. Each * callback method will be run on the main thread. * @return A handle to the shown prompt. * * * @see #startAuthentication(AuthPromptHost, BiometricPrompt.CryptoObject, Executor, * AuthPromptCallback) */ @NonNull public AuthPrompt startAuthentication( @NonNull AuthPromptHost host, @Nullable BiometricPrompt.CryptoObject crypto, @NonNull AuthPromptCallback callback) { return AuthPromptUtils.startAuthentication( host, mPromptInfo, crypto, null /* executor */, callback); } /** * Shows an authentication prompt to the user. * * @param host A wrapper for the component that will host the prompt. * @param crypto A cryptographic object to be associated with this authentication. * @param executor The executor that will be used to run authentication callback methods. * @param callback The callback object that will receive and process authentication events. * @return A handle to the shown prompt. * * @see #startAuthentication(AuthPromptHost, BiometricPrompt.CryptoObject, AuthPromptCallback) */ @NonNull public AuthPrompt startAuthentication( @NonNull AuthPromptHost host, @Nullable BiometricPrompt.CryptoObject crypto, @NonNull Executor executor, @NonNull AuthPromptCallback callback) { return AuthPromptUtils.startAuthentication( host, mPromptInfo, crypto, executor, callback); } /** * Gets the title to be displayed on the prompt. * * @return The title for the prompt. */ @NonNull public CharSequence getTitle() { return mPromptInfo.getTitle(); } /** * Gets the subtitle to be displayed on the prompt, if set. * * @return The subtitle for the prompt. * * @see Builder#setSubtitle(CharSequence) */ @Nullable public CharSequence getSubtitle() { return mPromptInfo.getSubtitle(); } /** * Gets the description to be displayed on the prompt, if set. * * @return The description for the prompt. * * @see Builder#setDescription(CharSequence) */ @Nullable public CharSequence getDescription() { return mPromptInfo.getDescription(); } /** * Checks if the prompt should require explicit user confirmation after a passive biometric * (e.g. iris or face) has been recognized but before * {@link AuthPromptCallback#onAuthenticationSucceeded(androidx.fragment.app.FragmentActivity, * BiometricPrompt.AuthenticationResult)} is called. * * @return Whether the prompt should require explicit user confirmation for passive biometrics. * * @see Builder#setConfirmationRequired(boolean) */ public boolean isConfirmationRequired() { return mPromptInfo.isConfirmationRequired(); } /** * Builder for a {@link Class3BiometricOrCredentialAuthPrompt} with configurable options. */ public static final class Builder { // Required fields. @NonNull private final CharSequence mTitle; // Optional fields. @Nullable private CharSequence mSubtitle = null; @Nullable private CharSequence mDescription = null; private boolean mIsConfirmationRequired = true; /** * Constructs a prompt builder with the given required options. * * @param title The title to be displayed on the prompt. */ public Builder(@NonNull CharSequence title) { mTitle = title; } /** * Sets a subtitle that should be displayed on the prompt. Defaults to {@code null}. * * @param subtitle A subtitle for the prompt. * @return This builder. */ @NonNull public Builder setSubtitle( @NonNull CharSequence subtitle) { mSubtitle = subtitle; return this; } /** * Sets a description that should be displayed on the prompt. Defaults to {@code null}. * * @param description A description for the prompt. * @return This builder. */ @NonNull public Builder setDescription( @NonNull CharSequence description) { mDescription = description; return this; } /** * Sets a hint indicating whether the prompt should require explicit user confirmation * after a passive biometric (e.g. iris or face) has been recognized but before * {@link AuthPromptCallback#onAuthenticationSucceeded( * androidx.fragment.app.FragmentActivity, BiometricPrompt.AuthenticationResult)} is * called. Defaults to {@code true}. * * <p>Setting this option to {@code false} is generally only appropriate for frequent, * low-value transactions, such as re-authenticating for a previously authorized app. * * <p>As a hint, the value of this option may be ignored by the system. For example, * explicit confirmation may always be required if the user has toggled a system-wide * setting to disallow pure passive authentication. This option will also be ignored on any * device with an OS version prior to Android 10 (API 29). * * @param confirmationRequired Whether the prompt should require explicit user confirmation * for passive biometrics. * @return This builder. */ @NonNull public Builder setConfirmationRequired(boolean confirmationRequired) { mIsConfirmationRequired = confirmationRequired; return this; } /** * Creates a new prompt with the specified options. * * @return An instance of {@link Class3BiometricOrCredentialAuthPrompt}. */ @NonNull public Class3BiometricOrCredentialAuthPrompt build() { final BiometricPrompt.PromptInfo promptInfo = new BiometricPrompt.PromptInfo.Builder() .setTitle(mTitle) .setSubtitle(mSubtitle) .setDescription(mDescription) .setConfirmationRequired(mIsConfirmationRequired) .setAllowedAuthenticators( Authenticators.BIOMETRIC_STRONG | Authenticators.DEVICE_CREDENTIAL) .build(); return new Class3BiometricOrCredentialAuthPrompt(promptInfo); } } }
3,290
488
<filename>src/frontend/SageIII/astFromString/ParserBuilder.tpp namespace ParserBuilder { template <class Recipient> Parser<Recipient>::Parser() : p_grammar_root(NULL) {} template <class Recipient> Parser<Recipient>::~Parser() { std::set<GrammarNode *> childs; p_grammar_root->collectChilds(childs); std::set<GrammarNode *>::iterator it; for (it = childs.begin(); it != childs.end(); it++) delete *it; delete p_grammar_root; } template <class Recipient> void Parser<Recipient>::setRoot(GrammarNode * grammar_root) { p_grammar_root = grammar_root; } template <class Recipient> bool Parser<Recipient>::parse(std::string & str, Recipient & rec) { if (p_grammar_root == NULL) return true; std::vector<GrammarNode::LexemeInstance *> lexemes; if ((*p_grammar_root)(str, lexemes) && str.size() == 0) { std::vector<GrammarNode::LexemeInstance *>::iterator it; for (it = lexemes.begin(); it != lexemes.end(); it++) { rec(*it); delete *it; } return true; } return false; } }
463
10,225
package io.quarkus.security.test.cdi.app.denied.unnanotated; import javax.inject.Singleton; /** * @author <NAME>, <EMAIL> */ @Singleton public class BeanWithNoSecurityAnnotations { public String unannotated() { return "unannotatedOnBeanWithNoAnno"; } }
105
334
#include <array> #include <chrono> #include <filesystem> #include <fstream> #include <iomanip> #include <iostream> #include <memory> #include <vector> #include <algorithm> int main() { using namespace std; vector<int> numbers; numbers.resize(15); for (size_t i = 0; i < 15; ++i) { numbers.emplace_back(i * 10 + 1); } // Save numbers in text file ofstream fout("numbers.txt"); for (int number : numbers) { fout << number << " "; } fout.close(); // Load numbers from text file vector<int> m_numbers; ifstream fin("numbers.txt"); for (size_t i = 0; i < numbers.size(); ++i) { int x; fin >> x; m_numbers.emplace_back(x); } fin.close(); // Print numbers for_each(numbers.begin(), numbers.end(), [](auto x) { cout << x << " "; }); cout << endl; for_each(m_numbers.begin(), m_numbers.end(), [](auto x) { cout << x << " "; }); cout << endl; // Save numbers in binary file fout.open("numbers.bin", ios::binary); for (int & number : numbers) { char *pointer_to_number = (char *)&number; size_t bytes_per_number = sizeof(number); fout.write(pointer_to_number, bytes_per_number); } fout.close(); // Load numbers from binary file m_numbers.clear(); fin.open("numbers.bin", ios::binary); for (size_t i = 0; i < numbers.size(); ++i) { int x; char *pointer_to_number = (char *)&x; size_t bytes_per_number = sizeof(x); fin.read(pointer_to_number, bytes_per_number); m_numbers.emplace_back(x); } fin.close(); // Print numbers for_each(numbers.begin(), numbers.end(), [](auto x) { cout << x << " "; }); cout << endl; for_each(m_numbers.begin(), m_numbers.end(), [](auto x) { cout << x << " "; }); cout << endl; // Compare file sizes cout << "File size (text): " << filesystem::file_size("numbers.txt") << " bytes" << endl; cout << "File size (binary): " << filesystem::file_size("numbers.bin") << " bytes" << endl; return 0; }
932
558
<filename>syncer-common/src/main/java/syncer/common/properties/DbProperties.java // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // See the License for the specific language governing permissions and // limitations under the License. package syncer.common.properties; import lombok.AllArgsConstructor; import lombok.NoArgsConstructor; import lombok.Setter; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.stereotype.Component; import org.springframework.util.StringUtils; /** * @author zhanenqiang * @Description 描述 * @Date 2020/4/9 */ @Component @ConfigurationProperties(prefix = "syncer.config.path") @Setter @AllArgsConstructor @NoArgsConstructor public class DbProperties { /** * 日志文件 */ private String logfile; /** * 数据文件 */ private String datafile; public String getLogfile() { if(StringUtils.isEmpty(logfile)){ logfile="./log"; } return logfile; } public String getDatafile() { if(StringUtils.isEmpty(logfile)){ logfile="./"; } return datafile; } }
534
1,056
<gh_stars>1000+ /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.netbeans.modules.javadoc.search; import java.net.URL; import java.util.ArrayList; import java.util.List; import java.util.logging.Level; import org.openide.ErrorManager; /** * @author <NAME> */ final class JavadocSearchEngineImpl extends JavadocSearchEngine { private final List<IndexSearchThread> tasks = new ArrayList<IndexSearchThread>(); private IndexSearchThread.DocIndexItemConsumer diiConsumer; private boolean isStopped = false; /** Used to search for set elements in javadoc repository * @param items to search for * @throws NoJavadocException if no javadoc directory is mounted, nothing can be searched */ public @Override void search(String[] items, final SearchEngineCallback callback) throws NoJavadocException { diiConsumer = new IndexSearchThread.DocIndexItemConsumer() { public @Override void addDocIndexItem(DocIndexItem dii) { callback.addItem(dii); } public @Override void indexSearchThreadFinished(IndexSearchThread t) { boolean isEmpty; synchronized(JavadocSearchEngineImpl.this) { IndexSearch.LOG.log(Level.FINE, "JavadocSearchEngineImpl.indexSearchThreadFinished: tasks: {0}", tasks.size()); tasks.remove( t ); isEmpty = tasks.isEmpty(); } if (isEmpty) { callback.finished(); } } }; URL[] docRoots = JavadocRegistry.getDefault().getDocRoots(); synchronized(this) { if (isStopped) { return; } } if ( docRoots.length <= 0 ) { callback.finished(); throw new NoJavadocException(); } String toFind = items[0]; for( int i = 0; i < docRoots.length; i++ ) { JavadocSearchType st = JavadocRegistry.getDefault().findSearchType( docRoots[i] ); if (st == null) { ErrorManager.getDefault().log ("NO Search type for " + docRoots[i]); continue; } URL indexFo = st.getDocFileObject( docRoots[i] ); if (indexFo == null) { ErrorManager.getDefault().log ("NO Index files fot " + docRoots[i] ); continue; } IndexSearchThread searchThread = st.getSearchThread( toFind, indexFo, diiConsumer ); synchronized(this) { if (isStopped) { return; } tasks.add( searchThread ); } } // run search threads IndexSearchThread[] tasksArray; synchronized(this) { tasksArray = tasks.toArray(new IndexSearchThread[tasks.size()]); } for (IndexSearchThread searchThread : tasksArray) { if (isStopped) { return; } else { searchThread.go(); } } } /** Stops execution of Javadoc search thread */ public @Override void stop() { IndexSearchThread[] tasksArray = null; boolean noTask; synchronized(this) { if (isStopped) { return; } isStopped = true; noTask = tasks.isEmpty(); if (!noTask) { tasksArray = tasks.toArray(new IndexSearchThread[tasks.size()]); } } IndexSearch.LOG.fine("JavadocSearchEngineImpl.stop"); if (noTask) { diiConsumer.indexSearchThreadFinished(null); return; } for (IndexSearchThread searchThread : tasksArray) { searchThread.finish(); } } }
2,320
2,900
<reponame>jiweigang1/jackson-databind package com.fasterxml.jackson.failing; import com.fasterxml.jackson.annotation.JsonIdentityInfo; import com.fasterxml.jackson.annotation.ObjectIdGenerators; import com.fasterxml.jackson.databind.BaseMapTest; import com.fasterxml.jackson.databind.ObjectMapper; import java.util.Arrays; import java.util.Collection; import java.util.EnumSet; import java.util.Iterator; import java.util.List; import java.util.PriorityQueue; import java.util.TreeSet; /** * Unit test to prove that serialization does not * work in depth but in width. This causes elements * at the same level to be sometimes serialized as * IDs when they could have not yet been visited. */ // 02-Jul-2021, tatu: not sure if this is valid, but adding for further // inspection public class CyclicRefViaCollection3069Test extends BaseMapTest { // [databind#3069] @JsonIdentityInfo( generator = ObjectIdGenerators.PropertyGenerator.class , property = "id" , scope = Bean.class ) static class Bean implements Comparable<Bean> { final int _id; final String _name; Collection<Bean> _next; public Bean(int id, String name) { _id = id; _name = name; } public int getId() { return _id; } public Collection<Bean> getNext() { return _next; } public void setNext(final Collection<Bean> n) { _next = n; } public String getName() { return _name; } @Override public int compareTo(Bean o) { if (o == null) { return -1; } return Integer.compare(_id, ((Bean) o).getId()); } } /* /********************************************************** /* Test methods /********************************************************** */ private final ObjectMapper MAPPER = new ObjectMapper(); // [databind#3069] public void testSerializationCollection() throws Exception { testSerializationCollection(MAPPER, new TreeSet<>(abc())); //testSerializationEnumSet(MAPPER, EnumSet.of(addEnum(BeanEnum.class, a), addEnum(BeanEnum.class, b))); } public void testSerializationList() throws Exception { testSerializationIndexedList(MAPPER, abc()); } public void testSerializationIterable() throws Exception { testSerializationIterable(MAPPER, new PriorityQueue<>(abc())); } public void testSerializationIterator() throws Exception { testSerializationIterator(MAPPER, abc().iterator()); } private List<Bean> abc() { final Bean a = new Bean(1, "A"); final Bean b = new Bean(2, "B"); final Bean c = new Bean(3, "C"); a.setNext(Arrays.asList(a, c)); b.setNext(Arrays.asList(a, c)); c.setNext(Arrays.asList(a, b)); return Arrays.asList(a, b, c); } public void testSerializationCollection(final ObjectMapper mapper, final Collection<Bean> collection) throws Exception { assertEquals(getExpectedResult(), mapper.writeValueAsString(collection)); } public void testSerializationEnumSet(final ObjectMapper mapper, final EnumSet<?> enumSet) throws Exception { assertEquals(getExpectedResult(), mapper.writeValueAsString(enumSet)); } public void testSerializationIndexedList(final ObjectMapper mapper, final List<Bean> list) throws Exception { assertEquals(getExpectedResult(), mapper.writeValueAsString(list)); } public void testSerializationIterable(final ObjectMapper mapper, final Iterable<Bean> iterable) throws Exception { assertEquals(getExpectedResult(), mapper.writeValueAsString(iterable)); } public void testSerializationIterator(final ObjectMapper mapper, final Iterator<Bean> iterator) throws Exception { assertEquals(getExpectedResult(), mapper.writeValueAsString(iterator)); } private String getExpectedResult() { final StringBuilder builder = new StringBuilder(); builder.append("["); builder.append("{\"id\":1,\"name\":\"A\",\"next\":["); builder.append("1, {\"id\":3,\"name\":\"C\",\"next\":["); builder.append("1, {\"id\":2,\"name\":\"B\",\"next\":["); builder.append("1, 3"); builder.append("]}"); builder.append("]}"); builder.append("]}"); builder.append(", {\"id\":2,\"name\":\"B\",\"next\":["); builder.append("{\"id\":1,\"name\":\"A\",\"next\":["); builder.append("1, {\"id\":3,\"name\":\"C\",\"next\":[1, 2]}"); builder.append("]}"); builder.append(", {\"id\":3,\"name\":\"C\",\"next\":["); builder.append("{\"id\":1,\"name\":\"A\",\"next\":[1, 3]}, 2"); builder.append("]}"); builder.append("]}"); builder.append(", {\"id\":3,\"name\":\"C\",\"next\":["); builder.append("{\"id\":1,\"name\":\"A\",\"next\":[1, 3]}"); builder.append(", {\"id\":2,\"name\":\"B\",\"next\":["); builder.append("{\"id\":1,\"name\":\"A\",\"next\":[1, 3]}, 3"); builder.append("]}"); builder.append("]}"); builder.append("]"); return builder.toString(); } }
2,201
852
#ifndef FWCore_Utilities_Algorithms_h #define FWCore_Utilities_Algorithms_h #include <algorithm> namespace edm { /// Function templates that provide wrappers for standard algorithms, /// avoiding some duplication /// and assuring that incommensurate iterators are not used. /// wrapper for std::for_each template <typename ForwardSequence, typename Func> inline Func for_all(ForwardSequence& s, Func f) { return std::for_each(s.begin(), s.end(), f); } /// wrappers for copy template <typename ForwardSequence, typename Func> inline Func copy_all(ForwardSequence& s, Func f) { return std::copy(s.begin(), s.end(), f); } /// wrappers for std::find template <typename ForwardSequence, typename Datum> inline typename ForwardSequence::const_iterator find_in_all(ForwardSequence const& s, Datum const& d) { return std::find(s.begin(), s.end(), d); } template <typename ForwardSequence, typename Datum> inline typename ForwardSequence::iterator find_in_all(ForwardSequence& s, Datum const& d) { return std::find(s.begin(), s.end(), d); } template <typename ForwardSequence, typename Datum> inline bool search_all(ForwardSequence const& s, Datum const& d) { return std::find(s.begin(), s.end(), d) != s.end(); } /// wrappers for std::find template <typename ForwardSequence, typename Predicate> inline typename ForwardSequence::const_iterator find_if_in_all(ForwardSequence const& s, Predicate const& p) { return std::find_if(s.begin(), s.end(), p); } template <typename ForwardSequence, typename Predicate> inline typename ForwardSequence::iterator find_if_in_all(ForwardSequence& s, Predicate const& p) { return std::find_if(s.begin(), s.end(), p); } template <typename ForwardSequence, typename Predicate> inline bool search_if_in_all(ForwardSequence const& s, Predicate const& p) { return std::find_if(s.begin(), s.end(), p) != s.end(); } /// wrappers for std::binary_search template <typename ForwardSequence, typename Datum> inline bool binary_search_all(ForwardSequence const& s, Datum const& d) { return std::binary_search(s.begin(), s.end(), d); } template <typename ForwardSequence, typename Datum, typename Predicate> inline bool binary_search_all(ForwardSequence const& s, Datum const& d, Predicate p) { return std::binary_search(s.begin(), s.end(), d, p); } /// wrappers for std::lower_bound template <typename ForwardSequence, typename Datum> inline typename ForwardSequence::const_iterator lower_bound_all(ForwardSequence const& s, Datum const& d) { return std::lower_bound(s.begin(), s.end(), d); } template <typename ForwardSequence, typename Datum> inline typename ForwardSequence::iterator lower_bound_all(ForwardSequence& s, Datum const& d) { return std::lower_bound(s.begin(), s.end(), d); } template <typename ForwardSequence, typename Datum, typename Predicate> inline typename ForwardSequence::const_iterator lower_bound_all(ForwardSequence const& s, Datum const& d, Predicate p) { return std::lower_bound(s.begin(), s.end(), d, p); } template <typename ForwardSequence, typename Datum, typename Predicate> inline typename ForwardSequence::iterator lower_bound_all(ForwardSequence& s, Datum const& d, Predicate p) { return std::lower_bound(s.begin(), s.end(), d, p); } /// wrappers for std::sort template <typename RandomAccessSequence> inline void sort_all(RandomAccessSequence& s) { std::sort(s.begin(), s.end()); } template <typename RandomAccessSequence, typename Predicate> inline void sort_all(RandomAccessSequence& s, Predicate p) { std::sort(s.begin(), s.end(), p); } /// wrappers for std::stable_sort template <typename RandomAccessSequence> inline void stable_sort_all(RandomAccessSequence& s) { std::stable_sort(s.begin(), s.end()); } template <typename RandomAccessSequence, typename Predicate> inline void stable_sort_all(RandomAccessSequence& s, Predicate p) { std::stable_sort(s.begin(), s.end(), p); } } // namespace edm #endif
1,497
3,459
<reponame>werminghoff/Provenance #ifndef __COMPRESSION_BITCODER_H #define __COMPRESSION_BITCODER_H #include "rngcoder.h" namespace NCompression { namespace NArithmetic { const int kNumBitModelTotalBits = 11; const UINT32 kBitModelTotal = (1 << kNumBitModelTotalBits); const int kNumMoveReducingBits = 2; ///////////////////////////// // CBitModel template <int aNumMoveBits> class CBitModel { public: UINT32 m_Probability; void UpdateModel(UINT32 aSymbol) { /* m_Probability -= (m_Probability + ((aSymbol - 1) & ((1 << aNumMoveBits) - 1))) >> aNumMoveBits; m_Probability += (1 - aSymbol) << (kNumBitModelTotalBits - aNumMoveBits); */ if (aSymbol == 0) m_Probability += (kBitModelTotal - m_Probability) >> aNumMoveBits; else m_Probability -= (m_Probability) >> aNumMoveBits; } public: void Init() { m_Probability = kBitModelTotal / 2; } }; template <int aNumMoveBits> class CBitDecoder: public CBitModel<aNumMoveBits> { public: UINT32 Decode(CRangeDecoder *aRangeDecoder) { UINT32 aNewBound = (aRangeDecoder->m_Range >> kNumBitModelTotalBits) * CBitModel<aNumMoveBits>::m_Probability; if (aRangeDecoder->m_Code < aNewBound) { aRangeDecoder->m_Range = aNewBound; CBitModel<aNumMoveBits>::m_Probability += (kBitModelTotal - CBitModel<aNumMoveBits>::m_Probability) >> aNumMoveBits; if (aRangeDecoder->m_Range < kTopValue) { aRangeDecoder->m_Code = (aRangeDecoder->m_Code << 8) | aRangeDecoder->m_Stream.ReadByte(); aRangeDecoder->m_Range <<= 8; } return 0; } else { aRangeDecoder->m_Range -= aNewBound; aRangeDecoder->m_Code -= aNewBound; CBitModel<aNumMoveBits>::m_Probability -= (CBitModel<aNumMoveBits>::m_Probability) >> aNumMoveBits; if (aRangeDecoder->m_Range < kTopValue) { aRangeDecoder->m_Code = (aRangeDecoder->m_Code << 8) | aRangeDecoder->m_Stream.ReadByte(); aRangeDecoder->m_Range <<= 8; } return 1; } } }; }} #endif
896
399
package edu.wpi.grip.core.sockets; import edu.wpi.grip.core.Connection; import com.thoughtworks.xstream.annotations.XStreamAlias; import java.util.HashSet; import java.util.Optional; import java.util.Set; import static com.google.common.base.Preconditions.checkNotNull; /** * A {@link SocketHint} that's type is linked between many other sockets and who's type is defined * by whatever {@link InputSocket} was connected to it first. */ public final class LinkedSocketHint extends SocketHint.SocketHintDecorator { /** * Keeps track of the sockets that control the type of this socket hint. */ private final Set<InputSocket> controllingSockets = new HashSet<>(); private final Set<OutputSocket> controlledOutputSockets = new HashSet<>(); private final InputSocket.Factory inputSocketFactory; private final OutputSocket.Factory outputSocketFactory; private Optional<Class> connectedType = Optional.empty(); /** * @param inputSocketFactory The factory for {@link InputSocket}. * @param outputSocketFactory The factory for {@link OutputSocket}. */ @SuppressWarnings("unchecked") public LinkedSocketHint(InputSocket.Factory inputSocketFactory, OutputSocket.Factory outputSocketFactory) { super(new Builder(Object.class).identifier("").build()); this.inputSocketFactory = checkNotNull(inputSocketFactory); this.outputSocketFactory = checkNotNull(outputSocketFactory); } /** * Creates an {@link InputSocket} that is linked to this SocketHint. * * @param hintIdentifier The identifier for this socket's SocketHint * @return A socket hint that's socket type is determined by this SocketHint */ @SuppressWarnings("unchecked") public InputSocket<?> linkedInputSocket(String hintIdentifier) { return new LinkedInputSocket<>(inputSocketFactory.create(new IdentiferOverridingSocketHintDecorator(this, hintIdentifier))); } /** * Creates an input socket that is linked to this Socket This output socket will automatically be * disconnected when there is no longer an input socket to guarantee the type of this SocketHint. * * @param hintIdentifier The identifier for this socket's SocketHint * @return An OutputSocket that's type is dynamically linked to this SocketHint */ @SuppressWarnings("unchecked") public OutputSocket linkedOutputSocket(String hintIdentifier) { final OutputSocket outSocket = outputSocketFactory.create(new IdentiferOverridingSocketHintDecorator(this, hintIdentifier)); controlledOutputSockets.add(outSocket); return outSocket; } @Override public String getTypeLabel() { return "<Generic>"; } @Override public Class getType() { // If the type is known because one of the input sockets is connected then return that. // Otherwise, return Object return connectedType.orElse(Object.class); } @Override @SuppressWarnings("unchecked") public boolean isCompatibleWith(SocketHint other) { if (connectedType.isPresent()) { // If the type is present // Then use this socket hint to determine if this socket can be connected return connectedType.get().isAssignableFrom(other.getType()); } else { // Otherwise use the socket hint we are decorating to determine the supported type return getDecorated().isCompatibleWith(other); } } /** * Our own custom implementation of socket hint that interacts on this class when connections are * added and removed. */ @XStreamAlias("grip:LinkedInput") private class LinkedInputSocket<T> extends InputSocket.Decorator<T> { /** * @param socket the decorated socket. */ LinkedInputSocket(InputSocket<T> socket) { super(socket); } @Override public void addConnection(Connection connection) { synchronized (this) { controllingSockets.add(this); connectedType = Optional.of(connection.getOutputSocket().getSocketHint().getType()); } super.addConnection(connection); } @Override @SuppressWarnings("unchecked") public void removeConnection(Connection connection) { synchronized (this) { // Remove this socket because it is no longer controlling the type of socket controllingSockets.remove(this); if (controllingSockets.isEmpty()) { // When the set is empty, the socket can support any // type again connectedType = Optional.empty(); // XXX: TODO: This is breaking the law of Demeter fix this controlledOutputSockets.forEach(outputSocket -> { final Set<Connection<?>> connections = outputSocket.getConnections(); connections.stream().forEach(Connection::remove); outputSocket.setPreviewed(false); outputSocket.setValueOptional(Optional.empty()); }); } } super.removeConnection(connection); } } }
1,565
5,169
{ "name": "XCUIElement+Visible", "version": "0.0.1", "summary": "It adds visible property to XCUIElement", "description": "This extension adds computed property visible to XCUIElement. It is useful in UI Testing for checking if an element is visible on the screen.", "homepage": "https://github.com/albinekcom/XCUIElement-Visible", "license": "MIT", "authors": { "Albin 'albinek' Sadowski": "<EMAIL>" }, "social_media_url": "http://twitter.com/albinekcom", "platforms": { "ios": "9.0", "osx": "10.11" }, "source": { "git": "https://github.com/albinekcom/XCUIElement-Visible.git", "tag": "0.0.1" }, "source_files": "Sources", "frameworks": "XCTest" }
277
17,085
<gh_stars>1000+ # Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import unittest import random import numpy as np import six import paddle.fluid as fluid import paddle import warnings from paddle.fluid.framework import IrGraph from paddle.fluid.contrib.slim.quantization import QuantizationTransformPass from paddle.fluid.contrib.slim.quantization import QuantizationFreezePass from paddle.fluid.contrib.slim.quantization import OutScaleForTrainingPass from paddle.fluid.contrib.slim.quantization import OutScaleForInferencePass from paddle.fluid.contrib.slim.quantization import AddQuantDequantPass from paddle.fluid import (core, Program, Variable, program_guard, layers) from paddle.fluid.io import prepend_feed_ops, append_fetch_ops from inference_pass_test import InferencePassTest from paddle.fluid.core import create_paddle_predictor from paddle.fluid.core import AnalysisConfig class QuantDequantTest(unittest.TestCase): def __init__(self, methodName='runTest'): super(QuantDequantTest, self).__init__(methodName) paddle.enable_static() self.main_program = fluid.Program() self.startup_program = fluid.Program() self.test_main_program = fluid.Program() self.test_startup_program = fluid.Program() self.feeds = None self.fetch_list = None self.enable_mkldnn = False self.enable_mkldnn_bfloat16 = False self.enable_trt = False self.enable_tensorrt_oss = True self.trt_parameters = None self.dynamic_shape_params = None self.enable_lite = False self.lite_parameters = None self.path = "./inference_pass/" + self.__class__.__name__ + "/" self.data = None self.label = None self.result = None np.random.seed(1) random.seed(1) # from Paddle release2.1 def _normalize_program(self, program, feed_vars, fetch_vars): if not isinstance(program, Program): raise TypeError( "program type must be `fluid.Program`, but received `%s`" % type(program)) if not isinstance(feed_vars, list): feed_vars = [feed_vars] if not all(isinstance(v, Variable) for v in feed_vars): raise TypeError( "feed_vars type must be a Variable or a list of Variable.") if not isinstance(fetch_vars, list): fetch_vars = [fetch_vars] if not all(isinstance(v, Variable) for v in fetch_vars): raise TypeError( "fetch_vars type must be a Variable or a list of Variable.") # remind users to set auc_states to 0 if auc op were found. for op in program.global_block().ops: # clear device of Op device_attr_name = core.op_proto_and_checker_maker.kOpDeviceAttrName( ) op._set_attr(device_attr_name, "") if op.type == 'auc': warnings.warn("Be sure that you have set auc states to 0 " "before saving inference model.") break # serialize program copy_program = program.clone() global_block = copy_program.global_block() remove_op_idx = [] for i, op in enumerate(global_block.ops): op.desc.set_is_target(False) if op.type == "feed" or op.type == "fetch": remove_op_idx.append(i) for idx in remove_op_idx[::-1]: global_block._remove_op(idx) copy_program.desc.flush() feed_var_names = [var.name for var in feed_vars] copy_program = copy_program._prune_with_input( feeded_var_names=feed_var_names, targets=fetch_vars) copy_program = copy_program._inference_optimize(prune_read_op=True) fetch_var_names = [var.name for var in fetch_vars] prepend_feed_ops(copy_program, feed_var_names) append_fetch_ops(copy_program, fetch_var_names) copy_program.desc._set_version() return copy_program def _save_models(self, dirname, feeded_var_names, target_vars, executor, program, scope): with fluid.scope_guard(scope): fluid.io.save_inference_model( dirname, feeded_var_names, target_vars, executor, program, clip_extra=True) def _get_paddle_outs(self, feed, fetch_list, executor, program, scope): ''' Return PaddlePaddle outputs. ''' with fluid.scope_guard(scope): outs = executor.run(program=program, feed=feed, fetch_list=fetch_list, return_numpy=True) return outs def _get_inference_outs(self, config): ''' Return AnalysisPredictor outputs. ''' predictor = create_paddle_predictor(config) tensor_shapes = predictor.get_input_tensor_shape() names = predictor.get_input_names() for i, name in enumerate(names): shape = tensor_shapes[name] shape[0] = 1 tensor = predictor.get_input_tensor(name) feed_data = list(self.feeds.values())[i] tensor.copy_from_cpu(np.array(feed_data)) if type(feed_data) == fluid.LoDTensor: tensor.set_lod(feed_data.lod()) predictor.zero_copy_run() output_names = predictor.get_output_names() outs = [ predictor.get_output_tensor(out_name).copy_to_cpu() for out_name in output_names ] return outs def _get_analysis_config(self, use_gpu=False, use_trt=False, use_mkldnn=False): ''' Return a new object of AnalysisConfig. ''' config = AnalysisConfig(self.path) config.disable_gpu() config.switch_specify_input_names(True) config.switch_ir_optim(True) config.switch_use_feed_fetch_ops(False) if use_gpu: config.enable_use_gpu(100, 0) if use_trt: config.enable_tensorrt_engine( self.trt_parameters.workspace_size, self.trt_parameters.max_batch_size, self.trt_parameters.min_subgraph_size, self.trt_parameters.precision, self.trt_parameters.use_static, self.trt_parameters.use_calib_mode) if self.dynamic_shape_params: config.set_trt_dynamic_shape_info( self.dynamic_shape_params.min_input_shape, self.dynamic_shape_params.max_input_shape, self.dynamic_shape_params.optim_input_shape, self.dynamic_shape_params.disable_trt_plugin_fp16) if self.enable_tensorrt_oss: config.enable_tensorrt_oss() elif use_mkldnn: config.enable_mkldnn() if self.enable_mkldnn_bfloat16: config.enable_mkldnn_bfloat16() print('config summary:', config.summary()) return config def check_output_with_option(self, use_gpu, atol=1e-5, flatten=False, quant=False, rtol=1e-5): ''' Check whether calculating on CPU and GPU, enable TensorRT or disable TensorRT, enable MKLDNN or disable MKLDNN are all the same. ''' place = fluid.CUDAPlace(0) if use_gpu else fluid.CPUPlace() executor = fluid.Executor(place) scope = fluid.Scope() device = "GPU" if use_gpu else "CPU" with fluid.scope_guard(scope): executor.run(self.startup_program) executor.run(self.test_startup_program) main_graph = IrGraph(core.Graph(self.main_program.desc), for_test=False) test_graph = IrGraph( core.Graph(self.test_main_program.desc), for_test=True) transform_pass = QuantizationTransformPass( scope=scope, place=place, activation_quantize_type=self.activation_quantize_type, weight_quantize_type=self.weight_quantize_type) transform_pass.apply(main_graph) transform_pass.apply(test_graph) add_quant_dequant_pass = AddQuantDequantPass(scope=scope, place=place) add_quant_dequant_pass.apply(main_graph) add_quant_dequant_pass.apply(test_graph) scale_training_pass = OutScaleForTrainingPass(scope=scope, place=place) scale_training_pass.apply(main_graph) build_strategy = fluid.BuildStrategy() build_strategy.memory_optimize = False build_strategy.enable_inplace = False build_strategy.fuse_all_reduce_ops = False binary = fluid.CompiledProgram(main_graph.graph) iters = 10 batch_size = 1 train_reader = paddle.batch( paddle.reader.shuffle( paddle.dataset.mnist.train(), buf_size=500), batch_size=batch_size) feeder = fluid.DataFeeder( feed_list=[self.data, self.label], place=place) with fluid.scope_guard(scope): for _ in range(iters): data = next(train_reader()) loss_v = executor.run(binary, feed=feeder.feed(data), fetch_list=[self.loss]) scale_inference_pass = OutScaleForInferencePass(scope=scope) scale_inference_pass.apply(test_graph) # Freeze graph for inference, but the weight of fc/conv is still float type. freeze_pass = QuantizationFreezePass( scope=scope, place=place, weight_quantize_type=self.weight_quantize_type) freeze_pass.apply(test_graph) self.main_program = test_graph.to_program() with fluid.scope_guard(scope): self.main_program = self._normalize_program( self.main_program, self.data, self.fetch_list) self._save_models(self.path, list(self.feeds.keys()), self.fetch_list, executor, self.main_program, scope) paddle_outs = self._get_paddle_outs(self.feeds, self.fetch_list, executor, self.main_program, scope) inference_outs = self._get_inference_outs( self._get_analysis_config(use_gpu=use_gpu)) # Check whether the results calculated on CPU and on GPU are the same. self.assertTrue( len(paddle_outs) == len(inference_outs), "The number of outputs is different between inference and training forward at {}". format(device)) for out, inference_out in zip(paddle_outs, inference_outs): paddle_out = np.array(out) if flatten: paddle_out = paddle_out.flatten() inference_out = inference_out.flatten() self.assertTrue( np.allclose( paddle_out, inference_out, atol=atol), "Output has diff between inference and training forward at {} ". format(device)) # Check whether the trt results and the GPU results are the same. if use_gpu and self.enable_trt: tensorrt_outputs = self._get_inference_outs( self._get_analysis_config( use_gpu=use_gpu, use_trt=self.enable_trt)) if self.trt_parameters.use_static: #deserialize tensorrt_outputs = self._get_inference_outs( self._get_analysis_config( use_gpu=use_gpu, use_trt=self.enable_trt)) self.assertTrue( len(tensorrt_outputs) == len(paddle_outs), "The number of outputs is different between GPU and TensorRT. ") for paddle_out, tensorrt_output in zip(paddle_outs, tensorrt_outputs): paddle_out = np.array(paddle_out) if flatten: paddle_out = paddle_out.flatten() tensorrt_output = tensorrt_output.flatten() self.assertTrue( np.allclose( paddle_out, tensorrt_output, rtol=rtol, atol=atol), "Output has diff between GPU and TensorRT. ") # Check whether the mkldnn results and the CPU results are the same. if (not use_gpu) and self.enable_mkldnn: mkldnn_outputs = self._get_inference_outs( self._get_analysis_config( use_gpu=use_gpu, use_mkldnn=self.enable_mkldnn)) self.assertTrue( len(paddle_outs) == len(mkldnn_outputs), "The number of outputs is different between CPU and MKLDNN. ") if self.enable_mkldnn_bfloat16: atol = 0.01 for paddle_out, mkldnn_output in zip(paddle_outs, mkldnn_outputs): self.assertTrue( np.allclose( np.array(paddle_out), mkldnn_output, atol=atol), "Output has diff between CPU and MKLDNN. ") class TensorRTParam: ''' Prepare TensorRT subgraph engine parameters. ''' def __init__(self, workspace_size, max_batch_size, min_subgraph_size, precision, use_static, use_calib_mode): self.workspace_size = workspace_size self.max_batch_size = max_batch_size self.min_subgraph_size = min_subgraph_size self.precision = precision self.use_static = use_static self.use_calib_mode = use_calib_mode class DynamicShapeParam: ''' Prepare TensorRT subgraph engine dynamic shape parameters. ''' def __init__(self, min_input_shape, max_input_shape, optim_input_shape, disable_trt_plugin_fp16): self.min_input_shape = min_input_shape self.max_input_shape = max_input_shape self.optim_input_shape = optim_input_shape self.disable_trt_plugin_fp16 = disable_trt_plugin_fp16 def quant_dequant(self): place = fluid.CPUPlace() exe = fluid.Executor(place) scope = fluid.Scope()
7,426
372
/* Editor Settings: expandtabs and use 4 spaces for indentation * ex: set softtabstop=4 tabstop=8 expandtab shiftwidth=4: * * -*- mode: c, c-basic-offset: 4 -*- */ /* * Copyright © BeyondTrust Software 2004 - 2019 * All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * BEYONDTRUST MAKES THIS SOFTWARE AVAILABLE UNDER OTHER LICENSING TERMS AS * WELL. IF YOU HAVE ENTERED INTO A SEPARATE LICENSE AGREEMENT WITH * BEYONDTRUST, THEN YOU MAY ELECT TO USE THE SOFTWARE UNDER THE TERMS OF THAT * SOFTWARE LICENSE AGREEMENT INSTEAD OF THE TERMS OF THE APACHE LICENSE, * NOTWITHSTANDING THE ABOVE NOTICE. IF YOU HAVE QUESTIONS, OR WISH TO REQUEST * A COPY OF THE ALTERNATE LICENSING TERMS OFFERED BY BEYONDTRUST, PLEASE CONTACT * BEYONDTRUST AT beyondtrust.com/contact */ /* * Copyright (C) BeyondTrust Software. All rights reserved. * * Module Name: * * main.c * * Abstract: * * BeyondTrust Security and Authentication Subsystem (LSASS) * * Tool to set the LSASS Log Level at runtime * * Authors: <NAME> (<EMAIL>) * <NAME> (<EMAIL>) */ #include "config.h" #include "lsasystem.h" #include "lsadef.h" #include "lsa/lsa.h" #include "lwmem.h" #include "lwstr.h" #include "lwsecurityidentifier.h" #include "lsautils.h" #include "lsaclient.h" #define LW_PRINTF_STRING(x) ((x) ? (x) : "<null>") static DWORD ParseArgs( int argc, char* argv[], LsaLogLevel* pLogLevel ); static VOID ShowUsage(); static DWORD PrintLogInfo( PLSA_LOG_INFO pLogInfo ); static DWORD MapErrorCode( DWORD dwError ); int set_log_level_main( int argc, char* argv[] ) { DWORD dwError = 0; LsaLogLevel logLevel = LSA_LOG_LEVEL_ERROR; HANDLE hLsaConnection = (HANDLE)NULL; PLSA_LOG_INFO pLogInfo = NULL; size_t dwErrorBufferSize = 0; BOOLEAN bPrintOrigError = TRUE; if (geteuid() != 0) { fprintf(stderr, "This program requires super-user privileges.\n"); dwError = LW_ERROR_ACCESS_DENIED; BAIL_ON_LSA_ERROR(dwError); } dwError = ParseArgs(argc, argv, &logLevel); BAIL_ON_LSA_ERROR(dwError); dwError = LsaOpenServer(&hLsaConnection); BAIL_ON_LSA_ERROR(dwError); dwError = LsaSetLogLevel( hLsaConnection, logLevel); BAIL_ON_LSA_ERROR(dwError); fprintf(stdout, "The log level was set successfully\n\n"); dwError = LsaGetLogInfo( hLsaConnection, &pLogInfo); BAIL_ON_LSA_ERROR(dwError); dwError = PrintLogInfo(pLogInfo); BAIL_ON_LSA_ERROR(dwError); cleanup: if (pLogInfo) { LsaFreeLogInfo(pLogInfo); } if (hLsaConnection != (HANDLE)NULL) { LsaCloseServer(hLsaConnection); } return (dwError); error: dwError = MapErrorCode(dwError); dwErrorBufferSize = LwGetErrorString(dwError, NULL, 0); if (dwErrorBufferSize > 0) { DWORD dwError2 = 0; PSTR pszErrorBuffer = NULL; dwError2 = LwAllocateMemory( dwErrorBufferSize, (PVOID*)&pszErrorBuffer); if (!dwError2) { DWORD dwLen = LwGetErrorString(dwError, pszErrorBuffer, dwErrorBufferSize); if ((dwLen == dwErrorBufferSize) && !LW_IS_NULL_OR_EMPTY_STR(pszErrorBuffer)) { fprintf(stderr, "Failed to set log level. Error code %u (%s).\n%s\n", dwError, LW_PRINTF_STRING(LwWin32ExtErrorToName(dwError)), pszErrorBuffer); bPrintOrigError = FALSE; } } LW_SAFE_FREE_STRING(pszErrorBuffer); } if (bPrintOrigError) { fprintf(stderr, "Failed to set log level. Error code %u (%s).\n", dwError, LW_PRINTF_STRING(LwWin32ExtErrorToName(dwError))); } goto cleanup; } static DWORD ParseArgs( int argc, char* argv[], LsaLogLevel* pLogLevel ) { typedef enum { PARSE_MODE_OPEN = 0 } ParseMode; DWORD dwError = 0; int iArg = 1; PSTR pszArg = NULL; ParseMode parseMode = PARSE_MODE_OPEN; LsaLogLevel logLevel = LSA_LOG_LEVEL_ERROR; BOOLEAN bLogLevelSpecified = FALSE; do { pszArg = argv[iArg++]; if (pszArg == NULL || *pszArg == '\0') { break; } switch (parseMode) { case PARSE_MODE_OPEN: if ((strcmp(pszArg, "--help") == 0) || (strcmp(pszArg, "-h") == 0)) { ShowUsage(); exit(0); } else { if (!strcasecmp(pszArg, "error")) { logLevel = LSA_LOG_LEVEL_ERROR; bLogLevelSpecified = TRUE; } else if (!strcasecmp(pszArg, "warning")) { logLevel = LSA_LOG_LEVEL_WARNING; bLogLevelSpecified = TRUE; } else if (!strcasecmp(pszArg, "info")) { logLevel = LSA_LOG_LEVEL_INFO; bLogLevelSpecified = TRUE; } else if (!strcasecmp(pszArg, "verbose")) { logLevel = LSA_LOG_LEVEL_VERBOSE; bLogLevelSpecified = TRUE; } else if (!strcasecmp(pszArg, "debug")) { logLevel = LSA_LOG_LEVEL_DEBUG; bLogLevelSpecified = TRUE; } else if (!strcasecmp(pszArg, "trace")) { logLevel = LSA_LOG_LEVEL_TRACE; bLogLevelSpecified = TRUE; } else { ShowUsage(); exit(1); } } break; } } while (iArg < argc); if (!bLogLevelSpecified) { ShowUsage(); exit(1); } *pLogLevel = logLevel; return dwError; } static void ShowUsage() { printf("Usage: set-log-level {error, warning, info, verbose, debug, trace}\n"); } static DWORD PrintLogInfo( PLSA_LOG_INFO pLogInfo ) { DWORD dwError = 0; fprintf(stdout, "Current log settings:\n"); fprintf(stdout, "=================\n"); switch(pLogInfo->logTarget) { case LSA_LOG_TARGET_DISABLED: fprintf(stdout, "Logging is currently disabled\n"); break; case LSA_LOG_TARGET_CONSOLE: fprintf(stdout, "LSA Server is logging to console\n"); break; case LSA_LOG_TARGET_FILE: fprintf(stdout, "LSA Server is logging to file.\n"); fprintf(stdout, "Log file path: %s\n", pLogInfo->pszPath); break; case LSA_LOG_TARGET_SYSLOG: fprintf(stdout, "LSA Server is logging to syslog\n"); break; default: dwError = LW_ERROR_INVALID_PARAMETER; BAIL_ON_LSA_ERROR(dwError); } fprintf(stdout, "Maximum allowed log level: "); switch(pLogInfo->maxAllowedLogLevel) { case LSA_LOG_LEVEL_ERROR: fprintf(stdout, "%s\n", "error"); break; case LSA_LOG_LEVEL_WARNING: fprintf(stdout, "%s\n", "warning"); break; case LSA_LOG_LEVEL_INFO: fprintf(stdout, "%s\n", "info"); break; case LSA_LOG_LEVEL_VERBOSE: fprintf(stdout, "%s\n", "verbose"); break; case LSA_LOG_LEVEL_DEBUG: fprintf(stdout, "%s\n", "debug"); break; case LSA_LOG_LEVEL_TRACE: fprintf(stdout, "%s\n", "trace"); break; default: dwError = LW_ERROR_INVALID_PARAMETER; BAIL_ON_LSA_ERROR(dwError); } error: return dwError; } static DWORD MapErrorCode( DWORD dwError ) { DWORD dwError2 = dwError; switch (dwError) { case ECONNREFUSED: case ENETUNREACH: case ETIMEDOUT: dwError2 = LW_ERROR_LSA_SERVER_UNREACHABLE; break; default: break; } return dwError2; }
4,996
703
<reponame>Tekh-ops/ezEngine #include <ToolsFoundationTest/ToolsFoundationTestPCH.h> #include <TestFramework/Framework/TestFramework.h> #include <TestFramework/Utilities/TestSetup.h> ezInt32 ezConstructionCounter::s_iConstructions = 0; ezInt32 ezConstructionCounter::s_iDestructions = 0; ezInt32 ezConstructionCounter::s_iConstructionsLast = 0; ezInt32 ezConstructionCounter::s_iDestructionsLast = 0; EZ_TESTFRAMEWORK_ENTRY_POINT("ToolsFoundationTest", "Tools Foundation Tests")
178
310
""" Copyright (c) 2019 Intel Corporation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import os from tests.common.helpers import create_venv_with_nncf import pytest pytest.register_assert_rewrite('tests.torch.helpers') def pytest_addoption(parser): parser.addoption( "--data", type=str, default=None, help="Path to test datasets, e.g. CIFAR10 - for sanity tests or CIFAR100 - for weekly ones" ) parser.addoption( "--regen-dot", action="store_true", default=False, help="If specified, the " "reference .dot files will be regenerated " "using the current state of the repository." ) parser.addoption( "--torch-home", type=str, default=None, help="Path to cached test models, downloaded by torchvision" ) parser.addoption( "--weekly-models", type=str, default=None, help="Path to models' weights for weekly tests" ) parser.addoption( "--sota-checkpoints-dir", type=str, default=None, help="Path to checkpoints directory for sota accuracy test" ) parser.addoption( "--sota-data-dir", type=str, default=None, help="Path to datasets directory for sota accuracy test" ) parser.addoption( "--metrics-dump-path", type=str, default=None, help="Path to directory to store metrics. " "Directory must be empty or should not exist." "Metric keeps in " "PROJECT_ROOT/test_results/metrics_dump_timestamp " "if param not specified" ) parser.addoption( "--ov-data-dir", type=str, default=None, help="Path to datasets directory for OpenVino accuracy test" ) parser.addoption( "--imagenet", action="store_true", default=False, help="Enable tests with imagenet" ) parser.addoption( "--test-install-type", type=str, help="Type of installation, use CPU or GPU for appropriate install" ) parser.addoption( "--backward-compat-models", type=str, default=None, help="Path to NNCF-traned model checkpoints that are tested" "to be strictly loadable" ) parser.addoption( "--third-party-sanity", action="store_true", default=False, help="To run third party sanity test cases" ) parser.addoption( "--run-openvino-eval", action="store_true", default=False, help="To run eval models via OpenVino" ) parser.addoption( "--onnx-dir", type=str, default=None, help="Path to converted onnx models" ) parser.addoption( "--ov-config-dir", type=str, default=None, help="Path to OpenVino configs" ) parser.addoption( "--pip-cache-dir", type=str, default=None, help="Path to pip cached downloaded packages directory (speeds up installation tests)" ) def pytest_configure(config): regen_dot = config.getoption('--regen-dot', False) if regen_dot: os.environ["NNCF_TEST_REGEN_DOT"] = "1" @pytest.fixture(scope="module") def dataset_dir(request): return request.config.getoption("--data") @pytest.fixture(scope="module") def enable_imagenet(request): return request.config.getoption("--imagenet") @pytest.fixture(scope="module") def weekly_models_path(request): return request.config.getoption("--weekly-models") @pytest.fixture(scope="module") def sota_checkpoints_dir(request): return request.config.getoption("--sota-checkpoints-dir") @pytest.fixture(scope="module") def sota_data_dir(request): return request.config.getoption("--sota-data-dir") @pytest.fixture(scope="module") def metrics_dump_dir(request): pytest.metrics_dump_path = request.config.getoption("--metrics-dump-path") @pytest.fixture(scope="module") def ov_data_dir(request): return request.config.getoption("--ov-data-dir") @pytest.fixture(scope="module") def install_type(request): return request.config.getoption("--test-install-type") @pytest.fixture(scope="module") def backward_compat_models_path(request): return request.config.getoption("--backward-compat-models") @pytest.fixture(autouse=True) def torch_home_dir(request, monkeypatch): torch_home = request.config.getoption("--torch-home") if torch_home: monkeypatch.setenv('TORCH_HOME', torch_home) @pytest.fixture(scope="session") def third_party(request): return request.config.getoption("--third-party-sanity") @pytest.fixture(scope="session") def openvino(request): return request.config.getoption("--run-openvino-eval") @pytest.fixture(scope="module") def onnx_dir(request): return request.config.getoption("--onnx-dir") @pytest.fixture(scope="module") def ov_config_dir(request): return request.config.getoption("--ov-config-dir") @pytest.fixture(scope="module") def pip_cache_dir(request): return request.config.getoption("--pip-cache-dir") @pytest.fixture(scope="function") def tmp_venv_with_nncf(install_type, tmp_path, package_type, venv_type): # pylint:disable=redefined-outer-name if install_type is None: pytest.skip("Please specify type of installation") venv_path = create_venv_with_nncf(tmp_path, package_type, venv_type, extra_reqs='torch') return venv_path @pytest.fixture def runs_subprocess_in_precommit(): # PyTorch caches its CUDA memory allocations, so during the # pytest execution the total memory reserved on GPUs will only grow, # but it is not necessarily completely occupied at the current moment. # The sub-processes are separate to the pytest process and will only see the GPU # memory which has not been cached (and thus remains reserved) in the owning pytest process by PyTorch, # and the tests below may fail with an OOM. To avoid this, need to call torch.cuda.empty_cache() # each time a GPU-powered subprocess is executed during a test. try: import torch if torch.cuda.is_available(): torch.cuda.empty_cache() except ImportError: pass
2,664
416
<reponame>khauser/SimpleFlatMapper<gh_stars>100-1000 package org.simpleflatmapper.converter.impl.time; import org.simpleflatmapper.converter.Context; import org.simpleflatmapper.converter.ContextualConverter; import java.time.ZoneId; import java.time.ZonedDateTime; import java.util.Date; public class DateToJavaZonedDateTimeConverter implements ContextualConverter<Date, ZonedDateTime> { private final ZoneId zoneId; public DateToJavaZonedDateTimeConverter(ZoneId zoneId) { this.zoneId = zoneId; } @Override public ZonedDateTime convert(Date in, Context context) throws Exception { if (in == null) return null; return in.toInstant().atZone(zoneId); } }
257
2,472
// Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved. // Please refer to the license text that comes with this tendis open source // project for additional information. #include <iostream> #include <string> #include <cstring> #include "tendisplus/utils/base64.h" std::string Base64::Encode(const unsigned char* str, int bytes) { static const char _base64_table[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"; std::string _encode_result; const unsigned char* current; current = str; while (bytes > 2) { _encode_result += _base64_table[current[0] >> 2]; _encode_result += _base64_table[((current[0] & 0x03) << 4) + (current[1] >> 4)]; _encode_result += _base64_table[((current[1] & 0x0f) << 2) + (current[2] >> 6)]; _encode_result += _base64_table[current[2] & 0x3f]; current += 3; bytes -= 3; } if (bytes > 0) { _encode_result += _base64_table[current[0] >> 2]; if (bytes % 3 == 1) { _encode_result += _base64_table[(current[0] & 0x03) << 4]; _encode_result += "=="; } else if (bytes % 3 == 2) { _encode_result += _base64_table[((current[0] & 0x03) << 4) + (current[1] >> 4)]; _encode_result += _base64_table[(current[1] & 0x0f) << 2]; _encode_result += "="; } } return _encode_result; } std::string Base64::Decode(const char* str, int length) { static const char base64_pad = '='; static const char DecodeTable[] = { -2, -2, -2, -2, -2, -2, -2, -2, -2, -1, -1, -2, -2, -1, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -1, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, 62, -2, -2, -2, 63, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, -2, -2, -2, -2, -2, -2, -2, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, -2, -2, -2, -2, -2, -2, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2}; int bin = 0, i = 0; std::string _decode_result; const char* current = str; char ch; while ((ch = *current++) != '\0' && length-- > 0) { if (ch == base64_pad) { if (*current != '=' && (i % 4) == 1) { return NULL; } continue; } ch = DecodeTable[(unsigned int)ch]; if (ch < 0) { continue; } switch (i % 4) { case 0: bin = ch << 2; break; case 1: bin |= ch >> 4; _decode_result += bin; bin = (ch & 0x0f) << 4; break; case 2: bin |= ch >> 2; _decode_result += bin; bin = (ch & 0x03) << 6; break; case 3: bin |= ch; _decode_result += bin; break; } i++; } return _decode_result; }
1,808
1,521
/** * Copyright 2020 Alibaba Group Holding Limited. * * <p>Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a copy of the License at * * <p>http://www.apache.org/licenses/LICENSE-2.0 * * <p>Unless required by applicable law or agreed to in writing, software distributed under the * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing permissions and * limitations under the License. */ package com.alibaba.maxgraph.frontendservice; import com.alibaba.maxgraph.compiler.api.schema.GraphElement; import com.alibaba.maxgraph.compiler.api.schema.GraphSchema; import com.alibaba.maxgraph.compiler.api.schema.SchemaFetcher; import com.alibaba.maxgraph.iterator.IteratorList; import com.alibaba.maxgraph.iterator.function.EdgeResponseFunction; import com.alibaba.maxgraph.iterator.function.VertexResponseFunction; import com.alibaba.maxgraph.proto.GremlinServiceGrpc; import com.alibaba.maxgraph.proto.StoreApi; import com.alibaba.maxgraph.proto.StoreServiceGrpc; import com.alibaba.maxgraph.sdkcommon.graph.ElementId; import com.alibaba.maxgraph.proto.GremlinQuery.*; import com.alibaba.maxgraph.structure.Edge; import com.alibaba.maxgraph.structure.Vertex; import com.alibaba.maxgraph.structure.graph.MaxGraph; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import io.grpc.ManagedChannel; import io.grpc.netty.NegotiationType; import io.grpc.netty.NettyChannelBuilder; import org.apache.commons.lang3.tuple.Pair; import java.io.*; import java.util.*; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; public class RemoteProxy implements Closeable { private StoreServiceGrpc.StoreServiceBlockingStub stub; private final long timeout; private ManagedChannel channel; private SchemaFetcher schemaFetcher; private MaxGraph graph; public RemoteProxy( String host, int port, long timeout, SchemaFetcher schemaFetcher, MaxGraph graph) { this.timeout = timeout; this.channel = NettyChannelBuilder.forAddress(host, port) .negotiationType(NegotiationType.PLAINTEXT) .maxInboundMessageSize(Integer.MAX_VALUE) .idleTimeout(1000, TimeUnit.SECONDS) .build(); this.stub = StoreServiceGrpc.newBlockingStub(channel); this.schemaFetcher = schemaFetcher; this.graph = graph; } public Iterator<Vertex> getVertex(final Set<ElementId> ids) { return getVertexBlock(ids); } private Iterator<Vertex> getVertexBlock(Set<ElementId> ids) { StoreApi.GetVertexsRequest.Builder b = StoreApi.GetVertexsRequest.newBuilder(); b.addAllIds(ids.stream().map(ElementId::id).collect(Collectors.toSet())) .setSnapshotId(schemaFetcher.getSchemaSnapshotPair().getRight()); Iterator<VertexResponse> responses = stub.withDeadlineAfter(timeout, TimeUnit.SECONDS).getVertexs(b.build()); List<Iterator<VertexResponse>> responseList = Lists.newArrayList(); responseList.add(responses); return new IteratorList<>( responseList, new VertexResponseFunction( schemaFetcher.getSchemaSnapshotPair().getLeft(), this.graph)); } public Iterator<Edge> getOutEdges(Set<Vertex> v, String... label) { List<Iterator<StoreApi.GraphEdgeReponse>> iterEdgeList = Lists.newArrayList(); Pair<GraphSchema, Long> schemaPair = schemaFetcher.getSchemaSnapshotPair(); GraphSchema schema = schemaPair.getLeft(); long snapshotId = schemaPair.getRight(); for (Vertex vertex : v) { if (label.length == 0) { StoreApi.GetOutEdgesRequest.Builder req = StoreApi.GetOutEdgesRequest.newBuilder(); req.setSnapshotId(snapshotId) .setSrcId(vertex.id.id()) .setSnapshotId(schemaPair.getRight()); Iterator<StoreApi.GraphEdgeReponse> edgeResponse = stub.withDeadlineAfter(timeout, TimeUnit.SECONDS).getOutEdges(req.build()); iterEdgeList.add(edgeResponse); } else { for (String labelVal : label) { try { GraphElement element = schema.getElement(labelVal); int labelId = element.getLabelId(); StoreApi.GetOutEdgesRequest.Builder req = StoreApi.GetOutEdgesRequest.newBuilder(); req.setSnapshotId(snapshotId) .setSrcId(vertex.id.id()) .setTypeId(labelId) .setSnapshotId(schemaPair.getRight()); Iterator<StoreApi.GraphEdgeReponse> edgeResponse = stub.withDeadlineAfter(timeout, TimeUnit.SECONDS) .getOutEdges(req.build()); iterEdgeList.add(edgeResponse); } catch (Exception ignored) { } } } } return new IteratorList<>(iterEdgeList, new EdgeResponseFunction(schema, this.graph)); } public Iterator<Edge> getInEdges(Set<Vertex> v, String... label) { List<Iterator<StoreApi.GraphEdgeReponse>> iterEdgeList = Lists.newArrayList(); Pair<GraphSchema, Long> schemaPair = schemaFetcher.getSchemaSnapshotPair(); GraphSchema schema = schemaPair.getLeft(); long snapshotId = schemaPair.getRight(); for (Vertex vertex : v) { if (label.length == 0) { StoreApi.GetInEdgesRequest.Builder req = StoreApi.GetInEdgesRequest.newBuilder(); req.setSnapshotId(snapshotId).setDstId(vertex.id.id()); Iterator<StoreApi.GraphEdgeReponse> edgeResponse = stub.withDeadlineAfter(timeout, TimeUnit.SECONDS).getInEdges(req.build()); iterEdgeList.add(edgeResponse); } else { for (String labelVal : label) { try { GraphElement element = schema.getElement(labelVal); int labelId = element.getLabelId(); StoreApi.GetInEdgesRequest.Builder req = StoreApi.GetInEdgesRequest.newBuilder(); req.setSnapshotId(snapshotId).setDstId(vertex.id.id()).setTypeId(labelId); Iterator<StoreApi.GraphEdgeReponse> edgeResponse = stub.withDeadlineAfter(timeout, TimeUnit.SECONDS) .getInEdges(req.build()); iterEdgeList.add(edgeResponse); } catch (Exception ignored) { } } } } return new IteratorList<>(iterEdgeList, new EdgeResponseFunction(schema, this.graph)); } public Iterator<Vertex> scan(Set<String> labelList) { Pair<GraphSchema, Long> pair = schemaFetcher.getSchemaSnapshotPair(); Set<Integer> labelIdList = Sets.newHashSet(); if (null == labelList || labelList.isEmpty()) { labelIdList.add(0); } else { for (String label : labelList) { try { labelIdList.add(pair.getLeft().getElement(label).getLabelId()); } catch (Exception ignored) { } } } if (labelIdList.isEmpty()) { return new ArrayList<Vertex>().iterator(); } List<Iterator<VertexResponse>> resList = Lists.newArrayList(); VertexScanRequest vertexScanRequest = VertexScanRequest.newBuilder().setTypeId(-1).setOrder(false).build(); Iterator<VertexResponse> scanResult = GremlinServiceGrpc.newBlockingStub(this.channel) .withDeadlineAfter(timeout, TimeUnit.SECONDS) .scan(vertexScanRequest); resList.add(scanResult); return new IteratorList<>(resList, new VertexResponseFunction(pair.getLeft(), this.graph)); } public Iterator<Vertex> scan() { return scan(null); } public Iterator<Edge> scanEdge() { return scanEdge(null); } public Iterator<Edge> scanEdge(Set<String> labelList) { Pair<GraphSchema, Long> pair = schemaFetcher.getSchemaSnapshotPair(); Set<Integer> labelIdList = Sets.newHashSet(); if (null == labelList || labelList.isEmpty()) { labelIdList.add(0); } else { for (String label : labelList) { try { labelIdList.add(pair.getLeft().getElement(label).getLabelId()); } catch (Exception ignored) { } } } if (labelIdList.isEmpty()) { return new ArrayList<Edge>().iterator(); } List<Iterator<StoreApi.GraphEdgeReponse>> resList = Lists.newArrayList(); for (int labelId : labelIdList) { StoreApi.ScanEdgeRequest.Builder req = StoreApi.ScanEdgeRequest.newBuilder(); req.setSnapshotId(pair.getRight()) .setOffset(0) .setLimit(Integer.MAX_VALUE) .setTypeId(labelId); resList.add(stub.withDeadlineAfter(timeout, TimeUnit.SECONDS).scanEdges(req.build())); } return new IteratorList<>(resList, new EdgeResponseFunction(pair.getLeft(), this.graph)); } @Override public void close() throws IOException { this.stub = null; this.channel.shutdown(); } }
4,605
4,879
<gh_stars>1000+ #pragma once #include <string> enum MapStyle { MapStyleClear = 0, MapStyleDark = 1, MapStyleMerged = 2, MapStyleVehicleClear = 3, MapStyleVehicleDark = 4, // Add new map style here // Specifies number of MapStyle enum values, must be last MapStyleCount }; extern MapStyle const kDefaultMapStyle; extern MapStyle MapStyleFromSettings(std::string const & str); extern std::string MapStyleToString(MapStyle mapStyle); extern std::string DebugPrint(MapStyle mapStyle);
163
862
#!/usr/bin/python # # Filename: # # Version: 1.0.0 # # Author: <NAME> (TryCatchHCF) # # Summary: # # Part of the DumpsterFire Toolset. See documentation at https://github.com/TryCatchHCF/DumpsterFire # # # Description: # # # Example: # # import os, sys, urllib, random, string from FireModules.fire_module_base_class import * class create_500000_files( FireModule ): def __init__(self): self.commentsStr = "Filesystem/create_500000_files" self.mTargetDirectory = "/" self.fileNameBaseStr = ".us-west-2.elb.amazonaws.com" def __init__(self, moofStr): self.moofStr = moofStr self.commentsStr = "Filesystem/create_500000_files" self.mTargetDirectory = "/" self.fileNameBaseStr = ".us-west-2.elb.amazonaws.com" return; def Description( self ): self.Description = "Creates 500,000 files named 'nomnomX' where X is the file number, writes 'nomnom' into each." return self.Description def Configure( self ): self.mTargetDirectory = raw_input( "Enter target directory (Ex. /tmp/): " ) return def GetParameters( self ): return self.mTargetDirectory def SetParameters( self, parametersStr ): self.mTargetDirectory = parametersStr return def ActivateLogging( self, logFlag ): print self.commentsStr + ": Setting Logging flag!" print logFlag return def Ignite( self ): print self.commentsStr + ": Creating 500,000 nomnom files in: " + self.mTargetDirectory # First we create files filled with 16MB of random trash, and continue until the target # directory/partition can't fit another 16MB file. Then repeat process with 1MB and 1KB # files in order to fill in the last nooks and crannies of all available space. i = 0 self.sourceFilenameStr = "" try: while( i < 500000 ): self.sourceFilenameStr = self.mTargetDirectory + "nomnom" + str( i ) self.file = open( self.sourceFilenameStr, "w") # Write kBytes * 1024 of random data to file self.file.write( 'nomnom' ) self.file.close() i = i + 1 except: # Exit when we can no longer create a file of the desired size in target directory print "Error creating nomnom file", self.sourceFilenameStr return return
757