max_stars_count
int64
301
224k
text
stringlengths
6
1.05M
token_count
int64
3
727k
397
<filename>robosuite/models/objects/composite_body/__init__.py from .hinged_box import HingedBoxObject
36
684
<filename>app/src/main/java/com/emmaguy/cleanstatusbar/ToggleReceiver.java<gh_stars>100-1000 package com.emmaguy.cleanstatusbar; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; public class ToggleReceiver extends BroadcastReceiver { private static final String EXTRA_ENABLED = "enabled"; @Override public void onReceive(Context context, Intent intent) { Intent service = new Intent(context, CleanStatusBarService.class); if (intent.getBooleanExtra(EXTRA_ENABLED, true)) { context.startService(service); } else { context.stopService(service); } } }
245
12,366
// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // /////////////////////////////////////////////////////////////////////////////// #include "tink/hybrid/internal/hpke_key_boringssl.h" #include <utility> #include "openssl/base.h" #include "openssl/err.h" #include "openssl/hpke.h" #include "tink/hybrid/internal/hpke_util_boringssl.h" #include "tink/util/status.h" #include "tink/util/statusor.h" #include "proto/hpke.pb.h" namespace crypto { namespace tink { namespace internal { using ::google::crypto::tink::HpkeKem; util::StatusOr<std::unique_ptr<HpkeKeyBoringSsl>> HpkeKeyBoringSsl::New( const HpkeKem& kem, absl::string_view recipient_private_key) { std::unique_ptr<HpkeKeyBoringSsl> hpke_key = absl::WrapUnique(new HpkeKeyBoringSsl(kem)); util::Status status = hpke_key->Init(recipient_private_key); if (!status.ok()) { return status; } return hpke_key; } util::Status HpkeKeyBoringSsl::Init(absl::string_view recipient_private_key) { util::StatusOr<const EVP_HPKE_KEM*> hpke_kem = KemParam(kem_); if (!hpke_kem.ok()) { return hpke_kem.status(); } if (!EVP_HPKE_KEY_init( recipient_private_key_.get(), *hpke_kem, reinterpret_cast<const uint8_t*>(recipient_private_key.data()), recipient_private_key.size())) { return util::Status( util::error::UNKNOWN, "Unable to initialize BoringSSL HPKE recipient private key."); } return util::OkStatus(); } } // namespace internal } // namespace tink } // namespace crypto
738
2,542
<reponame>gridgentoo/ServiceFabricAzure // ------------------------------------------------------------ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License (MIT). See License.txt in the repo root for license information. // ------------------------------------------------------------ #pragma once namespace Serialization { class FabricSerializable : public IFabricSerializable, public KObject<FabricSerializable> { public: FabricSerializable(); virtual ~FabricSerializable(); FabricSerializable(__in FabricSerializable const & serializable); FabricSerializable(__in FabricSerializable && serializable); FabricSerializable & operator=(__in FabricSerializable && serializable); FabricSerializable & operator=(__in FabricSerializable const & serializable); bool operator==(__in FabricSerializable const & serializable) const; virtual NTSTATUS GetTypeInformation(__out FabricTypeInformation & typeInformation) const; virtual NTSTATUS Write(__in IFabricSerializableStream * stream); virtual NTSTATUS Read(__in IFabricSerializableStream * stream); virtual NTSTATUS GetUnknownData(__in ULONG scope, __out FabricIOBuffer & data); virtual NTSTATUS SetUnknownData(__in ULONG scope, __in FabricIOBuffer buffer); virtual void ClearUnknownData() { if (unknownScopeData_ != nullptr) { this->unknownScopeData_->Clear(); } } private: struct DataContext : public KObject<DataContext> { public: DataContext() : _length(0) { } DataContext(DataContext && context); DataContext& operator=(DataContext const & context); DataContext& operator=(DataContext && context); bool operator==(DataContext & context); DataContext(ULONG length, KUniquePtr<UCHAR, ArrayDeleter<UCHAR>> && data) : _length(length) , _data(Ktl::Move(data)) { } ULONG _length; KUniquePtr<UCHAR, ArrayDeleter<UCHAR>> _data; }; KArray<DataContext> * unknownScopeData_; }; }
988
190,993
/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ #include <string> #include <vector> #include <gmock/gmock.h> #include <gtest/gtest.h> #include "tensorflow/lite/toco/graph_transformations/graph_transformations.h" #include "tensorflow/lite/toco/model.h" #include "tensorflow/lite/toco/tooling_util.h" namespace { using ::testing::Test; class RemoveSuccessiveTransposeTest : public Test { protected: RemoveSuccessiveTransposeTest() {} void SetUp() override { model_.reset(new toco::Model); } void CreateArray(const std::string& name, const std::vector<int>& shape) { toco::Array& array = model_->GetOrCreateArray(name); array.data_type = toco::ArrayDataType::kFloat; toco::Shape* array_shape = array.mutable_shape(); *(array_shape->mutable_dims()) = shape; } void CreateConstantArray(const std::string& name, const std::vector<int>& shape, const std::vector<float>& data) { CreateArray(name, shape); toco::Array& array = model_->GetOrCreateArray(name); auto& array_buffer = array.GetMutableBuffer<toco::ArrayDataType::kFloat>(); int bufsize = 1; for (int dim : shape) { bufsize *= dim; } array_buffer.data.resize(bufsize); float* buf_ptr = array_buffer.data.data(); for (int i = 0; i < bufsize; ++i) { buf_ptr[i] = data[i]; } } void CreateGraph(const std::vector<int>& perm1, const std::vector<int>& perm2) { CreateArray("InputA", {2, 2}); CreateArray("InputB", {2, 2}); CreateArray("Input", {2, 2}); CreateArray("InputTranspose", {2, 2}); CreateArray("InputTransposeTranspose", {2, 2}); CreateArray("InputTransposeTransposePlusB", {2, 2}); auto* add_op = new toco::AddOperator; add_op->inputs = {"InputA", "InputB"}; add_op->outputs = {"Input"}; model_->operators.push_back(std::unique_ptr<toco::Operator>(add_op)); auto* transpose_op = new toco::TransposeOperator; transpose_op->inputs = {"Input"}; transpose_op->perm = perm1; transpose_op->outputs = {"InputTranspose"}; model_->operators.push_back(std::unique_ptr<toco::Operator>(transpose_op)); auto* transpose2_op = new toco::TransposeOperator; transpose2_op->inputs = {"InputTranspose"}; transpose2_op->perm = perm2; transpose2_op->outputs = {"InputTransposeTranspose"}; model_->operators.push_back(std::unique_ptr<toco::Operator>(transpose2_op)); auto* add2_op = new toco::AddOperator; add2_op->inputs = {"InputTransposeTranspose", "InputB"}; add2_op->outputs = {"InputTransposeTransposePlusB"}; model_->operators.push_back(std::unique_ptr<toco::Operator>(add2_op)); } std::unique_ptr<toco::Model> model_; }; TEST_F(RemoveSuccessiveTransposeTest, RemoveTranspose) { // Creating a model. CreateGraph({1, 0}, {1, 0}); toco::RemoveSuccessiveTranspose transformation; bool modified; ASSERT_TRUE(transformation.Run(model_.get(), /*op_index=*/1, &modified).ok()); EXPECT_TRUE(modified); ASSERT_EQ(model_->operators.size(), 2); ASSERT_EQ(model_->operators[0]->type, toco::OperatorType::kAdd); ASSERT_EQ(model_->operators[1]->type, toco::OperatorType::kAdd); ASSERT_EQ(model_->operators[1]->inputs[0], model_->operators[0]->outputs[0]); } TEST_F(RemoveSuccessiveTransposeTest, DontRemoveNotIdentityTranspose) { // Creating a model. CreateGraph({0, 2, 1}, {1, 0, 2}); toco::RemoveSuccessiveTranspose transformation; bool modified; ASSERT_TRUE(transformation.Run(model_.get(), /*op_index=*/1, &modified).ok()); EXPECT_FALSE(modified); } TEST_F(RemoveSuccessiveTransposeTest, DontRemoveTransposeOutputUnused) { CreateArray("InputA", {2, 2}); CreateArray("InputB", {2, 2}); CreateArray("Input", {2, 2}); CreateArray("InputTranspose", {2, 2}); CreateArray("InputTransposeTranspose", {2, 2}); auto* add_op = new toco::AddOperator; add_op->inputs = {"InputA", "InputB"}; add_op->outputs = {"Input"}; model_->operators.push_back(std::unique_ptr<toco::Operator>(add_op)); auto* transpose_op = new toco::TransposeOperator; transpose_op->inputs = {"Input"}; transpose_op->perm = {0, 2, 1}; transpose_op->outputs = {"InputTranspose"}; model_->operators.push_back(std::unique_ptr<toco::Operator>(transpose_op)); auto* transpose2_op = new toco::TransposeOperator; transpose2_op->inputs = {"InputTranspose"}; transpose2_op->perm = {0, 2, 1}; transpose2_op->outputs = {"InputTransposeTranspose"}; model_->operators.push_back(std::unique_ptr<toco::Operator>(transpose2_op)); toco::RemoveSuccessiveTranspose transformation; bool modified; ASSERT_TRUE(transformation.Run(model_.get(), /*op_index=*/1, &modified).ok()); EXPECT_FALSE(modified); } } // namespace
1,982
1,414
/** * Glide64 Video Plugin - winlnxdefs.h * Copyright (C) 2002 Dave2001 * * Mupen64Plus homepage: http://code.google.com/p/mupen64plus/ * * This program is free software; you can redistribute it and/ * or modify it under the terms of the GNU General Public Li- * cence as published by the Free Software Foundation; either * version 2 of the License, or any later version. * * This program is distributed in the hope that it will be use- * ful, but WITHOUT ANY WARRANTY; without even the implied war- * ranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. * See the GNU General Public Licence for more details. * * You should have received a copy of the GNU General Public * Licence along with this program; if not, write to the Free * Software Foundation, Inc., 51 Franklin Street, Fifth Floor, * Boston, MA 02110-1301, USA * **/ #ifndef WINLNXDEFS_H #define WINLNXDEFS_H #ifndef WIN32 typedef int BOOL; typedef unsigned char BYTE; typedef unsigned short WORD; typedef unsigned int DWORD; typedef int INT; typedef long long LONGLONG; typedef int __int32; typedef void* HINSTANCE; typedef int PROPSHEETHEADER; typedef int PROPSHEETPAGE; typedef int HWND; #define FALSE false #define TRUE true #define __stdcall #define __declspec(dllexport) #define _cdecl #define WINAPI typedef union _LARGE_INTEGER { struct { DWORD LowPart; INT HighPart; } s; struct { DWORD LowPart; INT HighPart; } u; LONGLONG QuadPart; } LARGE_INTEGER, *PLARGE_INTEGER; #define HIWORD(a) ((unsigned int)(a) >> 16) #define LOWORD(a) ((a) & 0xFFFF) #endif #endif // WINLNXDEFS_H
587
1,963
/* ******************************************************************************* * Copyright (c) 2021, STMicroelectronics * All rights reserved. * * This software component is licensed by ST under BSD 3-Clause license, * the "License"; You may not use this file except in compliance with the * License. You may obtain a copy of the License at: * opensource.org/licenses/BSD-3-Clause * ******************************************************************************* */ #pragma once /*---------------------------------------------------------------------------- * STM32 pins number *----------------------------------------------------------------------------*/ #define PA0 0 // GPS_PPS_PIN #define PA8 1 #define PA9 2 // UART_TX #define PA10 3 // UART_RX #define PA12 4 // LED1 #define PA13 5 #define PA14 6 #define PA15 7 // GPS_POWER_ON_PIN #define PB2 8 #define PB3 9 #define PB4 10 // LED2 #define PB5 11 #define PB8 12 // I2C_SCL #define PB9 13 // I2C_SDA #define PB10 14 // GPS_UART_TX #define PB11 15 // GPS_UART_RX #define PA1 PIN_A0 #define PA2 PIN_A1 // ADC_VBAT #define PB12 PIN_A2 #define PB14 19 // LIS3DH_INT1_PIN #define PB15 20 // LIS3DH_INT2_PIN #define PB13 21 // RADIO_RESET #define PH1 22 // RADIO_XTAL_EN #define PA7 23 // RADIO_MOSI #define PA6 24 // RADIO_MISO #define PA5 25 // RADIO_SCLK #define PB0 26 // RADIO_NSS #define PA11 27 // RADIO_DIO_0 #define PB1 28 // RADIO_DIO_1 #define PA3 29 // RADIO_DIO_2 #define PH0 30 // RADIO_DIO_3 #define PC13 31 // RADIO_DIO_4 #define PB6 32 // RADIO_RF_CRX_RX #define PB7 33 // RADIO_RF_CBT_HF #define PA4 34 // RADIO_RF_CTX_PA // Alternate pins number #define PA2_ALT1 (PA2 | ALT1) #define PA3_ALT1 (PA3 | ALT1) #define PA6_ALT1 (PA6 | ALT1) #define PA7_ALT1 (PA7 | ALT1) #define PB8_ALT1 (PB8 | ALT1) #define PB9_ALT1 (PB9 | ALT1) #define NUM_DIGITAL_PINS 35 #define NUM_ANALOG_INPUTS 3 // On-board LED pin number #define LED_BUILTIN PA12 #define LED1 LED_BUILTIN #define LED2 PB4 // SPI Definitions #define PIN_SPI_SS PB0 #define PIN_SPI_MOSI PA7 #define PIN_SPI_MISO PA6 #define PIN_SPI_SCK PA5 // I2C Definitions #define PIN_WIRE_SDA PB9 #define PIN_WIRE_SCL PB8 // Timer Definitions // Use TIM6/TIM7 when possible as servo and tone don't need GPIO output pin #ifndef TIMER_TONE #define TIMER_TONE TIM6 #endif #ifndef TIMER_SERVO #define TIMER_SERVO TIM7 #endif // UART Definitions #define SERIAL_UART_INSTANCE 1 // Default pin used for 'Serial' instance (ex: ST-Link) // Mandatory for Firmata #define PIN_SERIAL_RX PA10 #define PIN_SERIAL_TX PA9 // LoRa Definitions #define RADIO_RESET PB13 #define RADIO_XTAL_EN PH1 #define RADIO_MOSI PA7 #define RADIO_MISO PA6 #define RADIO_SCLK PA5 #define RADIO_NSS PB0 #define RADIO_DIO_0 PA11 #define RADIO_DIO_1 PB1 #define RADIO_DIO_2 PA3 #define RADIO_DIO_3 PH0 #define RADIO_DIO_4 PC13 #define RADIO_RF_CRX_RX PB6 //CRF3 #define RADIO_RF_CBT_HF PB7 //CRF2 HF #define RADIO_RF_CTX_PA PA4 //CRF1 PA // GPS Definitions #define GPS_PPS_PIN PA0 #define GPS_UART USART3 #define GPS_POWER_ON_PIN PA15 #define GPS_UART_TX PB10 #define GPS_UART_RX PB11 // MEMS (LIS3DH) #define LIS3DH_INT1_PIN PB14 #define LIS3DH_INT2_PIN PB15 /* HAL configuration */ #define HSE_VALUE 12000000U /*---------------------------------------------------------------------------- * Arduino objects - C++ only *----------------------------------------------------------------------------*/ #ifdef __cplusplus // These serial port names are intended to allow libraries and architecture-neutral // sketches to automatically default to the correct port name for a particular type // of use. For example, a GPS module would normally connect to SERIAL_PORT_HARDWARE_OPEN, // the first hardware serial port whose RX/TX pins are not dedicated to another use. // // SERIAL_PORT_MONITOR Port which normally prints to the Arduino Serial Monitor // // SERIAL_PORT_USBVIRTUAL Port which is USB virtual serial // // SERIAL_PORT_LINUXBRIDGE Port which connects to a Linux system via Bridge library // // SERIAL_PORT_HARDWARE Hardware serial port, physical RX & TX pins. // // SERIAL_PORT_HARDWARE_OPEN Hardware serial ports which are open for use. Their RX & TX // pins are NOT connected to anything by default. #define SERIAL_PORT_MONITOR Serial #define SERIAL_PORT_HARDWARE Serial #endif
2,809
327
<reponame>TaffyBlog/jQWidgets { "main": "react_jqxtree.umd.js", "module": "react_jqxtree.esm.js", "typings": "react_jqxtree.d.ts", "name": "jqwidgets-react/jqxtree", "sideEffects": false }
108
389
/*~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ~ Copyright 2020 Adobe ~ ~ Licensed under the Apache License, Version 2.0 (the "License"); ~ you may not use this file except in compliance with the License. ~ You may obtain a copy of the License at ~ ~ http://www.apache.org/licenses/LICENSE-2.0 ~ ~ Unless required by applicable law or agreed to in writing, software ~ distributed under the License is distributed on an "AS IS" BASIS, ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ~ See the License for the specific language governing permissions and ~ limitations under the License. ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~*/ package com.adobe.cq.wcm.core.components.internal.models.v1; import javax.annotation.PostConstruct; import javax.inject.Inject; import javax.json.Json; import javax.json.JsonObjectBuilder; import com.adobe.cq.wcm.core.components.util.AbstractComponentImpl; import org.apache.commons.lang3.StringUtils; import org.apache.sling.api.SlingHttpServletRequest; import org.apache.sling.api.resource.Resource; import org.apache.sling.caconfig.ConfigurationBuilder; import org.apache.sling.models.annotations.Exporter; import org.apache.sling.models.annotations.Model; import org.apache.sling.models.annotations.injectorspecific.InjectionStrategy; import org.apache.sling.models.annotations.injectorspecific.ValueMapValue; import org.jetbrains.annotations.NotNull; import com.adobe.cq.export.json.ComponentExporter; import com.adobe.cq.export.json.ExporterConstants; import com.adobe.cq.wcm.core.components.internal.services.pdfviewer.PdfViewerCaConfig; import com.adobe.cq.wcm.core.components.models.PdfViewer; import org.jetbrains.annotations.Nullable; @Model(adaptables = SlingHttpServletRequest.class, adapters = { PdfViewer.class, ComponentExporter.class }, resourceType = { PdfViewerImpl.RESOURCE_TYPE }) @Exporter(name = ExporterConstants.SLING_MODEL_EXPORTER_NAME, extensions = ExporterConstants.SLING_MODEL_EXTENSION) public class PdfViewerImpl extends AbstractComponentImpl implements PdfViewer { protected static final String RESOURCE_TYPE = "core/wcm/components/pdfviewer/v1/pdfviewer"; protected static final String FIELD_EMBED_MODE = "embedMode"; @ValueMapValue(injectionStrategy = InjectionStrategy.OPTIONAL) @Nullable private String documentPath; @ValueMapValue(injectionStrategy = InjectionStrategy.OPTIONAL) @Nullable private String type; @ValueMapValue(injectionStrategy = InjectionStrategy.OPTIONAL) @Nullable private String defaultViewMode; @ValueMapValue(injectionStrategy = InjectionStrategy.OPTIONAL) private boolean borderless; @ValueMapValue(injectionStrategy = InjectionStrategy.OPTIONAL) private boolean showAnnotationTools; @ValueMapValue(injectionStrategy = InjectionStrategy.OPTIONAL) private boolean showFullScreen; @ValueMapValue(injectionStrategy = InjectionStrategy.OPTIONAL) private boolean showLeftHandPanel; @ValueMapValue(injectionStrategy = InjectionStrategy.OPTIONAL) private boolean showDownloadPdf; @ValueMapValue(injectionStrategy = InjectionStrategy.OPTIONAL) private boolean showPrintPdf; @ValueMapValue(injectionStrategy = InjectionStrategy.OPTIONAL) private boolean showPageControls; @ValueMapValue(injectionStrategy = InjectionStrategy.OPTIONAL) private boolean dockPageControls; @Inject private Resource resource; private PdfViewerCaConfig caConfig; @PostConstruct protected void initModel() { ConfigurationBuilder cb = resource.adaptTo(ConfigurationBuilder.class); if (cb != null) { caConfig = cb.as(PdfViewerCaConfig.class); } } @Override public String getClientId() { return caConfig.clientId(); } @Override public String getReportSuiteId() { return caConfig.reportSuiteId(); } @Override @Nullable public String getDocumentPath() { return documentPath; } @Override public String getDocumentFileName() { if (this.documentPath != null) { return StringUtils.substringAfterLast(this.documentPath, "/"); } return null; } @Override @Nullable public String getType() { return type; } @Override @Nullable public String getDefaultViewMode() { return defaultViewMode; } @Override public boolean isBorderless() { return borderless; } @Override public boolean isShowAnnotationTools() { return showAnnotationTools; } @Override public boolean isShowFullScreen() { return showFullScreen; } @Override public boolean isShowLeftHandPanel() { return showLeftHandPanel; } @Override public boolean isShowDownloadPdf() { return showDownloadPdf; } @Override public boolean isShowPrintPdf() { return showPrintPdf; } @Override public boolean isShowPageControls() { return showPageControls; } @Override public boolean isDockPageControls() { return dockPageControls; } @Override public String getViewerConfigJson() { JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); jsonObjectBuilder.add(FIELD_EMBED_MODE, type); if (StringUtils.equals(type, FULL_WINDOW)) { jsonObjectBuilder.add(PN_DEFAULT_VIEW_MODE, defaultViewMode); jsonObjectBuilder.add(PN_SHOW_ANNOTATION_TOOLS, showAnnotationTools); jsonObjectBuilder.add(PN_SHOW_LEFT_HAND_PANEL, showLeftHandPanel); } if (StringUtils.equals(type, SIZED_CONTAINER)) { jsonObjectBuilder.add(PN_SHOW_FULL_SCREEN, showFullScreen); } if (StringUtils.equals(type, FULL_WINDOW) || StringUtils.equals(type, SIZED_CONTAINER)) { jsonObjectBuilder.add(PN_SHOW_PAGE_CONTROLS, showPageControls); jsonObjectBuilder.add(PN_DOCK_PAGE_CONTROLS, dockPageControls); } jsonObjectBuilder.add(PN_SHOW_DOWNLOAD_PDF, showDownloadPdf); jsonObjectBuilder.add(PN_SHOW_PRINT_PDF, showPrintPdf); return jsonObjectBuilder.build().toString(); } @Override public String getContainerClass() { if(!StringUtils.isEmpty(type)) { if (type.equals(FULL_WINDOW) && borderless) { return CSS_BORDERLESS; } else if (type.equals(SIZED_CONTAINER)) { return CSS_SIZED_CONTAINER; } else if (type.equals(IN_LINE)) { return CSS_IN_LINE; } } return CSS_FULL_WINDOW; } @NotNull @Override public String getExportedType() { return resource.getResourceType(); } }
2,573
764
<gh_stars>100-1000 {"symbol": "JWL","address": "0x8275eBF521Dc217aa79C88132017A5BCEf001dd9","overview":{"en": ""},"email": "","website": "https://jewelpay.org/","state": "NORMAL","links": {"blog": "https://medium.com/@jewelpay","twitter": "https://twitter.com/jewelpay_org","telegram": "https://t.me/jewelpay","github": "https://github.com/jeveldev"}}
138
410
<gh_stars>100-1000 { "makeDropdownMenuItems": { "close": "Close Pane", "untitledTab": "untitled", "noTabs": "<No Tabs>" }, "contextMenu": { "close": "Close", "closeOthers": "Close Others", "closeAll": "Close All", "verticalSplit": "Vertical Split", "horizontalSplit": "Horizontal Split" } }
156
396
package com.mapbox.turf; import com.mapbox.geojson.Feature; import com.mapbox.geojson.FeatureCollection; import com.mapbox.geojson.GeometryCollection; import com.mapbox.geojson.LineString; import com.mapbox.geojson.MultiLineString; import com.mapbox.geojson.MultiPoint; import com.mapbox.geojson.MultiPolygon; import com.mapbox.geojson.Point; import com.mapbox.geojson.Polygon; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import java.io.IOException; import java.util.Arrays; import static org.hamcrest.CoreMatchers.startsWith; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; public class TurfConversionTest extends TestUtils { private static final String TURF_EXPLODE_MULTI_POINT = "turf-explode/multipoint.geojson"; private static final String TURF_EXPLODE_LINESTRING = "turf-explode/linestring.geojson"; private static final String TURF_EXPLODE_MULTILINESTRING = "turf-explode/multilinestring.geojson"; private static final String TURF_EXPLODE_MULTIPOLYGON = "turf-explode/multipolygon.geojson"; private static final String TURF_EXPLODE_GEOMETRY_COLLECTION = "turf-explode/geometrycollection.geojson"; private static final String TURF_COMBINE_FEATURE_COLLECTION_TO_COMBINE = "turf-combine/feature_collection_to_combine.geojson"; private static final String TURF_POLYGON_TO_LINE_PATH_IN = "turf-polygon-to-line/in/"; private static final String TURF_POLYGON_TO_LINE_PATH_OUT = "turf-polygon-to-line/expected/"; private static final String TURF_POLYGON_TO_LINE_FILENAME_POLYGON= "polygon.geojson"; private static final String TURF_POLYGON_TO_LINE_FILENAME_GEOMETRY_POLYGON= "geometry-polygon.geojson"; private static final String TURF_POLYGON_TO_LINE_FILENAME_POLYGON_WITH_HOLE = "polygon-with-hole.geojson"; private static final String TURF_POLYGON_TO_LINE_FILENAME_MULTIPOLYGON = "multi-polygon.geojson"; private static final String TURF_POLYGON_TO_LINE_FILENAME_MULTIPOLYGON_OUTER_DOUGHNUT = "multi-polygon-outer-doughnut.geojson"; private static final String TURF_POLYGON_TO_LINE_FILENAME_MULTIPOLYGON_WITH_HOLES = "multi-polygon-with-holes.geojson"; @Rule public ExpectedException thrown = ExpectedException.none(); @Test public void radiansToDistance() { assertEquals( 1, TurfConversion.radiansToLength(1, TurfConstants.UNIT_RADIANS), DELTA); assertEquals( 6373, TurfConversion.radiansToLength(1, TurfConstants.UNIT_KILOMETERS), DELTA); assertEquals( 3960, TurfConversion.radiansToLength(1, TurfConstants.UNIT_MILES), DELTA); } @Test public void distanceToRadians() { assertEquals( 1, TurfConversion.lengthToRadians(1, TurfConstants.UNIT_RADIANS), DELTA); assertEquals( 1, TurfConversion.lengthToRadians(6373, TurfConstants.UNIT_KILOMETERS), DELTA); assertEquals( 1, TurfConversion.lengthToRadians(3960, TurfConstants.UNIT_MILES), DELTA); } @Test public void distanceToDegrees() { assertEquals( 57.29577951308232, TurfConversion.lengthToDegrees(1, TurfConstants.UNIT_RADIANS), DELTA); assertEquals( 0.8990393772647469, TurfConversion.lengthToDegrees(100, TurfConstants.UNIT_KILOMETERS), DELTA); assertEquals( 0.14468631190172304, TurfConversion.lengthToDegrees(10, TurfConstants.UNIT_MILES), DELTA); } @Test public void convertDistance() throws TurfException { assertEquals(1, TurfConversion.convertLength(1000, TurfConstants.UNIT_METERS), DELTA); assertEquals(0.6213714106386318, TurfConversion.convertLength(1, TurfConstants.UNIT_KILOMETERS, TurfConstants.UNIT_MILES), DELTA); assertEquals(1.6093434343434343, TurfConversion.convertLength(1, TurfConstants.UNIT_MILES, TurfConstants.UNIT_KILOMETERS), DELTA); assertEquals(1.851999843075488, TurfConversion.convertLength(1, TurfConstants.UNIT_NAUTICAL_MILES), DELTA); assertEquals(100, TurfConversion.convertLength(1, TurfConstants.UNIT_METERS, TurfConstants.UNIT_CENTIMETERS), DELTA); } @Test public void combinePointsToMultiPoint() throws Exception { FeatureCollection pointFeatureCollection = FeatureCollection.fromFeatures( Arrays.asList( Feature.fromGeometry(Point.fromLngLat(-2.46, 27.6835)), Feature.fromGeometry(Point.fromLngLat(41.83, 7.3624)) )); FeatureCollection featureCollectionWithNewMultiPointObject = TurfConversion.combine(pointFeatureCollection); assertNotNull(featureCollectionWithNewMultiPointObject); MultiPoint multiPoint = (MultiPoint) featureCollectionWithNewMultiPointObject.features().get(0).geometry(); assertNotNull(multiPoint); assertEquals(-2.46, multiPoint.coordinates().get(0).longitude(), DELTA); assertEquals(27.6835, multiPoint.coordinates().get(0).latitude(), DELTA); assertEquals(41.83, multiPoint.coordinates().get(1).longitude(), DELTA); assertEquals(7.3624, multiPoint.coordinates().get(1).latitude(), DELTA); } @Test public void combinePointAndMultiPointToMultiPoint() throws Exception { FeatureCollection pointAndMultiPointFeatureCollection = FeatureCollection.fromFeatures( Arrays.asList( Feature.fromGeometry(Point.fromLngLat(-2.46, 27.6835)), Feature.fromGeometry(MultiPoint.fromLngLats( Arrays.asList(Point.fromLngLat(41.83, 7.3624), Point.fromLngLat(100, 101))) ))); FeatureCollection combinedFeatureCollection = TurfConversion.combine(pointAndMultiPointFeatureCollection); assertNotNull(combinedFeatureCollection); MultiPoint multiPoint = (MultiPoint) combinedFeatureCollection.features().get(0).geometry(); assertNotNull(multiPoint); assertEquals(-2.46, multiPoint.coordinates().get(0).longitude(), DELTA); assertEquals(27.6835, multiPoint.coordinates().get(0).latitude(), DELTA); assertEquals(41.83, multiPoint.coordinates().get(1).longitude(), DELTA); assertEquals(7.3624, multiPoint.coordinates().get(1).latitude(), DELTA); assertEquals(100, multiPoint.coordinates().get(2).longitude(), DELTA); assertEquals(101, multiPoint.coordinates().get(2).latitude(), DELTA); } @Test public void combineTwoLineStringsToMultiLineString() throws Exception { FeatureCollection lineStringFeatureCollection = FeatureCollection.fromFeatures( Arrays.asList( Feature.fromGeometry(LineString.fromLngLats( Arrays.asList(Point.fromLngLat(-11.25, 55.7765), Point.fromLngLat(41.1328, 22.91792)))), Feature.fromGeometry(LineString.fromLngLats( Arrays.asList(Point.fromLngLat(3.8671, 19.3111), Point.fromLngLat(20.742, -20.3034)))) )); FeatureCollection featureCollectionWithNewMultiLineStringObject = TurfConversion.combine(lineStringFeatureCollection); assertNotNull(featureCollectionWithNewMultiLineStringObject); MultiLineString multiLineString = (MultiLineString) featureCollectionWithNewMultiLineStringObject.features().get(0).geometry(); assertNotNull(multiLineString); // Checking the first LineString in the MultiLineString assertEquals(-11.25, multiLineString.coordinates().get(0).get(0).longitude(), DELTA); assertEquals(55.7765, multiLineString.coordinates().get(0).get(0).latitude(), DELTA); // Checking the second LineString in the MultiLineString assertEquals(41.1328, multiLineString.coordinates().get(0).get(1).longitude(), DELTA); assertEquals(22.91792, multiLineString.coordinates().get(0).get(1).latitude(), DELTA); } @Test public void combineLineStringAndMultiLineStringToMultiLineString() throws Exception { FeatureCollection lineStringFeatureCollection = FeatureCollection.fromFeatures( Arrays.asList( Feature.fromGeometry(LineString.fromLngLats(Arrays.asList( Point.fromLngLat(-11.25, 55.7765), Point.fromLngLat(41.1328, 22.91792)))), Feature.fromGeometry( MultiLineString.fromLineStrings(Arrays.asList( LineString.fromLngLats(Arrays.asList( Point.fromLngLat(102, -10), Point.fromLngLat(130.0, 4.0) )), LineString.fromLngLats(Arrays.asList( Point.fromLngLat(40.0, -20.0), Point.fromLngLat(150.0, 18.0) )) ))) )); FeatureCollection featureCollectionWithNewMultiLineStringObject = TurfConversion.combine(lineStringFeatureCollection); assertNotNull(featureCollectionWithNewMultiLineStringObject); MultiLineString multiLineString = (MultiLineString) featureCollectionWithNewMultiLineStringObject. features().get(0).geometry(); assertNotNull(multiLineString); // Checking the first LineString in the MultiLineString assertEquals(-11.25, multiLineString.coordinates().get(0).get(0).longitude(), DELTA); assertEquals(55.7765, multiLineString.coordinates().get(0).get(0).latitude(), DELTA); assertEquals(41.1328, multiLineString.coordinates().get(0).get(1).longitude(), DELTA); assertEquals(22.91792, multiLineString.coordinates().get(0).get(1).latitude(), DELTA); // Checking the second LineString in the MultiLineString assertEquals(102, multiLineString.coordinates().get(1).get(0).longitude(), DELTA); assertEquals(-10, multiLineString.coordinates().get(1).get(0).latitude(), DELTA); assertEquals(130.0, multiLineString.coordinates().get(1).get(1).longitude(), DELTA); assertEquals(4.0, multiLineString.coordinates().get(1).get(1).latitude(), DELTA); // Checking the third LineString in the MultiLineString assertEquals(40.0, multiLineString.coordinates().get(2).get(0).longitude(), DELTA); assertEquals(-20.0, multiLineString.coordinates().get(2).get(0).latitude(), DELTA); assertEquals(150.0, multiLineString.coordinates().get(2).get(1).longitude(), DELTA); assertEquals(18.0, multiLineString.coordinates().get(2).get(1).latitude(), DELTA); } @Test public void combinePolygonToMultiPolygon() throws Exception { FeatureCollection polygonFeatureCollection = FeatureCollection.fromFeatures( Arrays.asList( Feature.fromGeometry(Polygon.fromLngLats(Arrays.asList( Arrays.asList( Point.fromLngLat(61.938950426660604, 5.9765625), Point.fromLngLat(52.696361078274485, 33.046875), Point.fromLngLat(69.90011762668541, 28.828124999999996), Point.fromLngLat(61.938950426660604, 5.9765625))))), Feature.fromGeometry(Polygon.fromLngLats(Arrays.asList( Arrays.asList( Point.fromLngLat(11.42578125, 16.636191878397664), Point.fromLngLat(7.91015625, -9.102096738726443), Point.fromLngLat(31.113281249999996, 17.644022027872726), Point.fromLngLat(11.42578125, 16.636191878397664) )))) )); FeatureCollection featureCollectionWithNewMultiPolygonObject = TurfConversion.combine(polygonFeatureCollection); assertNotNull(featureCollectionWithNewMultiPolygonObject); MultiPolygon multiPolygon = (MultiPolygon) featureCollectionWithNewMultiPolygonObject.features().get(0).geometry(); assertNotNull(multiPolygon); // Checking the first Polygon in the MultiPolygon // Checking the first Point assertEquals(61.938950426660604, multiPolygon.coordinates().get(0).get(0).get(0).longitude(), DELTA); assertEquals(5.9765625, multiPolygon.coordinates().get(0).get(0).get(0).latitude(), DELTA); // Checking the second Point assertEquals(52.696361078274485, multiPolygon.coordinates().get(0).get(0).get(1).longitude(), DELTA); assertEquals(33.046875, multiPolygon.coordinates().get(0).get(0).get(1).latitude(), DELTA); // Checking the second Polygon in the MultiPolygon // Checking the first Point assertEquals(11.42578125, multiPolygon.coordinates().get(1).get(0).get(0).longitude(), DELTA); assertEquals(16.636191878397664, multiPolygon.coordinates().get(1).get(0).get(0).latitude(), DELTA); // Checking the second Point assertEquals(7.91015625, multiPolygon.coordinates().get(1).get(0).get(1).longitude(), DELTA); assertEquals(-9.102096738726443, multiPolygon.coordinates().get(1).get(0).get(1).latitude(), DELTA); } @Test public void combinePolygonAndMultiPolygonToMultiPolygon() throws Exception { FeatureCollection polygonFeatureCollection = FeatureCollection.fromFeatures( Arrays.asList( Feature.fromGeometry( Polygon.fromLngLats(Arrays.asList(Arrays.asList( Point.fromLngLat(61.938950426660604, 5.9765625), Point.fromLngLat(52.696361078274485, 33.046875), Point.fromLngLat(69.90011762668541, 28.828124999999996), Point.fromLngLat(61.938950426660604, 5.9765625))))), Feature.fromGeometry(MultiPolygon.fromPolygons(Arrays.asList( Polygon.fromLngLats(Arrays.asList(Arrays.asList( Point.fromLngLat(11.42578125, 16.636191878397664), Point.fromLngLat(7.91015625, -9.102096738726443), Point.fromLngLat(31.113281249999996, 17.644022027872726), Point.fromLngLat(11.42578125, 16.636191878397664) ))), Polygon.fromLngLats(Arrays.asList(Arrays.asList( Point.fromLngLat(30.0, 0.0), Point.fromLngLat(102.0, 0.0), Point.fromLngLat(103.0, 1.0), Point.fromLngLat(30.0, 0.0) ))) ))) )); FeatureCollection combinedFeatureCollection = TurfConversion.combine(polygonFeatureCollection); assertNotNull(combinedFeatureCollection); MultiPolygon multiPolygon = (MultiPolygon) combinedFeatureCollection.features().get(0).geometry(); assertNotNull(multiPolygon); // Checking the first Polygon in the MultiPolygon // Checking the first Point assertEquals(61.938950426660604, multiPolygon.coordinates().get(0).get(0).get(0).longitude(), DELTA); assertEquals(5.9765625, multiPolygon.coordinates().get(0).get(0).get(0).latitude(), DELTA); // Checking the second Point assertEquals(52.696361078274485, multiPolygon.coordinates().get(0).get(0).get(1).longitude(), DELTA); assertEquals(33.046875, multiPolygon.coordinates().get(0).get(0).get(1).latitude(), DELTA); // Checking the second Polygon in the MultiPolygon // Checking the first Point assertEquals(11.42578125, multiPolygon.coordinates().get(1).get(0).get(0).longitude(), DELTA); assertEquals(16.636191878397664, multiPolygon.coordinates().get(1).get(0).get(0).latitude(), DELTA); // Checking the second Point assertEquals(7.91015625, multiPolygon.coordinates().get(1).get(0).get(1).longitude(), DELTA); assertEquals(-9.102096738726443, multiPolygon.coordinates().get(1).get(0).get(1).latitude(), DELTA); // Checking the third Polygon in the MultiPolygon // Checking the first Point assertEquals(30.0, multiPolygon.coordinates().get(2).get(0).get(0).longitude(), DELTA); assertEquals(0.0, multiPolygon.coordinates().get(2).get(0).get(0).latitude(), DELTA); // Checking the second Point assertEquals(102.0, multiPolygon.coordinates().get(2).get(0).get(1).longitude(), DELTA); assertEquals(0.0, multiPolygon.coordinates().get(2).get(0).get(1).latitude(), DELTA); } @Test public void combinePolygonAndMultiPolygonAndPointToMultiPolygon() throws Exception { FeatureCollection featureCollectionWithPointPolygonAndMultiPolygon = FeatureCollection.fromFeatures( Arrays.asList( Feature.fromGeometry( Point.fromLngLat(-2.46, 27.6835)), Feature.fromGeometry( Polygon.fromLngLats(Arrays.asList(Arrays.asList( Point.fromLngLat(61.938950426660604, 5.9765625), Point.fromLngLat(52.696361078274485, 33.046875), Point.fromLngLat(69.90011762668541, 28.828124999999996), Point.fromLngLat(61.938950426660604, 5.9765625))))), Feature.fromGeometry( MultiPolygon.fromPolygons(Arrays.asList( Polygon.fromLngLats(Arrays.asList(Arrays.asList( Point.fromLngLat(11.42578125, 16.636191878397664), Point.fromLngLat(7.91015625, -9.102096738726443), Point.fromLngLat(31.113281249999996, 17.644022027872726), Point.fromLngLat(11.42578125, 16.636191878397664) ))), Polygon.fromLngLats(Arrays.asList(Arrays.asList( Point.fromLngLat(30.0, 0.0), Point.fromLngLat(102.0, 0.0), Point.fromLngLat(103.0, 1.0), Point.fromLngLat(30.0, 0.0) ))) ))) )); FeatureCollection combinedFeatureCollection = TurfConversion.combine(featureCollectionWithPointPolygonAndMultiPolygon); assertNotNull(combinedFeatureCollection); MultiPolygon multiPolygon = null; MultiPoint multiPoint = null; for (int x = 0; x < combinedFeatureCollection.features().size(); x++) { Feature singleFeature = combinedFeatureCollection.features().get(x); if (singleFeature.geometry() instanceof MultiPolygon) { multiPolygon = (MultiPolygon) combinedFeatureCollection.features().get(x).geometry(); } if (singleFeature.geometry() instanceof MultiPoint) { multiPoint = (MultiPoint) combinedFeatureCollection.features().get(x).geometry(); } } assertNotNull(multiPolygon); assertNotNull(multiPoint); // Checking the first Polygon in the MultiPolygon // Checking the first Point assertEquals(61.938950426660604, multiPolygon.coordinates().get(0).get(0).get(0).longitude(), DELTA); assertEquals(5.9765625, multiPolygon.coordinates().get(0).get(0).get(0).latitude(), DELTA); // Checking the second Point assertEquals(52.696361078274485, multiPolygon.coordinates().get(0).get(0).get(1).longitude(), DELTA); assertEquals(33.046875, multiPolygon.coordinates().get(0).get(0).get(1).latitude(), DELTA); // Checking the second Polygon in the MultiPolygon // Checking the first Point assertEquals(11.42578125, multiPolygon.coordinates().get(1).get(0).get(0).longitude(), DELTA); assertEquals(16.636191878397664, multiPolygon.coordinates().get(1).get(0).get(0).latitude(), DELTA); // Checking the second Point assertEquals(7.91015625, multiPolygon.coordinates().get(1).get(0).get(1).longitude(), DELTA); assertEquals(-9.102096738726443, multiPolygon.coordinates().get(1).get(0).get(1).latitude(), DELTA); // Checking the third Polygon in the MultiPolygon // Checking the first Point assertEquals(30.0, multiPolygon.coordinates().get(2).get(0).get(0).longitude(), DELTA); assertEquals(0.0, multiPolygon.coordinates().get(2).get(0).get(0).latitude(), DELTA); // Checking the second Point assertEquals(102.0, multiPolygon.coordinates().get(2).get(0).get(1).longitude(), DELTA); assertEquals(0.0, multiPolygon.coordinates().get(2).get(0).get(1).latitude(), DELTA); } @Test public void combinePointAndLineStringGeometry() throws Exception { FeatureCollection pointAndLineStringFeatureCollection = FeatureCollection.fromFeatures( Arrays.asList( Feature.fromGeometry(Point.fromLngLat(-2.46, 27.6835)), Feature.fromGeometry( LineString.fromLngLats( Arrays.asList(Point.fromLngLat(-11.25, 55.7765), Point.fromLngLat(41.1328, 22.91792))) ))); FeatureCollection combinedFeatureCollection = TurfConversion.combine(pointAndLineStringFeatureCollection); assertNotNull(combinedFeatureCollection); MultiPoint multiPoint = null; MultiLineString multiLineString = null; for (int x = 0; x < combinedFeatureCollection.features().size(); x++) { Feature singleFeature = combinedFeatureCollection.features().get(x); if (singleFeature.geometry() instanceof MultiPoint) { multiPoint = (MultiPoint) combinedFeatureCollection.features().get(x).geometry(); } if (singleFeature.geometry() instanceof MultiLineString) { multiLineString = (MultiLineString) combinedFeatureCollection.features().get(x).geometry(); } } assertNotNull(multiPoint); assertNotNull(multiLineString); // Checking the LineString in the MultiLineString // Checking the first LineString location assertEquals(-11.25, multiLineString.coordinates().get(0).get(0).longitude(), DELTA); assertEquals(55.7765, multiLineString.coordinates().get(0).get(0).latitude(), DELTA); // Checking the second LineString location assertEquals(41.1328, multiLineString.coordinates().get(0).get(1).longitude(), DELTA); assertEquals(22.91792, multiLineString.coordinates().get(0).get(1).latitude(), DELTA); // Checking the Point in the MultiPoint // Checking the first and only Point assertEquals(-2.46, multiPoint.coordinates().get(0).longitude(), DELTA); assertEquals(27.6835, multiPoint.coordinates().get(0).latitude(), DELTA); } @Test public void combinePointAndMultiPolygonAndLineStringGeometry() throws Exception { FeatureCollection pointMultiPolygonAndLineStringFeatureCollection = FeatureCollection.fromFeatures( Arrays.asList( Feature.fromGeometry(Point.fromLngLat(-2.46, 27.6835)), Feature.fromGeometry(MultiPolygon.fromPolygons(Arrays.asList( Polygon.fromLngLats(Arrays.asList(Arrays.asList( Point.fromLngLat(11.42578125, 16.636191878397664), Point.fromLngLat(7.91015625, -9.102096738726443), Point.fromLngLat(31.113281249999996, 17.644022027872726), Point.fromLngLat(11.42578125, 16.636191878397664) )))))), Feature.fromGeometry(LineString.fromLngLats( Arrays.asList(Point.fromLngLat(-11.25, 55.7765), Point.fromLngLat(41.1328, 22.91792))) ))); FeatureCollection combinedFeatureCollection = TurfConversion.combine(pointMultiPolygonAndLineStringFeatureCollection); assertNotNull(combinedFeatureCollection); MultiPoint multiPoint = null; MultiLineString multiLineString = null; MultiPolygon multiPolygon = null; for (int x = 0; x < combinedFeatureCollection.features().size(); x++) { Feature singleFeature = combinedFeatureCollection.features().get(x); if (singleFeature.geometry() instanceof MultiPoint) { multiPoint = (MultiPoint) combinedFeatureCollection.features().get(x).geometry(); } if (singleFeature.geometry() instanceof MultiLineString) { multiLineString = (MultiLineString) combinedFeatureCollection.features().get(x).geometry(); } if (singleFeature.geometry() instanceof MultiPolygon) { multiPolygon = (MultiPolygon) combinedFeatureCollection.features().get(x).geometry(); } } assertNotNull(multiPoint); assertNotNull(multiLineString); assertNotNull(multiPolygon); // Checking the Polygon in the MultiPolygon // Checking the first Point assertEquals(11.42578125, multiPolygon.coordinates().get(0).get(0).get(0).longitude(), DELTA); assertEquals(16.636191878397664, multiPolygon.coordinates().get(0).get(0).get(0).latitude(), DELTA); // Checking the second Point assertEquals(7.91015625, multiPolygon.coordinates().get(0).get(0).get(1).longitude(), DELTA); assertEquals(-9.102096738726443, multiPolygon.coordinates().get(0).get(0).get(1).latitude(), DELTA); // Checking the LineString in the MultiLineString // Checking the first LineString location assertEquals(-11.25, multiLineString.coordinates().get(0).get(0).longitude(), DELTA); assertEquals(55.7765, multiLineString.coordinates().get(0).get(0).latitude(), DELTA); // Checking the second LineString location assertEquals(41.1328, multiLineString.coordinates().get(0).get(1).longitude(), DELTA); assertEquals(22.91792, multiLineString.coordinates().get(0).get(1).latitude(), DELTA); // Checking the Point in the MultiPoint // Checking the first and only Point assertEquals(-2.46, multiPoint.coordinates().get(0).longitude(), DELTA); assertEquals(27.6835, multiPoint.coordinates().get(0).latitude(), DELTA); } @Test public void combine_featureCollectionSizeCheck() throws Exception { FeatureCollection pointMultiPolygonAndLineStringFeatureCollection = FeatureCollection.fromFeatures( Arrays.asList( Feature.fromGeometry(Point.fromLngLat(-2.46, 27.6835)), Feature.fromGeometry(MultiPolygon.fromPolygons(Arrays.asList( Polygon.fromLngLats(Arrays.asList(Arrays.asList( Point.fromLngLat(11.42578125, 16.636191878397664), Point.fromLngLat(7.91015625, -9.102096738726443), Point.fromLngLat(31.113281249999996, 17.644022027872726), Point.fromLngLat(11.42578125, 16.636191878397664) )))))), Feature.fromGeometry(LineString.fromLngLats( Arrays.asList(Point.fromLngLat(-11.25, 55.7765), Point.fromLngLat(41.1328, 22.91792))) ))); FeatureCollection combinedFeatureCollection = TurfConversion.combine(pointMultiPolygonAndLineStringFeatureCollection); assertNotNull(combinedFeatureCollection); assertEquals(3, combinedFeatureCollection.features().size()); } @Test public void combineEmptyFeatureCollectionThrowsException() throws Exception { thrown.expect(TurfException.class); thrown.expectMessage(startsWith("Your FeatureCollection doesn't have any Feature objects in it.")); TurfConversion.combine(FeatureCollection.fromJson( "{\n" + " \"type\": \"FeatureCollection\",\n" + " \"features\": []\n" + "}" )); } @Test public void explodePointSingleFeature() throws NullPointerException { Point point = Point.fromLngLat(102, 0.5); assertEquals(1, TurfConversion.explode(Feature.fromGeometry(point)).features().size()); } @Test public void explodeMultiPointSingleFeature() throws NullPointerException { MultiPoint multiPoint = MultiPoint.fromJson(loadJsonFixture(TURF_EXPLODE_MULTI_POINT)); assertEquals(4, TurfConversion.explode(Feature.fromGeometry(multiPoint)).features().size()); } @Test public void explodeLineStringSingleFeature() throws NullPointerException { LineString lineString = LineString.fromJson(loadJsonFixture(TURF_EXPLODE_LINESTRING)); assertEquals(4, TurfConversion.explode(Feature.fromGeometry(lineString)).features().size()); } @Test public void explodePolygonSingleFeature() throws NullPointerException { Polygon polygon = Polygon.fromLngLats(Arrays.asList( Arrays.asList( Point.fromLngLat(0, 101), Point.fromLngLat(1, 101), Point.fromLngLat(1, 100), Point.fromLngLat(0, 100)))); assertEquals(3, TurfConversion.explode(Feature.fromGeometry(polygon)).features().size()); } @Test public void explodeMultiLineStringSingleFeature() throws NullPointerException { MultiLineString multiLineString = MultiLineString.fromJson(loadJsonFixture(TURF_EXPLODE_MULTILINESTRING)); assertEquals(4, TurfConversion.explode(Feature.fromGeometry(multiLineString)).features().size()); } @Test public void explodeMultiPolygonSingleFeature() throws NullPointerException { MultiPolygon multiPolygon = MultiPolygon.fromJson(loadJsonFixture(TURF_EXPLODE_MULTIPOLYGON)); assertEquals(12, TurfConversion.explode(Feature.fromGeometry(multiPolygon)).features().size()); } @Test public void explodeGeometryCollectionSingleFeature() throws NullPointerException { GeometryCollection geometryCollection = GeometryCollection.fromJson(loadJsonFixture(TURF_EXPLODE_GEOMETRY_COLLECTION)); assertEquals(3, TurfConversion.explode(Feature.fromGeometry(geometryCollection)).features().size()); } @Test public void explodeFeatureCollection() throws NullPointerException { FeatureCollection featureCollection = FeatureCollection.fromFeatures(new Feature[] { Feature.fromGeometry(MultiLineString.fromJson(loadJsonFixture(TURF_EXPLODE_MULTILINESTRING))), Feature.fromGeometry(MultiPolygon.fromJson(loadJsonFixture(TURF_EXPLODE_MULTIPOLYGON))) }); assertEquals(16, TurfConversion.explode(featureCollection).features().size()); } @Test public void polygonToLine_GeometryPolygon() throws NullPointerException { Polygon polygon = Polygon.fromJson(loadJsonFixture(TURF_POLYGON_TO_LINE_PATH_IN + TURF_POLYGON_TO_LINE_FILENAME_GEOMETRY_POLYGON)); Feature expected = Feature.fromJson(loadJsonFixture(TURF_POLYGON_TO_LINE_PATH_OUT + TURF_POLYGON_TO_LINE_FILENAME_GEOMETRY_POLYGON)); compareJson(expected.toJson(), TurfConversion.polygonToLine(polygon).toJson()); } @Test public void polygonToLine_Polygon() throws NullPointerException { Feature polygon = Feature.fromJson(loadJsonFixture(TURF_POLYGON_TO_LINE_PATH_IN + TURF_POLYGON_TO_LINE_FILENAME_POLYGON)); Feature expected = Feature.fromJson(loadJsonFixture(TURF_POLYGON_TO_LINE_PATH_OUT + TURF_POLYGON_TO_LINE_FILENAME_POLYGON)); compareJson(expected.toJson(), TurfConversion.polygonToLine(polygon).toJson()); } @Test public void polygonToLine_PolygonWithHole() throws NullPointerException { Feature polygon = Feature.fromJson(loadJsonFixture(TURF_POLYGON_TO_LINE_PATH_IN + TURF_POLYGON_TO_LINE_FILENAME_POLYGON_WITH_HOLE)); Feature expected = Feature.fromJson(loadJsonFixture(TURF_POLYGON_TO_LINE_PATH_OUT + TURF_POLYGON_TO_LINE_FILENAME_POLYGON_WITH_HOLE)); compareJson(expected.toJson(), TurfConversion.polygonToLine(polygon).toJson()); } @Test public void polygonToLine_MultiPolygon() throws NullPointerException { Feature multiPolygon = Feature.fromJson(loadJsonFixture(TURF_POLYGON_TO_LINE_PATH_IN + TURF_POLYGON_TO_LINE_FILENAME_MULTIPOLYGON)); FeatureCollection expected = FeatureCollection.fromJson(loadJsonFixture(TURF_POLYGON_TO_LINE_PATH_OUT + TURF_POLYGON_TO_LINE_FILENAME_MULTIPOLYGON)); compareJson(expected.toJson(), TurfConversion.multiPolygonToLine(multiPolygon).toJson()); } @Test public void polygonToLine_MultiPolygonWithHoles() throws NullPointerException { Feature multiPolygon = Feature.fromJson(loadJsonFixture(TURF_POLYGON_TO_LINE_PATH_IN + TURF_POLYGON_TO_LINE_FILENAME_MULTIPOLYGON_WITH_HOLES)); FeatureCollection expected = FeatureCollection.fromJson(loadJsonFixture(TURF_POLYGON_TO_LINE_PATH_OUT + TURF_POLYGON_TO_LINE_FILENAME_MULTIPOLYGON_WITH_HOLES)); compareJson(expected.toJson(), TurfConversion.multiPolygonToLine(multiPolygon).toJson()); } @Test public void polygonToLine_MultiPolygonWithOuterDoughnut() throws NullPointerException { Feature multiPolygon = Feature.fromJson(loadJsonFixture(TURF_POLYGON_TO_LINE_PATH_IN + TURF_POLYGON_TO_LINE_FILENAME_MULTIPOLYGON_OUTER_DOUGHNUT)); FeatureCollection expected = FeatureCollection.fromJson(loadJsonFixture(TURF_POLYGON_TO_LINE_PATH_OUT + TURF_POLYGON_TO_LINE_FILENAME_MULTIPOLYGON_OUTER_DOUGHNUT)); compareJson(expected.toJson(), TurfConversion.multiPolygonToLine(multiPolygon).toJson()); } }
13,084
1,825
package com.github.unidbg.debugger.gdb; import com.github.unidbg.Emulator; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; class SetThreadCommand implements GdbStubCommand { private static final Log log = LogFactory.getLog(SetThreadCommand.class); @Override public boolean processCommand(Emulator<?> emulator, GdbStub stub, String command) { char type = command.charAt(1); int thread = Integer.parseInt(command.substring(2), 16); if (log.isDebugEnabled()) { log.debug("Set thread type=" + type + ", thread=" + thread); } switch (type) { case 'c': case 'g': stub.makePacketAndSend("OK"); break; default: stub.makePacketAndSend("E22"); break; } return true; } }
399
388
#pragma once enum { avCapturePermissionUnknown = 0, avCapturePermissionGranted = 1, avCapturePermissionDenied = 2, }; enum { avVideoCapture = 1, avAudioCapture = 2, }; extern "C" int UnityGetAVCapturePermission(int captureType); extern "C" void UnityRequestAVCapturePermission(int captureType);
109
319
<reponame>ajmadsen/magnum # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import uuid from oslo_log import log as logging import pecan import wsme from wsme import types as wtypes from magnum.api import attr_validator from magnum.api.controllers import base from magnum.api.controllers import link from magnum.api.controllers.v1 import collection from magnum.api.controllers.v1 import types from magnum.api import expose from magnum.api import utils as api_utils from magnum.api import validation from magnum.common import exception from magnum.common import name_generator from magnum.common import policy import magnum.conf from magnum import objects from magnum.objects import fields LOG = logging.getLogger(__name__) CONF = magnum.conf.CONF class FederationID(wtypes.Base): """API representation of a federation ID This class enforces type checking and value constraints, and converts between the internal object model and the API representation of a federation ID. """ uuid = types.uuid def __init__(self, uuid): self.uuid = uuid class Federation(base.APIBase): """API representation of a federation. This class enforces type checking and value constraints, and converts between the internal object model and the API representation of a Federation. """ # Unique UUID for this federation. uuid = types.uuid # Name of this federation, max length is limited to 242 because heat stack # requires max length limit to 255, and Magnum amend a uuid length. name = wtypes.StringType(min_length=1, max_length=242, pattern='^[a-zA-Z][a-zA-Z0-9_.-]*$') # UUID of the hostcluster of the federation, i.e. the cluster that # hosts the COE Federated API. hostcluster_id = wsme.wsattr(wtypes.text) # List of UUIDs of all the member clusters of the federation. member_ids = wsme.wsattr([wtypes.text]) # Status of the federation. status = wtypes.Enum(wtypes.text, *fields.FederationStatus.ALL) # Status reason of the federation. status_reason = wtypes.text # Set of federation metadata (COE-specific in some cases). properties = wtypes.DictType(wtypes.text, wtypes.text) # A list containing a self link and associated federations links links = wsme.wsattr([link.Link], readonly=True) def __init__(self, **kwargs): super(Federation, self).__init__() self.fields = [] for field in objects.Federation.fields: # Skip fields we do not expose. if not hasattr(self, field): continue self.fields.append(field) setattr(self, field, kwargs.get(field, wtypes.Unset)) @staticmethod def _convert_with_links(federation, url, expand=True): if not expand: federation.unset_fields_except(['uuid', 'name', 'hostcluster_id', 'member_ids', 'status', 'properties']) federation.links = [link.Link.make_link('self', url, 'federations', federation.uuid), link.Link.make_link('bookmark', url, 'federations', federation.uuid, bookmark=True)] return federation @classmethod def convert_with_links(cls, rpc_federation, expand=True): federation = Federation(**rpc_federation.as_dict()) return cls._convert_with_links(federation, pecan.request.host_url, expand) @classmethod def sample(cls, expand=True): sample = cls(uuid='4221a353-8368-475f-b7de-3429d3f724b3', name='example', hostcluster_id='49dc23f5-ffc9-40c3-9d34-7be7f9e34d63', member_ids=['49dc23f5-ffc9-40c3-9d34-7be7f9e34d63', 'f2439bcf-02a2-4278-9d8a-f07a2042230a', 'e549e0a5-3d3c-406f-bd7c-0e0182fb211c'], properties={'dns-zone': 'example.com.'}, status=fields.FederationStatus.CREATE_COMPLETE, status_reason="CREATE completed successfully") return cls._convert_with_links(sample, 'http://localhost:9511', expand) class FederationPatchType(types.JsonPatchType): _api_base = Federation @staticmethod def internal_attrs(): """"Returns a list of internal attributes. Internal attributes can't be added, replaced or removed. """ internal_attrs = [] return types.JsonPatchType.internal_attrs() + internal_attrs class FederationCollection(collection.Collection): """API representation of a collection of federations.""" # A list containing federation objects. federations = [Federation] def __init__(self, **kwargs): self._type = 'federations' @staticmethod def convert_with_links(rpc_federation, limit, url=None, expand=False, **kwargs): collection = FederationCollection() collection.federations = [Federation.convert_with_links(p, expand) for p in rpc_federation] collection.next = collection.get_next(limit, url=url, **kwargs) return collection @classmethod def sample(cls): sample = cls() sample.federations = [Federation.sample(expand=False)] return sample class FederationsController(base.Controller): """REST controller for federations.""" def __init__(self): super(FederationsController, self).__init__() _custom_actions = { 'detail': ['GET'], } def _generate_name_for_federation(self, context): """Generate a random name like: phi-17-federation.""" name_gen = name_generator.NameGenerator() name = name_gen.generate() return name + '-federation' def _get_federation_collection(self, marker, limit, sort_key, sort_dir, expand=False, resource_url=None): limit = api_utils.validate_limit(limit) sort_dir = api_utils.validate_sort_dir(sort_dir) marker_obj = None if marker: marker_obj = objects.Federation.get_by_uuid(pecan.request.context, marker) federations = objects.Federation.list(pecan.request.context, limit, marker_obj, sort_key=sort_key, sort_dir=sort_dir) return FederationCollection.convert_with_links(federations, limit, url=resource_url, expand=expand, sort_key=sort_key, sort_dir=sort_dir) @expose.expose(FederationCollection, types.uuid, int, wtypes.text, wtypes.text) def get_all(self, marker=None, limit=None, sort_key='id', sort_dir='asc'): """Retrieve a list of federations. :param marker: pagination marker for large data sets. :param limit: maximum number of resources to return in a single result. :param sort_key: column to sort results by. Default: id. :param sort_dir: direction to sort. "asc" or "desc". Default: asc. """ context = pecan.request.context policy.enforce(context, 'federation:get_all', action='federation:get_all') return self._get_federation_collection(marker, limit, sort_key, sort_dir) @expose.expose(FederationCollection, types.uuid, int, wtypes.text, wtypes.text) def detail(self, marker=None, limit=None, sort_key='id', sort_dir='asc'): """Retrieve a list of federation with detail. :param marker: pagination marker for large data sets. :param limit: maximum number of resources to return in a single result. :param sort_key: column to sort results by. Default: id. :param sort_dir: direction to sort. "asc" or "desc". Default: asc. """ context = pecan.request.context policy.enforce(context, 'federation:detail', action='federation:detail') # NOTE(lucasagomes): /detail should only work against collections parent = pecan.request.path.split('/')[:-1][-1] if parent != "federations": raise exception.HTTPNotFound expand = True resource_url = '/'.join(['federations', 'detail']) return self._get_federation_collection(marker, limit, sort_key, sort_dir, expand, resource_url) @expose.expose(Federation, types.uuid_or_name) def get_one(self, federation_ident): """Retrieve information about a given Federation. :param federation_ident: UUID or logical name of the Federation. """ context = pecan.request.context federation = api_utils.get_resource('Federation', federation_ident) policy.enforce(context, 'federation:get', federation.as_dict(), action='federation:get') federation = Federation.convert_with_links(federation) return federation @expose.expose(FederationID, body=Federation, status_code=202) def post(self, federation): """Create a new federation. :param federation: a federation within the request body. """ context = pecan.request.context policy.enforce(context, 'federation:create', action='federation:create') federation_dict = federation.as_dict() # Validate `hostcluster_id` hostcluster_id = federation_dict.get('hostcluster_id') attr_validator.validate_federation_hostcluster(hostcluster_id) # Validate `properties` dict. properties_dict = federation_dict.get('properties') attr_validator.validate_federation_properties(properties_dict) federation_dict['project_id'] = context.project_id # If no name is specified, generate a random human-readable name name = (federation_dict.get('name') or self._generate_name_for_federation(context)) federation_dict['name'] = name new_federation = objects.Federation(context, **federation_dict) new_federation.uuid = uuid.uuid4() # TODO(clenimar): remove hard-coded `create_timeout`. pecan.request.rpcapi.federation_create_async(new_federation, create_timeout=15) return FederationID(new_federation.uuid) @expose.expose(FederationID, types.uuid_or_name, types.boolean, body=[FederationPatchType], status_code=202) def patch(self, federation_ident, rollback=False, patch=None): """Update an existing Federation. Please note that the join/unjoin operation is performed by patching `member_ids`. :param federation_ident: UUID or logical name of a federation. :param rollback: whether to rollback federation on update failure. :param patch: a json PATCH document to apply to this federation. """ federation = self._patch(federation_ident, patch) pecan.request.rpcapi.federation_update_async(federation, rollback) return FederationID(federation.uuid) def _patch(self, federation_ident, patch): context = pecan.request.context federation = api_utils.get_resource('Federation', federation_ident) policy.enforce(context, 'federation:update', federation.as_dict(), action='federation:update') # NOTE(clenimar): Magnum does not allow one to append items to existing # fields through an `add` operation using HTTP PATCH (please check # `magnum.api.utils.apply_jsonpatch`). In order to perform the join # and unjoin operations, intercept the original JSON PATCH document # and change the operation from either `add` or `remove` to `replace`. patch_path = patch[0].get('path') patch_value = patch[0].get('value') patch_op = patch[0].get('op') if patch_path == '/member_ids': if patch_op == 'add' and patch_value is not None: patch = self._join_wrapper(federation_ident, patch) elif patch_op == 'remove' and patch_value is not None: patch = self._unjoin_wrapper(federation_ident, patch) try: federation_dict = federation.as_dict() new_federation = Federation( **api_utils.apply_jsonpatch(federation_dict, patch)) except api_utils.JSONPATCH_EXCEPTIONS as e: raise exception.PatchError(patch=patch, reason=e) # Retrieve only what changed after the patch. delta = self._update_changed_fields(federation, new_federation) validation.validate_federation_properties(delta) return federation def _update_changed_fields(self, federation, new_federation): """Update only the patches that were modified and return the diff.""" for field in objects.Federation.fields: try: patch_val = getattr(new_federation, field) except AttributeError: # Ignore fields that aren't exposed in the API continue if patch_val == wtypes.Unset: patch_val = None if federation[field] != patch_val: federation[field] = patch_val return federation.obj_what_changed() def _join_wrapper(self, federation_ident, patch): """Intercept PATCH JSON documents for join operations. Take a PATCH JSON document with `add` operation:: { 'op': 'add', 'value': 'new_member_id', 'path': '/member_ids' } and transform it into a document with `replace` operation:: { 'op': 'replace', 'value': ['current_member_id1', ..., 'new_member_id'], 'path': '/member_ids' } """ federation = api_utils.get_resource('Federation', federation_ident) new_member_uuid = patch[0]['value'] # Check if the cluster exists c = objects.Cluster.get_by_uuid(pecan.request.context, new_member_uuid) # Check if the cluster is already a member of the federation if new_member_uuid not in federation.member_ids and c is not None: # Retrieve all current members members = federation.member_ids # Add the new member members.append(c.uuid) else: kw = {'uuid': new_member_uuid, 'federation_name': federation.name} raise exception.MemberAlreadyExists(**kw) # Set `value` to the updated member list. Change `op` to `replace` patch[0]['value'] = members patch[0]['op'] = 'replace' return patch def _unjoin_wrapper(self, federation_ident, patch): """Intercept PATCH JSON documents for unjoin operations. Take a PATCH JSON document with `remove` operation:: { 'op': 'remove', 'value': 'former_member_id', 'path': '/member_ids' } and transform it into a document with `replace` operation:: { 'op': 'replace', 'value': ['current_member_id1', ..., 'current_member_idn'], 'path': '/member_ids' } """ federation = api_utils.get_resource('Federation', federation_ident) cluster_uuid = patch[0]['value'] # Check if the cluster exists c = objects.Cluster.get_by_uuid(pecan.request.context, cluster_uuid) # Check if the cluster is a member cluster and if it exists if cluster_uuid in federation.member_ids and c is not None: # Retrieve all current members members = federation.member_ids # Unjoin the member members.remove(cluster_uuid) else: raise exception.HTTPNotFound("Cluster %s is not a member of the " "federation %s." % (cluster_uuid, federation.name)) # Set `value` to the updated member list. Change `op` to `replace` patch[0]['value'] = members patch[0]['op'] = 'replace' return patch @expose.expose(None, types.uuid_or_name, status_code=204) def delete(self, federation_ident): """Delete a federation. :param federation_ident: UUID of federation or logical name of the federation. """ context = pecan.request.context federation = api_utils.get_resource('Federation', federation_ident) policy.enforce(context, 'federation:delete', federation.as_dict(), action='federation:delete') pecan.request.rpcapi.federation_delete_async(federation.uuid)
7,965
2,109
<reponame>zaza568/yo #!/usr/bin/python # AxCrypt 1.x encrypted file parser for JtR # 2016 by Fist0urs <eddy.maaalou at gmail.com>. # This software is Copyright (c) 2016, Fist0urs <eddy.maaalou at gmail.com>, # and it is hereby released to the general public under the following terms: # Redistribution and use in source and binary forms, with or without modification, # are permitted. import sys, struct # file is beginning with 16bytes constant header GUID='\xc0\xb9\x07\x2e\x4f\x93\xf1\x46\xa0\x15\x79\x2c\xa1\xd9\xe8\x21' OFFSET_TYPE=4 SIZE_KEYDATA=24 # size of constant in keywrap (0xA6*8) + size of DEK (16) SIZE_SALT=16 SIZE_ITERATION=4 StructKeys=[] def usage(): print >> sys.stderr, 'usage: %s <axxfile> [KEY-FILE]\n' % sys.argv[0] print >> sys.stderr, 'Script to extract hash from AxCrypt encrypted file or self-decrypting binary\n' print >> sys.stderr, 'optional arguments:\n KEY-FILE path to optional key-file provided' sys.exit(1) def DWORD_to_int(string_dword): string_dword_reversed = string_dword[::-1] return int('0x'+str(string_dword_reversed.encode('hex')), 16) def parse_PE(axxdata): i = 0 while(axxdata[i:i+16] != GUID): i += 1 return axxdata[i:] def parse_axxfile(axxfile): stream=open(axxfile, 'rb') axxdata=stream.read() stream.close() # if header is 'MZ' if axxdata[:2] == '\x4D\x5a': offset_PE_magic = struct.unpack('<L', axxdata[60:64])[0] # if 'PE' assume PE if axxdata[offset_PE_magic:offset_PE_magic+2] == '\x50\x45': axxdata = parse_PE(axxdata) sizeof_file=len(axxdata) if (axxdata[:16] != GUID): print "Be Careful, GUID is different from axcrypt's one..." header_datalen_offset = 16 headertype = '\x02' # first type encountered # headertype of dataencrypted section is 0x3f while(headertype != 63): header_datalen = ord(axxdata[header_datalen_offset]) headertype = ord(axxdata[header_datalen_offset + OFFSET_TYPE]) # probably a StructKey if (header_datalen == 49 and headertype == 04): offset_to_keydata = header_datalen_offset + OFFSET_TYPE + 1 offset_to_salt = offset_to_keydata + SIZE_KEYDATA offset_to_iteration = offset_to_salt + SIZE_SALT dword_str = axxdata[offset_to_iteration:offset_to_iteration + SIZE_ITERATION] StructKeys.append({'KeyData' : axxdata[offset_to_keydata:offset_to_salt] , 'Salt' : axxdata[offset_to_salt:offset_to_iteration] ,'Iteration' : DWORD_to_int(dword_str)}) header_datalen_offset += header_datalen if (header_datalen_offset >= sizeof_file): print "Could not parse file, exiting" sys.exit(0) return StructKeys[0]['KeyData'],StructKeys[0]['Salt'],StructKeys[0]['Iteration'] if __name__=="__main__": if (len(sys.argv) != 2 and len(sys.argv) != 3): usage() # A_DEK == wrappedKey wrappedKey, Salt, nb_iteration = parse_axxfile(sys.argv[1]) version = 1 keyfile_content = '' key_file_name = '' # dummy strip to relative path axxfile = sys.argv[1][sys.argv[1].rfind("/")+1:] if (len(sys.argv) == 3): keyfile = open(sys.argv[2], 'r') keyfile_content = '*' + keyfile.read().encode("hex") key_file_name = '*' + sys.argv[2][sys.argv[2].rfind("/")+1:] keyfile.close() print axxfile + key_file_name + ":$axcrypt$" + "*" + str(version) + "*" + str(nb_iteration) + "*" + Salt.encode("hex") + "*" + wrappedKey.encode("hex") + keyfile_content
1,405
1,844
"""Log facilitator """ import os import sys class Log: """Create a log instance on a log file """ def __init__(self, path, out=sys.stdout): self.out = out self.fp = open(path, "a") assert self.fp, "Unable to open log file" """Log a new message to the opened log file, and optionnaly on stdout or stderr too """ def log(self, msg): assert self.fp, "Unable to write in log file" self.fp.write(msg + os.linesep) self.fp.flush() if self.out: print(msg, file=self.out)
245
543
<gh_stars>100-1000 package com.riiablo.table.schema; import com.riiablo.table.Injector; public class MonStatsInjectorImpl implements Injector<MonStats, Object> { @Override public MonStats inject(Object manifest, MonStats record) { throw new UnsupportedOperationException(); } }
93
679
<filename>main/odk/examples/java/Inspector/Introspector.java /************************************************************** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * *************************************************************/ import com.sun.star.beans.IllegalTypeException; import com.sun.star.beans.MethodConcept; import com.sun.star.beans.NamedValue; import com.sun.star.beans.Property; import com.sun.star.beans.XIntrospection; import com.sun.star.beans.XIntrospectionAccess; import com.sun.star.beans.XPropertySet; import com.sun.star.bridge.UnoUrlResolver; import com.sun.star.bridge.XUnoUrlResolver; import com.sun.star.comp.helper.Bootstrap; import com.sun.star.container.XEnumeration; import com.sun.star.container.XEnumerationAccess; import com.sun.star.container.XHierarchicalNameAccess; import com.sun.star.container.XIndexAccess; import com.sun.star.container.XNameAccess; import com.sun.star.lang.XMultiComponentFactory; import com.sun.star.lang.XMultiServiceFactory; import com.sun.star.lang.XServiceInfo; import com.sun.star.lang.XTypeProvider; import com.sun.star.lib.uno.helper.WeakBase; import com.sun.star.reflection.ParamInfo; import com.sun.star.reflection.TypeDescriptionSearchDepth; import com.sun.star.reflection.XConstantTypeDescription; import com.sun.star.reflection.XConstantsTypeDescription; import com.sun.star.reflection.XIdlClass; import com.sun.star.reflection.XIdlField; import com.sun.star.reflection.XIdlMethod; import com.sun.star.reflection.XIdlReflection; import com.sun.star.reflection.XIndirectTypeDescription; import com.sun.star.reflection.XInterfaceTypeDescription; import com.sun.star.reflection.XInterfaceTypeDescription2; import com.sun.star.reflection.XPropertyTypeDescription; import com.sun.star.reflection.XServiceTypeDescription; import com.sun.star.reflection.XTypeDescription; import com.sun.star.reflection.XTypeDescriptionEnumeration; import com.sun.star.reflection.XTypeDescriptionEnumerationAccess; import com.sun.star.ucb.CommandAbortedException; import com.sun.star.ucb.XSimpleFileAccess; import com.sun.star.uno.AnyConverter; import com.sun.star.uno.Type; import com.sun.star.uno.TypeClass; import com.sun.star.uno.UnoRuntime; import com.sun.star.uno.XComponentContext; import com.sun.star.util.URL; import java.util.List; import java.util.Vector; import javax.swing.JOptionPane; public class Introspector extends WeakBase{ private XIntrospection m_xIntrospection; private XMultiComponentFactory m_xMultiComponentFactory; private XComponentContext m_xComponentContext; private XTypeDescriptionEnumerationAccess m_xTDEnumerationAccess; private static XComponentContext xOfficeComponentContext; private XIdlReflection mxIdlReflection; private URL openHyperlink; private static Introspector m_oIntrospector = null; private XSimpleFileAccess xSimpleFileAccess = null; public static Introspector getIntrospector(){ if (m_oIntrospector == null){ throw new NullPointerException(); } else{ return m_oIntrospector; } } public static Introspector getIntrospector(XComponentContext _xComponentContext){ if (m_oIntrospector == null){ m_oIntrospector = new Introspector(_xComponentContext); } return m_oIntrospector; } /** Creates a new instance of Introspection */ private Introspector(XComponentContext _xComponentContext) { try{ m_xComponentContext = _xComponentContext; m_xMultiComponentFactory = m_xComponentContext.getServiceManager(); Object o = m_xMultiComponentFactory.createInstanceWithContext("com.sun.star.beans.Introspection", m_xComponentContext); m_xIntrospection = ( XIntrospection ) UnoRuntime.queryInterface(XIntrospection.class, o ); Object oCoreReflection = getXMultiComponentFactory().createInstanceWithContext("com.sun.star.reflection.CoreReflection", getXComponentContext()); mxIdlReflection = (XIdlReflection) UnoRuntime.queryInterface(XIdlReflection.class, oCoreReflection); initTypeDescriptionManager(); } catch( Exception exception ) { System.err.println( exception ); }} protected XComponentContext getXComponentContext(){ return m_xComponentContext; } protected XMultiComponentFactory getXMultiComponentFactory(){ return m_xMultiComponentFactory; } protected XIntrospectionAccess getXIntrospectionAccess(Object _oUnoComponent){ return m_xIntrospection.inspect(_oUnoComponent); } public boolean isContainer(Object _oUnoObject){ boolean bIsContainer = false; try { XIntrospectionAccess xIntrospectionAccessObject = getXIntrospectionAccess(_oUnoObject); if (xIntrospectionAccessObject != null){ XEnumerationAccess xEnumerationAccess = (XEnumerationAccess) UnoRuntime.queryInterface(XEnumerationAccess.class, xIntrospectionAccessObject.queryAdapter( new Type( XEnumerationAccess.class ) ) ); if (xEnumerationAccess != null){ XEnumeration xEnumeration = xEnumerationAccess.createEnumeration(); bIsContainer = xEnumeration.hasMoreElements(); } if (!bIsContainer){ XIndexAccess xIndexAccess = (XIndexAccess) UnoRuntime.queryInterface( XIndexAccess.class, xIntrospectionAccessObject.queryAdapter(new Type( XIndexAccess.class ))); if (xIndexAccess != null){ bIsContainer = (xIndexAccess.getCount() > 0); } } } } catch (IllegalTypeException ex) { ex.printStackTrace(System.out); } return bIsContainer; } // add all containers for the given object to the tree under the node // parent public Object[] getUnoObjectsOfContainer(Object _oUnoParentObject) { Object[] oRetComponents = null; try { Vector oRetComponentsVector = new Vector(); XIntrospectionAccess xIntrospectionAccessObject = getXIntrospectionAccess(_oUnoParentObject); if ( xIntrospectionAccessObject != null ) { XEnumerationAccess xEnumerationAccess = (XEnumerationAccess) UnoRuntime.queryInterface(XEnumerationAccess.class, xIntrospectionAccessObject.queryAdapter( new Type( XEnumerationAccess.class ) ) ); if ( xEnumerationAccess != null ) { XEnumeration xEnumeration = xEnumerationAccess.createEnumeration(); while ( xEnumeration.hasMoreElements() ) { oRetComponentsVector.add(xEnumeration.nextElement()); } } XIndexAccess xIndexAccess = (XIndexAccess) UnoRuntime.queryInterface( XIndexAccess.class, xIntrospectionAccessObject.queryAdapter(new Type( XIndexAccess.class ))); if ( xIndexAccess != null ) { XIdlMethod mMethod = xIntrospectionAccessObject.getMethod("getByIndex", com.sun.star.beans.MethodConcept.INDEXCONTAINER); for ( int i = 0; i < xIndexAccess.getCount(); i++ ) { Object[][] aParamInfo = new Object[1][1]; aParamInfo[0] = new Integer[] { new Integer(i) }; oRetComponentsVector.add(mMethod.invoke(_oUnoParentObject, aParamInfo)); } } } if (oRetComponentsVector != null){ oRetComponents = new Object[oRetComponentsVector.size()]; oRetComponentsVector.toArray(oRetComponents); } } catch( Exception exception ) { System.err.println( exception ); } return oRetComponents; } protected XIdlMethod[] getMethodsOfInterface(Type _aType){ try{ XIdlClass xIdlClass = mxIdlReflection.forName(_aType.getTypeName()); return xIdlClass.getMethods(); } catch( Exception e ) { System.err.println( e ); return null; }} protected XIdlField[] getFieldsOfType(Type _aType){ try{ XIdlClass xIdlClass = mxIdlReflection.forName(_aType.getTypeName()); return xIdlClass.getFields(); } catch( Exception e ) { System.err.println( e ); return null; }} public boolean hasMethods(Object _oUnoObject){ boolean bHasMethods = (getMethods(_oUnoObject).length > 0); return bHasMethods; } // add all methods for the given object to the tree under the node parent public XIdlMethod[] getMethods(Object _oUnoParentObject) { try { XIntrospectionAccess xIntrospectionAccess = getXIntrospectionAccess(_oUnoParentObject); if (xIntrospectionAccess != null){ XIdlMethod[] xIdlMethods = xIntrospectionAccess.getMethods(MethodConcept.ALL - MethodConcept.DANGEROUS); return xIdlMethods; } } catch( Exception e ) { System.err.println( e ); } return null; } public boolean hasProperties(Object _oUnoObject){ boolean bHasProperties = (getProperties(_oUnoObject).length > 0); return bHasProperties; } protected Property[] getProperties( Object _oUnoParentObject){ try { XIntrospectionAccess xIntrospectionAccess = getXIntrospectionAccess(_oUnoParentObject); if (xIntrospectionAccess != null){ Property[] aProperties = xIntrospectionAccess.getProperties(com.sun.star.beans.PropertyConcept.ATTRIBUTES + com.sun.star.beans.PropertyConcept.PROPERTYSET); return aProperties; } } catch( Exception e ) { System.err.println( e ); } return null; } protected Property[] getProperties(Object _oUnoObject, String _sServiceName){ Property[] aProperties = getProperties(_oUnoObject); List aListOfProperties = java.util.Arrays.asList(aProperties); Vector aPropertiesVector = new Vector(aListOfProperties); if (aProperties != null){ XPropertyTypeDescription[] xPropertyTypeDescriptions = getPropertyDescriptionsOfService(_sServiceName); for (int i = aProperties.length - 1; i >= 0; i--){ if (!hasByName(xPropertyTypeDescriptions, _sServiceName + "." + aProperties[i].Name)){ aPropertiesVector.remove(i); } } } Property[] aRetProperties = new Property[aPropertiesVector.size()]; aPropertiesVector.toArray(aRetProperties); return aRetProperties; } protected Type[] getInterfaces(Object _oUnoObject, String _sServiceName){ Type[] aTypes = getInterfaces(_oUnoObject); List aListOfTypes = java.util.Arrays.asList(aTypes); Vector aTypesVector = new Vector(aListOfTypes); if (aTypes != null){ XInterfaceTypeDescription[] xInterfaceTypeDescriptions = getInterfaceDescriptionsOfService(_sServiceName); for (int i = aTypes.length - 1; i >= 0; i--){ if (!hasByName(xInterfaceTypeDescriptions, aTypes[i].getTypeName())){ aTypesVector.remove(i); } } } Type[] aRetTypes = new Type[aTypesVector.size()]; aTypesVector.toArray(aRetTypes); return aRetTypes; } public boolean hasInterfaces(Object _oUnoObject){ return (getInterfaces(_oUnoObject).length > 0); } protected Type[] getInterfaces(Object _oUnoParentObject){ Type[] aTypes = new Type[]{}; XTypeProvider xTypeProvider = ( XTypeProvider ) UnoRuntime.queryInterface( XTypeProvider.class, _oUnoParentObject); if ( xTypeProvider != null ) { aTypes = xTypeProvider.getTypes(); } return aTypes; } public static boolean isObjectSequence(Object _oUnoObject){ Type aType = AnyConverter.getType(_oUnoObject); return aType.getTypeClass().getValue() == TypeClass.SEQUENCE_value; } public static boolean isObjectPrimitive(Object _oUnoObject){ boolean breturn = false; if (_oUnoObject != null){ Type aType = AnyConverter.getType(_oUnoObject); breturn = isObjectPrimitive(_oUnoObject.getClass(), aType.getTypeClass()); } return breturn; } public static boolean isPrimitive(TypeClass _typeClass){ return (( _typeClass == TypeClass.BOOLEAN ) || ( _typeClass == TypeClass.BYTE ) || ( _typeClass == TypeClass.CHAR ) || ( _typeClass == TypeClass.DOUBLE ) || ( _typeClass == TypeClass.ENUM ) || ( _typeClass == TypeClass.FLOAT ) || ( _typeClass == TypeClass.HYPER ) || ( _typeClass == TypeClass.LONG ) || ( _typeClass == TypeClass.SHORT ) || ( _typeClass == TypeClass.STRING ) || ( _typeClass == TypeClass.UNSIGNED_HYPER ) || ( _typeClass == TypeClass.UNSIGNED_LONG ) || ( _typeClass == TypeClass.UNSIGNED_SHORT )); } public static boolean isObjectPrimitive(Class _oUnoClass, TypeClass _typeClass){ return !( ( !_oUnoClass.isPrimitive() ) && ( _typeClass != TypeClass.ARRAY ) && ( _typeClass != TypeClass.BOOLEAN ) && ( _typeClass != TypeClass.BYTE ) && ( _typeClass != TypeClass.CHAR ) && ( _typeClass != TypeClass.DOUBLE ) && ( _typeClass != TypeClass.ENUM ) && ( _typeClass != TypeClass.FLOAT ) && ( _typeClass != TypeClass.HYPER ) && ( _typeClass != TypeClass.LONG ) && ( _typeClass != TypeClass.SHORT ) && ( _typeClass != TypeClass.STRING ) && ( _typeClass != TypeClass.UNSIGNED_HYPER ) && ( _typeClass != TypeClass.UNSIGNED_LONG ) && ( _typeClass != TypeClass.UNSIGNED_SHORT )); } protected void initTypeDescriptionManager() { try { Object oTypeDescriptionManager = getXComponentContext().getValueByName("/singletons/com.sun.star.reflection.theTypeDescriptionManager"); m_xTDEnumerationAccess = (XTypeDescriptionEnumerationAccess) UnoRuntime.queryInterface(XTypeDescriptionEnumerationAccess.class, oTypeDescriptionManager); } catch ( java.lang.Exception e) { System.out.println(System.out); }} protected XTypeDescriptionEnumerationAccess getXTypeDescriptionEnumerationAccess(){ return m_xTDEnumerationAccess; } protected XConstantTypeDescription[] getFieldsOfConstantGroup(String _sTypeClass){ XConstantTypeDescription[] xConstantTypeDescriptions = null; try { TypeClass[] eTypeClasses = new com.sun.star.uno.TypeClass[1]; eTypeClasses[0] = com.sun.star.uno.TypeClass.CONSTANTS; XTypeDescriptionEnumeration xTDEnumeration = m_xTDEnumerationAccess.createTypeDescriptionEnumeration(getModuleName(_sTypeClass), eTypeClasses, TypeDescriptionSearchDepth.INFINITE); while (xTDEnumeration.hasMoreElements()) { XTypeDescription xTD = xTDEnumeration.nextTypeDescription(); if (xTD.getName().equals(_sTypeClass)){ XConstantsTypeDescription xConstantsTypeDescription = (XConstantsTypeDescription) UnoRuntime.queryInterface(XConstantsTypeDescription.class, xTD); xConstantTypeDescriptions = xConstantsTypeDescription.getConstants(); } String sName = xTD.getName(); } return xConstantTypeDescriptions; } catch ( java.lang.Exception e) { System.out.println(System.out); } return null; } private XServiceTypeDescription getServiceTypeDescription(String _sServiceName, TypeClass _eTypeClass){ try{ if (_sServiceName.length() > 0){ TypeClass[] eTypeClasses = new com.sun.star.uno.TypeClass[2]; eTypeClasses[0] = com.sun.star.uno.TypeClass.SERVICE; eTypeClasses[1] = _eTypeClass; XTypeDescriptionEnumeration xTDEnumeration = getXTypeDescriptionEnumerationAccess().createTypeDescriptionEnumeration(Introspector.getModuleName(_sServiceName), eTypeClasses, TypeDescriptionSearchDepth.INFINITE); while (xTDEnumeration.hasMoreElements()) { XTypeDescription xTD = xTDEnumeration.nextTypeDescription(); if (xTD.getName().equals(_sServiceName)){ XServiceTypeDescription xServiceTypeDescription = (XServiceTypeDescription) UnoRuntime.queryInterface(XServiceTypeDescription.class, xTD); return xServiceTypeDescription; } } } return null; } catch (Exception ex) { ex.printStackTrace(System.out); return null; }} public XPropertyTypeDescription[] getPropertyDescriptionsOfService(String _sServiceName){ try { XServiceTypeDescription xServiceTypeDescription = getServiceTypeDescription(_sServiceName, com.sun.star.uno.TypeClass.PROPERTY); if (xServiceTypeDescription != null){ XPropertyTypeDescription[] xPropertyTypeDescriptions = xServiceTypeDescription.getProperties(); return xPropertyTypeDescriptions; } } catch ( java.lang.Exception e) { System.out.println(System.out); } return new XPropertyTypeDescription[]{}; } public XTypeDescription getReferencedType(String _sTypeName){ XTypeDescription xTypeDescription = null; try{ XHierarchicalNameAccess xHierarchicalNameAccess = (XHierarchicalNameAccess) UnoRuntime.queryInterface(XHierarchicalNameAccess.class, m_xTDEnumerationAccess); if (xHierarchicalNameAccess != null){ if (xHierarchicalNameAccess.hasByHierarchicalName(_sTypeName)){ XIndirectTypeDescription xIndirectTypeDescription = (XIndirectTypeDescription) UnoRuntime.queryInterface(XIndirectTypeDescription.class, xHierarchicalNameAccess.getByHierarchicalName(_sTypeName)); if (xIndirectTypeDescription != null){ xTypeDescription = xIndirectTypeDescription.getReferencedType(); } } } } catch (Exception ex) { ex.printStackTrace(System.out); } return xTypeDescription; } public XInterfaceTypeDescription[] getInterfaceDescriptionsOfService(String _sServiceName){ try { XServiceTypeDescription xServiceTypeDescription = getServiceTypeDescription(_sServiceName, com.sun.star.uno.TypeClass.INTERFACE); if (xServiceTypeDescription != null){ XInterfaceTypeDescription[] xInterfaceTypeDescriptions = xServiceTypeDescription.getMandatoryInterfaces(); return xInterfaceTypeDescriptions; } } catch ( java.lang.Exception e) { System.out.println(System.out); } return new XInterfaceTypeDescription[]{}; } static boolean hasByName(XTypeDescription[] _xTypeDescriptions, String _sTypeName){ for (int i = 0; i < _xTypeDescriptions.length; i++){ if (_xTypeDescriptions[i].getName().equals(_sTypeName)){ return true; } } return false; } public static String getModuleName(String _sTypeClass){ int nlastindex = _sTypeClass.lastIndexOf("."); if (nlastindex > -1){ return _sTypeClass.substring(0, nlastindex); } else{ return ""; } } public static String getShortClassName(String _sClassName){ String sShortClassName = _sClassName; int nindex = _sClassName.lastIndexOf("."); if ((nindex < _sClassName.length()) && nindex > -1){ sShortClassName = _sClassName.substring(nindex + 1); } return sShortClassName; } public static boolean isUnoTypeObject(Object _oUnoObject){ return isOfUnoType(_oUnoObject, "com.sun.star.uno.Type"); } public static boolean isUnoPropertyTypeObject(Object _oUnoObject){ return isOfUnoType(_oUnoObject, "com.sun.star.beans.Property"); } public static boolean isUnoPropertyValueTypeObject(Object _oUnoObject){ return isOfUnoType(_oUnoObject, "com.sun.star.beans.PropertyValue"); } public static boolean isOfUnoType(Object _oUnoObject, String _sTypeName){ boolean bIsUnoObject = false; if (_oUnoObject != null){ if (_oUnoObject.getClass().isArray()){ if (!_oUnoObject.getClass().getComponentType().isPrimitive()){ Object[] oUnoArray = (Object[]) _oUnoObject; if (oUnoArray.length > 0){ bIsUnoObject = ( oUnoArray[0].getClass().getName().equals(_sTypeName)); } } } } else{ bIsUnoObject = (_oUnoObject.getClass().getName().equals(_sTypeName)); } return bIsUnoObject; } public String getConstantDisplayString(int _nValue, XConstantTypeDescription[] _xConstantTypeDescription, String _sDisplayString){ String sPrefix = ""; int[] nbits = new int[_xConstantTypeDescription.length]; for (int i = 0; i < _xConstantTypeDescription.length; i++){ short nConstantValue = ((Short) _xConstantTypeDescription[i].getConstantValue()).shortValue(); nbits[i] = _nValue & nConstantValue; if (nbits[i] > 0){ _sDisplayString += sPrefix + _xConstantTypeDescription[i].getName(); sPrefix = " + "; } } return _sDisplayString; } public static boolean isValid(Object[] _oObject){ if (_oObject != null){ if (_oObject.length > 0){ return true; } } return false; } public static boolean isValid(Object _oObject){ if (_oObject != null){ return (!AnyConverter.isVoid(_oObject)); } return false; } public static boolean isArray(Object _oObject){ return _oObject.getClass().isArray(); } public boolean hasSupportedServices(Object _oUnoObject){ boolean bHasSupportedServices = false; XServiceInfo xServiceInfo = ( XServiceInfo ) UnoRuntime.queryInterface( XServiceInfo.class, _oUnoObject); if ( xServiceInfo != null ){ String[] sSupportedServiceNames = xServiceInfo.getSupportedServiceNames(); bHasSupportedServices = sSupportedServiceNames.length > 0; } return bHasSupportedServices; } public Object getValueOfText(TypeClass aTypeClass, String sText){ Object oReturn = null; switch (aTypeClass.getValue()){ case TypeClass.CHAR_value: break; case TypeClass.DOUBLE_value: oReturn = Double.valueOf(sText); break; case TypeClass.ENUM_value: break; case TypeClass.FLOAT_value: oReturn = Float.valueOf(sText); break; case TypeClass.HYPER_value: oReturn = Long.valueOf(sText); break; case TypeClass.LONG_value: oReturn = Integer.valueOf(sText); break; case TypeClass.SHORT_value: oReturn = Byte.valueOf(sText); break; case TypeClass.STRING_value: oReturn = sText; break; case TypeClass.UNSIGNED_HYPER_value: oReturn = Long.valueOf(sText); break; case TypeClass.UNSIGNED_LONG_value: oReturn = Integer.valueOf(sText); break; case TypeClass.UNSIGNED_SHORT_value: oReturn = Byte.valueOf(sText); break; default: } return oReturn; } public XSimpleFileAccess getXSimpleFileAccess(){ try { if (xSimpleFileAccess == null){ Object oSimpleFileAccess = m_xComponentContext.getServiceManager().createInstanceWithContext("com.sun.star.ucb.SimpleFileAccess", m_xComponentContext); xSimpleFileAccess = (XSimpleFileAccess) com.sun.star.uno.UnoRuntime.queryInterface(XSimpleFileAccess.class, oSimpleFileAccess); } return xSimpleFileAccess; } catch (com.sun.star.uno.Exception ex) { ex.printStackTrace(System.out); return null; }} public boolean isValidSDKInstallationPath(String _sSDKInstallationPath){ boolean bIsValid = false; try { String sIDLFolder = Introspector.addToPath(_sSDKInstallationPath, Inspector.sIDLDOCUMENTSUBFOLDER); String sIndexFile = Introspector.addToPath(_sSDKInstallationPath, "index.html"); if (getXSimpleFileAccess() != null){ bIsValid = (getXSimpleFileAccess().exists(sIDLFolder) && getXSimpleFileAccess().exists(sIndexFile)); } } catch (com.sun.star.uno.Exception ex) { ex.printStackTrace(System.out); } return bIsValid; } public static String addToPath(String _sPath, String _sSubPath){ if (!_sPath.endsWith("/")){ _sPath += "/"; } return _sPath + _sSubPath; } }
11,849
368
<filename>mac/TeamTalk/interface/UserData/DDGroupInfoManager.h // // DDGroupInfoManager.h // Duoduo // // Created by 独嘉 on 14-3-4. // Copyright (c) 2014年 zuoye. All rights reserved. // #import <Foundation/Foundation.h> @class MTGroupEntity,DDGroupDataWindow,DDGroupDataModule; @interface DDGroupInfoManager : NSObject { DDGroupDataModule* _model; DDGroupDataWindow* _groupInfoWindowController; } + (DDGroupInfoManager*)instance; /** * 显示组信息 * * @param showGroup 待显示的组 * @param context context */ - (void)showGroup:(MTGroupEntity*)showGroup context:(id)context; - (void)hideGroupUserInfo; @end
245
432
/*- * Written by: <NAME> * Copyright (c) 2002 Adaptec Inc. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * * $FreeBSD: src/sys/dev/ips/ips_ioctl.c,v 1.5 2004/05/30 04:01:29 scottl Exp $ */ #include <dev/raid/ips/ips.h> #include <dev/raid/ips/ips_ioctl.h> static void ips_ioctl_finish(ips_command_t *command) { ips_ioctl_t *ioctl_cmd = command->arg; if (ioctl_cmd->readwrite & IPS_IOCTL_READ) { bus_dmamap_sync(ioctl_cmd->dmatag, ioctl_cmd->dmamap, BUS_DMASYNC_POSTREAD); } else if (ioctl_cmd->readwrite & IPS_IOCTL_WRITE) { bus_dmamap_sync(ioctl_cmd->dmatag, ioctl_cmd->dmamap, BUS_DMASYNC_POSTWRITE); } bus_dmamap_sync(command->sc->command_dmatag, command->command_dmamap, BUS_DMASYNC_POSTWRITE); bus_dmamap_unload(ioctl_cmd->dmatag, ioctl_cmd->dmamap); ioctl_cmd->status.value = command->status.value; ips_insert_free_cmd(command->sc, command); } static void ips_ioctl_callback(void *cmdptr, bus_dma_segment_t *segments, int segnum, int error) { ips_command_t *command; ips_ioctl_t *ioctl_cmd; ips_generic_cmd *command_buffer; command = cmdptr; ioctl_cmd = command->arg; command_buffer = command->command_buffer; if (error) { ioctl_cmd->status.value = IPS_ERROR_STATUS; ips_insert_free_cmd(command->sc, command); return; } command_buffer->id = command->id; command_buffer->buffaddr = segments[0].ds_addr; if (ioctl_cmd->readwrite & IPS_IOCTL_WRITE) { bus_dmamap_sync(ioctl_cmd->dmatag, ioctl_cmd->dmamap, BUS_DMASYNC_PREWRITE); } else if (ioctl_cmd->readwrite & IPS_IOCTL_READ) { bus_dmamap_sync(ioctl_cmd->dmatag, ioctl_cmd->dmamap, BUS_DMASYNC_PREREAD); } bus_dmamap_sync(command->sc->command_dmatag, command->command_dmamap, BUS_DMASYNC_PREWRITE); command->sc->ips_issue_cmd(command); } static int ips_ioctl_start(ips_command_t *command) { ips_ioctl_t *ioctl_cmd = command->arg; memcpy(command->command_buffer, ioctl_cmd->command_buffer, sizeof(ips_generic_cmd)); command->callback = ips_ioctl_finish; bus_dmamap_load(ioctl_cmd->dmatag, ioctl_cmd->dmamap, ioctl_cmd->data_buffer, ioctl_cmd->datasize, ips_ioctl_callback, command, 0); return 0; } static int ips_ioctl_cmd(ips_softc_t *sc, ips_ioctl_t *ioctl_cmd, ips_user_request *user_request) { ips_command_t *command; int error = EINVAL; if (bus_dma_tag_create( /* parent */ sc->adapter_dmatag, /* alignment */ 1, /* boundary */ 0, /* lowaddr */ BUS_SPACE_MAXADDR_32BIT, /* highaddr */ BUS_SPACE_MAXADDR, /* filter */ NULL, /* filterarg */ NULL, /* maxsize */ ioctl_cmd->datasize, /* numsegs */ 1, /* maxsegsize*/ ioctl_cmd->datasize, /* flags */ 0, &ioctl_cmd->dmatag) != 0) { return ENOMEM; } if (bus_dmamem_alloc(ioctl_cmd->dmatag, &ioctl_cmd->data_buffer, 0, &ioctl_cmd->dmamap)) { error = ENOMEM; goto exit; } if (copyin(user_request->data_buffer, ioctl_cmd->data_buffer, ioctl_cmd->datasize)) goto exit; ioctl_cmd->status.value = 0xffffffff; lockmgr(&sc->queue_lock, LK_EXCLUSIVE|LK_RETRY); if ((error = ips_get_free_cmd(sc, &command, 0)) > 0) { error = ENOMEM; lockmgr(&sc->queue_lock, LK_RELEASE); goto exit; } command->arg = ioctl_cmd; ips_ioctl_start(command); while (ioctl_cmd->status.value == 0xffffffff) tsleep(ioctl_cmd, 0, "ips", hz / 10); if (COMMAND_ERROR(&ioctl_cmd->status)) error = EIO; else error = 0; lockmgr(&sc->queue_lock, LK_RELEASE); if (copyout(ioctl_cmd->data_buffer, user_request->data_buffer, ioctl_cmd->datasize)) error = EINVAL; exit: bus_dmamem_free(ioctl_cmd->dmatag, ioctl_cmd->data_buffer, ioctl_cmd->dmamap); bus_dma_tag_destroy(ioctl_cmd->dmatag); return error; } int ips_ioctl_request(ips_softc_t *sc, u_long ioctl_request, caddr_t addr, int32_t flags) { ips_ioctl_t *ioctl_cmd; ips_user_request *user_request; int error = EINVAL; switch (ioctl_request) { case IPS_USER_CMD: user_request = (ips_user_request *)addr; ioctl_cmd = kmalloc(sizeof(ips_ioctl_t), M_IPSBUF, M_WAITOK); ioctl_cmd->command_buffer = kmalloc(sizeof(ips_generic_cmd), M_IPSBUF, M_WAITOK); if (copyin(user_request->command_buffer, ioctl_cmd->command_buffer, sizeof(ips_generic_cmd))) { kfree(ioctl_cmd->command_buffer, M_IPSBUF); kfree(ioctl_cmd, M_IPSBUF); break; } ioctl_cmd->readwrite = IPS_IOCTL_READ | IPS_IOCTL_WRITE; ioctl_cmd->datasize = IPS_IOCTL_BUFFER_SIZE; error = ips_ioctl_cmd(sc, ioctl_cmd, user_request); kfree(ioctl_cmd->command_buffer, M_IPSBUF); kfree(ioctl_cmd, M_IPSBUF); break; } return error; }
2,448
3,084
<reponame>ixjf/Windows-driver-samples<filename>wpd/WpdMultiTransportDriver/Device.cpp<gh_stars>1000+ #include "stdafx.h" #include "Device.h" #include "WpdMultiTransportDriver_i.c" #include "Device.tmh" STDMETHODIMP_(HRESULT) CDevice::OnD0Entry(_In_ IWDFDevice* /*pDevice*/, WDF_POWER_DEVICE_STATE /*previousState*/) { return S_OK; } STDMETHODIMP_(HRESULT) CDevice::OnD0Exit(_In_ IWDFDevice* /*pDevice*/, WDF_POWER_DEVICE_STATE /*newState*/) { return S_OK; } STDMETHODIMP_(VOID) CDevice::OnSurpriseRemoval(_In_ IWDFDevice* /*pDevice*/) { return; } STDMETHODIMP_(HRESULT) CDevice::OnQueryRemove(_In_ IWDFDevice* /*pDevice*/) { return S_OK; } STDMETHODIMP_(HRESULT) CDevice::OnQueryStop(_In_ IWDFDevice* /*pDevice*/) { return S_OK; } STDMETHODIMP_(VOID) CDevice::OnSelfManagedIoCleanup(_In_ IWDFDevice* /*pDevice*/) { return; } STDMETHODIMP_(VOID) CDevice::OnSelfManagedIoFlush(_In_ IWDFDevice* /*pDevice*/) { return; } STDMETHODIMP_(HRESULT) CDevice::OnSelfManagedIoInit(_In_ IWDFDevice* /*pDevice*/) { return S_OK; } STDMETHODIMP_(HRESULT) CDevice::OnSelfManagedIoSuspend(_In_ IWDFDevice* /*pDevice*/) { return S_OK; } STDMETHODIMP_(HRESULT) CDevice::OnSelfManagedIoRestart(_In_ IWDFDevice* /*pDevice*/) { return S_OK; } STDMETHODIMP_(HRESULT) CDevice::OnSelfManagedIoStop(_In_ IWDFDevice* /*pDevice*/) { return S_OK; } STDMETHODIMP_(HRESULT) CDevice::OnPrepareHardware(_In_ IWDFDevice* pDevice) { HRESULT hr = S_OK; if (m_pWpdBaseDriver != NULL) { hr = m_pWpdBaseDriver->Initialize(); CHECK_HR(hr, "Failed to Initialize the driver class"); } // Initialize the WPD Class Extension. This will enable the appropriate WPD interface GUID, // as well as do any additional initialization (e.g. enabling Legacy Compatibility layers for those drivers // which requested support in their INF). if (hr == S_OK && m_pPortableDeviceClassExtension == NULL) { CComPtr<IPortableDeviceValues> pOptions; CComPtr<IPortableDevicePropVariantCollection> pContentTypes; hr = CoCreateInstance(CLSID_PortableDeviceClassExtension, NULL, CLSCTX_INPROC_SERVER, IID_IPortableDeviceClassExtension, (VOID**)&m_pPortableDeviceClassExtension); CHECK_HR(hr, "Failed to CoCreate CLSID_PortableDeviceClassExtension"); if (hr == S_OK) { hr = CoCreateInstance(CLSID_PortableDeviceValues, NULL, CLSCTX_INPROC_SERVER, IID_IPortableDeviceValues, (VOID**)&pOptions); CHECK_HR(hr, "Failed to CoCreate CLSID_PortableDeviceValues"); if (hr == S_OK) { CComPtr<IPortableDeviceValues> pIDs; // ATTENTION: The following GUID value is provided for illustrative // purposes only. // // Rather than hard-coding a GUID value in your driver, the driver // must obtain a GUID value from the device. The GUID value on the // device can be provisioned by the driver (upon first-connect) by // using CoCreateGUID and setting that value into non-volatile storage // on the device. The same GUID value will then be reported by each // of your device's transports. To avoid a provisioning race condition, // always read the value from the device after provisioning. Only // provision the GUID once. Thereafter, always use the value provided // by the device. GUID guidFUID = { 0x245e5e81, 0x2c17, 0x40a4, { 0x8b, 0x10, 0xe9, 0x43, 0xc5, 0x4c, 0x97, 0xb2 } }; // Initialize the PortableDeviceClassExtension with a list of supported content types for the // connected device. This will ensure that the correct application compatibility settings will // be applied for your device. // Get supported content types if (hr == S_OK) { hr = GetSupportedContentTypes(&pContentTypes); CHECK_HR(hr, "Failed to get supported content types"); } // Add the supported types to the options if (hr == S_OK) { hr = pOptions->SetIPortableDevicePropVariantCollectionValue(WPD_CLASS_EXTENSION_OPTIONS_SUPPORTED_CONTENT_TYPES, pContentTypes); CHECK_HR(hr, "Failed to set WPD_CLASS_EXTENSION_OPTIONS_SUPPORTED_CONTENT_TYPES"); } if (hr == S_OK) { m_pWpdBaseDriver->m_pQueueCallback = NULL; HRESULT hrTemp = m_pPortableDeviceClassExtension->QueryInterface( __uuidof(IQueueCallbackDeviceIoControl), (void**)&m_pWpdBaseDriver->m_pQueueCallback ); CHECK_HR(hrTemp, "Failed to obtain IQueueCallbackDeviceIoControl interface from class extension"); if (hrTemp == S_OK) { // Enable the Multi-Transport Mode option hr = pOptions->SetBoolValue(WPD_CLASS_EXTENSION_OPTIONS_MULTITRANSPORT_MODE, TRUE); CHECK_HR(hr, "Failed to enable multi-transport mode"); // Create a PnP ID value collection if (hr == S_OK) { hr = CreateIDValues(DEVICE_MANUFACTURER_VALUE, DEVICE_MODEL_VALUE, DEVICE_FIRMWARE_VERSION_VALUE, guidFUID, &pIDs); CHECK_HR(hr, "Failed to Create the ID value collection"); } // Add the PnP ID value collection to the options if (hr == S_OK) { hr = pOptions->SetIPortableDeviceValuesValue(WPD_CLASS_EXTENSION_OPTIONS_DEVICE_IDENTIFICATION_VALUES, pIDs); CHECK_HR(hr, "Failed to set WPD_CLASS_EXTENSION_OPTIONS_DEVICE_IDENTIFICATION_VALUES"); } // Add the transport bandwidth (in kilobits per second units) to the options // (0 indicates bandwidth unknown) if (hr == S_OK) { // Set the transport bandwidth (optional) hr = pOptions->SetUnsignedIntegerValue(WPD_CLASS_EXTENSION_OPTIONS_TRANSPORT_BANDWIDTH, 0L); CHECK_HR(hr, "Failed to set WPD_CLASS_EXTENSION_OPTIONS_TRANSPORT_BANDWIDTH"); } } } if (hr == S_OK) { hr = m_pPortableDeviceClassExtension->Initialize(pDevice, pOptions); CHECK_HR(hr, "Failed to Initialize portable device class extension object"); } // Since users commonly have the abiltity to customize their device even when it is not // connected to the PC, we need to make sure the PC is current when the driver loads. // // Send the latest device friendly name to the PortableDeviceClassExtension component // so the system is always updated with the current device name. // // This call should also be made after a successful property set operation of // WPD_DEVICE_FRIENDLY_NAME. LPWSTR wszDeviceFriendlyName = NULL; if (hr == S_OK) { hr = GetDeviceFriendlyName(&wszDeviceFriendlyName); CHECK_HR(hr, "Failed to get device's friendly name"); } if (hr == S_OK && wszDeviceFriendlyName != NULL) { hr = UpdateDeviceFriendlyName(m_pPortableDeviceClassExtension, wszDeviceFriendlyName); CHECK_HR(hr, "Failed to update device's friendly name"); } // Free the memory. CoTaskMemFree ignores NULLs so no need to check. CoTaskMemFree(wszDeviceFriendlyName); } } } return hr; } STDMETHODIMP_(HRESULT) CDevice::OnReleaseHardware(_In_ IWDFDevice* /*pDevice*/) { if (m_pWpdBaseDriver != NULL) { m_pWpdBaseDriver->Uninitialize(); } if (m_pPortableDeviceClassExtension != NULL) { m_pPortableDeviceClassExtension = NULL; } return S_OK; } HRESULT CDevice::GetSupportedContentTypes( _Outptr_ IPortableDevicePropVariantCollection** ppContentTypes) { HRESULT hr = S_OK; CComPtr<IPortableDeviceValues> pParams; CComPtr<IPortableDeviceValues> pResults; if (ppContentTypes == NULL) { hr = E_INVALIDARG; return hr; } // CoCreate a collection to store the WPD_COMMAND_CAPABILITIES_GET_SUPPORTED_CONTENT_TYPES command parameters. if(SUCCEEDED(hr)) { hr = CoCreateInstance(CLSID_PortableDeviceValues, NULL, CLSCTX_INPROC_SERVER, IID_IPortableDeviceValues, (VOID**)&pParams); CHECK_HR(hr, "Failed to CoCreate CLSID_PortableDeviceValues"); } // CoCreate a collection to store the WPD_COMMAND_CAPABILITIES_GET_SUPPORTED_CONTENT_TYPES command results. if (hr == S_OK) { hr = CoCreateInstance(CLSID_PortableDeviceValues, NULL, CLSCTX_INPROC_SERVER, IID_IPortableDeviceValues, (VOID**)&pResults); CHECK_HR(hr, "Failed to CoCreate CLSID_PortableDeviceValues"); } // Set the params if(SUCCEEDED(hr)) { hr = pParams->SetGuidValue(WPD_PROPERTY_COMMON_COMMAND_CATEGORY, WPD_COMMAND_CAPABILITIES_GET_SUPPORTED_CONTENT_TYPES.fmtid); CHECK_HR(hr, ("Failed to set WPD_PROPERTY_COMMON_COMMAND_CATEGORY")); } if(SUCCEEDED(hr)) { hr = pParams->SetUnsignedIntegerValue(WPD_PROPERTY_COMMON_COMMAND_ID, WPD_COMMAND_CAPABILITIES_GET_SUPPORTED_CONTENT_TYPES.pid); CHECK_HR(hr, ("Failed to set WPD_PROPERTY_COMMON_COMMAND_ID")); } if(SUCCEEDED(hr)) { hr = pParams->SetGuidValue(WPD_PROPERTY_CAPABILITIES_FUNCTIONAL_CATEGORY, WPD_FUNCTIONAL_CATEGORY_ALL); CHECK_HR(hr, ("Failed to set WPD_PROPERTY_CAPABILITIES_FUNCTIONAL_CATEGORY")); } // Make the call if(SUCCEEDED(hr)) { hr = m_pWpdBaseDriver->DispatchWpdMessage(pParams, pResults); CHECK_HR(hr, ("Failed to dispatch message to get supported content types")); } // Get the results if(SUCCEEDED(hr)) { hr = pResults->GetIPortableDevicePropVariantCollectionValue(WPD_PROPERTY_CAPABILITIES_CONTENT_TYPES, ppContentTypes); CHECK_HR(hr, ("Failed to get WPD_PROPERTY_CAPABILITIES_CONTENT_TYPES")); } return hr; } HRESULT CDevice::GetDeviceFriendlyName( _Outptr_result_maybenull_ LPWSTR* pwszDeviceFriendlyName) { HRESULT hr = S_OK; CComPtr<IPortableDeviceValues> pParams; CComPtr<IPortableDeviceValues> pResults; CComPtr<IPortableDeviceKeyCollection> pKeys; CComPtr<IPortableDeviceValues> pValues; if (pwszDeviceFriendlyName == NULL) { hr = E_INVALIDARG; return hr; } *pwszDeviceFriendlyName = NULL; // CoCreate a collection to store the WPD_COMMAND_OBJECT_PROPERTIES_GET command parameters. if (hr == S_OK) { hr = CoCreateInstance(CLSID_PortableDeviceValues, NULL, CLSCTX_INPROC_SERVER, IID_IPortableDeviceValues, (VOID**)&pParams); CHECK_HR(hr, "Failed to CoCreate CLSID_PortableDeviceValues"); } // CoCreate a collection to store the WPD_COMMAND_OBJECT_PROPERTIES_GET command results. if (hr == S_OK) { hr = CoCreateInstance(CLSID_PortableDeviceValues, NULL, CLSCTX_INPROC_SERVER, IID_IPortableDeviceValues, (VOID**)&pResults); CHECK_HR(hr, "Failed to CoCreate CLSID_PortableDeviceValues"); } // CoCreate a collection to store the requested property keys. In our case, we are requesting just the device friendly name // (WPD_DEVICE_FRIENDLY_NAME) if (hr == S_OK) { hr = CoCreateInstance(CLSID_PortableDeviceKeyCollection, NULL, CLSCTX_INPROC_SERVER, IID_IPortableDeviceKeyCollection, (VOID**)&pKeys); CHECK_HR(hr, "Failed to CoCreate CLSID_PortableDeviceKeyCollection for results"); } // Set the params if (hr == S_OK) { hr = pParams->SetGuidValue(WPD_PROPERTY_COMMON_COMMAND_CATEGORY, WPD_COMMAND_OBJECT_PROPERTIES_GET.fmtid); CHECK_HR(hr, ("Failed to set WPD_PROPERTY_COMMON_COMMAND_CATEGORY")); } if (hr == S_OK) { hr = pParams->SetUnsignedIntegerValue(WPD_PROPERTY_COMMON_COMMAND_ID, WPD_COMMAND_OBJECT_PROPERTIES_GET.pid); CHECK_HR(hr, ("Failed to set WPD_PROPERTY_COMMON_COMMAND_ID")); } if (hr == S_OK) { hr = pParams->SetStringValue(WPD_PROPERTY_OBJECT_PROPERTIES_OBJECT_ID, WPD_DEVICE_OBJECT_ID); CHECK_HR(hr, ("Failed to set WPD_PROPERTY_OBJECT_PROPERTIES_OBJECT_ID")); } if (hr == S_OK) { hr = pKeys->Add(WPD_DEVICE_FRIENDLY_NAME); CHECK_HR(hr, ("Failed to add WPD_DEVICE_FRIENDLY_NAME to key collection")); } if (hr == S_OK) { hr = pParams->SetIPortableDeviceKeyCollectionValue(WPD_PROPERTY_OBJECT_PROPERTIES_PROPERTY_KEYS, pKeys); CHECK_HR(hr, ("Failed to set WPD_PROPERTY_OBJECT_PROPERTIES_PROPERTY_KEYS")); } // Make the call if (hr == S_OK) { hr = m_pWpdBaseDriver->DispatchWpdMessage(pParams, pResults); CHECK_HR(hr, ("Failed to dispatch message to get supported content types")); } // Get the results if (hr == S_OK) { hr = pResults->GetIPortableDeviceValuesValue(WPD_PROPERTY_OBJECT_PROPERTIES_PROPERTY_VALUES, &pValues); CHECK_HR(hr, ("Failed to get WPD_PROPERTY_OBJECT_PROPERTIES_PROPERTY_VALUES")); } if (hr == S_OK) { hr = pValues->GetStringValue(WPD_DEVICE_FRIENDLY_NAME, pwszDeviceFriendlyName); CHECK_HR(hr, ("Failed to get WPD_DEVICE_FRIENDLY_NAME")); } return hr; } HRESULT CDevice::CreateIDValues( _In_ LPCWSTR pszManufacturer, _In_ LPCWSTR pszModel, _In_opt_ LPCWSTR pszVersion, _In_ REFGUID guidFUID, _COM_Outptr_ IPortableDeviceValues** ppValues) { HRESULT hr = S_OK; CComPtr<IPortableDeviceValues> pValues; *ppValues = NULL; // Create the object to hold the ID values hr = CoCreateInstance( CLSID_PortableDeviceValues, NULL, CLSCTX_INPROC_SERVER, IID_IPortableDeviceValues, (VOID**)&pValues); if (SUCCEEDED(hr)) { hr = pValues->SetStringValue(WPD_DEVICE_MANUFACTURER, pszManufacturer); } if (SUCCEEDED(hr)) { hr = pValues->SetStringValue(WPD_DEVICE_MODEL, pszModel); } if (SUCCEEDED(hr) && pszVersion) { hr = pValues->SetStringValue(WPD_DEVICE_FIRMWARE_VERSION, pszVersion); } if (SUCCEEDED(hr)) { hr = pValues->SetGuidValue(WPD_DEVICE_FUNCTIONAL_UNIQUE_ID, guidFUID); } if (SUCCEEDED(hr)) { *ppValues = pValues.Detach(); } return hr; } HRESULT UpdateDeviceFriendlyName( _In_ IPortableDeviceClassExtension* pPortableDeviceClassExtension, _In_ LPCWSTR wszDeviceFriendlyName) { HRESULT hr = S_OK; // If we were passed NULL parameters we have nothing to do, return S_OK. if ((pPortableDeviceClassExtension == NULL) || (wszDeviceFriendlyName == NULL)) { return S_OK; } CComPtr<IPortableDeviceValues> pParams; CComPtr<IPortableDeviceValues> pResults; CComPtr<IPortableDeviceValues> pValues; // Prepare to make a call to set the device information if (hr == S_OK) { hr = CoCreateInstance(CLSID_PortableDeviceValues, NULL, CLSCTX_INPROC_SERVER, IID_IPortableDeviceValues, (VOID**)&pParams); CHECK_HR(hr, "Failed to CoCreate CLSID_PortableDeviceValues"); } if (hr == S_OK) { hr = CoCreateInstance(CLSID_PortableDeviceValues, NULL, CLSCTX_INPROC_SERVER, IID_IPortableDeviceValues, (VOID**)&pResults); CHECK_HR(hr, "Failed to CoCreate CLSID_PortableDeviceValues for results"); } if (hr == S_OK) { hr = CoCreateInstance(CLSID_PortableDeviceValues, NULL, CLSCTX_INPROC_SERVER, IID_IPortableDeviceValues, (VOID**)&pValues); CHECK_HR(hr, "Failed to CoCreate CLSID_PortableDeviceValues for results"); } // Get the information values to update and set them in WPD_PROPERTY_CLASS_EXTENSION_DEVICE_INFORMATION_VALUES if (hr == S_OK) { hr = pValues->SetStringValue(WPD_DEVICE_FRIENDLY_NAME, wszDeviceFriendlyName); CHECK_HR(hr, ("Failed to set WPD_DEVICE_FRIENDLY_NAME")); } // Set the params if (hr == S_OK) { hr = pParams->SetGuidValue(WPD_PROPERTY_COMMON_COMMAND_CATEGORY, WPD_COMMAND_CLASS_EXTENSION_WRITE_DEVICE_INFORMATION.fmtid); CHECK_HR(hr, ("Failed to set WPD_PROPERTY_COMMON_COMMAND_CATEGORY")); } if (hr == S_OK) { hr = pParams->SetUnsignedIntegerValue(WPD_PROPERTY_COMMON_COMMAND_ID, WPD_COMMAND_CLASS_EXTENSION_WRITE_DEVICE_INFORMATION.pid); CHECK_HR(hr, ("Failed to set WPD_PROPERTY_COMMON_COMMAND_ID")); } if (hr == S_OK) { hr = pParams->SetIPortableDeviceValuesValue(WPD_PROPERTY_CLASS_EXTENSION_DEVICE_INFORMATION_VALUES, pValues); CHECK_HR(hr, ("Failed to set WPD_PROPERTY_CLASS_EXTENSION_DEVICE_INFORMATION_VALUES")); } // Make the call if (hr == S_OK) { hr = pPortableDeviceClassExtension->ProcessLibraryMessage(pParams, pResults); CHECK_HR(hr, ("Failed to process update device information message")); } // A Failed ProcessLibraryMessage operation for updating this value is not considered // fatal and should return S_OK. return S_OK; }
10,571
3,651
/* * * * Copyright 2010-2016 OrientDB LTD (info(-at-)orientdb.com) * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * */ package com.orientechnologies.orient.etl.block; import com.orientechnologies.orient.console.OConsoleDatabaseApp; import com.orientechnologies.orient.core.command.OCommandContext; import com.orientechnologies.orient.core.exception.OConfigurationException; import com.orientechnologies.orient.core.record.impl.ODocument; import java.util.List; /** Executes the OrientDB console. Useful to execute batches. */ public class OETLConsoleBlock extends OETLAbstractBlock { protected String file; protected List<String> commands; protected OConsoleDatabaseApp console; @Override public ODocument getConfiguration() { return new ODocument() .fromJSON( "{parameters:[" + getCommonConfigurationParameters() + "{file:{optional:true,description:'Input filename with commands.sh to execute'}}" + "{commands.sh:{optional:true,description:'Commands to execute in sequence as an array of strings'}}" + "]}"); } @Override public void configure(final ODocument iConfiguration, OCommandContext iContext) { super.configure(iConfiguration, iContext); if (iConfiguration.containsField("file")) file = iConfiguration.field("file"); if (iConfiguration.containsField("commands.sh")) commands = iConfiguration.field("commands.sh"); if (file == null && commands == null) throw new OConfigurationException("file or commands.sh are mandatory"); if (file != null) console = new OConsoleDatabaseApp(new String[] {file}); else console = new OConsoleDatabaseApp(commands.toArray(new String[commands.size()])); } @Override public String getName() { return "console"; } @Override public Object executeBlock() { return console.run(); } }
762
634
# %% [markdown] # # Decision tree for regression # # In this notebook, we present how decision trees are working in regression # problems. We show differences with the decision trees previously presented in # a classification setting. # # First, we load the penguins dataset specifically for solving a regression # problem. # %% [markdown] # ```{note} # If you want a deeper overview regarding this dataset, you can refer to the # Appendix - Datasets description section at the end of this MOOC. # ``` # %% import pandas as pd penguins = pd.read_csv("../datasets/penguins_regression.csv") data_columns = ["Flipper Length (mm)"] target_column = "Body Mass (g)" data_train, target_train = penguins[data_columns], penguins[target_column] # %% [markdown] # To illustrate how decision trees are predicting in a regression setting, we # will create a synthetic dataset containing all possible flipper length from # the minimum to the maximum of the original data. # %% import numpy as np data_test = pd.DataFrame(np.arange(data_train[data_columns[0]].min(), data_train[data_columns[0]].max()), columns=data_columns) # %% import matplotlib.pyplot as plt import seaborn as sns sns.scatterplot(data=penguins, x="Flipper Length (mm)", y="Body Mass (g)", color="black", alpha=0.5) _ = plt.title("Illustration of the regression dataset used") # %% [markdown] # We will first illustrate the difference between a linear model and a decision # tree. # %% from sklearn.linear_model import LinearRegression linear_model = LinearRegression() linear_model.fit(data_train, target_train) target_predicted = linear_model.predict(data_test) # %% sns.scatterplot(data=penguins, x="Flipper Length (mm)", y="Body Mass (g)", color="black", alpha=0.5) plt.plot(data_test, target_predicted, label="Linear regression") plt.legend() _ = plt.title("Prediction function using a LinearRegression") # %% [markdown] # On the plot above, we see that a non-regularized `LinearRegression` is able # to fit the data. A feature of this model is that all new predictions # will be on the line. # %% ax = sns.scatterplot(data=penguins, x="Flipper Length (mm)", y="Body Mass (g)", color="black", alpha=0.5) plt.plot(data_test, target_predicted, label="Linear regression", linestyle="--") plt.scatter(data_test[::3], target_predicted[::3], label="Test predictions", color="tab:orange") plt.legend() _ = plt.title("Prediction function using a LinearRegression") # %% [markdown] # Contrary to linear models, decision trees are non-parametric models: # they do not make assumptions about the way data is distributed. # This will affect the prediction scheme. Repeating the above experiment # will highlight the differences. # %% from sklearn.tree import DecisionTreeRegressor tree = DecisionTreeRegressor(max_depth=1) tree.fit(data_train, target_train) target_predicted = tree.predict(data_test) # %% sns.scatterplot(data=penguins, x="Flipper Length (mm)", y="Body Mass (g)", color="black", alpha=0.5) plt.plot(data_test, target_predicted, label="Decision tree") plt.legend() _ = plt.title("Prediction function using a DecisionTreeRegressor") # %% [markdown] # We see that the decision tree model does not have an *a priori* distribution # for the data and we do not end-up with a straight line to regress flipper # length and body mass. # # Instead, we observe that the predictions of the tree are piecewise constant. # Indeed, our feature space was split into two partitions. Let's check the # tree structure to see what was the threshold found during the training. # %% from sklearn.tree import plot_tree _, ax = plt.subplots(figsize=(8, 6)) _ = plot_tree(tree, feature_names=data_columns, ax=ax) # %% [markdown] # The threshold for our feature (flipper length) is 206.5 mm. The predicted # values on each side of the split are two constants: 3683.50 g and 5023.62 g. # These values corresponds to the mean values of the training samples in each # partition. # # In classification, we saw that increasing the depth of the tree allowed us to # get more complex decision boundaries. # Let's check the effect of increasing the depth in a regression setting: # %% tree = DecisionTreeRegressor(max_depth=3) tree.fit(data_train, target_train) target_predicted = tree.predict(data_test) # %% sns.scatterplot(data=penguins, x="Flipper Length (mm)", y="Body Mass (g)", color="black", alpha=0.5) plt.plot(data_test, target_predicted, label="Decision tree") plt.legend() _ = plt.title("Prediction function using a DecisionTreeRegressor") # %% [markdown] # Increasing the depth of the tree will increase the number of partition and # thus the number of constant values that the tree is capable of predicting. # # In this notebook, we highlighted the differences in behavior of a decision # tree used in a classification problem in contrast to a regression problem.
1,622
8,027
<filename>src/com/facebook/buck/android/toolchain/ndk/NdkCxxPlatformCompiler.java<gh_stars>1000+ /* * Copyright (c) Facebook, Inc. and its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.buck.android.toolchain.ndk; import com.facebook.buck.core.rulekey.AddToRuleKey; import com.facebook.buck.core.rulekey.AddsToRuleKey; import com.facebook.buck.core.util.immutables.BuckStyleValue; @BuckStyleValue public interface NdkCxxPlatformCompiler extends AddsToRuleKey { @AddToRuleKey NdkCompilerType getType(); /** * @return the compiler version, corresponding to either `gcc_version` or `clang_version` from the * .buckconfig settings, depending on which compiler family was selected. */ @AddToRuleKey String getVersion(); /** * @return the GCC compiler version. Since even runtimes which are not GCC-based need to use GCC * tools (e.g. ar, as,, ld.gold), we need to *always* have a version of GCC. */ @AddToRuleKey String getGccVersion(); static NdkCxxPlatformCompiler of(NdkCompilerType type, String version, String gccVersion) { return ImmutableNdkCxxPlatformCompiler.of(type, version, gccVersion); } }
518
479
<filename>lib/antlr-2.7.5/examples/python/xml/xml.py import sys import antlr import codecs import xml_l def warn(msg): print >>sys.stderr,"warning:",msg sys.stderr.flush() def error(msg): print >>sys.stderr,"got error:",msg sys.stderr.flush() ### Xml handling depends very much on whether ### your terminal can handle (print) xml chars. ### To be sure about it, just create a non ASCII ### letter and try to print it. If that is not going ### to work, we create an alternative method which ### maps non printable chars to '?'. c = u"\N{LATIN SMALL LETTER O WITH ACUTE}" try: print c except: warn("terminal can't display unicode chars.") sys.stderr.flush() ## I'm just going to redefine 'unicode' to return ## a ASCII string. def unicode(x): return x.__str__().encode("ascii","replace") ### Now for the input. This should ideally be done ### in the lexer .. ### replace stdin with a wrapper that spits out ### unicode chars. sys.stdin = codecs.lookup('latin1')[-2](sys.stdin) for token in xml_l.Lexer() : pass
397
333
<gh_stars>100-1000 /* For copyright information please refer to files in the COPYRIGHT directory */ #define MK_TRANSIENT_PTR(type, f) #define MK_TRANSIENT_VAL(type, f, val) #define PARAMETER(T) #define MK_VAL(type, f) #define MK_PTR(type, f) #define MK_PTR_TVAR(f, T) #define MK_PTR_TAPP(type, f, cpr) #define MK_ARRAY(type, size, f) #define MK_ARRAY_IN_STRUCT(type, size, f) #define MK_VAR_ARRAY(type, f) #define MK_VAR_ARRAY_IN_STRUCT(type, f) #define MK_PTR_ARRAY(type, size, f) #define MK_PTR_ARRAY_IN_STRUCT(type, size, f) #define MK_PTR_VAR_ARRAY(type, f) #define MK_PTR_VAR_ARRAY_IN_STRUCT(type, f) #define MK_PTR_TAPP_ARRAY(type, size, f, cpfn) #define MK_PTR_TAPP_ARRAY_IN_STRUCT(type, size, f, cpfn) #define RE_STRUCT_BEGIN(T) \ RE_STRUCT_FUNC_PROTO(T); #define RE_STRUCT_BEGIN_NO_BUF(T) \ RE_STRUCT_FUNC_PROTO_NO_BUF(T); #define RE_STRUCT_BEGIN_NO_BUF_PTR_DESC(T) \ RE_STRUCT_FUNC_PROTO_NO_BUF_PTR_DESC(T); #define RE_STRUCT_GENERIC_BEGIN(T, cpfn) \ RE_STRUCT_GENERIC_FUNC_PROTO(T, cpfn); #define RE_STRUCT_GENERIC_END(T, cpfn) #define RE_STRUCT_END(T)
533
1,592
<gh_stars>1000+ #globalVars.py #A part of NonVisual Desktop Access (NVDA) #Copyright (C) 2006-2007 NVDA Contributors <http://www.nvda-project.org/> #This file is covered by the GNU General Public License. #See the file COPYING for more details. """global variables module @var foregroundObject: holds the current foreground object. The object for the last foreground event received. @type foregroundObject: L{NVDAObjects.NVDAObject} @var focusObject: holds the current focus object @type focusObject: L{NVDAObjects.NVDAObject} @var mouseObject: holds the object that is at the position of the mouse pointer @type mouseObject: L{NVDAObjects.NVDAObject} @var mouseOldX: the last x coordinate of the mouse pointer before its current position @type oldMouseX: int @var mouseOldY: the last y coordinate of the mouse pointer before its current position @type oldMouseY: int @var navigatorObject: holds the current navigator object @type navigatorObject: L{NVDAObjects.NVDAObject} @var navigatorTracksFocus: if true, the navigator object will follow the focus as it changes @type navigatorTracksFocus: boolean """ startTime=0 desktopObject=None foregroundObject=None focusObject=None focusAncestors=[] focusDifferenceLevel=None mouseObject=None mouseOldX=None mouseOldY=None navigatorObject=None reviewPosition=None reviewPositionObj=None lastProgressValue=0 appArgs=None appArgsExtra=None settingsRing = None speechDictionaryProcessing=True exitCode=0
451
809
<reponame>nikitavlaev/embox /** * @file lthread_test.c * @brief simple test for lthreads * * @author <NAME> * @date 16.02.2014 */ #include <util/err.h> #include <embox/test.h> #include <kernel/sched.h> #include <kernel/sched/schedee_priority.h> #include <kernel/lthread/lthread.h> #include <kernel/time/ktime.h> #define LTHREAD_QUANTITY OPTION_GET(NUMBER, lthreads_quantity) EMBOX_TEST_SUITE("test for lthread API"); static volatile int done = 0; static int run1(struct lthread *self) { test_emit('a'); done++; return 0; } TEST_CASE("Launch simple lthread") { struct lthread lt; done = 0; lthread_init(&lt, run1); lthread_launch(&lt); /* Spin, waiting lthread finished */ while(1) { if(done == 1) break; ksleep(0); } lthread_join(&lt); test_assert_emitted("a"); } static int run_resched(struct lthread *self) { ksleep(0); done = 1; return 0; } TEST_CASE("Call sched from lthread") { struct lthread lt; done = 0; lthread_init(&lt, run_resched); lthread_launch(&lt); /* Spin, waiting lthread finished */ while(1) { if(done == 1) break; ksleep(0); } lthread_join(&lt); } static int run2(struct lthread *self) { done++; return 0; } TEST_CASE("Create lthreads with different priorities") { struct lthread lts[LTHREAD_QUANTITY]; done = 0; for(int i = 0; i < LTHREAD_QUANTITY; i++) { lthread_init(&lts[i], run2); test_assert_zero( schedee_priority_set(&lts[i].schedee, SCHED_PRIORITY_MAX - i) ); lthread_launch(&lts[i]); } /* deleting occurs onle after finishing */ for(int i = 0; i < LTHREAD_QUANTITY; i++) { lthread_join(&lts[i]); } test_assert_equal(done, LTHREAD_QUANTITY); } static int run3(struct lthread *self) { test_emit('b'); done++; return 0; } TEST_CASE("Test executing order") { struct lthread lt1, lt2; done = 0; lthread_init(&lt1, run1); schedee_priority_set(&lt1.schedee, SCHED_PRIORITY_MAX - 1); lthread_init(&lt2, run3); schedee_priority_set(&lt2.schedee, SCHED_PRIORITY_MAX); /* prevent scheduling to avoid executing one * before adding another to runq */ sched_lock(); { lthread_launch(&lt1); lthread_launch(&lt2); } sched_unlock(); lthread_join(&lt1); lthread_join(&lt2); test_assert_emitted("ba"); } TEST_CASE("Test several launches") { struct lthread lt; done = 0; lthread_init(&lt, run1); schedee_priority_set(&lt.schedee, SCHED_PRIORITY_MAX); lthread_launch(&lt); ksleep(0); lthread_launch(&lt); ksleep(0); test_assert_equal(2, done); lthread_join(&lt); }
1,063
348
<reponame>chamberone/Leaflet.PixiOverlay {"nom":"Fienvillers","circ":"4ème circonscription","dpt":"Somme","inscrits":453,"abs":188,"votants":265,"blancs":20,"nuls":7,"exp":238,"res":[{"nuance":"FN","nom":"<NAME>","voix":121},{"nuance":"REM","nom":"<NAME>","voix":117}]}
109
378
<reponame>puneethps/tomee<gh_stars>100-1000 /** * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.openejb.assembler.classic; import junit.framework.TestCase; import org.apache.openejb.config.ConfigurationFactory; import org.apache.openejb.loader.SystemInstance; import org.apache.openejb.spi.ContainerSystem; import javax.sql.DataSource; import javax.transaction.TransactionManager; import java.sql.Connection; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.List; /** * @version $Rev$ $Date$ */ public class JdbcConfigTest extends TestCase { public void test() throws Exception { final ConfigurationFactory config = new ConfigurationFactory(); final Assembler assembler = new Assembler(); // System services assembler.createProxyFactory(config.configureService(ProxyFactoryInfo.class)); assembler.createTransactionManager(config.configureService(TransactionServiceInfo.class)); assembler.createSecurityService(config.configureService(SecurityServiceInfo.class)); // managed JDBC assembler.createResource(config.configureService("Default JDBC Database", ResourceInfo.class)); // unmanaged JDBC assembler.createResource(config.configureService("Default Unmanaged JDBC Database", ResourceInfo.class)); final ContainerSystem containerSystem = SystemInstance.get().getComponent(ContainerSystem.class); final DataSource managedDS = (DataSource) containerSystem.getJNDIContext().lookup("openejb/Resource/Default JDBC Database"); assertNotNull("managedDS is null", managedDS); final DataSource unmanagedDS = (DataSource) containerSystem.getJNDIContext().lookup("openejb/Resource/Default Unmanaged JDBC Database"); assertNotNull("unmanagedDS is null", unmanagedDS); // test without a transaction // NOTE: without a transaction all connections work as unmanaged verifyUnmanagedConnections(managedDS); verifyUnmanagedConnections(unmanagedDS); // test in the context of a transaction final TransactionManager transactionManager = SystemInstance.get().getComponent(TransactionManager.class); transactionManager.begin(); try { verifyManagedConnections(managedDS); verifyUnmanagedConnections(unmanagedDS); } finally { // commit the transaction transactionManager.commit(); } } private void verifyManagedConnections(final DataSource dataSource) throws SQLException { final List<Connection> managedConnections = new ArrayList<>(); try { for (int i = 0; i < 4; i++) { final Connection connection = dataSource.getConnection(); managedConnections.add(connection); try { connection.setAutoCommit(true); fail("expected connection.setAutoCommit(true) to throw an exception"); } catch (final SQLException expected) { } try (Statement statement = connection.createStatement()) { statement.getQueryTimeout(); } } } finally { for (final Connection connection : managedConnections) { close(connection); } } } private void verifyUnmanagedConnections(final DataSource dataSource) throws SQLException { final List<Connection> unmanagedConnections = new ArrayList<>(); try { for (int i = 0; i < 4; i++) { final Connection connection = dataSource.getConnection(); unmanagedConnections.add(connection); assertTrue("Expected connection.getAutoCommit() to be true", connection.getAutoCommit()); connection.setAutoCommit(true); try (Statement statement = connection.createStatement()) { statement.getQueryTimeout(); } connection.commit(); connection.setAutoCommit(false); } } finally { for (final Connection connection : unmanagedConnections) { close(connection); } } } private static void close(final Connection connection) { if (connection == null) return; try { connection.close(); } catch (final SQLException e) { } } }
1,938
324
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jclouds.ec2.util; import static org.jclouds.ec2.domain.Tag.ResourceType.IMAGE; import static org.jclouds.ec2.domain.Tag.ResourceType.INSTANCE; import static org.testng.Assert.assertEquals; import org.jclouds.ec2.domain.Tag; import org.testng.annotations.Test; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @Test public class TagsTest { Tag resourceTag1 = Tag.builder().resourceType(IMAGE).resourceId("1").key("key").value("value").build(); public void testValueFunction() { assertEquals(Tags.valueFunction().apply(resourceTag1), "value"); } public void testKeyFunction() { assertEquals(Tags.keyFunction().apply(resourceTag1), "key"); } Tag resourceTag2 = Tag.builder().resourceType(IMAGE).resourceId("1").key("foo").value("bar").build(); Tag resource2Tag1 = Tag.builder().resourceType(INSTANCE).resourceId("2").key("absent").build(); Tag resource2Tag2 = Tag.builder().resourceType(INSTANCE).resourceId("2").key("hello").value("world").build(); public void testResourceToTagsAsMap() { assertEquals( Tags.resourceToTagsAsMap(ImmutableSet.of(resourceTag1, resourceTag2, resource2Tag1, resource2Tag2)), ImmutableMap.of("1", ImmutableMap.of("key", "value", "foo", "bar"), "2", ImmutableMap.of("absent", "", "hello", "world"))); } }
699
448
<filename>Test/qbss_benchmark/liblfds711_modulo/inc/liblfds711.h #ifndef LIBLFDS711_H /***** defines *****/ #define LIBLFDS711_H /***** pragmas on *****/ #pragma warning( push ) #pragma warning( disable : 4324 ) // TRD : 4324 disables MSVC warnings for structure alignment padding due to alignment specifiers #pragma prefast( disable : 28113 28182 28183, "blah" ) /***** includes *****/ #include "liblfds711/lfds711_porting_abstraction_layer_compiler.h" #include "liblfds711/lfds711_porting_abstraction_layer_operating_system.h" #include "liblfds711/lfds711_porting_abstraction_layer_processor.h" #include "liblfds711/lfds711_prng.h" // TRD : misc requires prng #include "liblfds711/lfds711_misc.h" // TRD : everything after depends on misc #include "liblfds711/lfds711_btree_addonly_unbalanced.h" // TRD : hash_addonly depends on btree_addonly_unbalanced #include "liblfds711/lfds711_freelist.h" #include "liblfds711/lfds711_hash_addonly.h" #include "liblfds711/lfds711_list_addonly_singlylinked_ordered.h" #include "liblfds711/lfds711_list_addonly_singlylinked_unordered.h" #include "liblfds711/lfds711_queue_bounded_manyproducer_manyconsumer.h" #include "liblfds711/lfds711_queue_bounded_singleproducer_singleconsumer.h" #include "liblfds711/lfds711_queue_unbounded_manyproducer_manyconsumer.h" #include "liblfds711/lfds711_ringbuffer.h" #include "liblfds711/lfds711_stack.h" /***** pragmas off *****/ #pragma warning( pop ) #endif
729
4,772
<reponame>fjacobs/spring-data-examples package example.repo; import example.model.Customer834; import java.util.List; import org.springframework.data.repository.CrudRepository; public interface Customer834Repository extends CrudRepository<Customer834, Long> { List<Customer834> findByLastName(String lastName); }
103
364
package com.jslsolucoes.nginx.admin.agent.client.api.impl.ping; import java.util.concurrent.ScheduledExecutorService; import com.jslsolucoes.nginx.admin.agent.client.api.NginxAgentClientApiBuilder; public class NginxPingBuilder implements NginxAgentClientApiBuilder { private ScheduledExecutorService scheduledExecutorService; private String endpoint; private String authorizationKey; private NginxPingBuilder() { } @Override public NginxPing build() { return new NginxPing(scheduledExecutorService, endpoint, authorizationKey); } public static NginxPingBuilder newBuilder() { return new NginxPingBuilder(); } public NginxPingBuilder withScheduledExecutorService(ScheduledExecutorService scheduledExecutorService) { this.scheduledExecutorService = scheduledExecutorService; return this; } public NginxPingBuilder withEndpoint(String endpoint) { this.endpoint = endpoint; return this; } public NginxPingBuilder withAuthorizationKey(String authorizationKey) { this.authorizationKey = authorizationKey; return this; } }
319
14,668
<filename>ios/net/url_test_util.cc // Copyright 2019 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "ios/net/url_test_util.h" #include "url/gurl.h" namespace net { std::string GetContentAndFragmentForUrl(const GURL& url) { return url.GetContent() + (url.has_ref() ? "#" + url.ref() : ""); } } // namespace net
147
347
package org.ovirt.engine.core.bll; import java.util.List; import javax.enterprise.inject.Typed; import org.ovirt.engine.core.compat.Guid; import org.ovirt.engine.core.compat.backendcompat.CommandExecutionStatus; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A callback for commands that are executing their child commands serially. Note that this callback supports execution * of child commands until a failure or until successful completion. */ @Typed(SerialChildCommandsExecutionCallback.class) public class SerialChildCommandsExecutionCallback extends ChildCommandsCallbackBase { private static final Logger log = LoggerFactory.getLogger(SerialChildCommandsExecutionCallback.class); @Override protected void childCommandsExecutionEnded(CommandBase<?> command, boolean anyFailed, List<Guid> childCmdIds, CommandExecutionStatus status, int completedChildren) { Guid cmdId = command.getCommandId(); if (status == CommandExecutionStatus.EXECUTED) { SerialChildExecutingCommand serialChildExecutingCommand = (SerialChildExecutingCommand) command; if (!anyFailed || serialChildExecutingCommand.ignoreChildCommandFailure()) { try { boolean endCommand = !serialChildExecutingCommand.performNextOperation(completedChildren); if (!endCommand) { return; } } catch (Exception e) { log.error("Command '{}' id: '{}' with children {} failed when attempting to perform the next operation, marking as '{}'", command.getActionType(), cmdId, childCmdIds, command.getCommandStatus()); log.error(e.getMessage(), e); serialChildExecutingCommand.handleFailure(); anyFailed = true; } } else { serialChildExecutingCommand.handleFailure(); } } else { log.info("Command '{}' id: '{}' execution didn't complete, not proceeding to perform the next operation", command.getActionType(), cmdId); } setCommandEndStatus(command, anyFailed, status, childCmdIds); } }
1,047
1,342
<filename>app/src/main/java/com/google/audio/StreamingAudioEncoder.java /* * Copyright 2019 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.audio; import static com.google.common.base.Preconditions.checkArgument; import android.media.MediaCodec; import android.media.MediaCodecInfo; import android.media.MediaCodecList; import android.media.MediaFormat; import android.os.Build.VERSION; import android.os.Build.VERSION_CODES; import com.google.common.collect.ImmutableList; import com.google.common.flogger.FluentLogger; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.nio.ByteBuffer; /** * Converts from uncompressed 16-bit PCM data to encoded data. * * <p>You may call the sequence (init, processAudioBytes, ..., processAudioBytes, flush, stop) * multiple times. * * <p>Note that AMR-WB encoding is mandatory for handheld devices and OggOpus is supported * regardless of device. */ // Based on examples from https://developer.android.com/reference/android/media/MediaCodec // and some reference tests: // https://android.googlesource.com/platform/cts/+/jb-mr2-release/tests/tests/media/src/android/media/cts/EncoderTest.java public class StreamingAudioEncoder { private static final FluentLogger logger = FluentLogger.forEnclosingClass(); private static final int BYTES_PER_SAMPLE = 2; // This is not meaningful for OggOpus, which does not rely on the AndroidSystemEncoder. private boolean useDeprecatedEncoder = false; /** State variables for basic control flow management. */ private boolean flushed; private boolean initialized = false; /** An exception for anything that goes wrong with the coder as a result of misuse. */ public static class EncoderException extends Exception { public EncoderException(String message) { super(message); } } /** Describes the general class of codecs. */ public enum CodecType { UNSPECIFIED, AMRWB, FLAC, OGG_OPUS, } CodecType codecType = CodecType.UNSPECIFIED; private StreamingAudioInternalEncoder impl; /** Creates an audio encoder. */ public StreamingAudioEncoder(boolean useDeprecatedEncoder) { this.useDeprecatedEncoder = useDeprecatedEncoder; } public StreamingAudioEncoder() { this(VERSION.SDK_INT <= VERSION_CODES.KITKAT_WATCH); } private interface StreamingAudioInternalEncoder { void init(int sampleRateHz, CodecAndBitrate codecAndBitrate, boolean useVbr) throws EncoderException, IOException; byte[] processAudioBytes(byte[] input, int offset, int length); byte[] flushAndStop(); } /** * Prepares a codec to stream. This may be called only if instance is uninitialized (prior to a * call to init() or after a call to stop()). * * @throws IOException if codec cannot be created. * @throws EncoderException if sample rate is not 16kHz or if no suitable encoder exists on device * for the requested format. */ public void init(int sampleRateHz, CodecAndBitrate codecAndBitrate, boolean allowVbr) throws EncoderException, IOException { codecType = lookupCodecType(codecAndBitrate); if (codecType == CodecType.OGG_OPUS) { impl = new OggOpusEncoder(); } else { impl = new AndroidSystemEncoder(useDeprecatedEncoder); } impl.init(sampleRateHz, codecAndBitrate, allowVbr); initialized = true; flushed = false; } /** * Encodes 16-bit PCM audio. This will not always return bytes and will block until the codec has * no output to offer. Must be called after init(). * * @param input array of audio samples formatted as raw bytes (i.e., two bytes per sample). buffer * may be of any size. * @param offset the offset of the first byte to process * @param length the number of bytes to process from input * @return bytes of compressed audio */ public byte[] processAudioBytes(byte[] input, int offset, int length) { if (!initialized) { throw new IllegalStateException("You forgot to call init()!"); } if (flushed) { throw new IllegalStateException("Cannot process more bytes after flushing."); } return impl.processAudioBytes(input, offset, length); } public byte[] processAudioBytes(byte[] input) { return processAudioBytes(input, 0, input.length); } /** Stop the codec. Call init() before using again. */ public byte[] flushAndStop() { if (!initialized) { throw new IllegalStateException("You forgot to call init()!"); } if (flushed) { throw new IllegalStateException("Already flushed. You must reinitialize."); } flushed = true; byte[] flushedBytes = impl.flushAndStop(); initialized = false; codecType = CodecType.UNSPECIFIED; return flushedBytes; } /** * Can be used to test if codec will work or not on a given device. This will always return the * same value no matter when you call it. */ public static boolean isEncoderSupported(CodecAndBitrate encoderInfo) { CodecType type = lookupCodecType(encoderInfo); if (type == CodecType.OGG_OPUS) { // We support Opus directly via the OggOpusEncoder class. return true; } return searchAmongAndroidSupportedCodecs(getMime(type)) != null; } public CodecType getCodecType() { return codecType; } private static String getMime(CodecType codecAndBitrate) { // MediaFormat.MIMETYPE_AUDIO_AMR_WB requires SDK >= 21. switch (codecAndBitrate) { case AMRWB: return "audio/amr-wb"; case FLAC: return "audio/flac"; case OGG_OPUS: // Not supported by android system, so we don't need a MIME. case UNSPECIFIED: return ""; } return ""; } private static CodecType lookupCodecType(CodecAndBitrate codecAndBitrate) { switch (codecAndBitrate) { case AMRWB_BITRATE_6KBPS: case AMRWB_BITRATE_8KBPS: case AMRWB_BITRATE_12KBPS: case AMRWB_BITRATE_14KBPS: case AMRWB_BITRATE_15KBPS: case AMRWB_BITRATE_18KBPS: case AMRWB_BITRATE_19KBPS: case AMRWB_BITRATE_23KBPS: case AMRWB_BITRATE_24KBPS: return CodecType.AMRWB; case FLAC: return CodecType.FLAC; case OGG_OPUS_BITRATE_12KBPS: case OGG_OPUS_BITRATE_16KBPS: case OGG_OPUS_BITRATE_24KBPS: case OGG_OPUS_BITRATE_32KBPS: case OGG_OPUS_BITRATE_64KBPS: case OGG_OPUS_BITRATE_96KBPS: case OGG_OPUS_BITRATE_128KBPS: return CodecType.OGG_OPUS; case UNDEFINED: return CodecType.UNSPECIFIED; } return CodecType.UNSPECIFIED; } /** * Searches for a codec that implements the requested format conversion. Android framework encoder * only. */ private static MediaCodecInfo searchAmongAndroidSupportedCodecs(String mimeType) { int numCodecs = MediaCodecList.getCodecCount(); for (int i = 0; i < numCodecs; i++) { MediaCodecInfo codecAndBitrate = MediaCodecList.getCodecInfoAt(i); if (!codecAndBitrate.isEncoder()) { continue; } String[] codecTypes = codecAndBitrate.getSupportedTypes(); for (int j = 0; j < codecTypes.length; j++) { if (codecTypes[j].equalsIgnoreCase(mimeType)) { return codecAndBitrate; } } } return null; } /** An encoder that relies on the Android framework's multimedia encoder. */ private static class AndroidSystemEncoder implements StreamingAudioInternalEncoder { // If we can't supply a buffer immediately, we wait until the next one, which is timed at the // microphone & block rate of the audio supplier. Waiting less than that time and getting // samples // before the next input buffer would reduce latency. private static final long WAIT_TIME_MICROSECONDS = 1000; // Joda doesn't support microseconds. /** * Notes when the codec formatting change has occurred. This should happen only once at the * start of streaming. Otherwise, there is an error. */ private boolean formatChangeReportedOnce; private MediaCodec codec; private boolean useDeprecatedEncoder = false; private CodecType codecType; private int sampleRateHz; /** Prevents trying to flush multiple times. */ private boolean successfullyFlushed; /** Keeps track of whether the header was injected into the stream. */ private boolean addedHeader; /** * The number of samples that are passed to the underlying codec at once. It's not clear that * one value for this will work better than any other, but powers of two are usually fast, and a * larger CHUNK_SIZE_SAMPLES both reduces the number of buffers we have to wait for and doesn't * prevent sending smaller blocks of samples. */ private static final int CHUNK_SIZE_SAMPLES = 2048; private static final int CHUNK_SIZE_BYTES = BYTES_PER_SAMPLE * CHUNK_SIZE_SAMPLES; // Used only on very old SDKs (pre VERSION_CODES.KITKAT_WATCH). private ByteBuffer[] inputBuffersPreKitKat; private ByteBuffer[] outputBuffersPreKitKat; /** Creates an audio encoder. */ public AndroidSystemEncoder(boolean useDeprecatedEncoder) { this.useDeprecatedEncoder = useDeprecatedEncoder; this.codecType = CodecType.UNSPECIFIED; } // Note that VBR is not currently supported for the AndroidStreamingEncoder. @Override public void init(int sampleRateHz, CodecAndBitrate codecAndBitrate, boolean allowVbr) throws EncoderException, IOException { codecType = lookupCodecType(codecAndBitrate); if (codecType == CodecType.UNSPECIFIED || codecType == CodecType.OGG_OPUS) { throw new EncoderException("Codec not set properly."); } if (codecType == CodecType.AMRWB && sampleRateHz != 16000) { throw new EncoderException("AMR-WB encoder requires a sample rate of 16kHz."); } MediaCodecInfo codecInfo = searchAmongAndroidSupportedCodecs(getMime(codecType)); if (codecInfo == null) { throw new EncoderException("Encoder not found."); } this.codec = MediaCodec.createByCodecName(codecInfo.getName()); MediaFormat format = getMediaFormat(codecAndBitrate, sampleRateHz); codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); codec.start(); initBuffers(); addedHeader = false; successfullyFlushed = false; formatChangeReportedOnce = false; } @Override public byte[] processAudioBytes(byte[] input, int offset, int length) { ByteArrayOutputStream outputBytes = new ByteArrayOutputStream(); if (!addedHeader) { try { outputBytes.write(getHeaderBytes()); } catch (IOException e) { logger.atSevere().log("Unable to write bytes into buffer!"); } addedHeader = true; } int startByte = 0; while (startByte < length) { int thisChunkSizeBytes = Math.min(CHUNK_SIZE_BYTES, length - startByte); processAudioBytesInternal( input, offset + startByte, thisChunkSizeBytes, false, outputBytes); startByte += thisChunkSizeBytes; } return outputBytes.toByteArray(); } @Override public byte[] flushAndStop() { ByteArrayOutputStream outputBytes = new ByteArrayOutputStream(); try { processAudioBytesInternal(null, 0, 0, true, outputBytes); // Flush! codec.stop(); } catch (MediaCodec.CodecException e) { logger.atSevere().log("Something went wrong in the underlying codec!"); } codec.release(); return outputBytes.toByteArray(); } // length must be less than or equal to CHUNK_SIZE_BYTES. private void processAudioBytesInternal( byte[] input, int offset, int length, boolean flush, ByteArrayOutputStream outputBytes) { checkArgument( length <= CHUNK_SIZE_BYTES, "length must be less than or equal to CHUNK_SIZE_BYTES!"); boolean processedInput = false; // There are a limited number of buffers allocated in the codec. As long as we're not // holding on to them, they should always be available. Sometimes all buffers will be occupied // by the output and we need to process them before pushing input. Sometimes multiple output // buffers will be available at once. Append them together and return. It is common for // outputBytes to not receive any samples upon returning. MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo(); // Loop until input is processed and outputs are unavailable. while (!processedInput || flush) { if (!processedInput) { if (flush && successfullyFlushed) { throw new IllegalStateException("Already flushed!"); } // Push the input only once. int inputBufferIndex = codec.dequeueInputBuffer(WAIT_TIME_MICROSECONDS); if (inputBufferIndex != MediaCodec.INFO_TRY_AGAIN_LATER) { if (flush) { // Signal that the input stream is complete. codec.queueInputBuffer( inputBufferIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM); successfullyFlushed = true; } else { // Push audio data into the codec. ByteBuffer inputBuffer = getInputBuffer(inputBufferIndex); inputBuffer.put(input, offset, length); codec.queueInputBuffer(inputBufferIndex, 0, length, 0, 0); } processedInput = true; } } // See if outputs are available. int outputBufferIndex = codec.dequeueOutputBuffer(bufferInfo, WAIT_TIME_MICROSECONDS); if (outputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) { // There will not be an output buffer for every input buffer. } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { // Shouldn't happen after the very first output. if (formatChangeReportedOnce) { throw new IllegalStateException("The codec format was unexpectedly changed."); } formatChangeReportedOnce = true; } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { // Shouldn't happen for SDK > 21. updateOutputBuffers(); } else { // Get an output buffer and add it to the stream. ByteBuffer outputBuffer = getOutputBuffer(outputBufferIndex); byte[] outData = new byte[bufferInfo.size]; outputBuffer.get(outData); codec.releaseOutputBuffer(outputBufferIndex, false); try { outputBytes.write(outData); } catch (IOException e) { logger.atSevere().log("Unable to write bytes into buffer!"); } } boolean processedAllOutput = (bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0; if (processedAllOutput) { if (!processedInput) { throw new IllegalStateException("Didn't process input yet."); } break; } } } /** Configure the codec at a specified bitrate for a fixed sample block size. */ private static MediaFormat getMediaFormat(CodecAndBitrate codecAndBitrate, int sampleRateHz) { MediaFormat format = new MediaFormat(); CodecType codecType = lookupCodecType(codecAndBitrate); format.setString(MediaFormat.KEY_MIME, getMime(codecType)); format.setInteger(MediaFormat.KEY_SAMPLE_RATE, sampleRateHz); format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1); format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, BYTES_PER_SAMPLE * CHUNK_SIZE_SAMPLES); if (codecType != CodecType.FLAC) { // FLAC is lossless, we can't request a bitrate. format.setInteger(MediaFormat.KEY_BIT_RATE, codecAndBitrate.getNumber()); } return format; } /** The data does not include a header. Some applications will require one anyhow. */ private byte[] getHeaderBytes() { switch (this.codecType) { case AMRWB: String amrWbHeader = "#!AMR-WB\n"; return amrWbHeader.getBytes(); case FLAC: byte[] noHeader = new byte[0]; return noHeader; case OGG_OPUS: throw new IllegalStateException("Should never happen! Use OggOpusEncoder instead."); case UNSPECIFIED: throw new IllegalStateException("Trying to make header for unspecified codec!"); } return null; } // The following methods are used to resolve differences between SDK versions. private void initBuffers() { if (useDeprecatedEncoder) { inputBuffersPreKitKat = codec.getInputBuffers(); outputBuffersPreKitKat = codec.getOutputBuffers(); } } private ByteBuffer getInputBuffer(int index) { if (useDeprecatedEncoder) { return inputBuffersPreKitKat[index]; } else { return codec.getInputBuffer(index); } } private ByteBuffer getOutputBuffer(int index) { if (useDeprecatedEncoder) { return outputBuffersPreKitKat[index]; } else { return codec.getOutputBuffer(index); } } private void updateOutputBuffers() { if (useDeprecatedEncoder) { outputBuffersPreKitKat = codec.getOutputBuffers(); } } } private static class OggOpusEncoder implements StreamingAudioInternalEncoder { // This is a pointer to the native object that we're working with. Zero when unallocated. private long instance = 0; ImmutableList<Integer> validSampleRates = ImmutableList.of(8000, 12000, 16000, 24000, 48000); public OggOpusEncoder() {} @Override public void init(int sampleRateHz, CodecAndBitrate codecAndBitrate, boolean allowVbr) throws EncoderException { if (instance != 0) { flushAndStop(); } CodecType codecType = lookupCodecType(codecAndBitrate); if (codecType != CodecType.OGG_OPUS) { throw new RuntimeException("Made OggOpusEncoder for non OGG_OPUS encoding type."); } if (!validSampleRates.contains(sampleRateHz)) { throw new EncoderException( "Opus encoder requires a sample rate of 8kHz, 12kHz, 16kHz, 24kHz, or 48kHz."); } this.instance = init(1 /* Mono audio. */, codecAndBitrate.getNumber(), sampleRateHz, allowVbr); } private native long init(int channels, int bitrate, int sampleRateHz, boolean allowVbr); @Override public byte[] processAudioBytes(byte[] bytes, int offset, int length) { return processAudioBytes(instance, bytes, offset, length); } private native byte[] processAudioBytes(long instance, byte[] samples, int offset, int length); /** * Complete the input stream, return any remaining bits of the output stream, and stop. * This should only be called once. Must be called after init(). * * @return bytes of compressed audio */ @Override public byte[] flushAndStop() { if (instance != 0) { byte[] flushedBytes = flush(instance); free(instance); instance = 0; return flushedBytes; } else { logger.atSevere().log("stop() called multiple times or without call to init()!"); return new byte[0]; } } @Override protected void finalize() throws Throwable { super.finalize(); if (instance != 0) { logger.atSevere().log( "Native OggOpusEncoder resources weren't cleaned up. You must call stop()!"); free(instance); } } private native byte[] flush(long instance); private native void free(long instance); } static { System.loadLibrary("ogg_opus_encoder"); } }
7,378
429
<reponame>Dennisbonke/toaruos<filename>base/usr/include/strings.h<gh_stars>100-1000 #pragma once #include <_cheader.h> #include <stdlib.h> _Begin_C_Header extern int strcasecmp(const char *s1, const char *s2); extern int strncasecmp(const char *s1, const char *s2, size_t n); _End_C_Header
121
453
package com.waylau.spring.boot.elasticsearch.domain; import java.io.Serializable; import javax.xml.bind.annotation.XmlRootElement; import org.springframework.data.annotation.Id; import org.springframework.data.elasticsearch.annotations.Document; /** * Blog. * * @since 1.0.0 2017年3月5日 * @author <a href="https://waylau.com"><NAME></a> */ @Document(indexName = "blog", type = "blog", shards = 1, replicas = 0, refreshInterval = "-1") @XmlRootElement // MediaType 转为 XML public class Blog implements Serializable { private static final long serialVersionUID = 1L; @Id // 主键 private String id; // 用户的唯一标识 private String title; private String content; protected Blog() { // JPA 的规范要求无参构造函数;设为 protected 防止直接使用 } public Blog(String name, String content) { this.title = name; this.content = content; } public Blog(String id, String name, String content) { this.id = id; this.title = name; this.content = content; } public String getId() { return id; } public void setId(String id) { this.id = id; } public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } public String getContent() { return content; } public void setContent(String content) { this.content = content; } @Override public String toString() { return String.format( "User[id=%d, title='%s', content='%s']", id, title, content); } }
604
30,023
"""Tests for button platform.""" from homeassistant.components import flux_led from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN from homeassistant.components.flux_led.const import DOMAIN from homeassistant.const import ATTR_ENTITY_ID, CONF_HOST, CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from . import ( DEFAULT_ENTRY_TITLE, FLUX_DISCOVERY, IP_ADDRESS, MAC_ADDRESS, _mock_config_entry_for_bulb, _mocked_bulb, _mocked_switch, _patch_discovery, _patch_wifibulb, ) from tests.common import MockConfigEntry async def test_button_reboot(hass: HomeAssistant) -> None: """Test a smart plug can be rebooted.""" config_entry = MockConfigEntry( domain=DOMAIN, data={CONF_HOST: IP_ADDRESS, CONF_NAME: DEFAULT_ENTRY_TITLE}, unique_id=MAC_ADDRESS, ) config_entry.add_to_hass(hass) switch = _mocked_switch() with _patch_discovery(), _patch_wifibulb(device=switch): await async_setup_component(hass, flux_led.DOMAIN, {flux_led.DOMAIN: {}}) await hass.async_block_till_done() entity_id = "button.bulb_rgbcw_ddeeff_restart" assert hass.states.get(entity_id) await hass.services.async_call( BUTTON_DOMAIN, "press", {ATTR_ENTITY_ID: entity_id}, blocking=True ) switch.async_reboot.assert_called_once() async def test_button_unpair_remotes(hass: HomeAssistant) -> None: """Test that remotes can be unpaired.""" _mock_config_entry_for_bulb(hass) bulb = _mocked_bulb() bulb.discovery = FLUX_DISCOVERY with _patch_discovery(device=FLUX_DISCOVERY), _patch_wifibulb(device=bulb): await async_setup_component(hass, flux_led.DOMAIN, {flux_led.DOMAIN: {}}) await hass.async_block_till_done() entity_id = "button.bulb_rgbcw_ddeeff_unpair_remotes" assert hass.states.get(entity_id) await hass.services.async_call( BUTTON_DOMAIN, "press", {ATTR_ENTITY_ID: entity_id}, blocking=True ) bulb.async_unpair_remotes.assert_called_once()
876
2,151
// Copyright 2016 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef IOS_CHROME_BROWSER_UI_CONTEXTUAL_SEARCH_TOUCH_TO_SEARCH_PERMISSIONS_MEDIATOR_H_ #define IOS_CHROME_BROWSER_UI_CONTEXTUAL_SEARCH_TOUCH_TO_SEARCH_PERMISSIONS_MEDIATOR_H_ #import <Foundation/Foundation.h> #import "url/gurl.h" namespace ios { class ChromeBrowserState; } namespace TouchToSearch { // Enum describing the possible state that a user's Touch-to-Search preference // is in: typedef NS_ENUM(NSInteger, TouchToSearchPreferenceState) { UNDECIDED = -1, // User has not set a preference. DISABLED, // User has disabled TTS. ENABLED // User has "opted in" and enabled TTS. }; } // namespace TouchToSearch @protocol TouchToSearchPermissionsChangeAudience<NSObject> @optional // Called synchronously when preferences are changed - (void)touchToSearchDidChangePreferenceState: (TouchToSearch::TouchToSearchPreferenceState)preferenceState; // Called (asynchronously) after some state has changed that might have affected // touch-to-search permissions. - (void)touchToSearchPermissionsUpdated; @end // An object for managing and vending permissions associated with the // Touch-to-Search feature. @interface TouchToSearchPermissionsMediator : NSObject // YES if the device supports Touch-to-Search (based on command line flags). The // return value will be the same over the lifetime of a Chrome process. + (BOOL)isTouchToSearchAvailableOnDevice; // Designated initializer. - (instancetype)initWithBrowserState:(ios::ChromeBrowserState*)browserState NS_DESIGNATED_INITIALIZER; - (instancetype)init NS_UNAVAILABLE; // Current preference state. Assigning to this property will update the internal // representation backing this state. @property(nonatomic, assign) TouchToSearch::TouchToSearchPreferenceState preferenceState; // Current audience object. @property(nonatomic, weak) NSObject<TouchToSearchPermissionsChangeAudience>* audience; // YES if, given the current permissions state, touch-to-search can be enabled. - (BOOL)canEnable; // YES if, given the current permissions state, surrounding text in |URL| may be // extracted. - (BOOL)canExtractTapContextForURL:(const GURL&)url; // YES if it is permitted to send page URLs to the contextual search service. - (BOOL)canSendPageURLs; // YES if search results pages can be preloaded. - (BOOL)canPreloadSearchResults; @end #endif // IOS_CHROME_BROWSER_UI_CONTEXTUAL_SEARCH_TOUCH_TO_SEARCH_PERMISSIONS_MEDIATOR_H_
817
1,350
<gh_stars>1000+ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.communication.phonenumbers.models; import com.azure.core.annotation.Immutable; import com.fasterxml.jackson.annotation.JsonProperty; import java.time.OffsetDateTime; /** The PurchasedPhoneNumber model. */ @Immutable public final class PurchasedPhoneNumber { /* * The id of the phone number, e.g. 11234567890. */ @JsonProperty(value = "id", required = true, access = JsonProperty.Access.WRITE_ONLY) private String id; /* * String of the E.164 format of the phone number, e.g. +11234567890. */ @JsonProperty(value = "phoneNumber", required = true, access = JsonProperty.Access.WRITE_ONLY) private String phoneNumber; /* * The ISO 3166-2 code of the phone number's country, e.g. US. */ @JsonProperty(value = "countryCode", required = true, access = JsonProperty.Access.WRITE_ONLY) private String countryCode; /* * The phone number's type, e.g. Geographic, TollFree. */ @JsonProperty(value = "phoneNumberType", required = true, access = JsonProperty.Access.WRITE_ONLY) private PhoneNumberType phoneNumberType; /* * Capabilities of a phone number. */ @JsonProperty(value = "capabilities", required = true, access = JsonProperty.Access.WRITE_ONLY) private PhoneNumberCapabilities capabilities; /* * The assignment type of the phone number. A phone number can be assigned * to a person, or to an application. */ @JsonProperty(value = "assignmentType", required = true, access = JsonProperty.Access.WRITE_ONLY) private PhoneNumberAssignmentType assignmentType; /* * The date and time that the phone number was purchased. */ @JsonProperty(value = "purchaseDate", required = true, access = JsonProperty.Access.WRITE_ONLY) private OffsetDateTime purchaseDate; /* * The incurred cost for a single phone number. */ @JsonProperty(value = "cost", required = true, access = JsonProperty.Access.WRITE_ONLY) private PhoneNumberCost cost; /** * Get the id property: The id of the phone number, e.g. 11234567890. * * @return the id value. */ public String getId() { return this.id; } /** * Get the phoneNumber property: String of the E.164 format of the phone number, e.g. +11234567890. * * @return the phoneNumber value. */ public String getPhoneNumber() { return this.phoneNumber; } /** * Get the countryCode property: The ISO 3166-2 code of the phone number's country, e.g. US. * * @return the countryCode value. */ public String getCountryCode() { return this.countryCode; } /** * Get the phoneNumberType property: The phone number's type, e.g. Geographic, TollFree. * * @return the phoneNumberType value. */ public PhoneNumberType getPhoneNumberType() { return this.phoneNumberType; } /** * Get the capabilities property: Capabilities of a phone number. * * @return the capabilities value. */ public PhoneNumberCapabilities getCapabilities() { return this.capabilities; } /** * Get the assignmentType property: The assignment type of the phone number. A phone number can be assigned to a * person, or to an application. * * @return the assignmentType value. */ public PhoneNumberAssignmentType getAssignmentType() { return this.assignmentType; } /** * Get the purchaseDate property: The date and time that the phone number was purchased. * * @return the purchaseDate value. */ public OffsetDateTime getPurchaseDate() { return this.purchaseDate; } /** * Get the cost property: The incurred cost for a single phone number. * * @return the cost value. */ public PhoneNumberCost getCost() { return this.cost; } }
1,463
337
<reponame>acheong08/dsniff<filename>tcp_raw.c /* * tcp_raw.c * * Copyright (c) 2000 <NAME> <<EMAIL>> * * $Id: tcp_raw.c,v 1.10 2001/03/15 08:33:04 dugsong Exp $ */ #include "config.h" #include <sys/types.h> #include <sys/uio.h> #include <netinet/in.h> #include <time.h> #include <err.h> #include <libnet.h> #include "options.h" #include "tcp_raw.h" struct tha { in_addr_t src; in_addr_t dst; u_short port; }; struct tcp_seg { u_int32_t seq; u_char *data; int len; }; struct tcp_conn { struct tha tha; time_t mtime; struct tcp_seg *seg; int segcnt; int segmax; struct tcp_conn *next; }; #define TCP_TIMEOUT 60 #define TCP_HASHSIZE 919 static struct tcp_conn conntab[TCP_HASHSIZE]; static int tcp_seg_compare(const void *a, const void *b) { struct tcp_seg *sa, *sb; sa = (struct tcp_seg *) a; sb = (struct tcp_seg *) b; if (sa->seq < sb->seq) return (-1); else if (sa->seq > sb->seq) return (1); else return (0); } static void tcp_raw_delete(struct tcp_conn *conn) { struct tcp_conn *hold; int i; if (conn->next != NULL) { for (i = 0; i < conn->segcnt; i++) { if (conn->seg[i].data) free(conn->seg[i].data); } free(conn->seg); conn->seg = NULL; conn->segcnt = conn->segmax = 0; if (conn->next->next != NULL) { hold = conn->next; *conn = *conn->next; free(hold); } else { free(conn->next); conn->next = NULL; } } } static struct iovec * tcp_raw_reassemble(struct tcp_conn *conn, int minlen) { struct iovec *iov; int i, len; len = 0; for (i = 0; i < conn->segcnt; i++) len += conn->seg[i].len; if (len < minlen) return (NULL); if ((iov = (struct iovec *) malloc(sizeof(*iov))) == NULL) err(1, "tcp_raw_reassemble: malloc"); if ((iov->iov_base = (u_char *) malloc(len)) == NULL) err(1, "tcp_raw_reassemble: malloc"); iov->iov_len = 0; qsort(conn->seg, conn->segcnt, sizeof(*conn->seg), tcp_seg_compare); for (i = 0; i < conn->segcnt; i++) { len = conn->seg[i].len; memcpy(iov->iov_base + iov->iov_len, conn->seg[i].data, len); iov->iov_len += len; } return (iov); } struct iovec * tcp_raw_input(struct libnet_ipv4_hdr *ip, struct libnet_tcp_hdr *tcp, int len) { struct tha tha; struct tcp_conn *conn; struct tcp_seg seg; struct iovec *iov; u_short cksum; u_char *buf; int tcp_hl = tcp->th_off * 4; /* Verify TCP checksum. */ cksum = tcp->th_sum; libnet_do_checksum(NULL, (u_char *) ip, IPPROTO_TCP, len); if (cksum != tcp->th_sum) return (NULL); tha.src = ip->ip_src.s_addr; tha.dst = ip->ip_dst.s_addr; tha.port = ntohs(tcp->th_sport) << 16 | ntohs(tcp->th_dport); buf = (u_char *)tcp + tcp_hl; len -= tcp_hl; iov = NULL; /* Find half-duplex stream associated with this segment. */ for (conn = &conntab[tha.port % TCP_HASHSIZE]; conn->next != NULL; conn = conn->next) { if (memcmp((char *)&tha, (char *)&conn->tha, sizeof(tha)) == 0) break; } /* Process by TCP flags. */ if (conn->next == NULL) { if (tcp->th_flags & TH_SYN) { if (conn->next == NULL && (conn->next = (struct tcp_conn *) calloc(1, sizeof(*conn))) == NULL) { err(1, "tcp_raw_input: calloc"); } conn->tha = tha; if (conn->seg == NULL && (conn->seg = (struct tcp_seg *) malloc(sizeof(seg) * 128)) == NULL) { err(1, "tcp_raw_input: malloc"); } conn->segmax = 128; } } else if (tcp->th_flags & TH_FIN || tcp->th_flags & TH_RST) { iov = tcp_raw_reassemble(conn, 1); } else if (tcp->th_flags & TH_ACK && len > 0) { seg.seq = ntohl(tcp->th_seq); if (bsearch(&seg, conn->seg, conn->segcnt, sizeof(seg), tcp_seg_compare) == NULL) { if ((seg.data = (u_char *) malloc(len)) == NULL) err(1, "tcp_raw_input: malloc"); memcpy(seg.data, buf, len); seg.len = len; if (conn->segcnt == conn->segmax) { if ((conn->seg = (struct tcp_seg *) realloc(conn->seg, (conn->segmax * 2) * sizeof(seg))) == NULL) err(1, "tcp_raw_input: realloc"); conn->segmax *= 2; } conn->seg[conn->segcnt++] = seg; iov = tcp_raw_reassemble(conn, Opt_snaplen); } } conn->mtime = time(NULL); /* If we successfully reassembled the stream, delete its entry. */ if (iov != NULL) { tcp_raw_delete(conn); } return (iov); } void tcp_raw_timeout(int timeout, tcp_raw_callback_t callback) { struct tcp_conn *conn; struct iovec *iov; time_t now; int i; now = time(NULL); for (i = 0; i < TCP_HASHSIZE; i++) { for (conn = &conntab[i]; conn != NULL && conn->next != NULL; conn = conn->next) { if (now - conn->mtime > timeout) { iov = tcp_raw_reassemble(conn, 1); if (iov != NULL) { callback(conn->tha.src, conn->tha.dst, conn->tha.port >> 16, conn->tha.port & 0xffff, iov->iov_base, iov->iov_len); free(iov->iov_base); free(iov); } tcp_raw_delete(conn); } } } }
2,401
403
<reponame>sdroege/node-gtk #pragma once #include <nan.h> #include <node.h> #include <girepository.h> #include <glib.h> #include <cairo.h> namespace GNodeJS { namespace Cairo { class Glyph: public Nan::ObjectWrap { public: static Nan::Persistent<v8::FunctionTemplate> constructorTemplate; static Nan::Persistent<v8::Function> constructor; static void Initialize(Nan::ADDON_REGISTER_FUNCTION_ARGS_TYPE target); static NAN_METHOD(New); static NAN_GETTER(GetLength); static NAN_INDEX_GETTER(IndexGetter); Glyph(cairo_glyph_t* data, int64_t length); ~Glyph(); cairo_glyph_t* _data; int64_t _length; }; }; // Cairo }; // GNodeJS
286
4,879
<reponame>vicpopov/omim package com.mapswithme.maps.purchase; public interface BillingRequest { void execute(); }
41
10,225
package io.quarkus.undertow.test; import javax.servlet.ServletContextEvent; import javax.servlet.ServletContextListener; import javax.servlet.annotation.WebListener; @WebListener public class AnnotatedListener implements ServletContextListener { @Override public void contextInitialized(ServletContextEvent sce) { sce.getServletContext().setAttribute("annotated listener", true); } }
130
1,269
package com.chiclaim.dagger.sample; public interface Callback<T> { void onSuccess(T data); void onFailed(String errorMsg); }
50
5,279
<filename>runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/counters/CounterTest.java<gh_stars>1000+ /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.runners.dataflow.worker.counters; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.util.ArrayList; import org.apache.beam.runners.dataflow.worker.NameContextsForTests; import org.apache.beam.runners.dataflow.worker.counters.Counter.CommitState; import org.apache.beam.runners.dataflow.worker.counters.CounterFactory.CounterDistribution; import org.apache.beam.runners.dataflow.worker.counters.CounterFactory.CounterMean; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Lists; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Unit tests for the {@link Counter} API. */ @RunWith(JUnit4.class) public class CounterTest { private final CounterName name = CounterName.named("undertest"); private final CounterName name2 = CounterName.named("othername"); private final CounterFactory counters = new CounterFactory(); @Rule public ExpectedException thrown = ExpectedException.none(); private static final double EPSILON = 0.00000000001; @Test public void testCompatibility() { // Equal counters are compatible, of all kinds. assertEquals(counters.longSum(name), counters.longSum(name)); assertEquals(counters.intSum(name), counters.intSum(name)); assertEquals(counters.doubleSum(name), counters.doubleSum(name)); assertEquals(counters.booleanOr(name), counters.booleanOr(name)); // The name, kind, and type of the counter must match. assertFalse(counters.longSum(name).equals(counters.longSum(name2))); assertFalse(counters.longSum(name).equals(counters.longMax(name))); assertFalse(counters.longSum(name).equals(counters.intSum(name))); // The value of the counters are ignored. assertEquals(counters.longSum(name).addValue(666L), counters.longSum(name).addValue(42L)); } // Tests for SUM. @Test public void testSumLong() { Counter<Long, Long> c = counters.longSum(name); assertEquals(0L, (long) c.getAggregate()); c.addValue(13L).addValue(42L).addValue(0L); assertEquals(13L + 42L, (long) c.getAggregate()); c.getAndReset(); c.addValue(120L).addValue(17L).addValue(37L); assertEquals(120L + 17L + 37L, (long) c.getAggregate()); c.addValue(15L).addValue(42L); assertEquals(120L + 17L + 37L + 15L + 42L, (long) c.getAggregate()); c.getAndReset(); c.addValue(100L).addValue(17L).addValue(49L); assertEquals(100L + 17L + 49L, (long) c.getAggregate()); assertEquals( "getAndReset should return previous value", 100L + 17L + 49L, (long) c.getAndReset()); assertEquals("getAndReset should have reset value", 0, (long) c.getAggregate()); } @Test public void testSumDouble() { Counter<Double, Double> c = counters.doubleSum(name); c.addValue(Math.E).addValue(Math.PI).addValue(0.0); assertEquals(Math.E + Math.PI, c.getAggregate(), EPSILON); c.getAndReset(); c.addValue(Math.sqrt(2)).addValue(2 * Math.PI).addValue(3 * Math.E); assertEquals(Math.sqrt(2) + 2 * Math.PI + 3 * Math.E, c.getAggregate(), EPSILON); assertEquals( "getAndReset should return previous value", Math.sqrt(2) + 2 * Math.PI + 3 * Math.E, c.getAndReset(), EPSILON); assertEquals("getAndReset should have reset value", 0.0, c.getAggregate(), EPSILON); } // Tests for MAX. @Test public void testMaxLong() { Counter<Long, Long> c = counters.longMax(name); assertEquals(Long.MIN_VALUE, (long) c.getAggregate()); c.addValue(13L).addValue(42L).addValue(0L); assertEquals(42L, (long) c.getAggregate()); c.getAndReset(); c.addValue(120L).addValue(17L).addValue(37L); assertEquals(120L, (long) c.getAggregate()); c.addValue(15L).addValue(42L); assertEquals(120L, (long) c.getAggregate()); c.addValue(137L); assertEquals(137L, (long) c.getAggregate()); c.getAndReset(); c.addValue(100L).addValue(17L).addValue(49L); assertEquals(100L, (long) c.getAggregate()); assertEquals("getAndReset should return previous value", 100L, (long) c.getAndReset()); assertEquals("getAndReset should have reset value", Long.MIN_VALUE, (long) c.getAggregate()); } @Test public void testMaxDouble() { Counter<Double, Double> c = counters.doubleMax(name); assertEquals(Double.NEGATIVE_INFINITY, c.getAggregate(), EPSILON); c.addValue(Math.E).addValue(Math.PI).addValue(Double.NEGATIVE_INFINITY); assertEquals(Math.PI, c.getAggregate(), EPSILON); c.getAndReset(); c.addValue(Math.sqrt(12345)).addValue(2 * Math.PI).addValue(3 * Math.E); assertEquals(Math.sqrt(12345), c.getAggregate(), EPSILON); assertEquals( "getAndReset should return previous value", Math.sqrt(12345), c.getAndReset(), EPSILON); assertEquals(Double.NEGATIVE_INFINITY, c.getAggregate(), EPSILON); c.addValue(7 * Math.PI).addValue(5 * Math.E); assertEquals(7 * Math.PI, c.getAggregate(), EPSILON); c.getAndReset(); c.addValue(Math.sqrt(17)).addValue(171.0).addValue(49.0); assertEquals(171.0, c.getAggregate(), EPSILON); } // Tests for MIN. @Test public void testMinLong() { Counter<Long, Long> c = counters.longMin(name); assertEquals(Long.MAX_VALUE, (long) c.getAggregate()); c.addValue(13L).addValue(42L).addValue(Long.MAX_VALUE); assertEquals(13L, (long) c.getAggregate()); c.getAndReset(); c.addValue(120L).addValue(17L).addValue(37L); assertEquals(17L, (long) c.getAggregate()); assertEquals("getAndReset should return previous value", 17L, (long) c.getAndReset()); assertEquals( "getAndReset should have reset the value", Long.MAX_VALUE, (long) c.getAggregate()); c.addValue(42L).addValue(18L); assertEquals(18L, (long) c.getAggregate()); } @Test public void testMinDouble() { Counter<Double, Double> c = counters.doubleMin(name); assertEquals(Double.POSITIVE_INFINITY, c.getAggregate(), EPSILON); c.addValue(Math.E).addValue(Math.PI).addValue(Double.POSITIVE_INFINITY); assertEquals(Math.E, c.getAggregate(), EPSILON); c.getAndReset(); c.addValue(Math.sqrt(12345)).addValue(2 * Math.PI).addValue(3 * Math.E); assertEquals(2 * Math.PI, c.getAggregate(), EPSILON); assertEquals("getAndReset should return previous value", 2 * Math.PI, c.getAndReset(), EPSILON); assertEquals( "getAndReset should have reset the value", Double.POSITIVE_INFINITY, c.getAggregate(), EPSILON); c.getAndReset(); c.addValue(Math.sqrt(17)).addValue(171.0).addValue(0.0); assertEquals(0.0, c.getAggregate(), EPSILON); } // Tests for MEAN. private void assertMean(long s, long c, Counter<Long, CounterMean<Long>> cn) { CounterMean<Long> mean = cn.getAggregate(); assertEquals(s, mean.getAggregate().longValue()); assertEquals(c, mean.getCount()); } private void assertMean(double s, long c, Counter<Double, CounterMean<Double>> cn) { CounterMean<Double> mean = cn.getAggregate(); assertEquals(s, mean.getAggregate().doubleValue(), EPSILON); assertEquals(c, mean.getCount()); } @Test public void testMeanLong() { Counter<Long, CounterMean<Long>> c = counters.longMean(name); assertMean(0, 0, c); c.addValue(13L).addValue(42L).addValue(0L); assertMean(13 + 42 + 0, 3, c); c.getAndReset(); c.addValue(120L).addValue(17L).addValue(37L); assertMean(120 + 17 + 37, 3, c); CounterMean<Long> mean = c.getAndReset(); assertEquals( "getAndReset should return previous value", 120 + 17 + 37, (long) mean.getAggregate()); assertEquals("getAndReset should return previous count", 3, mean.getCount()); // getAndReset should reset the value assertMean(0, 0, c); c.getAndReset(); c.addValue(33L).addValue(33L).addValue(34L).addValue(17L).addValue(49L); assertMean(166, 5, c); } @Test public void testMeanDouble() { Counter<Double, CounterMean<Double>> c = counters.doubleMean(name); double expTotal = 0.0; long expCountTotal = 0; assertMean(expTotal, expCountTotal, c); c.addValue(Math.E).addValue(Math.PI).addValue(0.0); expTotal += Math.E + Math.PI; expCountTotal += 3; assertMean(expTotal, expCountTotal, c); c.getAndReset(); c.addValue(Math.sqrt(2)).addValue(2 * Math.PI).addValue(3 * Math.E); expTotal = Math.sqrt(2) + 2 * Math.PI + 3 * Math.E; assertMean(expTotal, expCountTotal, c); CounterMean<Double> mean = c.getAndReset(); assertEquals( "getAndReset should return previous value", expTotal, (double) mean.getAggregate(), EPSILON); assertEquals("getAndReset should return previous count", expCountTotal, mean.getCount()); assertMean(0, 0, c); c.getAndReset(); c.addValue(Math.sqrt(17)).addValue(0.0).addValue(0.0).addValue(17.0).addValue(49.0); expTotal = Math.sqrt(17.0) + 17.0 + 49.0; expCountTotal = 5; assertMean(expTotal, expCountTotal, c); } @Test public void testDistribution() { Counter<Long, CounterDistribution> c = counters.distribution(name); CounterDistribution expected = CounterDistribution.builder() .minMax(Long.MAX_VALUE, 0L) .count(0L) .sum(0L) .sumOfSquares(0f) .buckets(0, new ArrayList<>()) .build(); assertEquals(expected, c.getAggregate()); c.addValue(2L).addValue(10L).addValue(4L); expected = CounterDistribution.builder() .minMax(2L, 10L) .count(3) .sum(2L + 10L + 4L) .sumOfSquares(4L + 100L + 16L) .buckets(2, Lists.newArrayList(2L, 0L, 1L)) .build(); assertEquals(expected, c.getAggregate()); c.getAndReset(); c.addValue(0L).addValue(0L); expected = CounterDistribution.builder() .minMax(0L, 0L) .count(2L) .sum(0L) .sumOfSquares(0f) .buckets(0, Lists.newArrayList(2L)) .build(); assertEquals(expected, c.getAggregate()); CounterDistribution distribution = c.getAndReset(); assertEquals("getAndReset should return previous value", expected, distribution); expected = CounterDistribution.builder() .minMax(Long.MAX_VALUE, 0L) .count(0L) .sum(0L) .sumOfSquares(0f) .buckets(0, new ArrayList<>()) .build(); assertEquals(expected, c.getAggregate()); } @Test public void testBoolAnd() { Counter<Boolean, Boolean> c = counters.booleanAnd(name); assertTrue(c.getAggregate()); c.addValue(true); assertTrue(c.getAggregate()); c.addValue(false); assertFalse(c.getAggregate()); c.getAndReset(); c.addValue(true).addValue(true); assertTrue(c.getAggregate()); c.addValue(false); assertFalse(c.getAggregate()); assertFalse(c.getAndReset()); assertTrue(c.getAggregate()); c.addValue(false); assertFalse(c.getAggregate()); } @Test public void testBoolOr() { Counter<Boolean, Boolean> c = counters.booleanOr(name); assertFalse(c.getAggregate()); c.addValue(false); assertFalse(c.getAggregate()); c.addValue(true); assertTrue(c.getAggregate()); c.getAndReset(); c.addValue(false).addValue(false); assertFalse(c.getAggregate()); c.addValue(true); assertTrue(c.getAggregate()); assertTrue(c.getAndReset()); assertFalse(c.getAggregate()); c.addValue(true); assertTrue(c.getAggregate()); } @Test public void testDirtyBit() { verifyDirtyBit(counters.longSum(CounterName.named("long-sum")), 1L); verifyDirtyBit(counters.longMean(CounterName.named("long-mean")), 1L); verifyDirtyBit(counters.doubleSum(CounterName.named("double-sum")), 1.0); verifyDirtyBit(counters.doubleMean(CounterName.named("double-mean")), 1.0); verifyDirtyBit(counters.intSum(CounterName.named("int-sum")), 1); verifyDirtyBit(counters.intMean(CounterName.named("int-mean")), 1); verifyDirtyBit(counters.booleanAnd(CounterName.named("and")), true); } /** Verify dirty bit is set correctly through various Counter state transitions */ private <InputT> void verifyDirtyBit(Counter<InputT, ?> counter, InputT sampleValue) { String name = String.format("counter '%s'", counter.getName().name()); // Test counters are not dirty and are COMMITTED initially. assertFalse( String.format("%s should not be dirty on initialization.", name), counter.isDirty()); assertEquals( String.format("%s should not be COMMITTED on initialization.", name), CommitState.COMMITTED, counter.commitState.get()); // Test counters are dirty after mutating. counter.addValue(sampleValue); assertTrue(String.format("%s should be dirty after mutating.", name), counter.isDirty()); assertEquals( String.format("%s should have DIRTY state after mutating.", name), CommitState.DIRTY, counter.commitState.get()); // Test counters are dirty and are COMMITTING. assertTrue( String.format("Committing %s should succeed when in DIRTY state.", name), counter.committing()); assertTrue(String.format("%s should be dirty after committing.", name), counter.isDirty()); assertEquals( String.format("%s should have COMMITTING state after mutating.", name), CommitState.COMMITTING, counter.commitState.get()); // Test counters are dirty again after mutating. counter.addValue(sampleValue); assertFalse( String.format("Marking %s committed should succeed after mutating.", name), counter.committed()); assertTrue( String.format("%s should be dirty after marking committed.", name), counter.isDirty()); assertEquals( String.format("%s state should be DIRTY after marking committed.", name), CommitState.DIRTY, counter.commitState.get()); // Test counters are not dirty and are COMMITTED. assertTrue( String.format("Committing %s should succeed when in DIRTY state.", name), counter.committing()); assertTrue( String.format("Marking %s committed should succeed after committing.", name), counter.committed()); assertFalse( String.format("%s should be dirty after being marked committed.", name), counter.isDirty()); assertEquals( String.format("%s should have COMMITTED state after marking committed.", name), CommitState.COMMITTED, counter.commitState.get()); } @Test public void testStructuredNames() { Counter<?, ?> unstructured = counters.intSum(name); Counter<?, ?> structuredOriginal = counters.intSum(name.withOriginalName(NameContextsForTests.nameContextForTest())); Counter<?, ?> structuredSystem = counters.intSum(name.withSystemName(NameContextsForTests.nameContextForTest())); Counter<?, ?> structuredCompatible = counters.intSum(name.withOriginalName(NameContextsForTests.nameContextForTest())); // unstructured is equal to nothing assertFalse(unstructured.equals(structuredOriginal)); assertFalse(unstructured.equals(structuredSystem)); assertFalse(unstructured.equals(structuredCompatible)); // structuredOriginal is only equal to structuredCompatible assertEquals(structuredOriginal, structuredCompatible); assertFalse(structuredOriginal.equals(structuredSystem)); // structuredSystem is equal to nothing assertFalse(structuredSystem.equals(structuredCompatible)); } }
6,420
318
package org.apache.maven.surefire.junit; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import org.apache.maven.surefire.api.filter.NonAbstractClassFilter; import org.apache.maven.surefire.common.junit3.JUnit3TestChecker; import org.apache.maven.surefire.api.util.ScannerFilter; /** * @author <NAME> */ public class PojoAndJUnit3Checker implements ScannerFilter { private final JUnit3TestChecker jUnit3TestChecker; private final NonAbstractClassFilter nonAbstractClassFilter = new NonAbstractClassFilter(); public PojoAndJUnit3Checker( JUnit3TestChecker jUnit3TestChecker ) { this.jUnit3TestChecker = jUnit3TestChecker; } @Override public boolean accept( Class testClass ) { return jUnit3TestChecker.accept( testClass ) || nonAbstractClassFilter.accept( testClass ) && isPojoTest( testClass ); } private boolean isPojoTest( Class<?> testClass ) { try { testClass.getConstructor(); return true; } catch ( Exception e ) { return false; } } }
633
524
<filename>core/tools/win32/system.win32.cpp #include "stdafx.h" #include <lm.h> #include "../system.h" #include "dll.h" #include "../../profiling/profiler.h" #pragma comment(lib, "Rpcrt4.lib") #pragma comment(lib, "netapi32.lib") namespace fs = boost::filesystem; CORE_TOOLS_SYSTEM_NS_BEGIN namespace { std::wstring get_user_downloads_dir_xp(); std::wstring get_user_downloads_dir_vista(); } unsigned long get_current_thread_id() { return ::GetCurrentThreadId(); } bool is_dir_writable(const std::wstring &_dir_path_str) { const fs::wpath dir_path(_dir_path_str); boost::system::error_code error; const auto is_dir = fs::is_directory(dir_path, error); assert(is_dir); if (!is_dir) { return false; } const auto test_path = (dir_path / generate_guid()); { std::ofstream out(test_path.wstring()); if (out.fail()) { return false; } } fs::remove(test_path, error); return true; } bool core::tools::system::delete_file(const std::wstring& _file_name) { boost::system::error_code error; boost::filesystem::remove(_file_name, error); return !error; } bool move_file(const std::wstring& _old_file, const std::wstring& _new_file) { return !!::MoveFileEx(_old_file.c_str(), _new_file.c_str(), MOVEFILE_COPY_ALLOWED | MOVEFILE_REPLACE_EXISTING); } bool copy_file(const std::wstring& _old_file, const std::wstring& _new_file) { return !!::CopyFile(_old_file.c_str(), _new_file.c_str(), false); } bool compare_dirs(const std::wstring& _dir1, const std::wstring& _dir2) { if (_dir1.empty() || _dir2.empty()) return false; boost::system::error_code error; return fs::equivalent(fs::path(_dir1), fs::path(_dir2), error); } std::wstring get_file_directory(const std::wstring& file) { fs::wpath p(file); return p.parent_path().wstring(); } std::wstring get_file_name(const std::wstring& file) { fs::wpath p(file); return p.filename().wstring(); } std::wstring get_temp_directory() { wchar_t path[MAX_PATH + 1]; if (::GetTempPath(MAX_PATH, path) != 0) return path; return std::wstring(); } std::wstring get_user_downloads_dir() { static std::wstring cached_path; if (!cached_path.empty()) { return cached_path; } cached_path = get_user_downloads_dir_vista(); if (!cached_path.empty()) { return cached_path; } cached_path = get_user_downloads_dir_xp(); return cached_path; } std::string to_upper(std::string_view str) { return boost::locale::to_upper(str.data(), str.data() + str.size()); } std::string to_lower(std::string_view str) { return boost::locale::to_lower(str.data(), str.data() + str.size()); } namespace { std::wstring get_user_downloads_dir_xp() { WCHAR path[MAX_PATH] = { 0 }; const auto error = ::SHGetFolderPath(nullptr, CSIDL_PERSONAL|CSIDL_FLAG_CREATE, nullptr, 0, Out path); if (FAILED(error)) { return std::wstring(); } boost::system::error_code e; assert(fs::is_directory(path, e)); return path; } std::wstring get_user_downloads_dir_vista() { PWSTR path = nullptr; static auto proc = tools::win32::import_proc<decltype(&::SHGetKnownFolderPath)>(L"Shell32.dll", "SHGetKnownFolderPath"); if (!proc) { return std::wstring(); } const auto error = proc->get()(FOLDERID_Downloads, 0, nullptr, Out &path); if (FAILED(error)) { return std::wstring(); } std::wstring result(path); boost::system::error_code e; assert(fs::is_directory(result, e)); ::CoTaskMemFree(path); return result; } } bool GetWinMajorMinorVersion(DWORD& major, DWORD& minor) { bool bRetCode = false; LPBYTE pinfoRawData = 0; if (NERR_Success == NetWkstaGetInfo(NULL, 100, &pinfoRawData)) { WKSTA_INFO_100* pworkstationInfo = (WKSTA_INFO_100*)pinfoRawData; major = pworkstationInfo->wki100_ver_major; minor = pworkstationInfo->wki100_ver_minor; ::NetApiBufferFree(pinfoRawData); bRetCode = true; } return bRetCode; } std::string get_os_version_string() { std::string winver; OSVERSIONINFOEX osver; SYSTEM_INFO sysInfo; typedef void(__stdcall *GETSYSTEMINFO) (LPSYSTEM_INFO); __pragma(warning(push)) __pragma(warning(disable:4996)) memset(&osver, 0, sizeof(osver)); osver.dwOSVersionInfoSize = sizeof(osver); auto getResult = GetVersionEx((LPOSVERSIONINFO)&osver); __pragma(warning(pop)) if (getResult == 0) return "unknown"; DWORD major = 0; DWORD minor = 0; if (GetWinMajorMinorVersion(major, minor)) { osver.dwMajorVersion = major; osver.dwMinorVersion = minor; } else if (osver.dwMajorVersion == 6 && osver.dwMinorVersion == 2) { OSVERSIONINFOEXW osvi; ULONGLONG cm = 0; cm = VerSetConditionMask(cm, VER_MINORVERSION, VER_EQUAL); ZeroMemory(&osvi, sizeof(osvi)); osvi.dwOSVersionInfoSize = sizeof(osvi); osvi.dwMinorVersion = 3; if (VerifyVersionInfoW(&osvi, VER_MINORVERSION, cm)) { osver.dwMinorVersion = 3; } } GETSYSTEMINFO getSysInfo = (GETSYSTEMINFO)GetProcAddress(GetModuleHandle(L"kernel32.dll"), "GetNativeSystemInfo"); if (getSysInfo == NULL) getSysInfo = ::GetSystemInfo; getSysInfo(&sysInfo); if (osver.dwMajorVersion == 10 && osver.dwMinorVersion >= 0 && osver.wProductType != VER_NT_WORKSTATION) winver = "Windows 10 Server"; if (osver.dwMajorVersion == 10 && osver.dwMinorVersion >= 0 && osver.wProductType == VER_NT_WORKSTATION) winver = "Windows 10"; if (osver.dwMajorVersion == 6 && osver.dwMinorVersion == 3 && osver.wProductType != VER_NT_WORKSTATION) winver = "Windows Server 2012 R2"; if (osver.dwMajorVersion == 6 && osver.dwMinorVersion == 3 && osver.wProductType == VER_NT_WORKSTATION) winver = "Windows 8.1"; if (osver.dwMajorVersion == 6 && osver.dwMinorVersion == 2 && osver.wProductType != VER_NT_WORKSTATION) winver = "Windows Server 2012"; if (osver.dwMajorVersion == 6 && osver.dwMinorVersion == 2 && osver.wProductType == VER_NT_WORKSTATION) winver = "Windows 8"; if (osver.dwMajorVersion == 6 && osver.dwMinorVersion == 1 && osver.wProductType != VER_NT_WORKSTATION) winver = "Windows Server 2008 R2"; if (osver.dwMajorVersion == 6 && osver.dwMinorVersion == 1 && osver.wProductType == VER_NT_WORKSTATION) winver = "Windows 7"; if (osver.dwMajorVersion == 6 && osver.dwMinorVersion == 0 && osver.wProductType != VER_NT_WORKSTATION) winver = "Windows Server 2008"; if (osver.dwMajorVersion == 6 && osver.dwMinorVersion == 0 && osver.wProductType == VER_NT_WORKSTATION) winver = "Windows Vista"; if (osver.dwMajorVersion == 5 && osver.dwMinorVersion == 2 && osver.wProductType == VER_NT_WORKSTATION && sysInfo.wProcessorArchitecture == PROCESSOR_ARCHITECTURE_AMD64) winver = "Windows XP x64"; if (osver.dwMajorVersion == 5 && osver.dwMinorVersion == 2) winver = "Windows Server 2003"; if (osver.dwMajorVersion == 5 && osver.dwMinorVersion == 1) winver = "Windows XP"; if (osver.dwMajorVersion == 5 && osver.dwMinorVersion == 0) winver = "Windows 2000"; if (osver.dwMajorVersion < 5) winver = "unknown"; if (osver.wServicePackMajor != 0) { std::string sp; char buf[128] = { 0 }; sp = " Service Pack "; sprintf_s(buf, sizeof(buf), "%d", osver.wServicePackMajor); sp.append(buf); winver += sp; } return winver; } std::string get_short_file_name(const std::wstring& _file_name) { auto length = GetShortPathName(_file_name.c_str(), NULL, 0); std::string result; if (length == 0) return result; std::vector<wchar_t> wide_buffer(length); length = GetShortPathName(_file_name.c_str(), wide_buffer.data(), length); if (length == 0) return result; length = WideCharToMultiByte(CP_ACP, 0, wide_buffer.data(), length, NULL, 0, NULL, NULL); if (length == 0) return result; std::vector<char> buffer(length); length = WideCharToMultiByte(CP_ACP, 0, wide_buffer.data(), length, buffer.data(), length, NULL, NULL); if (length == 0) return result; return result.assign(buffer.begin(), buffer.end()); } bool is_windows_vista_or_higher() { OSVERSIONINFO os_version; ZeroMemory(&os_version, sizeof(OSVERSIONINFO)); os_version.dwOSVersionInfoSize = sizeof(OSVERSIONINFO); GetVersionEx(&os_version); return os_version.dwMajorVersion >= 6; } bool is_do_not_dirturb_on() { return false; } CORE_TOOLS_SYSTEM_NS_END
4,103
852
<reponame>ckamtsikis/cmssw<gh_stars>100-1000 import FWCore.ParameterSet.Config as cms from DQMOffline.Trigger.dijetMonitoring_cfi import dijetMonitoring DiPFjetAve40_Prommonitoring = dijetMonitoring.clone() DiPFjetAve40_Prommonitoring.FolderName = cms.string('HLT/JME/Jets/AK4/PF/HLT_DiPFJetAve40/') DiPFjetAve40_Prommonitoring.histoPSet.dijetPSet = cms.PSet( nbins = cms.uint32 ( 200 ), xmin = cms.double( 0), xmax = cms.double(1000.), ) DiPFjetAve40_Prommonitoring.histoPSet.dijetPtThrPSet = cms.PSet( nbins = cms.uint32 ( 50 ), xmin = cms.double( 0.), xmax = cms.double(100.), ) DiPFjetAve40_Prommonitoring.met = cms.InputTag("pfMetEI") # pfMet #DiPFjetAve40_Prommonitoring.pfjets = cms.InputTag("ak4PFJets") # ak4PFJets, ak4PFJetsCHS DiPFjetAve40_Prommonitoring.dijetSrc = cms.InputTag("ak4PFJets") # ak4PFJets, ak4PFJetsCHS DiPFjetAve40_Prommonitoring.electrons = cms.InputTag("gedGsfElectrons") # while pfIsolatedElectronsEI are reco::PFCandidate ! DiPFjetAve40_Prommonitoring.muons = cms.InputTag("muons") # while pfIsolatedMuonsEI are reco::PFCandidate ! DiPFjetAve40_Prommonitoring.ptcut = cms.double(20) # while pfIsolatedMuonsEI are reco::PFCandidate ! DiPFjetAve40_Prommonitoring.numGenericTriggerEventPSet.andOr = cms.bool( False ) DiPFjetAve40_Prommonitoring.numGenericTriggerEventPSet.dbLabel = cms.string("JetMETDQMTrigger") # it does not exist yet, we should consider the possibility of using the DB, but as it is now it will need a label per path ! DiPFjetAve40_Prommonitoring.numGenericTriggerEventPSet.andOrHlt = cms.bool(True)# True:=OR; False:=AND DiPFjetAve40_Prommonitoring.numGenericTriggerEventPSet.hltInputTag = cms.InputTag( "TriggerResults::HLT" ) DiPFjetAve40_Prommonitoring.numGenericTriggerEventPSet.hltPaths = cms.vstring("HLT_DiPFJetAve40_v*") # HLT_ZeroBias_v* DiPFjetAve40_Prommonitoring.numGenericTriggerEventPSet.errorReplyHlt = cms.bool( False ) DiPFjetAve40_Prommonitoring.numGenericTriggerEventPSet.verbosityLevel = cms.uint32(1) DiPFjetAve40_Prommonitoring.denGenericTriggerEventPSet.andOr = cms.bool( False ) DiPFjetAve40_Prommonitoring.denGenericTriggerEventPSet.dcsInputTag = cms.InputTag( "scalersRawToDigi" ) DiPFjetAve40_Prommonitoring.denGenericTriggerEventPSet.dcsPartitions = cms.vint32 ( 24, 25, 26, 27, 28, 29 ) # 24-27: strip, 28-29: pixel, we should add all other detectors ! DiPFjetAve40_Prommonitoring.denGenericTriggerEventPSet.andOrDcs = cms.bool( False ) DiPFjetAve40_Prommonitoring.denGenericTriggerEventPSet.errorReplyDcs = cms.bool( True ) DiPFjetAve40_Prommonitoring.denGenericTriggerEventPSet.verbosityLevel = cms.uint32(1)
1,096
615
#include "Sqlite.h" #include "catch.hpp" #include <string> #include <vector> #include "SqliteError.h" #include "TempDir.h" #include "CaptureLog.h" TEST_CASE("opens databases", "[Sqlite]") { TempDir tempDir; CaptureLog log; Sqlite sqlite(log); auto dbPath = tempDir.path + "/db.sqlite"; SECTION("creates a new db") { sqlite.open(dbPath, false); } SECTION("fails to open a non-existent db") { REQUIRE_THROWS(sqlite.open(dbPath, true)); } SECTION("reopens a new db") { sqlite.open(dbPath, false); sqlite.close(); Sqlite anotherSqlite(log); anotherSqlite.open(dbPath, true); } } TEST_CASE("prepares statements", "[Sqlite]") { TempDir tempDir; CaptureLog log; Sqlite sqlite(log); auto dbPath = tempDir.path + "/db.sqlite"; sqlite.open(dbPath, false); auto s = sqlite.prepare("create table t(one varchar(10), two integer)"); SECTION("handles exceptions") { std::string threw; try { sqlite.prepare("SELECT * FROM foo"); } catch (const SqliteError &e) { threw = e.what(); } CHECK(threw == "no such table: foo (SELECT * FROM foo)"); } } TEST_CASE("executes statements", "[Sqlite]") { TempDir tempDir; CaptureLog log; Sqlite sqlite(log); auto dbPath = tempDir.path + "/db.sqlite"; sqlite.open(dbPath, false); REQUIRE(sqlite.prepare("create table t(one varchar(10), two integer)").step() == true); REQUIRE(sqlite.prepare("insert into t values('moo', 1)").step() == true); REQUIRE(sqlite.prepare("insert into t values('foo', 2)").step() == true); auto select = sqlite.prepare("select * from t order by two desc"); CHECK(select.columnCount() == 2); int num = 0; for (;;) { if (select.step()) break; CHECK(select.columnName(0) == "one"); CHECK(select.columnName(1) == "two"); if (num == 0) { CHECK(select.columnString(0) == "foo"); CHECK(select.columnInt64(1) == 2); } else if (num == 1) { CHECK(select.columnString(0) == "moo"); CHECK(select.columnInt64(1) == 1); } ++num; } CHECK(num == 2); } TEST_CASE("handles binds and blobs", "[Sqlite]") { TempDir tempDir; CaptureLog log; Sqlite sqlite(log); auto dbPath = tempDir.path + "/db.sqlite"; sqlite.open(dbPath, false); REQUIRE(sqlite.prepare("create table t(offset integer, data blob)").step() == true); auto inserter = sqlite.prepare("insert into t values(:id, :data)"); inserter.bindInt64(":id", 1234); constexpr auto byteLen = 1024; char bytes[byteLen]; for (int i = 0; i < byteLen; ++i) bytes[i] = i & 0xff; inserter.bindBlob(":data", bytes, byteLen); CHECK(inserter.step() == true); inserter.reset(); inserter.bindInt64(":id", 5678); for (int i = 0; i < byteLen; ++i) bytes[i] = (i & 0xff) ^ 0xff; inserter.bindBlob(":data", bytes, byteLen); CHECK(inserter.step() == true); auto select = sqlite.prepare("select * from t order by offset"); CHECK(select.columnCount() == 2); CHECK(select.step() == false); CHECK(select.columnName(0) == "offset"); CHECK(select.columnName(1) == "data"); CHECK(select.columnInt64(0) == 1234); auto blob1 = select.columnBlob(1); REQUIRE(blob1.size() == byteLen); for (int i = 0; i < byteLen; ++i) REQUIRE(blob1[i] == (i & 0xff)); CHECK(select.step() == false); CHECK(select.columnInt64(0) == 5678); auto blob2 = select.columnBlob(1); REQUIRE(blob2.size() == byteLen); for (int i = 0; i < byteLen; ++i) REQUIRE(blob2[i] == (0xff ^ (i & 0xff))); CHECK(select.step() == true); }
1,646
578
<filename>src/backend/job-execute/service-job-execute/src/main/java/com/tencent/bk/job/execute/engine/gse/model/GSEFileTaskResult.java<gh_stars>100-1000 /* * Tencent is pleased to support the open source community by making BK-JOB蓝鲸智云作业平台 available. * * Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved. * * BK-JOB蓝鲸智云作业平台 is licensed under the MIT License. * * License for BK-JOB蓝鲸智云作业平台: * -------------------------------------------------------------------- * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated * documentation files (the "Software"), to deal in the Software without restriction, including without limitation * the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and * to permit persons to whom the Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all copies or substantial portions of * the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO * THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF * CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS * IN THE SOFTWARE. */ package com.tencent.bk.job.execute.engine.gse.model; import com.fasterxml.jackson.annotation.JsonProperty; import com.tencent.bk.job.execute.common.constants.FileDistModeEnum; import com.tencent.bk.job.execute.engine.util.FilePathUtils; import lombok.Getter; import lombok.Setter; import lombok.ToString; import org.apache.commons.lang3.StringUtils; import java.util.StringJoiner; /** * GSE 文件任务结果 */ @Getter @Setter @ToString public class GSEFileTaskResult { /** * 0:upload, 1:download */ private Integer mode; /** * 完成进度,取值0-100 */ private Integer process; /** * 文件总字节数 */ private Long size; /** * 传输的速度,单位bps */ private Integer speed; /** * 任务状态信息,比如checksum/upload/download */ private String type; /** * 任务开始时间 */ @JsonProperty("start_time") private Long startTime; /** * 任务开始时间-兼容字段 */ @JsonProperty("starttime") private String startTimeStr; /** * 任务结束时间-兼容字段 */ @JsonProperty("endtime") private String endTimeStr; /** * 任务结束时间 */ @JsonProperty("end_time") private Long endTime; /** * 文件源IP */ @JsonProperty("source") private String sourceIp; /** * 文件源云区域 */ @JsonProperty("source_cloudid") private Long sourceCloudId; /** * 分发目标IP */ @JsonProperty("dest") private String destIp; /** * 分发目标云区域 */ @JsonProperty("dest_cloudid") private Long destCloudId; /** * 源文件目录 */ @JsonProperty("source_file_dir") private String srcDirPath; /** * 源文件名 */ @JsonProperty("source_file_name") private String srcFileName; /** * 目标文件目录 */ @JsonProperty("dest_file_dir") private String destDirPath; /** * 目标文件名 */ @JsonProperty("dest_file_name") private String destFileName; /** * GSE 文件任务状态 */ @JsonProperty("status_code") private Integer status; /** * GSE 文件任务状态描述 */ @JsonProperty("status_info") private String statusDesc; /** * GSE 协议版本(0 - 未知版本;1 - 初始版本 ; 2 - 解除valuekey依赖版本) */ @JsonProperty("protover") private Integer protocolVersion; // ------------------ 非协议字段 ---------------------- /** * 用来表示文件任务ID */ private String taskId; /** * 标准化之后的源文件路径 */ private String standardSourceFilePath; /** * 标准化之后的目标文件路径 */ private String standardDestFilePath; /** * 任务类型,1-文件分发,2-目录分发,3-正则分发,4-通配符分发 */ private TaskType taskType; public String getSourceCloudIp() { if (sourceCloudId == null || sourceIp == null) { return null; } return sourceCloudId + ":" + sourceIp; } public String getDestCloudIp() { if (destCloudId == null || destIp == null) { return null; } return destCloudId + ":" + destIp; } public boolean isDownloadMode() { return FileDistModeEnum.DOWNLOAD.getValue().equals(this.mode); } public String getStandardSourceFilePath() { if (standardSourceFilePath != null) { return standardSourceFilePath; } else { standardSourceFilePath = FilePathUtils.appendFileName(srcDirPath, srcFileName); } return standardSourceFilePath; } public String getStandardDestFilePath() { if (standardDestFilePath != null) { return standardDestFilePath; } else { if (protocolVersion != null && protocolVersion == 2) { if (StringUtils.isEmpty(srcFileName)) { //目录分发 String srcDirName = FilePathUtils.parseDirName(srcDirPath); standardDestFilePath = FilePathUtils.appendDirName(destDirPath, srcDirName); } else { // 文件分发、正则分发、通配符分发 standardDestFilePath = FilePathUtils.appendFileName(destDirPath, srcFileName); } } else { standardDestFilePath = FilePathUtils.appendFileName(destDirPath, destFileName); } } return standardDestFilePath; } public String getTaskId() { if (taskId != null) { return taskId; } if (isDownloadMode()) { if (StringUtils.isNotEmpty(sourceIp)) { this.taskId = concat(mode.toString(), getSourceCloudIp(), getStandardSourceFilePath(), getDestCloudIp(), getStandardDestFilePath()); } else { // GSE BUG, 兼容处理 this.taskId = concat(mode.toString(), "*", getStandardSourceFilePath(), getDestCloudIp(), getStandardDestFilePath()); } } else { this.taskId = concat(mode.toString(), getSourceCloudIp(), getStandardSourceFilePath()); } return this.taskId; } private String concat(String... strArgs) { StringJoiner sj = new StringJoiner(":"); for (String strArg : strArgs) { if (StringUtils.isEmpty(strArg)) { sj.add(""); } else { sj.add(strArg); } } return sj.toString(); } public Long getStartTime() { if (startTime != null) { return startTime; } if (StringUtils.isNotEmpty(startTimeStr)) { return Long.valueOf(startTimeStr); } return null; } public Long getEndTime() { if (endTime != null) { return endTime; } if (StringUtils.isNotEmpty(endTimeStr)) { return Long.valueOf(endTimeStr); } return null; } private enum TaskType { FILE(1), DIR(2), REGEX(3), WILDCARD(4); private final int value; TaskType(int taskType) { this.value = taskType; } public static TaskType valueOf(Integer taskType) { if (taskType == null) { return null; } for (TaskType inst : values()) { if (inst.value == taskType) { return inst; } } return null; } public final int getValue() { return value; } } }
3,892
14,668
// Copyright 2017 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef REMOTING_IOS_APP_HOST_SETUP_FOOTER_VIEW_H_ #define REMOTING_IOS_APP_HOST_SETUP_FOOTER_VIEW_H_ #import <UIKit/UIKit.h> // The view with the "Email link and instructions" button. @interface HostSetupFooterView : UICollectionReusableView @end #endif // REMOTING_IOS_APP_HOST_SETUP_FOOTER_VIEW_H_
172
679
/************************************************************** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * *************************************************************/ // MARKER(update_precomp.py): autogen include statement, do not remove #include "precompiled_basegfx.hxx" // autogenerated file with codegen.pl #include "preextstl.h" #include "gtest/gtest.h" #include "postextstl.h" #include <basegfx/tools/b2dclipstate.hxx> #include <basegfx/range/b2dpolyrange.hxx> #include <basegfx/polygon/b2dpolygon.hxx> #include <basegfx/polygon/b2dpolygontools.hxx> #include <basegfx/polygon/b2dpolypolygontools.hxx> #include <basegfx/polygon/b2dpolypolygon.hxx> #include <basegfx/numeric/ftools.hxx> #include <boost/bind.hpp> using namespace ::basegfx; // FIXME: #define RUN_OLD_FAILING_TESTS 0 namespace basegfx2d { class clipstate : public ::testing::Test { protected: tools::B2DClipState aUnion1; tools::B2DClipState aUnion2; tools::B2DClipState aIntersect; tools::B2DClipState aXor; tools::B2DClipState aSubtract; public: virtual void SetUp() { B2DRange aCenter(100, 100, -100, -100); B2DRange aNorth(-10, -110, 10, -90); B2DRange aWest(-110, -10, -90, 10); B2DRange aSouth(-10, 110, 10, 90); B2DRange aEast(110, -10, 90, 10); aUnion1.unionRange(aCenter); aUnion1.unionRange(aNorth); aUnion1.unionRange(aWest); aUnion1.unionRange(aSouth); aUnion1.unionRange(aEast); aUnion2.makeNull(); aUnion2.unionRange(aCenter); aUnion2.unionRange(aNorth); aUnion2.unionRange(aWest); aUnion2.unionRange(aSouth); aUnion2.unionRange(aEast); aIntersect.intersectRange(aCenter); aIntersect.intersectRange(aNorth); aIntersect.intersectRange(aWest); aIntersect.intersectRange(aSouth); aIntersect.intersectRange(aEast); aXor.makeNull(); aXor.xorRange(aCenter); aXor.xorRange(aNorth); aXor.xorRange(aWest); aXor.xorRange(aSouth); aXor.xorRange(aEast); aSubtract.intersectRange(aCenter); aSubtract.subtractRange(aNorth); aSubtract.subtractRange(aWest); aSubtract.subtractRange(aSouth); aSubtract.subtractRange(aEast); } virtual void TearDown() {} void verifyPoly(const char* sName, const char* sSvg, const tools::B2DClipState& toTest) { #if defined(VERBOSE) fprintf(stderr, "%s - svg:d=\"%s\"\n", sName, rtl::OUStringToOString( basegfx::tools::exportToSvgD(toTest.getClipPoly(), true, true, false), RTL_TEXTENCODING_UTF8).getStr() ); #endif B2DPolyPolygon aTmp1; ASSERT_TRUE(tools::importFromSvgD( aTmp1, rtl::OUString::createFromAscii(sSvg), false, 0)) << sName; const rtl::OUString aSvg= tools::exportToSvgD(toTest.getClipPoly(), true, true, false); B2DPolyPolygon aTmp2; ASSERT_TRUE(tools::importFromSvgD(aTmp2, aSvg, false, 0)) << sName; ASSERT_TRUE(aTmp2 == aTmp1) << sName; } }; TEST_F(clipstate, verifySimpleRange) { const char* unionSvg="m100 10v90h-90v10h-20v-10h-90v-90h-10v-20h10v-90h90v-10h20v10h90v90h10v20z"; const char* intersectSvg="m-100 10v-20h10v20zm80 90v-10h20v10zm-20-190v-10h20v10zm80 100v-20h10v20z"; const char* xorSvg="m-100 10h10v-20h-10zm90 110h20v-10h-20zm0-180h20v-10h-20zm100 110h10v-20h-10zm10 20v90h-90v10h-20v-10h-90v-90h-10v-20h10v-90h90v-10h20v10h90v90h10v20z"; const char* subtractSvg="m-90 10v-20h-10v-90h90v10h20v-10h90v90h-10v20h10v90h-90v-10h-20v10h-90v-90z"; ASSERT_TRUE(aUnion1.isCleared()) << "cleared clip stays empty under union operation"; verifyPoly("union", unionSvg, aUnion2); #if RUN_OLD_FAILING_TESTS verifyPoly("intersect", intersectSvg, aIntersect); verifyPoly("xor", xorSvg, aXor); #endif verifyPoly("subtract", subtractSvg, aSubtract); } TEST_F(clipstate, verifyMixedClips) { tools::B2DClipState aMixedClip; const char* unionSvg="m100 10v90h-90v10h-20v-10h-90v-90h-10v-20h10v-90h90v-10h20v10h90v90h10v20z"; B2DPolyPolygon aTmp1; tools::importFromSvgD( aTmp1, rtl::OUString::createFromAscii(unionSvg), false, 0); aMixedClip.intersectPolyPolygon(aTmp1); aMixedClip.subtractRange(B2DRange(-20,-150,20,0)); aMixedClip.subtractRange(B2DRange(-150,-20,0,20)); aMixedClip.xorRange(B2DRange(-150,-150,150,150)); const char* mixedClipSvg="m0 0v20h-100v80h90v10h20v-10h90v-90h10v-20h-10v-90h-80v100zm-40-20v-80h-80v80zm-50 170v-300h300v300z"; #if RUN_OLD_FAILING_TESTS verifyPoly("mixed clip", mixedClipSvg, aMixedClip); #endif } // ----------------------------------------------------------------------------- } // namespace basegfx2d
2,533
1,947
#include "test.h" #include <iostream> #include <stdexcept> #include "midgard/distanceapproximator.h" #include "midgard/encoded.h" #include "midgard/logging.h" #include "midgard/util.h" #include "tyr/actor.h" #if !defined(VALHALLA_SOURCE_DIR) #define VALHALLA_SOURCE_DIR #endif using namespace valhalla; using namespace valhalla::midgard; namespace { // fake config const auto conf = test::make_config("test/data/utrecht_tiles"); TEST(ShapeAttributes, test_shape_attributes_included) { tyr::actor_t actor(conf); auto result_json = actor.trace_attributes( R"({"shape":[ {"lat":52.09110,"lon":5.09806}, {"lat":52.09050,"lon":5.09769}, {"lat":52.09098,"lon":5.09679} ],"costing":"auto","shape_match":"map_snap", "filters":{"attributes":["edge.length","edge.speed","edge.begin_shape_index", "edge.end_shape_index","shape","shape_attributes.length","shape_attributes.time","shape_attributes.speed"], "action":"include"}})"); rapidjson::Document doc; doc.Parse(result_json); EXPECT_FALSE(doc.HasParseError()) << "Could not parse json response"; auto shape = midgard::decode<std::vector<PointLL>>(rapidjson::Pointer("/shape").Get(doc)->GetString()); auto shape_attributes_time = rapidjson::Pointer("/shape_attributes/time").Get(doc)->GetArray(); auto shape_attributes_length = rapidjson::Pointer("/shape_attributes/length").Get(doc)->GetArray(); auto shape_attributes_speed = rapidjson::Pointer("/shape_attributes/speed").Get(doc)->GetArray(); auto edges = rapidjson::Pointer("/edges").Get(doc)->GetArray(); EXPECT_EQ(shape_attributes_time.Size(), shape.size() - 1); EXPECT_EQ(shape_attributes_length.Size(), shape.size() - 1); EXPECT_EQ(shape_attributes_speed.Size(), shape.size() - 1); // Measures the length between point for (int i = 1; i < shape.size(); i++) { auto distance = shape[i].Distance(shape[i - 1]) * .001f; // Measuring that the length between shape pts is approx. to the shape attributes length EXPECT_NEAR(distance, shape_attributes_length[i - 1].GetFloat(), .01f); } // Assert that the shape attributes (time, length, speed) are equal to their corresponding edge // attributes for (int e = 0; e < edges.Size(); e++) { auto edge_length = edges[e]["length"].GetDouble(); auto edge_speed = edges[e]["speed"].GetDouble(); double sum_times = 0; double sum_lengths = 0; for (int j = edges[e]["begin_shape_index"].GetInt(); j < edges[e]["end_shape_index"].GetInt(); j++) { sum_times += shape_attributes_time[j].GetDouble(); sum_lengths += shape_attributes_length[j].GetDouble(); EXPECT_NEAR(edge_speed, shape_attributes_speed[j].GetDouble(), .15); } // Can't assert that sum of shape times equals edge's elapsed_time because elapsed_time includes // transition costs and shape times do not. EXPECT_NEAR(3600 * edge_length / edge_speed, sum_times, .1); EXPECT_NEAR(edge_length, sum_lengths, .1); } } TEST(ShapeAttributes, test_shape_attributes_duplicated_point) { tyr::actor_t actor(conf); auto result_json = actor.trace_attributes( R"({"shape":[ {"lat":52.09110,"lon":5.09806}, {"lat":52.09110,"lon":5.09806}, {"lat":52.09050,"lon":5.09769}, {"lat":52.09098,"lon":5.09679} ],"costing":"auto","shape_match":"map_snap", "filters":{"attributes":["edge.length","edge.speed","edge.begin_shape_index", "edge.end_shape_index","shape","shape_attributes.length","shape_attributes.time","shape_attributes.speed"], "action":"include"}})"); rapidjson::Document doc; doc.Parse(result_json); EXPECT_FALSE(doc.HasParseError()) << "Could not parse json response"; auto shape = midgard::decode<std::vector<PointLL>>(rapidjson::Pointer("/shape").Get(doc)->GetString()); auto shape_attributes_time = rapidjson::Pointer("/shape_attributes/time").Get(doc)->GetArray(); auto shape_attributes_length = rapidjson::Pointer("/shape_attributes/length").Get(doc)->GetArray(); auto shape_attributes_speed = rapidjson::Pointer("/shape_attributes/speed").Get(doc)->GetArray(); auto edges = rapidjson::Pointer("/edges").Get(doc)->GetArray(); EXPECT_EQ(shape_attributes_time.Size(), shape.size() - 1); EXPECT_EQ(shape_attributes_length.Size(), shape.size() - 1); EXPECT_EQ(shape_attributes_speed.Size(), shape.size() - 1); // Measures the length between point for (int i = 1; i < shape.size(); i++) { auto distance = shape[i].Distance(shape[i - 1]) * .001f; // Measuring that the length between shape pts is approx. to the shape attributes length EXPECT_NEAR(distance, shape_attributes_length[i - 1].GetFloat(), .01f); } // Assert that the shape attributes (time, length, speed) are equal to their corresponding edge // attributes for (int e = 0; e < edges.Size(); e++) { auto edge_length = edges[e]["length"].GetDouble(); auto edge_speed = edges[e]["speed"].GetDouble(); double sum_times = 0; double sum_lengths = 0; for (int j = edges[e]["begin_shape_index"].GetInt(); j < edges[e]["end_shape_index"].GetInt(); j++) { sum_times += shape_attributes_time[j].GetDouble(); sum_lengths += shape_attributes_length[j].GetDouble(); EXPECT_NEAR(edge_speed, shape_attributes_speed[j].GetDouble(), .15); } // Can't assert that sum of shape times equals edge's elapsed_time because elapsed_time includes // transition costs and shape times do not. EXPECT_NEAR(3600 * edge_length / edge_speed, sum_times, .1); EXPECT_NEAR(edge_length, sum_lengths, .1); } } TEST(ShapeAttributes, test_shape_attributes_no_turncosts) { tyr::actor_t actor(conf); auto result_json = actor.trace_attributes( R"({"shape":[ {"lat":52.09110,"lon":5.09806}, {"lat":52.091050,"lon":5.097556} ],"costing":"auto","shape_match":"map_snap", "filters":{"attributes":["edge.length","edge.speed","node.elapsed_time", "edge.begin_shape_index","edge.end_shape_index","shape", "shape_attributes.length","shape_attributes.time","shape_attributes.speed"], "action":"include"}})"); rapidjson::Document doc; doc.Parse(result_json); EXPECT_FALSE(doc.HasParseError()) << "Could not parse json response"; auto shape = midgard::decode<std::vector<PointLL>>(rapidjson::Pointer("/shape").Get(doc)->GetString()); auto shape_attributes_time = rapidjson::Pointer("/shape_attributes/time").Get(doc)->GetArray(); auto shape_attributes_length = rapidjson::Pointer("/shape_attributes/length").Get(doc)->GetArray(); auto shape_attributes_speed = rapidjson::Pointer("/shape_attributes/speed").Get(doc)->GetArray(); auto edges = rapidjson::Pointer("/edges").Get(doc)->GetArray(); EXPECT_EQ(shape_attributes_time.Size(), shape.size() - 1); EXPECT_EQ(shape_attributes_length.Size(), shape.size() - 1); EXPECT_EQ(shape_attributes_speed.Size(), shape.size() - 1); // Measures the length between point for (int i = 1; i < shape.size(); i++) { auto distance = shape[i].Distance(shape[i - 1]) * .001f; // Measuring that the length between shape pts is approx. to the shape attributes length EXPECT_NEAR(distance, shape_attributes_length[i - 1].GetFloat(), .01f); } // Assert that the shape attributes (time, length, speed) are equal to their corresponding edge // attributes auto edge_length = edges[0]["length"].GetDouble(); auto edge_speed = edges[0]["speed"].GetDouble(); auto edge_elapsed_time = edges[0]["end_node"]["elapsed_time"].GetDouble(); double sum_times = 0; double sum_lengths = 0; sum_times += shape_attributes_time[0].GetDouble(); sum_lengths += shape_attributes_length[0].GetDouble(); EXPECT_NEAR(edge_speed, shape_attributes_speed[0].GetDouble(), .15); // Can't assert that sum of shape times equals edge's elapsed_time because elapsed_time includes // transition costs and shape times do not. EXPECT_NEAR(edge_elapsed_time, sum_times, .1); EXPECT_NEAR(edge_length, sum_lengths, .1); } } // namespace int main(int argc, char* argv[]) { testing::InitGoogleTest(&argc, argv); return RUN_ALL_TESTS(); }
2,990
353
<gh_stars>100-1000 package org.nutz.plugins.nop.core.sign; import javax.servlet.http.HttpServletRequest; import org.nutz.log.Log; import org.nutz.log.Logs; /** * 签名器 * * @author kerbores * */ public interface Signer { public Log log = Logs.get(); /** * 名称 * * @return */ public String name(); /** * * @param appSecret * 密钥 * @param timestamp * 时间戳 * @param gateway * 网关/方法名称 * @param nonce * 随机串 * @param dataMate * 数据元数据 * @return */ public String sign(String appSecret, String timestamp, String gateway, String nonce, String dataMate); /** * 签名检查 * * @param request * 请求 * @param fetcher * 密钥获取器 * @return */ public boolean check(HttpServletRequest request, AppsecretFetcher fetcher); }
444
1,338
<reponame>Kirishikesan/haiku /* * Copyright 2006, Haiku. * Distributed under the terms of the MIT License. * * Authors: * <NAME> <<EMAIL>> */ #include "ShapeContainer.h" #include <stdio.h> #include <string.h> #include <OS.h> #include "Shape.h" #ifdef ICON_O_MATIC // constructor ShapeContainerListener::ShapeContainerListener() { } // destructor ShapeContainerListener::~ShapeContainerListener() { } #endif // ICON_O_MATIC // constructor ShapeContainer::ShapeContainer() : fShapes(16) #ifdef ICON_O_MATIC , fListeners(2) #endif { } // destructor ShapeContainer::~ShapeContainer() { #ifdef ICON_O_MATIC int32 count = fListeners.CountItems(); if (count > 0) { debugger("~ShapeContainer() - there are still" "listeners attached\n"); } #endif // ICON_O_MATIC _MakeEmpty(); } // #pragma mark - // AddShape bool ShapeContainer::AddShape(Shape* shape) { return AddShape(shape, CountShapes()); } // AddShape bool ShapeContainer::AddShape(Shape* shape, int32 index) { if (!shape) return false; // prevent adding the same shape twice if (HasShape(shape)) return false; if (fShapes.AddItem((void*)shape, index)) { #ifdef ICON_O_MATIC _NotifyShapeAdded(shape, index); #endif return true; } fprintf(stderr, "ShapeContainer::AddShape() - out of memory!\n"); return false; } // RemoveShape bool ShapeContainer::RemoveShape(Shape* shape) { if (fShapes.RemoveItem((void*)shape)) { #ifdef ICON_O_MATIC _NotifyShapeRemoved(shape); #endif return true; } return false; } // RemoveShape Shape* ShapeContainer::RemoveShape(int32 index) { Shape* shape = (Shape*)fShapes.RemoveItem(index); #ifdef ICON_O_MATIC if (shape) { _NotifyShapeRemoved(shape); } #endif return shape; } // MakeEmpty void ShapeContainer::MakeEmpty() { _MakeEmpty(); } // #pragma mark - // CountShapes int32 ShapeContainer::CountShapes() const { return fShapes.CountItems(); } // HasShape bool ShapeContainer::HasShape(Shape* shape) const { return fShapes.HasItem((void*)shape); } // IndexOf int32 ShapeContainer::IndexOf(Shape* shape) const { return fShapes.IndexOf((void*)shape); } // ShapeAt Shape* ShapeContainer::ShapeAt(int32 index) const { return (Shape*)fShapes.ItemAt(index); } // ShapeAtFast Shape* ShapeContainer::ShapeAtFast(int32 index) const { return (Shape*)fShapes.ItemAtFast(index); } // #pragma mark - #ifdef ICON_O_MATIC // AddListener bool ShapeContainer::AddListener(ShapeContainerListener* listener) { if (listener && !fListeners.HasItem((void*)listener)) return fListeners.AddItem((void*)listener); return false; } // RemoveListener bool ShapeContainer::RemoveListener(ShapeContainerListener* listener) { return fListeners.RemoveItem((void*)listener); } #endif // ICON_O_MATIC // #pragma mark - // _MakeEmpty void ShapeContainer::_MakeEmpty() { int32 count = CountShapes(); for (int32 i = 0; i < count; i++) { Shape* shape = ShapeAtFast(i); #ifdef ICON_O_MATIC _NotifyShapeRemoved(shape); shape->ReleaseReference(); #else delete shape; #endif } fShapes.MakeEmpty(); } // #pragma mark - #ifdef ICON_O_MATIC // _NotifyShapeAdded void ShapeContainer::_NotifyShapeAdded(Shape* shape, int32 index) const { BList listeners(fListeners); int32 count = listeners.CountItems(); for (int32 i = 0; i < count; i++) { ShapeContainerListener* listener = (ShapeContainerListener*)listeners.ItemAtFast(i); listener->ShapeAdded(shape, index); } } // _NotifyShapeRemoved void ShapeContainer::_NotifyShapeRemoved(Shape* shape) const { BList listeners(fListeners); int32 count = listeners.CountItems(); for (int32 i = 0; i < count; i++) { ShapeContainerListener* listener = (ShapeContainerListener*)listeners.ItemAtFast(i); listener->ShapeRemoved(shape); } } #endif // ICON_O_MATIC
1,386
1,444
<reponame>FateRevoked/mage package org.mage.test.cards.abilities.enters; import mage.constants.PhaseStep; import mage.constants.Zone; import mage.counters.CounterType; import org.junit.Test; import org.mage.test.serverside.base.CardTestPlayerBase; /** * @author JRHerlehy */ public class GreenbeltRampagerTest extends CardTestPlayerBase { @Test public void testFirstCast() { addCard(Zone.HAND, playerA, "Greenbelt Rampager"); addCard(Zone.BATTLEFIELD, playerA, "Forest", 1); castSpell(1, PhaseStep.PRECOMBAT_MAIN, playerA, "Greenbelt Rampager"); setStopAt(1, PhaseStep.POSTCOMBAT_MAIN); execute(); assertHandCount(playerA, "Greenbelt Rampager", 1); assertPermanentCount(playerA, "Greenbelt Rampager", 0); assertCounterCount(playerA, CounterType.ENERGY, 1); } @Test public void testScondCast() { addCard(Zone.HAND, playerA, "Greenbelt Rampager"); addCard(Zone.BATTLEFIELD, playerA, "Forest", 2); castSpell(1, PhaseStep.PRECOMBAT_MAIN, playerA, "Greenbelt Rampager"); castSpell(1, PhaseStep.PRECOMBAT_MAIN, playerA, "Greenbelt Rampager"); setStopAt(1, PhaseStep.POSTCOMBAT_MAIN); execute(); assertHandCount(playerA, "Greenbelt Rampager", 1); assertPermanentCount(playerA, "Greenbelt Rampager", 0); assertCounterCount(playerA, CounterType.ENERGY, 2); } @Test public void testThirdCast() { addCard(Zone.HAND, playerA, "Greenbelt Rampager"); addCard(Zone.BATTLEFIELD, playerA, "Forest", 3); castSpell(1, PhaseStep.PRECOMBAT_MAIN, playerA, "Greenbelt Rampager"); castSpell(1, PhaseStep.PRECOMBAT_MAIN, playerA, "Greenbelt Rampager"); castSpell(1, PhaseStep.PRECOMBAT_MAIN, playerA, "Greenbelt Rampager"); setStopAt(1, PhaseStep.POSTCOMBAT_MAIN); execute(); assertHandCount(playerA, "Greenbelt Rampager", 0); assertPermanentCount(playerA, "Greenbelt Rampager", 1); assertCounterCount(playerA, CounterType.ENERGY, 0); } @Test public void testCastNotOwned() { addCard(Zone.BATTLEFIELD, playerA, "Swamp", 10); // Deathtouch // When Gonti, Lord of Luxury enters the battlefield, look at the top four cards of target opponent's library, exile one of them face down, // then put the rest on the bottom of that library in a random order. For as long as that card remains exiled, // you may look at it, you may cast it, and you may spend mana as though it were mana of any type to cast it. addCard(Zone.HAND, playerA, "Gonti, Lord of Luxury"); // Creature {2}{B}{B} // When Greenbelt Rampager enters the battlefield, pay {E}{E}. If you can't, return Greenbelt Rampager to its owner's hand and you get {E}. addCard(Zone.LIBRARY, playerB, "Greenbelt Rampager"); // Creature {G} 3/4 skipInitShuffling(); castSpell(1, PhaseStep.PRECOMBAT_MAIN, playerA, "Gonti, Lord of Luxury"); addTarget(playerA, playerB); setChoice(playerA, "<NAME>"); castSpell(1, PhaseStep.PRECOMBAT_MAIN, playerA, "Greenbelt Rampager"); setStopAt(1, PhaseStep.BEGIN_COMBAT); execute(); assertCounterCount(playerA, CounterType.ENERGY, 1); assertPermanentCount(playerA, "Greenbelt Rampager", 0); assertHandCount(playerA, "<NAME>", 0); assertHandCount(playerB, "Greenbelt Rampager", 1); } }
1,371
346
<reponame>ZJCRT/drishti /*! @file EyeModelEstimator.cpp @author <NAME> @brief Internal eye model estimator implementation. \copyright Copyright 2014-2016 Elucideye, Inc. All rights reserved. \license{This project is released under the 3 Clause BSD License.} This file contains the implementation of the internal SDK eye model estimator, which does the actual work associated with generating eye models. */ #include "drishti/eye/EyeModelEstimatorImpl.h" #include "drishti/core/drishti_stdlib_string.h" // FIRST #include "drishti/core/drishti_cereal_pba.h" #include "drishti/core/make_unique.h" #include "drishti/ml/RegressionTreeEnsembleShapeEstimator.h" #include "drishti/rcpr/CPR.h" #include <fstream> #define DRISHTI_EYE_USE_DARK_CHANNEL 0 DRISHTI_EYE_NAMESPACE_BEGIN #if DRISHTI_EYE_USE_DARK_CHANNEL static cv::Mat getDarkChannel(const cv::Mat& I); #endif static float resizeEye(const cv::Mat& src, cv::Mat& dst, float width); EyeModelEstimator::Impl::Impl() { init(); } EyeModelEstimator::Impl::Impl(const std::string& eyeRegressor, const std::string& irisRegressor, const std::string& pupilRegressor) { m_eyeEstimator = drishti::core::make_unique<drishti::ml::RegressionTreeEnsembleShapeEstimator>(eyeRegressor); if (!irisRegressor.empty()) { m_irisEstimator = make_unique_cpb<drishti::rcpr::CPR>(irisRegressor); if (m_irisEstimator && !pupilRegressor.empty()) { m_pupilEstimator = make_unique_cpb<drishti::rcpr::CPR>(pupilRegressor); } } init(); } EyeModelEstimator::Impl::~Impl() = default; void EyeModelEstimator::Impl::init() { // Jitter iris defaults (normalized by eyelid extents): m_jitterIrisParams.theta = { static_cast<float>(-M_PI / 64.f), static_cast<float>(+M_PI / 64.f) }; m_jitterIrisParams.scale = { 7.f / 8.f, 8.f / 7.f }; m_jitterIrisParams.deltaX = { -0.125f, +0.125f }; m_jitterIrisParams.deltaY = { -0.050f, +0.050f }; // Jitter eyelid defaults (normalized by image extents): m_jitterEyelidParams.theta = { static_cast<float>(-M_PI / 32.f), static_cast<float>(+M_PI / 32.f) }; m_jitterEyelidParams.scale = { 3.0f / 4.0f, 1.0f }; // m_jitterEyelidParams.deltaX = { -0.05f, +0.05f }; m_jitterEyelidParams.deltaY = { -0.05f, +0.05f }; m_eyeSpec = EyeModelSpecification::create(16, 9, true, true, true, true, true); } void EyeModelEstimator::Impl::setStreamLogger(std::shared_ptr<spdlog::logger>& logger) { m_streamLogger = logger; if (m_irisEstimator) { m_irisEstimator->setStreamLogger(logger); m_eyeEstimator->setStreamLogger(logger); } } // Input: grayscale for contour regression // Red channel is closest to NIR for iris // TODO: Need a lazy image conversion type int EyeModelEstimator::Impl::operator()(const cv::Mat& crop, EyeModel& eye) const { cv::Mat I; float scale = resizeEye(crop, I, m_targetWidth), scaleInv = (1.0 / scale); cv::Mat Ic[3]{ I }, dark, blue, red; if (I.channels() == 3) { cv::split(I, Ic); #if DRISHTI_EYE_USE_DARK_CHANNEL // Dark channel: dark = getDarkChannel(I); #endif blue = Ic[0]; red = Ic[2]; } else { //dark = I; blue = red = I; } // ######## Find the eyelids ######### segmentEyelids(blue, eye); if (m_doIndependentIrisAndPupil) { float openness = 0.f; if ((openness = eye.openness()) > m_opennessThrehsold) { // ((((( Do iris estimate ))))) if (m_irisEstimator) { segmentIris(red, eye); { // If point-wise estimates match the iris regressor, then update our landmarks cv::Point2f irisCenter, innerLimbus, outerLimbus; eye.estimateIrisLandmarks(irisCenter, innerLimbus, outerLimbus); eye.irisCenter = irisCenter; eye.irisInner = innerLimbus; eye.irisOuter = outerLimbus; } eye.iris = 0.f; // drop the circular initial estimate eye.pupil = 0.f; eye.pupilEllipse.center = eye.irisEllipse.center; if (m_pupilEstimator && m_doPupil && eye.irisEllipse.size.area() > 0.f) { segmentPupil(red, eye); } } } else { // for squinting eyes defer to limbus point estimate: eye.irisEllipse = estimateIrisFromLimbusPoints(eye); eye.pupilEllipse.center = eye.irisEllipse.center; } } // Scale up the model if (scaleInv != 1.0f) { eye = eye * scaleInv; } return 0; } EyeModelEstimator::EyeModelEstimator() = default; EyeModelEstimator::EyeModelEstimator(std::istream& is, const std::string& hint) { load_cpb(is, *this); } EyeModelEstimator::EyeModelEstimator(const std::string& filename) { load_cpb(filename, *this); } EyeModelEstimator::EyeModelEstimator(const RegressorConfig& config) { m_impl = drishti::core::make_unique<EyeModelEstimator::Impl>(config.eyeRegressor, config.irisRegressor, config.pupilRegressor); } EyeModelEstimator::~EyeModelEstimator() = default; void EyeModelEstimator::setDoIndependentIrisAndPupil(bool flag) { m_impl->setDoIndependentIrisAndPupil(flag); } bool EyeModelEstimator::good() const { return static_cast<bool>(m_impl.get()); } EyeModelEstimator::operator bool() const { return good(); } void EyeModelEstimator::setStreamLogger(std::shared_ptr<spdlog::logger>& logger) { m_streamLogger = logger; if (m_impl) { m_impl->setStreamLogger(m_streamLogger); } } int EyeModelEstimator::operator()(const cv::Mat& crop, EyeModel& eye) const { return (*m_impl)(crop, eye); } void EyeModelEstimator::normalize(const cv::Mat& crop, const EyeModel& eye, const cv::Size& size, NormalizedIris& code, int padding) const { return m_impl->normalize(crop, eye, size, code, padding); } void EyeModelEstimator::setEyelidInits(int n) { m_impl->setEyelidInits(n); } int EyeModelEstimator::getEyelidInits() const { return m_impl->getEyelidInits(); } void EyeModelEstimator::setIrisInits(int n) { m_impl->setIrisInits(n); } int EyeModelEstimator::getIrisInits() const { return m_impl->getIrisInits(); } void EyeModelEstimator::setOptimizationLevel(int level) { m_impl->setOptimizationLevel(level); } void EyeModelEstimator::setTargetWidth(int width) { m_impl->setTargetWidth(width); } void EyeModelEstimator::setOpennessThreshold(float threshold) { m_impl->setOpennessThreshold(threshold); } float EyeModelEstimator::getOpennessThreshold() const { return m_impl->getOpennessThreshold(); } void EyeModelEstimator::setDoPupil(bool flag) { m_impl->setDoPupil(flag); } bool EyeModelEstimator::getDoPupil() const { return m_impl->getDoPupil(); } void EyeModelEstimator::setDoVerbose(bool flag) { m_impl->setDoVerbose(flag); } bool EyeModelEstimator::getDoVerbose() const { return m_impl->getDoVerbose(); } cv::Mat EyeModelEstimator::drawMeanShape(const cv::Size& size) const { return m_impl->drawMeanShape(size); } DRISHTI_EYE::EyeModel EyeModelEstimator::getMeanShape(const cv::Size& size) const { return m_impl->getMeanShape(size); } bool EyeModelEstimator::getDoMask() const { return m_impl->getDoMask(); } void EyeModelEstimator::setDoMask(bool flag) { m_impl->setDoMask(flag); } bool EyeModelEstimator::getUseHierarchy() const { return m_impl->getUseHierarchy(); } void EyeModelEstimator::setUseHierarchy(bool flag) { m_impl->setUseHierarchy(flag); } void EyeModelEstimator::setEyelidStagesHint(int stages) { m_impl->setEyelidStagesHint(stages); } int EyeModelEstimator::getEyelidStagesHint() const { return m_impl->getEyelidStagesHint(); } void EyeModelEstimator::setIrisStagesHint(int stages) { m_impl->setIrisStagesHint(stages); } int EyeModelEstimator::getIrisStagesHint() const { return m_impl->getIrisStagesHint(); } static float resizeEye(const cv::Mat& src, cv::Mat& dst, float width) { float scale = 1.f; if (src.cols < width) { dst = src; } else { scale = float(width) / float(src.cols); cv::resize(src, dst, {}, scale, scale, cv::INTER_CUBIC); } return scale; } #if DRISHTI_EYE_USE_DARK_CHANNEL static cv::Mat getDarkChannel(const cv::Mat& I) { cv::Mat dark; cv::Mat Ic = I.isContinuous() ? I : I.clone(); cv::reduce(Ic.reshape(1, I.size().area()), dark, 1, CV_REDUCE_MIN); dark = dark.reshape(1, I.rows); return dark; } #endif DRISHTI_EYE_NAMESPACE_END
3,861
1,350
<reponame>Manny27nyc/azure-sdk-for-java<filename>sdk/commerce/azure-resourcemanager-commerce/src/main/java/com/azure/resourcemanager/commerce/models/InfoField.java // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.commerce.models; import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; /** Key-value pairs of instance details in the legacy format. */ @Fluent public final class InfoField { @JsonIgnore private final ClientLogger logger = new ClientLogger(InfoField.class); /* * Identifies the name of the instance provisioned by the user. */ @JsonProperty(value = "project") private String project; /** * Get the project property: Identifies the name of the instance provisioned by the user. * * @return the project value. */ public String project() { return this.project; } /** * Set the project property: Identifies the name of the instance provisioned by the user. * * @param project the project value to set. * @return the InfoField object itself. */ public InfoField withProject(String project) { this.project = project; return this; } /** * Validates the instance. * * @throws IllegalArgumentException thrown if the instance is not valid. */ public void validate() { } }
544
311
<gh_stars>100-1000 /** * Copyright 2019 The JoyQueue Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.joyqueue.nsr.network.codec; import com.alibaba.fastjson.JSON; import com.alibaba.fastjson.serializer.SerializerFeature; import org.joyqueue.domain.AllMetadata; import org.joyqueue.network.transport.command.Header; import org.joyqueue.network.transport.command.Type; import org.joyqueue.nsr.network.NsrPayloadCodec; import org.joyqueue.nsr.network.command.GetAllMetadataResponse; import org.joyqueue.nsr.network.command.NsrCommandType; import org.joyqueue.toolkit.io.ZipUtil; import io.netty.buffer.ByteBuf; /** * GetAllMetadataResponseCodec * author: gaohaoxiang * date: 2019/8/29 */ public class GetAllMetadataResponseCodec implements NsrPayloadCodec<GetAllMetadataResponse>, Type { @Override public GetAllMetadataResponse decode(Header header, ByteBuf buffer) throws Exception { int length = buffer.readInt(); byte[] json = new byte[length]; buffer.readBytes(json); GetAllMetadataResponse allMetadataResponse = new GetAllMetadataResponse(); allMetadataResponse.setMetadata((AllMetadata) parseJson(json, AllMetadata.class)); return allMetadataResponse; } @Override public void encode(GetAllMetadataResponse payload, ByteBuf buffer) throws Exception { byte[] json = null; if (payload.getResponse() != null) { json = payload.getResponse(); } else { json = toJson(payload); } buffer.writeInt(json.length); buffer.writeBytes(json); } public static Object parseJson(byte[] json, Class<?> type) { try { return JSON.parseObject(ZipUtil.decompress(json), type); } catch (Exception e) { throw new RuntimeException(e); } } public static byte[] toJson(Object value) { try { String json = JSON.toJSONString(value, SerializerFeature.DisableCircularReferenceDetect); return ZipUtil.compress(json); } catch (Exception e) { throw new RuntimeException(e); } } @Override public int type() { return NsrCommandType.NSR_GET_ALL_METADATA_RESPONSE; } }
1,001
1,279
<reponame>hawflau/serverless-application-model from integration.helpers.base_test import BaseTest from parameterized import parameterized class TestFunctionWithSelfManagedKafka(BaseTest): @parameterized.expand( [ "combination/function_with_self_managed_kafka", "combination/function_with_self_managed_kafka_intrinsics", ] ) def test_function_with_self_managed_kafka(self, file_name): self.create_and_verify_stack(file_name) # Get the notification configuration and make sure Lambda Function connection is added lambda_client = self.client_provider.lambda_client function_name = self.get_physical_id_by_type("AWS::Lambda::Function") lambda_function_arn = lambda_client.get_function_configuration(FunctionName=function_name)["FunctionArn"] event_source_mapping_id = self.get_physical_id_by_type("AWS::Lambda::EventSourceMapping") event_source_mapping_result = lambda_client.get_event_source_mapping(UUID=event_source_mapping_id) event_source_mapping_function_arn = event_source_mapping_result["FunctionArn"] self.assertEqual(event_source_mapping_function_arn, lambda_function_arn)
456
428
<reponame>radoslawcybulski/CastXML template <typename T> class A; typedef A<int> start;
39
1,162
<filename>KafkaCenter-Core/src/main/java/org/nesc/ec/bigdata/job/InitRunJob.java package org.nesc.ec.bigdata.job; import org.apache.commons.lang3.concurrent.BasicThreadFactory; import org.nesc.ec.bigdata.cache.HomeCache; import org.nesc.ec.bigdata.config.InitConfig; import org.nesc.ec.bigdata.model.ClusterInfo; import org.nesc.ec.bigdata.service.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import javax.annotation.PostConstruct; import java.util.List; import java.util.concurrent.*; /** * @author Truman.P.Du * @date 2020/08/06 * @description */ @Component public class InitRunJob { private static final Logger LOG = LoggerFactory.getLogger(InitRunJob.class); @Autowired ClusterService clusterService; @Autowired AlertService alertService; @Autowired InitConfig initConfig; @Autowired HomeService homeService; @Autowired KafkaAdminService kafkaAdminService; @Autowired ZKService zkService; @Autowired CollectTopicJob collectTopicJob; @Autowired CollectMetricsJob collectMetricsJob; @Autowired CollectConsumerLagJob collectConsumerLagJob; @Autowired NoticeJob noticeJob; @Autowired CollectKsqlInfoJob collectKsqlInfoJob; @Autowired CollectConnectorJob connectorJob; private ScheduledExecutorService scheduledExecutorService = new ScheduledThreadPoolExecutor(10, new BasicThreadFactory.Builder().build()); @PostConstruct public void init() { this.runCollectClusterStatistics(); if (initConfig.isMonitorCollectEnable()) { this.runCollectConsumerLagJob(); this.runCollectMetricsJob(); noticeJob.runNoticeJob(); } if(initConfig.isCollectTopicEnable()) { this.runCollectTopicData(); } if(initConfig.isCollectKsqlInfoJobEnable()){ runCollectKsqlInfoJob(); } if(initConfig.isCollectorJobEnable()){ runCollectorJob(); } } private void runCollectorJob(){ scheduledExecutorService.scheduleWithFixedDelay(()->connectorJob.runJob(),1,initConfig.getCollectorJobPeriodMinutes(),TimeUnit.MINUTES); } private void runCollectKsqlInfoJob(){ scheduledExecutorService.scheduleWithFixedDelay(()->collectKsqlInfoJob.runJob(),1,initConfig.getCollectKsqlInfoJobPeriodMinutes(),TimeUnit.MINUTES); } private void runCollectConsumerLagJob() { scheduledExecutorService.scheduleWithFixedDelay(() -> collectConsumerLagJob.collectConsumerLag(), 1, initConfig.getMonitorCollectPeriod(), TimeUnit.MINUTES); } private void runCollectMetricsJob() { scheduledExecutorService.scheduleWithFixedDelay(() -> collectMetricsJob.collectMetric(),1, initConfig.getMonitorCollectPeriod(), TimeUnit.MINUTES); } private void runCollectTopicData() { scheduledExecutorService.scheduleWithFixedDelay(() -> collectTopicJob.collectionTopicData(), 1, initConfig.getCollectTopicPeriod(), TimeUnit.MINUTES); } private void runCollectClusterStatistics(){ scheduledExecutorService.scheduleWithFixedDelay(this::clusterStatistics, 1, initConfig.getCollectTopicPeriod(), TimeUnit.MINUTES); } /** * 更新HomePage 集群相关统计信息 */ private void clusterStatistics(){ try { List<ClusterInfo> clusterList = clusterService.getTotalData(); int groups = 0; int zk = 0; for(ClusterInfo cluster:clusterList) { groups += kafkaAdminService.getKafkaAdmins(cluster.getId().toString()).listConsumerGroups().size(); zk += zkService.getZK(cluster.getId().toString()).listConsumerGroups().size(); } HomeCache.HomePageCache pageCache = HomeCache.getConfigCache(); int group = pageCache.getGroupSize(); int cluster = pageCache.getClusterSize(); int topic = pageCache.getTopicSize(); int alert = pageCache.getAlertSize(); pageCache.setGroupSize((zk+groups)==0?group:(zk+groups)); pageCache.setTopicSize(topic==0?topic:homeService.getTopicList(clusterList)); pageCache.setClusterSize(clusterList.isEmpty()?cluster:clusterList.size()); pageCache.setAlertSize(alert==0?alert:alertService.countData()); }catch (Exception e){ LOG.error("cluster statistics cache fail,please check",e); } } }
1,803
637
/* * Copyright (c) 2017-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the license found in the * LICENSE-examples file in the root directory of this source tree. */ package com.instagram.lazyload.demoapp; import android.app.Service; import android.content.Intent; import android.os.IBinder; import android.util.Log; import com.instagram.lazyload.base.LazyLoadListener; import com.instagram.lazyload.base.LazyLoadingException; import com.instagram.lazyload.base.LazyModuleLoaderHelper; import com.instagram.lazyload.base.ServiceLike; /** * The only responsibility of this service is to load a implementation of the real service (from * a file from assets folder) and delegate all the calls to that service. * <p> * This service is located in the main classes.dex file and can be created any time (no need to * load a service form secondary dex file before creating this service). */ public class ServiceProxy extends Service { private final static String TAG = "ServiceProxy"; private static final String CLASS_NAME = "com.instagram.lazyload.lazyloadedservice.LazyLoadedService"; private static final LazyLoadListener mLazyLoadListener = new LazyLoadListener() { @Override public void moduleLazilyLoaded(String module, long loadTimeMs) { Log.i(TAG, "Service successfully loaded in " + loadTimeMs + "ms"); } @Override public void moduleLazilyInstalled(String module, long loadTimeMs) { Log.i(TAG, "Service successfully installed in " + loadTimeMs + "ms"); } }; private ServiceLike mLazyLoadedService; @Override public void onCreate() { try { mLazyLoadedService = LazyModuleLoaderHelper.createLoaderWithoutNativeLibrariesSupport( this, new ManifestReader(), mLazyLoadListener).loadServiceModule( ManifestReader.LazyLoadedService, CLASS_NAME); } catch (LazyLoadingException e) { Log.e(TAG, "Failed to lazy loaded a service", e); } mLazyLoadedService.onCreate(); } @Override public IBinder onBind(Intent intent) { return mLazyLoadedService.onBind(intent); } @Override public boolean onUnbind(Intent intent) { return mLazyLoadedService.onUnbind(intent); } @Override public void onDestroy() { mLazyLoadedService.onDestroy(); } }
756
1,414
<filename>waterbox/bsnescore/bsnes/sfc/coprocessor/dip/serialization.cpp auto DIP::serialize(serializer& s) -> void { s.integer(value); }
55
971
<filename>dl-biz/src/main/java/com/ucar/datalink/biz/utils/flinker/job/SddlJobConfigServiceImpl.java package com.ucar.datalink.biz.utils.flinker.job; import com.alibaba.fastjson.JSONObject; import com.ucar.datalink.biz.meta.MetaMapping; import com.ucar.datalink.biz.utils.flinker.FlinkerJobConfigConstant; import com.ucar.datalink.biz.utils.flinker.module.JobExtendProperty; import com.ucar.datalink.biz.utils.flinker.module.MySqlJobExtendProperty; import com.ucar.datalink.common.utils.DLConfig; import com.ucar.datalink.domain.media.MediaSourceInfo; import com.ucar.datalink.domain.media.parameter.MediaSrcParameter; import com.ucar.datalink.domain.media.parameter.rdb.RdbMediaSrcParameter; import com.ucar.datalink.domain.media.parameter.sddl.SddlMediaSrcParameter; import com.ucar.datalink.domain.meta.ColumnMeta; import com.ucar.datalink.domain.meta.MediaMeta; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.text.MessageFormat; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Random; /** * Created by user on 2017/9/12. */ public class SddlJobConfigServiceImpl extends AbstractJobConfigService { private static final Logger LOGGER = LoggerFactory.getLogger(SddlJobConfigServiceImpl.class); @Override public String createReaderJson(MediaSourceInfo info, List<ColumnMeta> metas, JobExtendProperty property, String mediaName) { checkType(info.getParameterObj()); SddlMediaSrcParameter sddlParameter = (SddlMediaSrcParameter)info.getParameterObj(); sddlParameter.getProxyDbId(); RdbMediaSrcParameter parameter = getMediaSourceInfoById(sddlParameter.getProxyDbId()).getParameterObj(); Map<String,String> srcExtendJson = property.getReader(); Random rand = new Random(); String ip = null; if(parameter.getReadConfig().getHosts()!=null && parameter.getReadConfig().getHosts().size()>0) { ip = parameter.getReadConfig().getHosts().get( rand.nextInt(parameter.getReadConfig().getHosts().size()) ); } else { throw new RuntimeException("mysql read ip is emtpy"); } String etlHost = parameter.getReadConfig().getEtlHost(); if(StringUtils.isBlank(etlHost)) { etlHost = ip; } String port = parameter.getPort()+""; String schema = info.getParameterObj().getNamespace(); String username = parameter.getReadConfig().getUsername(); String password = parameter.getReadConfig().getDecryptPassword(); String json = ""; try{ String etlUrl = MessageFormat.format(FlinkerJobConfigConstant.MYSQL_URL, etlHost, port, schema); String url = MessageFormat.format(FlinkerJobConfigConstant.MYSQL_URL, ip, port, schema); String columns = buildColumnParm( metas ); String reader = loadJobConfig(FlinkerJobConfigConstant.MYSQL_READER); json = replace(reader,etlUrl,username,password,columns); json = replaceSingleTable(json,mediaName); //json = createSharingStrateg(dataxJobConfig,json); json = processSplitPrimaryKey(metas,json); json = processReaderExtendJson(json, srcExtendJson,url); }catch (Exception e){ LOGGER.error("mysql createReaderJson error ",e); } return json; } @Override public String createWriterJson(MediaSourceInfo srcInfo, MediaSourceInfo info, MediaMeta srcMediaMeta, JobExtendProperty property, String mediaName) { checkType(info.getParameterObj()); RdbMediaSrcParameter parameter = (RdbMediaSrcParameter)info.getParameterObj(); Map<String,String> destExtendJson = property.getWriter(); String ip = parameter.getWriteConfig().getWriteHost(); String port = parameter.getPort()+""; String schema = info.getParameterObj().getNamespace(); String username = parameter.getWriteConfig().getUsername(); String password = parameter.getWriteConfig().getDecryptPassword(); String json = ""; try{ MediaMeta target = MetaMapping.transformToRDBMS(srcMediaMeta); String url = MessageFormat.format(FlinkerJobConfigConstant.MYSQL_URL, ip, port, schema); String columns = buildColumnParm( target.getColumn() ); String reader = loadJobConfig(FlinkerJobConfigConstant.MYSQL_WRITER); json = replace(reader,url,username,password,columns); json = replaceSingleTable(json, parseMediaName(mediaName) ); json = processWriterExtendJson(json,destExtendJson); }catch (Exception e){ LOGGER.error("mysql createWriterJson error ",e); } return json; } @Override public String createReaderJson(MediaSourceInfo info, List<ColumnMeta> metas, JobExtendProperty property, List<String> names) { checkType(info.getParameterObj()); RdbMediaSrcParameter parameter = (RdbMediaSrcParameter)info.getParameterObj(); Map<String,String> srcExtendJson = property.getReader(); Random rand = new Random(); String ip = null; if(parameter.getReadConfig().getHosts()!=null && parameter.getReadConfig().getHosts().size()>0) { ip = parameter.getReadConfig().getHosts().get( rand.nextInt(parameter.getReadConfig().getHosts().size()) ); } else { ip = parameter.getWriteConfig().getWriteHost(); } String etlHost = parameter.getReadConfig().getEtlHost(); if(StringUtils.isBlank(etlHost)) { etlHost = ip; } String port = parameter.getPort()+""; String schema = info.getParameterObj().getNamespace(); String username = parameter.getReadConfig().getUsername(); String password = parameter.getReadConfig().getDecryptPassword(); String json = ""; try{ String etlUrl = MessageFormat.format(FlinkerJobConfigConstant.MYSQL_URL, etlHost, port, schema); String url = MessageFormat.format(FlinkerJobConfigConstant.MYSQL_URL, ip, port, schema); String columns = buildColumnParm( metas ); String reader = loadJobConfig(FlinkerJobConfigConstant.MYSQL_READER); json = replace(reader,etlUrl,username,password,columns ); json = replaceMultiTable(json,names); json = processSplitPrimaryKey(metas,json); json = processReaderExtendJson(json, srcExtendJson,url); }catch (Exception e){ LOGGER.error("mysql createReaderJson error ",e); } return json; } private String processReaderExtendJson(String json, Map<String, String> srcExtendJson,String url) { if(srcExtendJson==null || srcExtendJson.size()==0) { return json; } String extendJson = JSONObject.toJSONString(srcExtendJson); MySqlJobExtendProperty jobExtend = JSONObject.parseObject(extendJson, MySqlJobExtendProperty.class); filterSpace(jobExtend); DLConfig connConf = DLConfig.parseFrom(json); if( StringUtils.isNotBlank(jobExtend.getWhere()) ) { connConf.set("job.content[0].reader.parameter.where", jobExtend.getWhere()); } if( StringUtils.isNotBlank(jobExtend.getQuerySql()) ) { //connConf.remove("job.content[0].reader.parameter.splitPk"); connConf.remove("job.content[0].reader.parameter.connection[0].table"); connConf.remove("job.content[0].reader.parameter.where"); List<String> list = new ArrayList<>(); list.add(jobExtend.getQuerySql()); connConf.set("job.content[0].reader.parameter.connection[0].querySql", list); } if( StringUtils.isNotBlank(jobExtend.getJdbcReaderUrl()) ) { List<String> list = (List<String>)connConf.get("job.content[0].reader.parameter.connection[0].jdbcUrl"); list.add(jobExtend.getJdbcReaderUrl()); connConf.remove("job.content[0].reader.parameter.connection[0].jdbcUrl"); connConf.set("job.content[0].reader.parameter.connection[0].jdbcUrl", list); } else { List<String> list = (List<String>)connConf.get("job.content[0].reader.parameter.connection[0].jdbcUrl"); list.add(url); if(StringUtils.isNotBlank(jobExtend.getJdbcReaderUrl())) { list.add(jobExtend.getJdbcReaderUrl()); } connConf.remove("job.content[0].reader.parameter.connection[0].jdbcUrl"); connConf.set("job.content[0].reader.parameter.connection[0].jdbcUrl", list); } json = connConf.toJSON(); return json; } private String processWriterExtendJson(String json, Map<String, String> destExtendJson) { if(destExtendJson==null || destExtendJson.size()==0) { return json; } String extendJson = JSONObject.toJSONString(destExtendJson); MySqlJobExtendProperty jobExtend = JSONObject.parseObject(extendJson, MySqlJobExtendProperty.class); filterSpace(jobExtend); DLConfig connConf = DLConfig.parseFrom(json); if( StringUtils.isNotBlank(jobExtend.getPreSql()) ) { List<String> list = new ArrayList<>(); list.add(jobExtend.getPreSql()); connConf.set("job.content[0].writer.parameter.preSql", list); } if( StringUtils.isNotBlank(jobExtend.getPostSql()) ) { List<String> list = new ArrayList<>(); list.add(jobExtend.getPostSql()); connConf.set("job.content[0].writer.parameter.postSql", list); } json = connConf.toJSON(); return json; } private void checkType(MediaSrcParameter parameter) { if( !(parameter instanceof SddlMediaSrcParameter)) { throw new RuntimeException("media source type error "+parameter); } } /** * 处理job配置中的 splitPk 参数,这个值不再强制指定为 id,而是根据读取到的列元信息自动选择 * @param list */ private String processSplitPrimaryKey(List<ColumnMeta> list, String json) { for(ColumnMeta cm : list) { if(cm.isPrimaryKey()) { json = json.replaceAll(FlinkerJobConfigConstant.RMDBS_SPLIT_PK,cm.getName()); break; } } //如果当前表没有配置主键信息则将 splitPk这个字段设置为空字符串 if( json.contains(FlinkerJobConfigConstant.RMDBS_SPLIT_PK) ) { json = json.replaceAll(FlinkerJobConfigConstant.RMDBS_SPLIT_PK,""); } return json; } private String replace(String json,String url,String userName,String passWord,String column){ if(StringUtils.isNotBlank(url)){ json = json.replaceAll(FlinkerJobConfigConstant.JDBCURL, url); } if(StringUtils.isNotBlank(userName)){ json = json.replaceAll(FlinkerJobConfigConstant.USERNAME,userName); } if(StringUtils.isNotBlank(passWord)){ json = json.replaceAll(FlinkerJobConfigConstant.PASSWORD,passWord); } if(StringUtils.isNotBlank(column)){ //json = json.replaceAll(FlinkerJobConfigConstant.COLUMN,column); json = replaceColumns(json,column); } if(json.contains(FlinkerJobConfigConstant.COLUMN)) { json.replaceAll(FlinkerJobConfigConstant.COLUMN,""); } return json; } private String replaceSingleTable(String json, String name) { if(StringUtils.isNotBlank(name)){ json = json.replaceAll(FlinkerJobConfigConstant.TABLE,name); } return json; } private String replaceMultiTable(String json,List<String> names) { if(names!=null && names.size()>0) { DLConfig connConf = DLConfig.parseFrom(json); connConf.remove("job.content[0].reader.parameter.connection[0].table"); connConf.set("job.content[0].reader.parameter.connection[0].table", names); json = connConf.toJSON(); } return json; } private void filterSpace(MySqlJobExtendProperty property) { if(property == null) { return; } // if(StringUtils.isNotBlank( property.getJdbcReaderUrl() )) { // String jdbcReader_url = removeEnter(property.getJdbcReaderUrl()); // property.setJdbcReaderUrl(jdbcReader_url); // } if(StringUtils.isNotBlank(property.getPostSql())) { String postSql = removeEnter(property.getPostSql()); property.setPostSql(postSql); } if(StringUtils.isNotBlank(property.getPreSql())) { String preSql = removeEnter(property.getPreSql()); property.setPreSql(preSql); } if(StringUtils.isNotBlank(property.getQuerySql())) { String querySql = removeEnter(property.getQuerySql()); property.setQuerySql(querySql); } if(StringUtils.isNotBlank(property.getWhere())) { String where = removeEnter(property.getWhere()); property.setWhere(where); } } private String removeEnter(String content) { if(content.contains("\n")) { content = content.replaceAll("\n"," "); return content; } return content; } public String reloadReader(String json,MediaSourceInfo info) { try { checkType(info.getParameterObj()); RdbMediaSrcParameter parameter = (RdbMediaSrcParameter)info.getParameterObj(); Random rand = new Random(); String ip = null; if(parameter.getReadConfig().getHosts()!=null && parameter.getReadConfig().getHosts().size()>0) { ip = parameter.getReadConfig().getHosts().get( rand.nextInt(parameter.getReadConfig().getHosts().size()) ); } else { throw new RuntimeException("mysql read ip is emtpy"); } String etlHost = parameter.getReadConfig().getEtlHost(); if(StringUtils.isBlank(etlHost)) { etlHost = ip; } String port = parameter.getPort()+""; String schema = info.getParameterObj().getNamespace(); String username = parameter.getReadConfig().getUsername(); String password = parameter.getReadConfig().getDecryptPassword(); String etlUrl = MessageFormat.format(FlinkerJobConfigConstant.MYSQL_URL, etlHost, port, schema); String url = MessageFormat.format(FlinkerJobConfigConstant.MYSQL_URL, ip, port, schema); DLConfig connConf = DLConfig.parseFrom(json); List<String> list = new ArrayList<>(); list.add(etlUrl); list.add(url); connConf.remove("job.content[0].reader.parameter.connection[0].jdbcUrl"); connConf.set("job.content[0].reader.parameter.connection[0].jdbcUrl", list); connConf.remove("job.content[0].reader.parameter.username"); connConf.remove("job.content[0].reader.parameter.password"); connConf.set("job.content[0].reader.parameter.username",username); connConf.set("job.content[0].reader.parameter.password",password); json = connConf.toJSON(); } catch(Exception e) { LOGGER.error("reload reader json failure,",e); } return json; } public String reloadWriter(String json, MediaSourceInfo info) { try { checkType(info.getParameterObj()); RdbMediaSrcParameter parameter = (RdbMediaSrcParameter)info.getParameterObj(); String ip = parameter.getWriteConfig().getWriteHost(); String port = parameter.getPort()+""; String schema = info.getParameterObj().getNamespace(); String username = parameter.getWriteConfig().getUsername(); String password = parameter.getWriteConfig().getDecryptPassword(); String url = MessageFormat.format(FlinkerJobConfigConstant.MYSQL_URL, ip, port, schema); DLConfig connConf = DLConfig.parseFrom(json); connConf.remove("job.content[0].writer.parameter.connection[0].jdbcUrl"); connConf.set("job.content[0].writer.parameter.connection[0].jdbcUrl", url); connConf.remove("job.content[0].writer.parameter.username"); connConf.remove("job.content[0].writer.parameter.password"); connConf.set("job.content[0].writer.parameter.username",username); connConf.set("job.content[0].writer.parameter.password",password); json = connConf.toJSON(); } catch(Exception e) { LOGGER.error("reload writer json failure.",e); } return json; } @Override public String replaceJsonResult(String json, Object object, MediaSourceInfo srcInfo) { String dbName = (String)object; DLConfig connConf = DLConfig.parseFrom(json); Object obj = connConf.get("job.content[0].reader.parameter.connection[0].jdbcUrl"); if(obj instanceof String) { String url = (String)connConf.get("job.content[0].reader.parameter.connection[0].jdbcUrl"); url = replaceJdbcUrl(url,dbName); connConf.remove("job.content[0].reader.parameter.connection[0].jdbcUrl"); connConf.set("job.content[0].reader.parameter.connection[0].jdbcUrl", url); } else { List<String> list = (List<String>)connConf.get("job.content[0].reader.parameter.connection[0].jdbcUrl"); List<String> newUrl = new ArrayList<>(); for(String url : list) { String tmp = replaceJdbcUrl(url,dbName); newUrl.add(tmp); } connConf.remove("job.content[0].reader.parameter.connection[0].jdbcUrl"); connConf.set("job.content[0].reader.parameter.connection[0].jdbcUrl", newUrl); } json = connConf.toJSON(); return json; } private String replaceJdbcUrl(String url, String dbName) { if(url!=null && url.lastIndexOf("/")!=-1) { int index = url.lastIndexOf("/"); String prefix = url.substring(0,index); url = prefix + "/" + dbName; } return url; } }
7,955
5,169
<filename>Specs/2/3/3/MRestClient/0.1.0/MRestClient.podspec.json { "name": "MRestClient", "version": "0.1.0", "summary": "MRestClient is the HTTP client implementation library written in swift", "description": "\"MRestClient is the HTTP client implementation library written in swift, It is small , light weight & simple to use library for HTTP communication .The goal of this library is to reduce the effort to call REST-API with swift codable object. This is the generic implementation of swift object for request & response body so you can directly interact with codable object without worrying JSON conversion\"", "homepage": "https://github.com/afsaredrisy/MRestClient", "license": { "type": "MIT", "file": "LICENSE" }, "authors": { "afsaredrisy": "<EMAIL>" }, "source": { "git": "https://github.com/afsaredrisy/MRestClient.git", "tag": "0.1.0" }, "social_media_url": "https://github.com/afsaredrisy", "platforms": { "ios": "11.0" }, "swift_versions": "5.0", "source_files": "Source/**/*", "frameworks": "UIKit", "swift_version": "5.0" }
379
3,787
<gh_stars>1000+ # # Copyright 2019 The Eggroll Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import itertools import typing from fate_arch.abc import CTableABC from fate_arch.common import log from fate_arch.common.profile import computing_profile LOGGER = log.getLogger() class Table(CTableABC): def __init__(self, table): self._table = table def __getstate__(self): pass @property def partitions(self): return self._table.partitions @computing_profile def save(self, address, partitions, schema, **kwargs): from fate_arch.common.address import StandaloneAddress if isinstance(address, StandaloneAddress): self._table.save_as( name=address.name, namespace=address.namespace, partition=partitions, need_cleanup=False, ) schema.update(self.schema) return from fate_arch.common.address import PathAddress if isinstance(address, PathAddress): from fate_arch.computing.non_distributed import LocalData return LocalData(address.path) raise NotImplementedError( f"address type {type(address)} not supported with standalone backend" ) @computing_profile def count(self) -> int: return self._table.count() @computing_profile def collect(self, **kwargs): return self._table.collect(**kwargs) @computing_profile def take(self, n=1, **kwargs): if n <= 0: raise ValueError(f"{n} <= 0") return list(itertools.islice(self._table.collect(**kwargs), n)) @computing_profile def first(self, **kwargs): resp = list(itertools.islice(self._table.collect(**kwargs), 1)) if len(resp) < 1: raise RuntimeError("table is empty") return resp[0] @computing_profile def reduce(self, func, **kwargs): return self._table.reduce(func) @computing_profile def map(self, func): return Table(self._table.map(func)) @computing_profile def mapValues(self, func): return Table(self._table.mapValues(func)) @computing_profile def flatMap(self, func): return Table(self._table.flatMap(func)) @computing_profile def applyPartitions(self, func): return Table(self._table.applyPartitions(func)) @computing_profile def mapPartitions( self, func, use_previous_behavior=True, preserves_partitioning=False ): if use_previous_behavior is True: LOGGER.warning( "please use `applyPartitions` instead of `mapPartitions` " "if the previous behavior was expected. " "The previous behavior will not work in future" ) return Table(self._table.applyPartitions(func)) return Table( self._table.mapPartitions( func, preserves_partitioning=preserves_partitioning ) ) @computing_profile def mapReducePartitions(self, mapper, reducer, **kwargs): return Table(self._table.mapReducePartitions(mapper, reducer)) @computing_profile def glom(self): return Table(self._table.glom()) @computing_profile def sample( self, *, fraction: typing.Optional[float] = None, num: typing.Optional[int] = None, seed=None, ): if fraction is not None: return Table(self._table.sample(fraction=fraction, seed=seed)) if num is not None: total = self._table.count() if num > total: raise ValueError( f"not enough data to sample, own {total} but required {num}" ) frac = num / float(total) while True: sampled_table = self._table.sample(fraction=frac, seed=seed) sampled_count = sampled_table.count() if sampled_count < num: frac += 0.1 else: break if sampled_count > num: drops = sampled_table.take(sampled_count - num) for k, v in drops: sampled_table.delete(k) return Table(sampled_table) raise ValueError( f"exactly one of `fraction` or `num` required, fraction={fraction}, num={num}" ) @computing_profile def filter(self, func): return Table(self._table.filter(func)) @computing_profile def join(self, other: "Table", func): return Table(self._table.join(other._table, func)) @computing_profile def subtractByKey(self, other: "Table"): return Table(self._table.subtractByKey(other._table)) @computing_profile def union(self, other: "Table", func=lambda v1, v2: v1): return Table(self._table.union(other._table, func))
2,358
854
<gh_stars>100-1000 __________________________________________________________________________________________________ sample 100 ms submission class Solution: def minDeletionSize(self, A: List[str]) -> int: remained = [1] * len(A[0]) for i in range(len(A[0]) - 2, -1, -1): for j in range(i + 1, len(A[0])): all_good = True for row in A: if row[i] > row[j]: all_good = False break if all_good: remained[i] = max(remained[i], remained[j] + 1) return len(A[0]) - max(remained) __________________________________________________________________________________________________ sample 112 ms submission class Solution: def minDeletionSize(self, A: List[str]) -> int: n = len(A[0]) dp = list(range(n)) for r in range(1, n): for l in range(r - 1, -1, -1): good = True for s in A: if s[l] > s[r]: good = False break if good: dp[r] = min(dp[r], dp[l] + r - l - 1) mn = n for i,d in enumerate(dp): mn = min(mn, d + n - (i + 1)) return mn __________________________________________________________________________________________________
718
339
<reponame>Razakhel/RaZ #include "RaZ/Render/Renderer.hpp" #include "RaZ/Render/Shader.hpp" #include "RaZ/Utils/FilePath.hpp" #include "RaZ/Utils/Logger.hpp" #include <fstream> #include <limits> #include <sstream> #include <vector> namespace Raz { Shader::Shader(Shader&& shader) noexcept : m_index{ std::exchange(shader.m_index, std::numeric_limits<unsigned int>::max()) }, m_path{ std::move(shader.m_path) } {} bool Shader::isValid() const { return (m_index != std::numeric_limits<unsigned int>::max()); } void Shader::import(FilePath filePath) { m_path = std::move(filePath); load(); } void Shader::load() const { if (m_path.getPath().empty()) // Shader imported directly from source, no path available return; Logger::debug("[Shader] Loading (ID: " + std::to_string(m_index) + ", path: '" + m_path + "')..."); std::ifstream shaderSource(m_path, std::ios::in | std::ios::binary | std::ios::ate); if (!shaderSource) throw std::runtime_error("Error: Couldn't open the file '" + m_path + "'"); const auto fileSize = static_cast<std::size_t>(shaderSource.tellg()); shaderSource.seekg(0, std::ios::beg); std::vector<char> bytes(fileSize); shaderSource.read(bytes.data(), static_cast<std::streamsize>(fileSize)); Renderer::sendShaderSource(m_index, bytes.data(), static_cast<int>(fileSize)); Logger::debug("[Shader] Loaded"); } void Shader::compile() const { Logger::debug("[Shader] Compiling (ID: " + std::to_string(m_index) + ")..."); Renderer::compileShader(m_index); Logger::debug("[Shader] Compiled"); } bool Shader::isCompiled() const { return Renderer::isShaderCompiled(m_index); } void Shader::loadSource(const std::string& source) const { Logger::debug("[Shader] Loading source (ID: " + std::to_string(m_index) + ")..."); Renderer::sendShaderSource(m_index, source); Logger::debug("[Shader] Loaded source"); } void Shader::destroy() { if (!isValid()) return; Logger::debug("[Shader] Destroying (ID: " + std::to_string(m_index) + ")..."); Renderer::deleteShader(m_index); m_index = std::numeric_limits<unsigned int>::max(); Logger::debug("[Shader] Destroyed"); } Shader& Shader::operator=(Shader&& shader) noexcept { std::swap(m_index, shader.m_index); m_path = std::move(shader.m_path); return *this; } VertexShader::VertexShader() { Logger::debug("[Shader] Creating vertex shader..."); m_index = Renderer::createShader(ShaderType::VERTEX); Logger::debug("[Shader] Created vertex shader (ID: " + std::to_string(m_index) + ')'); } VertexShader VertexShader::loadFromSource(const std::string& source) { VertexShader vertShader; vertShader.loadSource(source); return vertShader; } FragmentShader::FragmentShader() { Logger::debug("[Shader] Creating fragment shader..."); m_index = Renderer::createShader(ShaderType::FRAGMENT); Logger::debug("[Shader] Created fragment shader (ID: " + std::to_string(m_index) + ')'); } FragmentShader FragmentShader::loadFromSource(const std::string& source) { FragmentShader fragShader; fragShader.loadSource(source); return fragShader; } GeometryShader::GeometryShader() { Logger::debug("[Shader] Creating geometry shader..."); m_index = Renderer::createShader(ShaderType::GEOMETRY); Logger::debug("[Shader] Created geometry shader (ID: " + std::to_string(m_index) + ')'); } GeometryShader GeometryShader::loadFromSource(const std::string& source) { GeometryShader geomShader; geomShader.loadSource(source); return geomShader; } } // namespace Raz
1,282
583
# Author: <NAME> # Last update: 7/07/2017 from __future__ import division, print_function from PyEMD import EEMD import numpy as np import pylab as plt # Define signal t = np.linspace(0, 1, 200) sin = lambda x,p: np.sin(2*np.pi*x*t+p) S = 3*sin(18,0.2)*(t-0.2)**2 S += 5*sin(11,2.7) S += 3*sin(14,1.6) S += 1*np.sin(4*2*np.pi*(t-0.8)**2) S += t**2.1 -t # Assign EEMD to `eemd` variable eemd = EEMD() # Say we want detect extrema using parabolic method emd = eemd.EMD emd.extrema_detection="parabol" # Execute EEMD on S eIMFs = eemd.eemd(S, t) nIMFs = eIMFs.shape[0] # Plot results plt.figure(figsize=(12,9)) plt.subplot(nIMFs+1, 1, 1) plt.plot(t, S, 'r') for n in range(nIMFs): plt.subplot(nIMFs+1, 1, n+2) plt.plot(t, eIMFs[n], 'g') plt.ylabel("eIMF %i" %(n+1)) plt.locator_params(axis='y', nbins=5) plt.xlabel("Time [s]") plt.tight_layout() plt.savefig('eemd_example', dpi=120) plt.show()
470
331
/** * UITableViewCell父类 * @author 郑业强 2017-06-12 创建文件 */ #import <UIKit/UIKit.h> @interface BaseTableCell : UITableViewCell @end
74
344
/*************************************************************************** * * Copyright (c) 2019 Chatopera.Inc, Inc. All Rights Reserved * **************************************************************************/ /** * @file /Users/hain/chatopera/chatopera.io/clause/src/intent/src/publisher.cpp * @author <NAME>(<EMAIL>) * @date 2019-08-21_11:05:35 * @brief * **/ #include "publisher.h" using namespace std; namespace chatopera { namespace bot { namespace clause { BrokerPublisher* BrokerPublisher::_instance = NULL; BrokerPublisher::BrokerPublisher() : destination(NULL) { }; BrokerPublisher::~BrokerPublisher() { this->cleanup(); }; // 初始化 bool BrokerPublisher::init() { destination = _conn->session->createQueue(FLAGS_activemq_queue_to_intent); producer = _conn->session->createProducer(destination); producer->setDeliveryMode(DeliveryMode::NON_PERSISTENT); VLOG(3) << "[publisher] init done."; return true; } BrokerPublisher* BrokerPublisher::getInstance() { if(_instance == NULL) { _instance = new BrokerPublisher(); (*_instance)._conn = BrokerConnection::getInstance(); } return _instance; }; /** * Publish event with payload */ bool BrokerPublisher::publish(const string& chatbotID, const string& action, const string& payload) { VLOG(3) << "[publish] chatbotID: " << chatbotID << ", action " << action; std::auto_ptr<TextMessage> message; message.reset(_conn->session->createTextMessage(payload)); message->setStringProperty("action", action); message->setStringProperty("chatbotID", chatbotID); producer->send(message.get()); return true; }; void BrokerPublisher::cleanup() { //************************************************* // Always close destination, consumers and producers before // you destroy their sessions and connection. //************************************************* // Destroy resources. try { if( destination != NULL ) delete destination; } catch (CMSException& e) {} destination = NULL; _instance = NULL; }; } // namespace intent } // namespace bot } // namespace chatopera /* vim: set expandtab ts=4 sw=4 sts=4 tw=100: */
698
899
""" Copyright (c) 2019-2020 Uber Technologies, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ __author__ = "<NAME>" from plato.agent.component.dialogue_state_tracker.dialogue_state_tracker \ import DialogueStateTracker from ludwig.api import LudwigModel from os import path import pandas as pd """ LudwigDST provides an interface to Ludwig models for dialogue State Tracking. """ class LudwigDST(DialogueStateTracker): def __init__(self, args): """ Load the Ludwig DST model. :param args: a dictionary containing the path to the model. """ super(LudwigDST, self).__init__() model_path = None if 'model_path' in args: model_path = args['model_path'] self.model = None self.load(model_path) def __del__(self): """ Close the model. :return: nothing """ if self.model: self.model.close() def initialize(self, args): """ Nothing to do here :return: """ pass def update_state(self, inpt): """ Retrieve updated state by querying the Ludwig model. :param inpt: the current input (usually the nlu output) :return: """ if not self.model: print('ERROR! Ludwig DST model not initialized!') return pd.DataFrame({'empty': [0]}) # Warning: Make sure the same tokenizer that was used to train the # model is used during prediction return self.model.predict(pd.DataFrame(data=inpt)) def update_state_db(self, db_result): """ Nothing to do here. :param db_result: :return: nothing """ pass def train(self, dialogue_episodes): """ Not implemented. We can use Ludwig's API to train the model given the experience. :param dialogue_episodes: dialogue experience :return: """ pass def save(self, model_path=None): """ Saves the Ludwig model. :return: """ self.model.save(model_path) def load(self, model_path): """ Load the Ludwig model from the path provided :param model_path: the path to load the model from :return: nothing """ if isinstance(model_path, str): if path.isdir(model_path): print('Loading Ludwig DST model...') self.model = LudwigModel.load(model_path) print('done!') else: raise FileNotFoundError('Ludwig DST model directory {0} not ' 'found'.format(model_path)) else: raise ValueError('Ludwig DST: Unacceptable value for model file ' 'name: {0}'.format(model_path))
1,411
332
/* Copyright (c) 2016 Microsoft Corporation. All rights reserved. Released under Apache 2.0 license as described in the file LICENSE. Author: <NAME> */ #include <string> #include <algorithm> #include "kernel/expr_maps.h" #include "library/trace.h" #include "library/util.h" #include "library/reducible.h" #include "library/app_builder.h" #include "library/class.h" #include "library/constants.h" #include "library/kernel_serializer.h" #include "library/vm/vm_expr.h" #include "library/tactic/tactic_state.h" #include "library/tactic/ac_tactics.h" namespace lean { struct ac_manager_old::cache { environment m_env; expr_map<optional<expr>> m_assoc_cache[2]; expr_map<optional<expr>> m_comm_cache[2]; cache(environment const & env): m_env(env) { } void clear() { for (unsigned i = 0; i < 2; i++) { m_assoc_cache[i].clear(); m_comm_cache[i].clear(); } } }; static ac_manager_old::cache_ptr get_cache(environment const & env) { return std::make_shared<ac_manager_old::cache>(env); } ac_manager_old::ac_manager_old(type_context_old & ctx): m_ctx(ctx), m_cache_ptr(get_cache(ctx.env())) { } ac_manager_old::~ac_manager_old() { } optional<expr> ac_manager_old::is_assoc(expr const & e) { auto op = get_binary_op(e); if (!op) return none_expr(); bool idx = has_local(e); auto it = m_cache_ptr->m_assoc_cache[idx].find(*op); if (it != m_cache_ptr->m_assoc_cache[idx].end()) return it->second; optional<expr> r; try { expr assoc_class = mk_app(m_ctx, get_is_associative_name(), *op); if (auto assoc_inst = m_ctx.mk_class_instance(assoc_class)) r = some_expr(mk_app(m_ctx, get_is_associative_assoc_name(), 3, *op, *assoc_inst)); } catch (app_builder_exception & ex) {} m_cache_ptr->m_assoc_cache[idx].insert(mk_pair(*op, r)); return r; } optional<expr> ac_manager_old::is_comm(expr const & e) { auto op = get_binary_op(e); if (!op) return none_expr(); bool idx = has_local(e); auto it = m_cache_ptr->m_comm_cache[idx].find(*op); if (it != m_cache_ptr->m_comm_cache[idx].end()) return it->second; optional<expr> r; try { expr comm_class = mk_app(m_ctx, get_is_commutative_name(), *op); if (auto comm_inst = m_ctx.mk_class_instance(comm_class)) r = some_expr(mk_app(m_ctx, get_is_commutative_comm_name(), 3, *op, *comm_inst)); } catch (app_builder_exception & ex) {} m_cache_ptr->m_comm_cache[idx].insert(mk_pair(*op, r)); return r; } static name * g_ac_app_name = nullptr; static macro_definition * g_ac_app_macro = nullptr; static std::string * g_ac_app_opcode = nullptr; static expr expand_ac_core(expr const & e) { unsigned nargs = macro_num_args(e); unsigned i = nargs - 1; expr const & op = macro_arg(e, i); --i; expr r = macro_arg(e, i); while (i > 0) { --i; r = mk_app(op, macro_arg(e, i), r); } return r; } class ac_app_macro_cell : public macro_definition_cell { public: virtual name get_name() const { return *g_ac_app_name; } virtual unsigned trust_level() const { return LEAN_AC_MACROS_TRUST_LEVEL; } virtual expr check_type(expr const & e, abstract_type_context & ctx, bool) const { return ctx.infer(macro_arg(e, 0)); } virtual optional<expr> expand(expr const & e, abstract_type_context &) const { return some_expr(expand_ac_core(e)); } virtual void write(serializer & s) const { s.write_string(*g_ac_app_opcode); } #ifdef LEAN_JSON virtual void write_json(abstract_ast_exporter &, json &) const override {} #endif virtual bool operator==(macro_definition_cell const & other) const { ac_app_macro_cell const * other_ptr = dynamic_cast<ac_app_macro_cell const *>(&other); return other_ptr; } virtual unsigned hash() const { return 37; } }; static expr mk_ac_app_core(unsigned nargs, expr const * args_op) { lean_assert(nargs >= 3); return mk_macro(*g_ac_app_macro, nargs, args_op); } static expr mk_ac_app_core(expr const & op, buffer<expr> & args) { lean_assert(args.size() >= 2); args.push_back(op); expr r = mk_ac_app_core(args.size(), args.data()); args.pop_back(); return r; } expr mk_ac_app(expr const & op, buffer<expr> & args) { lean_assert(args.size() > 0); if (args.size() == 1) { return args[0]; } else { std::sort(args.begin(), args.end(), is_hash_lt); return mk_ac_app_core(op, args); } } bool is_ac_app(expr const & e) { return is_macro(e) && is_eqp(macro_def(e), *g_ac_app_macro); } expr const & get_ac_app_op(expr const & e) { lean_assert(is_ac_app(e)); return macro_arg(e, macro_num_args(e) - 1); } unsigned get_ac_app_num_args(expr const & e) { lean_assert(is_ac_app(e)); return macro_num_args(e) - 1; } expr const * get_ac_app_args(expr const & e) { lean_assert(is_ac_app(e)); return macro_args(e); } /* Return true iff e1 occurs in e2. Example ac_mem(b, a*a*b*c) returns true. */ static bool ac_mem(expr const & e1, expr const & e2) { unsigned nargs2 = get_ac_app_num_args(e2); expr const * args2 = get_ac_app_args(e2); return std::find(args2, args2+nargs2, e1) != args2+nargs2; } /* Return true iff e1 is a "subset" of e2. Example: The result is true for e1 := (a*a*a*b*d) and e2 := (a*a*a*a*b*b*c*d*d) */ bool is_ac_subset(expr const & e1, expr const & e2) { if (is_ac_app(e1)) { if (is_ac_app(e2)) { if (get_ac_app_op(e1) == get_ac_app_op(e2)) { unsigned nargs1 = get_ac_app_num_args(e1); unsigned nargs2 = get_ac_app_num_args(e2); if (nargs1 > nargs2) return false; expr const * args1 = get_ac_app_args(e1); expr const * args2 = get_ac_app_args(e2); unsigned i1 = 0; unsigned i2 = 0; while (i1 < nargs1 && i2 < nargs2) { if (args1[i1] == args2[i2]) { i1++; i2++; } else if (is_hash_lt(args2[i2], args1[i1])) { i2++; } else { lean_assert(is_hash_lt(args1[i1], args2[i2])); return false; } } return i1 == nargs1; } else { lean_assert(get_ac_app_op(e1) != get_ac_app_op(e2)); /* treat e1 as an atomic value that may occur in e2 */ return ac_mem(e1, e2); } } else { return false; } } else { if (is_ac_app(e2)) { return ac_mem(e1, e2); } else { return e1 == e2; } } } /* Store in r e1\e2. Example: given e1 := (a*a*a*a*b*b*c*d*d*d) and e2 := (a*a*a*b*b*d), the result is (a, c, d, d) \pre is_ac_subset(e2, e1) */ void ac_diff(expr const & e1, expr const & e2, buffer<expr> & r) { lean_assert(is_ac_subset(e2, e1)); if (is_ac_app(e1)) { if (is_ac_app(e2) && get_ac_app_op(e1) == get_ac_app_op(e2)) { unsigned nargs1 = get_ac_app_num_args(e1); unsigned nargs2 = get_ac_app_num_args(e2); lean_assert(nargs1 >= nargs2); expr const * args1 = get_ac_app_args(e1); expr const * args2 = get_ac_app_args(e2); unsigned i2 = 0; for (unsigned i1 = 0; i1 < nargs1; i1++) { if (i2 == nargs2) { r.push_back(args1[i1]); } else if (args1[i1] == args2[i2]) { i2++; } else { lean_assert(is_hash_lt(args1[i1], args2[i2])); r.push_back(args1[i1]); } } } else { bool found = false; unsigned nargs1 = get_ac_app_num_args(e1); expr const * args1 = get_ac_app_args(e1); for (unsigned i = 0; i < nargs1; i++) { if (!found && args1[i] == e2) { found = true; } else { r.push_back(args1[i]); } } lean_assert(found); } } else { lean_assert(!is_ac_app(e1)); lean_assert(!is_ac_app(e2)); lean_assert(e1 == e2); } } void ac_append(expr const & op, expr const & e, buffer<expr> & r) { if (is_ac_app(e) && get_ac_app_op(e) == op) { r.append(get_ac_app_num_args(e), get_ac_app_args(e)); } else { r.push_back(e); } } void ac_intersection(expr const & e1, expr const & e2, buffer<expr> & r) { lean_assert(is_ac_app(e1)); lean_assert(is_ac_app(e2)); lean_assert(get_ac_app_op(e1) == get_ac_app_op(e2)); unsigned nargs1 = get_ac_app_num_args(e1); unsigned nargs2 = get_ac_app_num_args(e2); expr const * args1 = get_ac_app_args(e1); expr const * args2 = get_ac_app_args(e2); unsigned i1 = 0; unsigned i2 = 0; while (i1 < nargs1 && i2 < nargs2) { if (args1[i1] == args2[i2]) { r.push_back(args1[i1]); i1++; i2++; } else if (is_hash_lt(args2[i2], args1[i1])) { i2++; } else { lean_assert(is_hash_lt(args1[i1], args2[i2])); i1++; } } } expr mk_ac_flat_app(expr const & op, expr const & e1, expr const & e2) { buffer<expr> new_args; ac_append(op, e1, new_args); ac_append(op, e2, new_args); return mk_ac_app(op, new_args); } /* lexdeg order */ bool ac_lt(expr const & e1, expr const & e2) { if (is_ac_app(e1)) { if (is_ac_app(e2) && get_ac_app_op(e1) == get_ac_app_op(e2)) { unsigned nargs1 = get_ac_app_num_args(e1); unsigned nargs2 = get_ac_app_num_args(e2); if (nargs1 < nargs2) return true; if (nargs1 > nargs2) return false; expr const * args1 = get_ac_app_args(e1); expr const * args2 = get_ac_app_args(e2); for (unsigned i = 0; i < nargs1; i++) { if (args1[i] != args2[i]) return is_hash_lt(args1[i], args2[i]); } return false; } else { return false; } } else { if (is_ac_app(e2)) { return true; } else { return is_hash_lt(e1, e2); } } } static expr expand_if_ac_app(expr const & e) { if (is_ac_app(e)) return expand_ac_core(e); else return e; } struct flat_assoc_fn { abstract_type_context & m_ctx; expr m_op; expr m_assoc; flat_assoc_fn(abstract_type_context & ctx, expr const & op, expr const & assoc): m_ctx(ctx), m_op(op), m_assoc(assoc) {} bool is_op_app(expr const & e, expr & lhs, expr & rhs) { if (!is_app(e)) return false; expr const & fn1 = app_fn(e); if (!is_app(fn1)) return false; if (app_fn(fn1) != m_op) return false; lhs = app_arg(fn1); rhs = app_arg(e); return true; } bool is_op_app(expr const & e) { if (!is_app(e)) return false; expr const & fn1 = app_fn(e); if (!is_app(fn1)) return false; return app_fn(fn1) == m_op; } expr mk_op(expr const & a, expr const & b) { return mk_app(m_op, a, b); } expr mk_assoc(expr const & a, expr const & b, expr const & c) { return mk_app(m_assoc, a, b, c); } expr mk_eq_refl(expr const & a) { return ::lean::mk_eq_refl(m_ctx, a); } expr mk_eq_trans(expr const & H1, expr const & H2) { return ::lean::mk_eq_trans(m_ctx, H1, H2); } expr mk_eq_trans(expr const & H1, optional<expr> const & H2) { if (!H2) return H1; return mk_eq_trans(H1, *H2); } optional<expr> mk_eq_trans(optional<expr> const & H1, optional<expr> const & H2) { if (!H1) return H2; if (!H2) return H1; return some_expr(mk_eq_trans(*H1, *H2)); } expr mk_eq_symm(expr const & H) { return ::lean::mk_eq_symm(m_ctx, H); } optional<expr> mk_eq_symm(optional<expr> const & H) { if (!H) return none_expr(); return some_expr(mk_eq_symm(*H)); } expr mk_congr_arg(expr const & fn, expr const & H) { return ::lean::mk_congr_arg(m_ctx, fn, H); } pair<expr, optional<expr>> flat_with(expr const & e, expr const & rest) { expr lhs, rhs; if (is_op_app(e, lhs, rhs)) { lhs = expand_if_ac_app(lhs); rhs = expand_if_ac_app(rhs); auto p1 = flat_with(rhs, rest); if (p1.second) { auto p2 = flat_with(lhs, p1.first); // H3 is a proof for (lhs `op` rhs) `op` rest = lhs `op` (rhs `op` rest) expr H3 = mk_assoc(lhs, rhs, rest); // H4 is a proof for lhs `op` (rhs `op` rest) = lhs `op` p1.first expr H4 = mk_congr_arg(mk_app(m_op, lhs), *p1.second); expr H = mk_eq_trans(mk_eq_trans(H3, H4), p2.second); return mk_pair(p2.first, some_expr(H)); } else { if (is_op_app(lhs)) { auto p2 = flat_with(lhs, p1.first); // H3 is a proof for (lhs `op` rhs) `op` rest = lhs `op` (rhs `op` rest) expr H3 = mk_assoc(lhs, rhs, rest); expr H = mk_eq_trans(H3, p2.second); return mk_pair(p2.first, some_expr(H)); } else { return mk_pair(mk_op(lhs, p1.first), some_expr(mk_assoc(lhs, rhs, rest))); } } } else { return mk_pair(mk_op(e, rest), none_expr()); } } pair<expr, optional<expr>> flat_core(expr e) { expr lhs, rhs; e = expand_if_ac_app(e); if (is_op_app(e, lhs, rhs)) { lhs = expand_if_ac_app(lhs); rhs = expand_if_ac_app(rhs); auto p1 = flat_core(rhs); if (p1.second) { if (is_op_app(lhs)) { auto p2 = flat_with(lhs, p1.first); expr H3 = mk_congr_arg(mk_app(m_op, lhs), *p1.second); expr H = mk_eq_trans(H3, p2.second); return mk_pair(p2.first, some_expr(H)); } else { expr r = mk_op(lhs, p1.first); expr H = mk_congr_arg(mk_app(m_op, lhs), *p1.second); return mk_pair(r, some_expr(H)); } } else { if (is_op_app(lhs)) { return flat_with(lhs, rhs); } else { return mk_pair(e, none_expr()); } } } else { return mk_pair(e, none_expr()); } } pair<expr, expr> flat(expr const & e) { auto p = flat_core(e); if (p.second) { return mk_pair(p.first, *p.second); } else { return mk_pair(e, mk_eq_refl(e)); } } }; #define lean_perm_ac_trace(code) lean_trace(name({"tactic", "perm_ac"}), scope_trace_env _scope1(m_ctx.env(), m_ctx); code) struct perm_ac_fn : public flat_assoc_fn { expr m_comm; optional<expr> m_left_comm; perm_ac_fn(abstract_type_context & ctx, expr const & op, expr const & assoc, expr const & comm): flat_assoc_fn(ctx, op, assoc), m_comm(comm) { } [[ noreturn ]] void throw_failed() { throw exception("perm_ac failed, arguments are not equal modulo AC"); } expr mk_comm(expr const & a, expr const & b) { return mk_app(m_comm, a, b); } level dec_level(level const & l) { if (auto r = ::lean::dec_level(l)) return *r; throw_failed(); } expr mk_left_comm(expr const & a, expr const & b, expr const & c) { if (!m_left_comm) { expr A = m_ctx.infer(a); level lvl = dec_level(get_level(m_ctx, A)); m_left_comm = mk_app(mk_constant(get_left_comm_name(), {lvl}), A, m_op, m_comm, m_assoc); } return mk_app(*m_left_comm, a, b, c); } /* Given a term \c e of the form (op t_1 (op t_2 ... (op t_{n-1} t_n))), if for some i, t_i == t, then produce the term (op t_i (op t_2 ... (op t_{n-1} t_n))) and a proof they are equal AC. Throw exception if t is not found. */ pair<expr, expr> pull_term(expr const & t, expr const & e) { expr lhs1, rhs1; if (!is_op_app(e, lhs1, rhs1)) { lean_perm_ac_trace(tout() << "right-hand-side does not contain:\n" << t << "\n";); throw_failed(); } if (t == rhs1) { return mk_pair(mk_op(rhs1, lhs1), mk_comm(lhs1, rhs1)); } expr lhs2, rhs2; if (!is_op_app(rhs1, lhs2, rhs2)) { lean_perm_ac_trace(tout() << "right-hand-side does not contain:\n" << t << "\n";); throw_failed(); } if (t == lhs2) { return mk_pair(mk_op(lhs2, mk_op(lhs1, rhs2)), mk_left_comm(lhs1, lhs2, rhs2)); } /* We have e := lhs1 `op` lhs2 `op` rhs2 */ auto p = pull_term(t, rhs1); expr lhs3, rhs3; lean_verify(is_op_app(p.first, lhs3, rhs3)); lean_assert(t == lhs3); /* p.second : rhs1 = t `op` rhs3 */ expr H1 = mk_congr_arg(mk_app(m_op, lhs1), p.second); /* H1 : lhs1 `op` rhs1 = lhs1 `op` t `op` rhs3 */ expr H2 = mk_left_comm(lhs1, t, rhs3); /* H2 : lhs1 `op` t `op` rhs3 = t `op` lhs1 `op` rhs3 */ return mk_pair(mk_op(t, mk_op(lhs1, rhs3)), mk_eq_trans(H1, H2)); } /* Return a proof that e1 == e2 modulo AC. Return none if reflexivity. Throw exception if failure */ optional<expr> perm_flat(expr const & e1, expr const & e2) { expr lhs1, rhs1; expr lhs2, rhs2; bool b1 = is_op_app(e1, lhs1, rhs1); bool b2 = is_op_app(e2, lhs2, rhs2); if (b1 != b2) { lean_perm_ac_trace(tout() << "left and right-hand-sides have different number of terms\n";); throw_failed(); } if (!b1 && !b2) { if (e1 == e2) { return none_expr(); // reflexivity } else { lean_perm_ac_trace(tout() << "the left and right hand sides contain the terms:\n" << e1 << "\n" << e2 << "\n";); throw_failed(); } } lean_assert(b1 && b2); if (lhs1 == lhs2) { optional<expr> H = perm_flat(rhs1, rhs2); if (!H) return none_expr(); return some_expr(mk_congr_arg(mk_app(m_op, lhs1), *H)); } else { auto p = pull_term(lhs2, e1); is_op_app(p.first, lhs1, rhs1); lean_assert(lhs1 == lhs2); optional<expr> H1 = perm_flat(rhs1, rhs2); if (!H1) return some_expr(p.second); expr H2 = mk_congr_arg(mk_app(m_op, lhs1), *H1); return some_expr(mk_eq_trans(p.second, H2)); } } /* Return a proof that lhs == rhs modulo AC. Return none if reflexivity. Throw exception if failure */ optional<expr> perm_core(expr const & lhs, expr const & rhs) { auto p1 = flat_core(lhs); auto p2 = flat_core(rhs); auto H = perm_flat(p1.first, p2.first); return mk_eq_trans(p1.second, mk_eq_trans(H, mk_eq_symm(p2.second))); } expr perm(expr const & lhs, expr const & rhs) { if (auto H = perm_core(lhs, rhs)) return *H; else return mk_eq_refl(lhs); } }; pair<expr, optional<expr>> flat_assoc(abstract_type_context & ctx, expr const & op, expr const & assoc, expr const & e) { return flat_assoc_fn(ctx, op, assoc).flat_core(e); } expr perm_ac(abstract_type_context & ctx, expr const & op, expr const & assoc, expr const & comm, expr const & e1, expr const & e2) { return perm_ac_fn(ctx, op, assoc, comm).perm(e1, e2); } static name * g_perm_ac_name = nullptr; static macro_definition * g_perm_ac_macro = nullptr; static std::string * g_perm_ac_opcode = nullptr; class perm_ac_macro_cell : public macro_definition_cell { public: virtual name get_name() const { return *g_perm_ac_name; } virtual expr check_type(expr const & e, abstract_type_context & ctx, bool) const { return mk_eq(ctx, macro_arg(e, 2), macro_arg(e, 3)); } virtual unsigned trust_level() const { return LEAN_AC_MACROS_TRUST_LEVEL; } virtual optional<expr> expand(expr const & e, abstract_type_context & ctx) const { expr const & assoc = macro_arg(e, 0); expr const & comm = macro_arg(e, 1); expr e1 = expand_if_ac_app(macro_arg(e, 2)); expr e2 = expand_if_ac_app(macro_arg(e, 3)); expr const & op = app_fn(app_fn(e1)); return some_expr(perm_ac(ctx, op, assoc, comm, e1, e2)); } virtual void write(serializer & s) const { s.write_string(*g_perm_ac_opcode); } #ifdef LEAN_JSON virtual void write_json(abstract_ast_exporter &, json &) const override {} #endif virtual bool operator==(macro_definition_cell const & other) const { perm_ac_macro_cell const * other_ptr = dynamic_cast<perm_ac_macro_cell const *>(&other); return other_ptr; } virtual unsigned hash() const { return 31; } }; expr mk_perm_ac_macro_core(expr const & assoc, expr const & comm, expr const & e1, expr const & e2) { lean_assert((get_binary_op(e1) || is_ac_app(e1)) && (get_binary_op(e2) || is_ac_app(e2))); expr args[4] = {assoc, comm, e1, e2}; return mk_macro(*g_perm_ac_macro, 4, args); } expr mk_perm_ac_macro(abstract_type_context & ctx, expr const & assoc, expr const & comm, expr const & e1, expr const & e2) { if (e1 == e2) { return mk_eq_refl(ctx, e1); } else { return mk_perm_ac_macro_core(assoc, comm, e1, e2); } } #define TRY LEAN_TACTIC_TRY #define CATCH LEAN_TACTIC_CATCH(tactic::to_state(s)) vm_obj tactic_flat_assoc(vm_obj const & op, vm_obj const & assoc, vm_obj const & e, vm_obj const & s) { TRY; type_context_old ctx = mk_type_context_for(s); pair<expr, expr> p = flat_assoc_fn(ctx, to_expr(op), to_expr(assoc)).flat(to_expr(e)); return tactic::mk_success(mk_vm_pair(to_obj(p.first), to_obj(p.second)), tactic::to_state(s)); CATCH; } vm_obj tactic_perm_ac(vm_obj const & op, vm_obj const & assoc, vm_obj const & comm, vm_obj const & e1, vm_obj const & e2, vm_obj const & s) { TRY; type_context_old ctx = mk_type_context_for(s); expr H = perm_ac_fn(ctx, to_expr(op), to_expr(assoc), to_expr(comm)).perm(to_expr(e1), to_expr(e2)); return tactic::mk_success(to_obj(H), tactic::to_state(s)); CATCH; } void initialize_ac_tactics() { register_trace_class(name{"tactic", "perm_ac"}); DECLARE_VM_BUILTIN(name({"tactic", "flat_assoc"}), tactic_flat_assoc); DECLARE_VM_BUILTIN(name({"tactic", "perm_ac"}), tactic_perm_ac); g_ac_app_name = new name("ac_app"); g_ac_app_opcode = new std::string("ACApp"); g_ac_app_macro = new macro_definition(new ac_app_macro_cell()); register_macro_deserializer(*g_ac_app_opcode, [](deserializer &, unsigned num, expr const * args) { return mk_ac_app_core(num, args); }); g_perm_ac_name = new name("perm_ac"); g_perm_ac_opcode = new std::string("PermAC"); g_perm_ac_macro = new macro_definition(new perm_ac_macro_cell()); register_macro_deserializer(*g_perm_ac_opcode, [](deserializer &, unsigned num, expr const * args) { if (num != 4) corrupted_stream_exception(); return mk_perm_ac_macro_core(args[0], args[1], args[2], args[3]); }); } void finalize_ac_tactics() { delete g_ac_app_name; delete g_ac_app_opcode; delete g_ac_app_macro; delete g_perm_ac_name; delete g_perm_ac_opcode; delete g_perm_ac_macro; } }
12,649
1,396
<filename>stringtemplate4/src/main/java/org/jdbi/v3/stringtemplate4/UseStringTemplateSqlLocator.java /* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jdbi.v3.stringtemplate4; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import org.jdbi.v3.sqlobject.config.ConfiguringAnnotation; import org.jdbi.v3.stringtemplate4.internal.UseStringTemplateSqlLocatorImpl; /** * Configures SQL Object to locate SQL using the {@link StringTemplateSqlLocator#findStringTemplate(Class, String)} * method. If the SQL annotation (e.g. <code>@SqlQuery</code>) defines a value (e.g. <code>@SqlQuery("hello")</code>), * that value (<code>"hello"</code>) will be used for the <code>name</code> parameter; if undefined, the name of the SQL * object method will be used: * * <pre> * &#064;UseStringTemplateSqlLocator * interface Viccini { * &#064;SqlUpdate * void doTheThing(long id); // =&gt; StringTemplateSqlLocator.findStringTemplateSql(Viccini.class, "doTheThing") * * &#064;SqlUpdate("thatOtherThing") * void doTheThing(String name); // =&gt; StringTemplateSqlLocator.findStringTemplateSql(Viccini.class, "thatOtherThing") * } * </pre> */ @ConfiguringAnnotation(UseStringTemplateSqlLocatorImpl.class) @Retention(RetentionPolicy.RUNTIME) @Target({ElementType.TYPE, ElementType.METHOD}) public @interface UseStringTemplateSqlLocator {}
660
1,338
/* * PCL5Cap.h * Copyright 1999-2000 Y.Takagi. All Rights Reserved. */ #ifndef __PCL5CAP_H #define __PCL5CAP_H #include "PrinterCap.h" class PCL5Cap : public PrinterCap { public: PCL5Cap(const PrinterData* printer_data); virtual int CountCap(CapID) const; virtual bool Supports(CapID) const; virtual const BaseCap **GetCaps(CapID) const; }; #endif // __PCL5CAP_H
158
190,993
<reponame>yage99/tensorflow /* Copyright 2018 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ #include <string> #include <vector> #include <gmock/gmock.h> #include <gtest/gtest.h> #include "tensorflow/lite/toco/graph_transformations/graph_transformations.h" #include "tensorflow/lite/toco/model.h" #include "tensorflow/lite/toco/tooling_util.h" namespace toco { namespace { // A gmock matcher that check that elements of a float vector match to a given // tolerance. std::vector<testing::Matcher<float>> ArrayFloatNear( const std::vector<float>& values, float max_abs_error = 1e-5) { std::vector<testing::Matcher<float>> matchers; matchers.reserve(values.size()); for (const float& v : values) { matchers.emplace_back(testing::FloatNear(v, max_abs_error)); } return matchers; } } // namespace class FuseBinaryIntoFollowingAffineTest : public ::testing::Test { protected: FuseBinaryIntoFollowingAffineTest() {} void SetUp() override { model_.reset(new Model); } void CreateArray(const std::string& name, const std::vector<int>& shape) { Array& array = model_->GetOrCreateArray(name); array.data_type = ArrayDataType::kFloat; Shape* array_shape = array.mutable_shape(); *(array_shape->mutable_dims()) = shape; } void CreateConstantArray(const std::string& name, const std::vector<int>& shape, const std::vector<float>& data) { CreateArray(name, shape); Array& array = model_->GetOrCreateArray(name); auto& array_buffer = array.GetMutableBuffer<ArrayDataType::kFloat>(); int bufsize = 1; for (int dim : shape) { bufsize *= dim; } array_buffer.data.resize(bufsize); float* buf_ptr = array_buffer.data.data(); for (int i = 0; i < bufsize; ++i) { buf_ptr[i] = data[i]; } } std::unique_ptr<Model> model_; }; TEST_F(FuseBinaryIntoFollowingAffineTest, FuseMulIntoFullyConnected) { // Creating a model. { CreateArray("Input", {2, 2}); CreateConstantArray("MulInput2", {1}, {2.0}); CreateArray("MulOutput", {2, 2}); CreateConstantArray("FCWeight", {2, 2}, {1.0, 2.0, 3.0, 4.0}); CreateConstantArray("FCBias", {1}, {1.0}); CreateArray("Output", {2, 2}); auto* mul_op = new MulOperator; mul_op->inputs = {"Input", "MulInput2"}; mul_op->outputs = {"MulOutput"}; model_->operators.push_back(std::unique_ptr<Operator>(mul_op)); auto* fc_op = new FullyConnectedOperator; fc_op->inputs = {"MulOutput", "FCWeight", "FCBias"}; fc_op->outputs = {"Output"}; model_->operators.push_back(std::unique_ptr<Operator>(fc_op)); } toco::FuseBinaryIntoFollowingAffine transformation; bool modified; ASSERT_TRUE(transformation.Run(model_.get(), /*op_index=*/0, &modified).ok()); EXPECT_TRUE(modified); // `Mul` should be fused into `FullyConnected`. Only 1 op is left. ASSERT_EQ(model_->operators.size(), 1); const auto& op = model_->operators[0]; ASSERT_EQ(op->type, OperatorType::kFullyConnected); ASSERT_EQ(op->inputs.size(), 3); auto& weights_array = model_->GetArray(op->inputs[1]); EXPECT_THAT(weights_array.GetBuffer<toco::ArrayDataType::kFloat>().data, ElementsAreArray(ArrayFloatNear({2.0, 4.0, 6.0, 8.0}))); auto& bias_array = model_->GetArray(op->inputs[2]); EXPECT_THAT(bias_array.GetBuffer<toco::ArrayDataType::kFloat>().data, ElementsAreArray(ArrayFloatNear({1.0}))); } // This is a regression test of b/121287325. Toco crashes before the fix. TEST_F(FuseBinaryIntoFollowingAffineTest, DoNotFuseWithMultipleConsumers) { // Creating a model. { CreateArray("Input", {2, 2}); CreateConstantArray("MulInput2", {1}, {2.0}); CreateArray("MulOutput", {2, 2}); CreateConstantArray("FCWeight", {2, 2}, {1.0, 2.0, 3.0, 4.0}); CreateConstantArray("FCBias", {1}, {1.0}); CreateArray("Output", {2, 2}); CreateArray("AnotherOutput", {2, 2}); auto* mul_op = new MulOperator; mul_op->inputs = {"Input", "MulInput2"}; mul_op->outputs = {"MulOutput"}; model_->operators.push_back(std::unique_ptr<Operator>(mul_op)); auto* fc_op = new FullyConnectedOperator; fc_op->inputs = {"MulOutput", "FCWeight", "FCBias"}; fc_op->outputs = {"Output"}; model_->operators.push_back(std::unique_ptr<Operator>(fc_op)); auto identity_op = new TensorFlowIdentityOperator; identity_op->inputs = {"MulOutput"}; identity_op->outputs = {"AnotherOutput"}; model_->operators.push_back(std::unique_ptr<Operator>(identity_op)); } toco::FuseBinaryIntoFollowingAffine transformation; bool modified; ASSERT_TRUE(transformation.Run(model_.get(), /*op_index=*/0, &modified).ok()); // Do not modify the graph if the binary operator has another output. EXPECT_FALSE(modified); EXPECT_EQ(model_->operators.size(), 3); } } // namespace toco
2,061
2,219
// Copyright 2020 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef BASE_ALLOCATOR_PARTITION_ALLOCATOR_ADDRESS_POOL_MANAGER_H_ #define BASE_ALLOCATOR_PARTITION_ALLOCATOR_ADDRESS_POOL_MANAGER_H_ #include <bitset> #include <limits> #include "base/allocator/partition_allocator/address_pool_manager_bitmap.h" #include "base/allocator/partition_allocator/address_pool_manager_types.h" #include "base/allocator/partition_allocator/partition_alloc_check.h" #include "base/allocator/partition_allocator/partition_alloc_config.h" #include "base/allocator/partition_allocator/partition_alloc_constants.h" #include "base/synchronization/lock.h" #include "base/thread_annotations.h" #include "build/build_config.h" namespace base { template <typename Type> struct LazyInstanceTraitsBase; namespace internal { // (64bit version) // AddressPoolManager takes a reserved virtual address space and manages address // space allocation. // // AddressPoolManager (currently) supports up to 2 pools. Each pool manages a // contiguous reserved address space. Alloc() takes a pool_handle and returns // address regions from the specified pool. Free() also takes a pool_handle and // returns the address region back to the manager. // // (32bit version) // AddressPoolManager wraps AllocPages and FreePages and remembers allocated // address regions using bitmaps. IsManagedByPartitionAllocBRPPool and // IsManagedByPartitionAllocNonBRPPool use the bitmaps to judge whether a given // address is in a pool that supports BackupRefPtr or in a pool that doesn't. // All PartitionAlloc allocations must be in either of the pools. class BASE_EXPORT AddressPoolManager { public: static AddressPoolManager* GetInstance(); #if defined(PA_HAS_64_BITS_POINTERS) pool_handle Add(uintptr_t address, size_t length); void Remove(pool_handle handle); // Populate a |used| bitset of superpages currently in use. void GetPoolUsedSuperPages(pool_handle handle, std::bitset<kMaxSuperPages>& used); // Return the base address of a pool. uintptr_t GetPoolBaseAddress(pool_handle handle); #endif // Reserves address space from GigaCage. char* Reserve(pool_handle handle, void* requested_address, size_t length); // Frees address space back to GigaCage and decommits underlying system pages. void UnreserveAndDecommit(pool_handle handle, void* ptr, size_t length); void ResetForTesting(); #if !defined(PA_HAS_64_BITS_POINTERS) void MarkUsed(pool_handle handle, const void* address, size_t size); void MarkUnused(pool_handle handle, const void* address, size_t size); static bool IsManagedByNonBRPPool(const void* address) { return AddressPoolManagerBitmap::IsManagedByNonBRPPool(address); } static bool IsManagedByBRPPool(const void* address) { return AddressPoolManagerBitmap::IsManagedByBRPPool(address); } #endif // !defined(PA_HAS_64_BITS_POINTERS) private: friend class AddressPoolManagerForTesting; AddressPoolManager(); ~AddressPoolManager(); #if defined(PA_HAS_64_BITS_POINTERS) class Pool { public: Pool(); ~Pool(); void Initialize(uintptr_t ptr, size_t length); bool IsInitialized(); void Reset(); uintptr_t FindChunk(size_t size); void FreeChunk(uintptr_t address, size_t size); bool TryReserveChunk(uintptr_t address, size_t size); void GetUsedSuperPages(std::bitset<kMaxSuperPages>& used); uintptr_t GetBaseAddress(); private: base::Lock lock_; // The bitset stores the allocation state of the address pool. 1 bit per // super-page: 1 = allocated, 0 = free. std::bitset<kMaxSuperPages> alloc_bitset_ GUARDED_BY(lock_); // An index of a bit in the bitset before which we know for sure there all // 1s. This is a best-effort hint in the sense that there still may be lots // of 1s after this index, but at least we know there is no point in // starting the search before it. size_t bit_hint_ GUARDED_BY(lock_); size_t total_bits_ = 0; uintptr_t address_begin_ = 0; #if DCHECK_IS_ON() uintptr_t address_end_ = 0; #endif }; ALWAYS_INLINE Pool* GetPool(pool_handle handle) { PA_DCHECK(0 < handle && handle <= kNumPools); return &pools_[handle - 1]; } static constexpr size_t kNumPools = 2; Pool pools_[kNumPools]; #else // defined(PA_HAS_64_BITS_POINTERS) // BRP stands for BackupRefPtr. GigaCage is split into pools, one which // supports BackupRefPtr and one that doesn't. static constexpr pool_handle kNonBRPPoolHandle = 1; static constexpr pool_handle kBRPPoolHandle = 2; friend pool_handle GetNonBRPPool(); friend pool_handle GetBRPPool(); #endif // defined(PA_HAS_64_BITS_POINTERS) friend struct base::LazyInstanceTraitsBase<AddressPoolManager>; DISALLOW_COPY_AND_ASSIGN(AddressPoolManager); }; #if !defined(PA_HAS_64_BITS_POINTERS) ALWAYS_INLINE pool_handle GetNonBRPPool() { return AddressPoolManager::kNonBRPPoolHandle; } ALWAYS_INLINE pool_handle GetBRPPool() { return AddressPoolManager::kBRPPoolHandle; } #endif // !defined(PA_HAS_64_BITS_POINTERS) } // namespace internal } // namespace base #endif // BASE_ALLOCATOR_PARTITION_ALLOCATOR_ADDRESS_POOL_MANAGER_H_
1,786
507
<reponame>playday3008/VAC<gh_stars>100-1000 #pragma once #include <Windows.h> // E8 ? ? ? ? 59 8B F8 (relative jump) PVOID ProcessMonitor_readFileMapping(PBOOLEAN md5Computed, PBYTE md5, DWORD out[2]);
87
14,668
// Copyright 2019 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef CHROME_BROWSER_MEDIA_ROUTER_PROVIDERS_OPENSCREEN_NETWORK_SERVICE_ASYNC_PACKET_SENDER_H_ #define CHROME_BROWSER_MEDIA_ROUTER_PROVIDERS_OPENSCREEN_NETWORK_SERVICE_ASYNC_PACKET_SENDER_H_ #include "base/callback.h" #include "mojo/public/cpp/bindings/remote.h" #include "net/base/net_errors.h" #include "services/network/public/mojom/ip_endpoint.mojom.h" #include "services/network/public/mojom/network_context.mojom.h" #include "services/network/public/mojom/udp_socket.mojom.h" namespace media_router { class AsyncPacketSender { public: virtual ~AsyncPacketSender() {} virtual net::Error SendTo(const net::IPEndPoint& dest_addr, base::span<const uint8_t> data, base::OnceCallback<void(int32_t)> callback) = 0; }; class NetworkServiceAsyncPacketSender : public AsyncPacketSender { public: explicit NetworkServiceAsyncPacketSender( network::mojom::NetworkContext* network_context); explicit NetworkServiceAsyncPacketSender(NetworkServiceAsyncPacketSender&&); NetworkServiceAsyncPacketSender(const NetworkServiceAsyncPacketSender&) = delete; NetworkServiceAsyncPacketSender& operator=( const NetworkServiceAsyncPacketSender&) = delete; ~NetworkServiceAsyncPacketSender() override; // network::mojom::UDPSocket forwards. net::Error SendTo(const net::IPEndPoint& dest_addr, base::span<const uint8_t> data, base::OnceCallback<void(int32_t)> callback) override; private: mojo::Remote<network::mojom::UDPSocket> socket_; }; } // namespace media_router #endif // CHROME_BROWSER_MEDIA_ROUTER_PROVIDERS_OPENSCREEN_NETWORK_SERVICE_ASYNC_PACKET_SENDER_H_
726
2,325
<filename>tests/losses/adv_losses_test.py<gh_stars>1000+ # """ Tests adversarial loss related functions. """ import tensorflow as tf from texar.tf.losses.adv_losses import binary_adversarial_losses class AdvLossesTest(tf.test.TestCase): """Tests adversarial losses. """ def test_binary_adversarial_losses(self): """Tests :meth:`~texar.tf.losses.adv_losses.binary_adversarial_losse`. """ batch_size = 16 data_dim = 64 real_data = tf.zeros([batch_size, data_dim], dtype=tf.float32) fake_data = tf.ones([batch_size, data_dim], dtype=tf.float32) const_logits = tf.zeros([batch_size], dtype=tf.float32) # Use a dumb discriminator that always outputs logits=0. gen_loss, disc_loss = binary_adversarial_losses( real_data, fake_data, lambda x: const_logits) gen_loss_2, disc_loss_2 = binary_adversarial_losses( real_data, fake_data, lambda x: const_logits, mode="min_fake") with self.test_session() as sess: gen_loss_, disc_loss_ = sess.run([gen_loss, disc_loss]) gen_loss_2_, disc_loss_2_ = sess.run([gen_loss_2, disc_loss_2]) self.assertAlmostEqual(gen_loss_, -gen_loss_2_) self.assertAlmostEqual(disc_loss_, disc_loss_2_) if __name__ == "__main__": tf.test.main()
610
913
/* * CANopen data storage object for storing data into block device (eeprom) * * @file CO_storageEeprom.c * @author <NAME> * @copyright 2021 <NAME> * * This file is part of CANopenNode, an opensource CANopen Stack. * Project home page is <https://github.com/CANopenNode/CANopenNode>. * For more information on CANopen see <http://www.can-cia.org/>. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "storage/CO_storageEeprom.h" #include "storage/CO_eeprom.h" #include "301/crc16-ccitt.h" #if (CO_CONFIG_STORAGE) & CO_CONFIG_STORAGE_ENABLE /* * Function for writing data on "Store parameters" command - OD object 1010 * * For more information see file CO_storage.h, CO_storage_entry_t. */ static ODR_t storeEeprom(CO_storage_entry_t *entry, CO_CANmodule_t *CANmodule) { bool_t writeOk; /* save data to the eeprom */ CO_LOCK_OD(CANmodule); writeOk = CO_eeprom_writeBlock(entry->storageModule, entry->addr, entry->eepromAddr, entry->len); entry->crc = crc16_ccitt(entry->addr, entry->len, 0); CO_UNLOCK_OD(CANmodule); /* Verify, if data in eeprom are equal */ uint16_t crc_read = CO_eeprom_getCrcBlock(entry->storageModule, entry->eepromAddr, entry->len); if (entry->crc != crc_read || !writeOk) { return ODR_HW; } /* Write signature (see CO_storageEeprom_init() for info) */ uint16_t signatureOfEntry = (uint16_t)entry->len; uint32_t signature = (((uint32_t)entry->crc) << 16) | signatureOfEntry; writeOk = CO_eeprom_writeBlock(entry->storageModule, (uint8_t *)&signature, entry->eepromAddrSignature, sizeof(signature)); /* verify signature and write */ uint32_t signatureRead; CO_eeprom_readBlock(entry->storageModule, (uint8_t *)&signatureRead, entry->eepromAddrSignature, sizeof(signatureRead)); if(signature != signatureRead || !writeOk) { return ODR_HW; } return ODR_OK; } /* * Function for restoring data on "Restore default parameters" command - OD 1011 * * For more information see file CO_storage.h, CO_storage_entry_t. */ static ODR_t restoreEeprom(CO_storage_entry_t *entry, CO_CANmodule_t *CANmodule) { (void) CANmodule; bool_t writeOk; /* Write empty signature */ uint32_t signature = 0xFFFFFFFF; writeOk = CO_eeprom_writeBlock(entry->storageModule, (uint8_t *)&signature, entry->eepromAddrSignature, sizeof(signature)); /* verify signature and protection */ uint32_t signatureRead; CO_eeprom_readBlock(entry->storageModule, (uint8_t *)&signatureRead, entry->eepromAddrSignature, sizeof(signatureRead)); if(signature != signatureRead || !writeOk) { return ODR_HW; } return ODR_OK; } /******************************************************************************/ CO_ReturnError_t CO_storageEeprom_init(CO_storage_t *storage, CO_CANmodule_t *CANmodule, void *storageModule, OD_entry_t *OD_1010_StoreParameters, OD_entry_t *OD_1011_RestoreDefaultParam, CO_storage_entry_t *entries, uint8_t entriesCount, uint32_t *storageInitError) { CO_ReturnError_t ret; bool_t eepromOvf = false; /* verify arguments */ if (storage == NULL || entries == NULL || entriesCount == 0 || storageInitError == NULL ) { return CO_ERROR_ILLEGAL_ARGUMENT; } storage->enabled = false; /* Initialize storage hardware */ if (!CO_eeprom_init(storageModule)) { *storageInitError = 0xFFFFFFFF; return CO_ERROR_DATA_CORRUPT; } /* initialize storage and OD extensions */ ret = CO_storage_init(storage, CANmodule, OD_1010_StoreParameters, OD_1011_RestoreDefaultParam, storeEeprom, restoreEeprom, entries, entriesCount); if (ret != CO_ERROR_NO) { return ret; } /* Read entry signatures from the eeprom */ uint32_t signatures[entriesCount]; size_t signaturesAddress = CO_eeprom_getAddr(storageModule, false, sizeof(signatures), &eepromOvf); CO_eeprom_readBlock(storageModule, (uint8_t *)signatures, signaturesAddress, sizeof(signatures)); /* initialize entries */ *storageInitError = 0; for (uint8_t i = 0; i < entriesCount; i++) { CO_storage_entry_t *entry = &entries[i]; bool_t isAuto = (entry->attr & CO_storage_auto) != 0; /* verify arguments */ if (entry->addr == NULL || entry->len == 0 || entry->subIndexOD < 2) { *storageInitError = i; return CO_ERROR_ILLEGAL_ARGUMENT; } /* calculate addresses inside eeprom */ entry->eepromAddrSignature = signaturesAddress + sizeof(uint32_t) * i; entry->eepromAddr = CO_eeprom_getAddr(storageModule, isAuto, entry->len, &eepromOvf); entry->offset = 0; /* verify if eeprom is too small */ if (eepromOvf) { *storageInitError = i; return CO_ERROR_OUT_OF_MEMORY; } /* 32bit signature (which was stored in eeprom) is combined from * 16bit signature of the entry and 16bit CRC checksum of the data * block. 16bit signature of the entry is entry->len. */ uint32_t signature = signatures[i]; uint16_t signatureInEeprom = (uint16_t)signature; entry->crc = (uint16_t)(signature >> 16); uint16_t signatureOfEntry = (uint16_t)entry->len; /* Verify two signatures */ bool_t dataCorrupt = false; if (signatureInEeprom != signatureOfEntry) { dataCorrupt = true; } else { /* Read data into storage location */ CO_eeprom_readBlock(entry->storageModule, entry->addr, entry->eepromAddr, entry->len); /* Verify CRC, except for auto storage variables */ if (!isAuto) { uint16_t crc = crc16_ccitt(entry->addr, entry->len, 0); if (crc != entry->crc) { dataCorrupt = true; } } } /* additional info in case of error */ if (dataCorrupt) { uint32_t errorBit = entry->subIndexOD; if (errorBit > 31) errorBit = 31; *storageInitError |= ((uint32_t) 1) << errorBit; ret = CO_ERROR_DATA_CORRUPT; } } /* for (entries) */ storage->enabled = true; return ret; } /******************************************************************************/ void CO_storageEeprom_auto_process(CO_storage_t *storage, bool_t saveAll) { /* verify arguments */ if (storage == NULL || !storage->enabled) { return; } /* loop through entries */ for (uint8_t i = 0; i < storage->entriesCount; i++) { CO_storage_entry_t *entry = &storage->entries[i]; if ((entry->attr & CO_storage_auto) == 0) continue; if (saveAll) { /* update all bytes */ for (size_t i = 0; i < entry->len; ) { uint8_t dataByteToUpdate = ((uint8_t *)(entry->addr))[i]; size_t eepromAddr = entry->eepromAddr + i; if (CO_eeprom_updateByte(entry->storageModule, dataByteToUpdate, eepromAddr) ) { i++; } } } else { /* update one data byte and if successful increment to next */ uint8_t dataByteToUpdate = ((uint8_t*)(entry->addr))[entry->offset]; size_t eepromAddr = entry->eepromAddr + entry->offset; if (CO_eeprom_updateByte(entry->storageModule, dataByteToUpdate, eepromAddr) ) { if (++entry->offset >= entry->len) { entry->offset = 0; } } } } } #endif /* (CO_CONFIG_STORAGE) & CO_CONFIG_STORAGE_ENABLE */
5,110
341
package com.aventstack.extentreports.reporter.klov.entity; public class KlovURI { public static final String PROJECT = "/api/projects"; public static final String REPORT = "/api/reports"; }
61
843
package io.fullstack.firestack; import android.content.Context; import android.util.Log; import android.os.Bundle; import java.util.Iterator; import java.util.Map; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import com.facebook.react.bridge.Arguments; import com.facebook.react.bridge.ReactApplicationContext; import com.facebook.react.bridge.ReactContextBaseJavaModule; import com.facebook.react.bridge.ReactMethod; import com.facebook.react.bridge.Callback; import com.facebook.react.bridge.WritableMap; import com.facebook.react.bridge.ReadableMap; import com.facebook.react.bridge.ReactContext; import com.google.android.gms.tasks.OnCompleteListener; import com.google.android.gms.tasks.OnFailureListener; import com.google.android.gms.tasks.Task; import com.google.firebase.FirebaseApp; import com.google.firebase.analytics.FirebaseAnalytics; import com.google.firebase.analytics.FirebaseAnalytics.Event.*; import com.google.firebase.analytics.FirebaseAnalytics.Param; class FirestackAnalyticsModule extends ReactContextBaseJavaModule { private static final String TAG = "FirestackAnalytics"; private Context context; private ReactContext mReactContext; private FirebaseAnalytics mFirebaseAnalytics; public FirestackAnalyticsModule(ReactApplicationContext reactContext) { super(reactContext); this.context = reactContext; mReactContext = reactContext; Log.d(TAG, "New instance"); mFirebaseAnalytics = FirebaseAnalytics.getInstance(this.context); } @Override public String getName() { return TAG; } @ReactMethod public void logEventWithName(final String name, final ReadableMap props, final Callback callback) { // TODO // FirestackUtils.todoNote(TAG, "logEventWithName", callback); Map<String, Object> m = FirestackUtils.recursivelyDeconstructReadableMap(props); final String eventName = getEventName(name); final Bundle bundle = makeEventBundle(name, m); Log.d(TAG, "Logging event " + eventName); mFirebaseAnalytics.logEvent(name, bundle); } private String getEventName(final String name) { if (name == FirebaseAnalytics.Event.ADD_PAYMENT_INFO) {return FirebaseAnalytics.Event.ADD_PAYMENT_INFO; } else if (name == FirebaseAnalytics.Event.ADD_TO_CART) {return FirebaseAnalytics.Event.ADD_TO_CART;} else if (name == FirebaseAnalytics.Event.ADD_TO_WISHLIST) {return FirebaseAnalytics.Event.ADD_TO_WISHLIST;} else if (name == FirebaseAnalytics.Event.APP_OPEN) {return FirebaseAnalytics.Event.APP_OPEN;} else if (name == FirebaseAnalytics.Event.BEGIN_CHECKOUT) {return FirebaseAnalytics.Event.BEGIN_CHECKOUT;} else if (name == FirebaseAnalytics.Event.ECOMMERCE_PURCHASE) {return FirebaseAnalytics.Event.ECOMMERCE_PURCHASE;} else if (name == FirebaseAnalytics.Event.GENERATE_LEAD) {return FirebaseAnalytics.Event.GENERATE_LEAD;} else if (name == FirebaseAnalytics.Event.JOIN_GROUP) {return FirebaseAnalytics.Event.JOIN_GROUP;} else if (name == FirebaseAnalytics.Event.LEVEL_UP) {return FirebaseAnalytics.Event.LEVEL_UP;} else if (name == FirebaseAnalytics.Event.LOGIN) {return FirebaseAnalytics.Event.LOGIN;} else if (name == FirebaseAnalytics.Event.POST_SCORE) {return FirebaseAnalytics.Event.POST_SCORE;} else if (name == FirebaseAnalytics.Event.PRESENT_OFFER) {return FirebaseAnalytics.Event.PRESENT_OFFER;} else if (name == FirebaseAnalytics.Event.PURCHASE_REFUND) {return FirebaseAnalytics.Event.PURCHASE_REFUND;} else if (name == FirebaseAnalytics.Event.SEARCH) {return FirebaseAnalytics.Event.SEARCH;} else if (name == FirebaseAnalytics.Event.SELECT_CONTENT) {return FirebaseAnalytics.Event.SELECT_CONTENT;} else if (name == FirebaseAnalytics.Event.SHARE) {return FirebaseAnalytics.Event.SHARE;} else if (name == FirebaseAnalytics.Event.SIGN_UP) {return FirebaseAnalytics.Event.SIGN_UP;} else if (name == FirebaseAnalytics.Event.SPEND_VIRTUAL_CURRENCY) {return FirebaseAnalytics.Event.SPEND_VIRTUAL_CURRENCY;} else if (name == FirebaseAnalytics.Event.TUTORIAL_BEGIN) {return FirebaseAnalytics.Event.TUTORIAL_BEGIN;} else if (name == FirebaseAnalytics.Event.TUTORIAL_COMPLETE) {return FirebaseAnalytics.Event.TUTORIAL_COMPLETE;} else if (name == FirebaseAnalytics.Event.UNLOCK_ACHIEVEMENT) {return FirebaseAnalytics.Event.UNLOCK_ACHIEVEMENT;} else if (name == FirebaseAnalytics.Event.VIEW_ITEM) {return FirebaseAnalytics.Event.VIEW_ITEM;} else if (name == FirebaseAnalytics.Event.VIEW_ITEM_LIST) {return FirebaseAnalytics.Event.VIEW_ITEM_LIST;} else if (name == FirebaseAnalytics.Event.VIEW_SEARCH_RESULTS) {return FirebaseAnalytics.Event.VIEW_SEARCH_RESULTS;} else return name; } private Bundle makeEventBundle(final String name, final Map<String, Object> map) { Bundle bundle = new Bundle(); // Available from the Analytics event if (map.containsKey("id")) { String id = (String) map.get("id"); bundle.putString(FirebaseAnalytics.Param.ITEM_ID, id); } if (map.containsKey("name")) { String val = (String) map.get("name"); bundle.putString(FirebaseAnalytics.Param.ITEM_NAME, val); } if (map.containsKey("category")) { String val = (String) map.get("category"); bundle.putString(FirebaseAnalytics.Param.ITEM_NAME, val); } if (map.containsKey("quantity")) { double val = (double) map.get("quantity"); bundle.putDouble(FirebaseAnalytics.Param.QUANTITY, val); } if (map.containsKey("price")) { double val = (double) map.get("price"); bundle.putDouble(FirebaseAnalytics.Param.PRICE, val); } if (map.containsKey("value")) { double val = (double) map.get("value"); bundle.putDouble(FirebaseAnalytics.Param.VALUE, val); } if (map.containsKey("currency")) { String val = (String) map.get("currency"); bundle.putString(FirebaseAnalytics.Param.CURRENCY, val); } if (map.containsKey("origin")) { String val = (String) map.get("origin"); bundle.putString(FirebaseAnalytics.Param.ORIGIN, val); } if (map.containsKey("item_location_id")) { String val = (String) map.get("item_location_id"); bundle.putString(FirebaseAnalytics.Param.ITEM_LOCATION_ID, val); } if (map.containsKey("location")) { String val = (String) map.get("location"); bundle.putString(FirebaseAnalytics.Param.LOCATION, val); } if (map.containsKey("destination")) { String val = (String) map.get("destination"); bundle.putString(FirebaseAnalytics.Param.DESTINATION, val); } if (map.containsKey("start_date")) { String val = (String) map.get("start_date"); bundle.putString(FirebaseAnalytics.Param.START_DATE, val); } if (map.containsKey("end_date")) { String val = (String) map.get("end_date"); bundle.putString(FirebaseAnalytics.Param.END_DATE, val); } if (map.containsKey("transaction_id")) { String val = (String) map.get("transaction_id"); bundle.putString(FirebaseAnalytics.Param.TRANSACTION_ID, val); } if (map.containsKey("number_of_nights")) { long val = (long) map.get("number_of_nights"); bundle.putLong(FirebaseAnalytics.Param.NUMBER_OF_NIGHTS, val); } if (map.containsKey("number_of_rooms")) { long val = (long) map.get("number_of_rooms"); bundle.putLong(FirebaseAnalytics.Param.NUMBER_OF_ROOMS, val); } if (map.containsKey("number_of_passengers")) { long val = (long) map.get("number_of_passengers"); bundle.putLong(FirebaseAnalytics.Param.NUMBER_OF_PASSENGERS, val); } if (map.containsKey("travel_class")) { String val = (String) map.get("travel_class"); bundle.putString(FirebaseAnalytics.Param.TRAVEL_CLASS, val); } if (map.containsKey("coupon")) { String val = (String) map.get("coupon"); bundle.putString(FirebaseAnalytics.Param.COUPON, val); } if (map.containsKey("tax")) { long val = (long) map.get("tax"); bundle.putLong(FirebaseAnalytics.Param.TAX, val); } if (map.containsKey("shipping")) { double val = (double) map.get("shipping"); bundle.putDouble(FirebaseAnalytics.Param.SHIPPING, val); } if (map.containsKey("group_id")) { String val = (String) map.get("group_id"); bundle.putString(FirebaseAnalytics.Param.GROUP_ID, val); } if (map.containsKey("level")) { long val = (long) map.get("level"); bundle.putLong(FirebaseAnalytics.Param.LEVEL, val); } if (map.containsKey("character")) { String val = (String) map.get("character"); bundle.putString(FirebaseAnalytics.Param.CHARACTER, val); } if (map.containsKey("score")) { long val = (long) map.get("score"); bundle.putLong(FirebaseAnalytics.Param.SCORE, val); } if (map.containsKey("search_term")) { String val = (String) map.get("search_term"); bundle.putString(FirebaseAnalytics.Param.SEARCH_TERM, val); } if (map.containsKey("content_type")) { String val = (String) map.get("content_type"); bundle.putString(FirebaseAnalytics.Param.CONTENT_TYPE, val); } if (map.containsKey("sign_up_method")) { String val = (String) map.get("sign_up_method"); bundle.putString(FirebaseAnalytics.Param.SIGN_UP_METHOD, val); } if (map.containsKey("virtual_currency_name")) { String val = (String) map.get("virtual_currency_name"); bundle.putString(FirebaseAnalytics.Param.VIRTUAL_CURRENCY_NAME, val); } if (map.containsKey("achievement_id")) { String val = (String) map.get("achievement_id"); bundle.putString(FirebaseAnalytics.Param.ACHIEVEMENT_ID, val); } if (map.containsKey("flight_number")) { String val = (String) map.get("flight_number"); bundle.putString(FirebaseAnalytics.Param.FLIGHT_NUMBER, val); } Iterator<Map.Entry<String, Object>> entries = map.entrySet().iterator(); while (entries.hasNext()) { Map.Entry<String, Object> entry = entries.next(); if (bundle.getBundle(entry.getKey()) == null) { bundle.putString(entry.getKey(), entry.getValue().toString()); } } return bundle; } }
3,813
543
<gh_stars>100-1000 package com.riiablo; import com.badlogic.gdx.assets.AssetDescriptor; import com.badlogic.gdx.assets.AssetManager; import com.badlogic.gdx.graphics.g2d.BitmapFont; import com.riiablo.graphics.BlendMode; import com.riiablo.codec.FontTBL; import com.riiablo.loader.BitmapFontLoader; public class Fonts { public final BitmapFont consolas12; public final BitmapFont consolas16; public final FontTBL.BitmapFont font6; public final FontTBL.BitmapFont font8; public final FontTBL.BitmapFont font16; public final FontTBL.BitmapFont font24; public final FontTBL.BitmapFont font30; public final FontTBL.BitmapFont font42; public final FontTBL.BitmapFont fontformal10; public final FontTBL.BitmapFont fontformal11; public final FontTBL.BitmapFont fontformal12; public final FontTBL.BitmapFont fontexocet10; public final FontTBL.BitmapFont fontridiculous; public final FontTBL.BitmapFont ReallyTheLastSucker; public Fonts(AssetManager assets) { consolas12 = loadEx(assets, "consolas12.fnt"); consolas16 = loadEx(assets, "consolas16.fnt"); font6 = load(assets, "font6", BlendMode.LUMINOSITY_TINT); font8 = load(assets, "font8", BlendMode.LUMINOSITY_TINT); font16 = load(assets, "font16", BlendMode.LUMINOSITY_TINT); font24 = load(assets, "font24", BlendMode.ID); font30 = load(assets, "font30", BlendMode.ID); font42 = load(assets, "font42", BlendMode.ID); fontformal10 = load(assets, "fontformal10", BlendMode.LUMINOSITY_TINT); fontformal11 = load(assets, "fontformal11", BlendMode.LUMINOSITY_TINT); fontformal12 = load(assets, "fontformal12", BlendMode.LUMINOSITY_TINT); fontexocet10 = load(assets, "fontexocet10", BlendMode.TINT_BLACKS); fontridiculous = load(assets, "fontridiculous", BlendMode.TINT_BLACKS); ReallyTheLastSucker = load(assets, "ReallyTheLastSucker", BlendMode.ID); BitmapFont.BitmapFontData data; data = font8.getData(); data.lineHeight = data.xHeight = data.capHeight = 12; data.ascent = 16; data.down = -12; data = font16.getData(); data.lineHeight = data.xHeight = data.capHeight = 14; data.ascent = 17; data.down = -16; data = font42.getData(); data.lineHeight = data.xHeight = data.capHeight = 31; data.ascent = 48; data.down = -31; data = fontformal10.getData(); data.lineHeight = data.xHeight = data.capHeight = 14; data.ascent = 17; data.down = -14; data = fontformal11.getData(); data.lineHeight = data.xHeight = data.capHeight = 18; data.ascent = 18; data.down = -18; data = fontformal12.getData(); data.lineHeight = data.xHeight = data.capHeight = 16; data.ascent = 42; data.down = -20; data = ReallyTheLastSucker.getData(); data.lineHeight = data.xHeight = data.capHeight = 8; data.ascent = 11; data.down = -8; } private BitmapFont loadEx(AssetManager assets, String fontName) { assets.load(fontName, BitmapFont.class); assets.finishLoadingAsset(fontName); return assets.get(fontName); } private FontTBL.BitmapFont load(AssetManager assets, String fontName, int blendMode) { AssetDescriptor<FontTBL.BitmapFont> descriptor = getDescriptor(fontName, blendMode); assets.load(descriptor); assets.finishLoadingAsset(descriptor); return assets.get(descriptor); } private static AssetDescriptor<FontTBL.BitmapFont> getDescriptor(String fontName, int blendMode) { return new AssetDescriptor<>("data\\local\\font\\latin\\" + fontName + ".TBL", FontTBL.BitmapFont.class, BitmapFontLoader.Params.of(blendMode)); } }
1,431
1,144
/** Generated Model - DO NOT CHANGE */ package de.metas.printing.model; import java.sql.ResultSet; import java.util.Properties; /** Generated Model for AD_PrinterHW_MediaSize * @author metasfresh (generated) */ @SuppressWarnings("javadoc") public class X_AD_PrinterHW_MediaSize extends org.compiere.model.PO implements I_AD_PrinterHW_MediaSize, org.compiere.model.I_Persistent { private static final long serialVersionUID = -1034681283L; /** Standard Constructor */ public X_AD_PrinterHW_MediaSize (Properties ctx, int AD_PrinterHW_MediaSize_ID, String trxName) { super (ctx, AD_PrinterHW_MediaSize_ID, trxName); } /** Load Constructor */ public X_AD_PrinterHW_MediaSize (Properties ctx, ResultSet rs, String trxName) { super (ctx, rs, trxName); } /** Load Meta Data */ @Override protected org.compiere.model.POInfo initPO(Properties ctx) { return org.compiere.model.POInfo.getPOInfo(Table_Name); } @Override public de.metas.printing.model.I_AD_PrinterHW getAD_PrinterHW() { return get_ValueAsPO(COLUMNNAME_AD_PrinterHW_ID, de.metas.printing.model.I_AD_PrinterHW.class); } @Override public void setAD_PrinterHW(de.metas.printing.model.I_AD_PrinterHW AD_PrinterHW) { set_ValueFromPO(COLUMNNAME_AD_PrinterHW_ID, de.metas.printing.model.I_AD_PrinterHW.class, AD_PrinterHW); } @Override public void setAD_PrinterHW_ID (int AD_PrinterHW_ID) { if (AD_PrinterHW_ID < 1) set_Value (COLUMNNAME_AD_PrinterHW_ID, null); else set_Value (COLUMNNAME_AD_PrinterHW_ID, Integer.valueOf(AD_PrinterHW_ID)); } @Override public int getAD_PrinterHW_ID() { return get_ValueAsInt(COLUMNNAME_AD_PrinterHW_ID); } @Override public void setAD_PrinterHW_MediaSize_ID (int AD_PrinterHW_MediaSize_ID) { if (AD_PrinterHW_MediaSize_ID < 1) set_ValueNoCheck (COLUMNNAME_AD_PrinterHW_MediaSize_ID, null); else set_ValueNoCheck (COLUMNNAME_AD_PrinterHW_MediaSize_ID, Integer.valueOf(AD_PrinterHW_MediaSize_ID)); } @Override public int getAD_PrinterHW_MediaSize_ID() { return get_ValueAsInt(COLUMNNAME_AD_PrinterHW_MediaSize_ID); } @Override public void setName (java.lang.String Name) { set_ValueNoCheck (COLUMNNAME_Name, Name); } @Override public java.lang.String getName() { return (java.lang.String)get_Value(COLUMNNAME_Name); } }
991
690
<filename>livelessons-rest/livelessons-rest-errors/src/main/java/demo/PersonResourceProcessor.java<gh_stars>100-1000 package demo; import org.springframework.hateoas.Link; import org.springframework.hateoas.Resource; import org.springframework.hateoas.ResourceProcessor; import org.springframework.stereotype.Component; import org.springframework.web.servlet.support.ServletUriComponentsBuilder; import org.springframework.web.util.UriComponents; @Component public class PersonResourceProcessor implements ResourceProcessor<Resource<Person>> { @Override public Resource<Person> process(Resource<Person> resource) { String id = Long.toString(resource.getContent().getId()); UriComponents uriComponents = ServletUriComponentsBuilder.fromCurrentContextPath() .path("/people/{id}/photo").buildAndExpand(id); String uri = uriComponents.toUriString(); resource.add(new Link(uri, "photo")); return resource; } }
292
584
/******************************************************************************* * Copyright (c) 2016 <NAME> - github.com/shpralex * This program and the accompanying materials * are made available under the terms of the The MIT License (MIT) * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. *******************************************************************************/ package com.sproutlife.panel.gamepanel.handler; import com.sproutlife.panel.PanelController; import com.sproutlife.panel.gamepanel.handler.HandlerConsts.RequiredKey; public class DefaultHandlerSet extends HandlerSet { private PanelController panelController; public DefaultHandlerSet(PanelController panelController) { super(panelController); this.panelController = panelController; addHandlers(); } public ScrollDragHandler createScrollDragHandler(RequiredKey requiredKey) { return new ScrollDragHandler(panelController, requiredKey); } @Override public void addHandlers() { // Background Drag Handlers addBackgroundDragHandler(createScrollDragHandler(RequiredKey.None)); // Mouse Wheel Handlers //addMouseWheelHandler(new TestMouseWheelHandler(panelController, RequiredKey.Any)); } }
415
679
/************************************************************** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * *************************************************************/ // MARKER(update_precomp.py): autogen include statement, do not remove #include "precompiled_sfx2.hxx" #include <svl/itempool.hxx> #include <svl/poolitem.hxx> #include <svl/stritem.hxx> #include <nochaos.hxx> #include <sfx2/sfxuno.hxx> #define WID_CHAOS_START 500 //========================================================================= // // class CntStaticPoolDefaults_Impl // //========================================================================= class CntItemPool; class CntStaticPoolDefaults_Impl { sal_uInt32 m_nItems; SfxPoolItem** m_ppDefaults; SfxItemInfo* m_pItemInfos; private: // Forbidden and not implemented... CntStaticPoolDefaults_Impl( const CntStaticPoolDefaults_Impl& ); CntStaticPoolDefaults_Impl& operator=( const CntStaticPoolDefaults_Impl& ); inline void Insert( SfxPoolItem* pItem, sal_uInt16 nSID, sal_uInt16 nFlags ); public: CntStaticPoolDefaults_Impl( CntItemPool* pPool ); ~CntStaticPoolDefaults_Impl(); SfxPoolItem** GetDefaults() const { return m_ppDefaults; } const SfxItemInfo* GetItemInfos() const { return m_pItemInfos; } }; //---------------------------------------------------------------------------- //========================================================================= class CntItemPool: public SfxItemPool { static CntItemPool* _pThePool; sal_uInt16 _nRefs; protected: CntItemPool(); virtual ~CntItemPool(); public: static CntItemPool* Acquire(); static sal_uInt16 Release(); }; //---------------------------------------------------------------------------- //---------------------------------------------------------------------------- // static SfxItemPool* NoChaos::GetItemPool() { // Get and hold CHAOS item pool. return CntItemPool::Acquire(); } //---------------------------------------------------------------------------- // static sal_uInt16 NoChaos::ReleaseItemPool() { // Release CHAOS item pool. return CntItemPool::Release(); } //========================================================================= // // CntItemPool implementation // //========================================================================= static CntStaticPoolDefaults_Impl* pPoolDefs_Impl = NULL; // static member! CntItemPool* CntItemPool::_pThePool = NULL; //------------------------------------------------------------------------- CntItemPool::CntItemPool() : SfxItemPool( DEFINE_CONST_UNICODE("chaos"), WID_CHAOS_START, WID_CHAOS_START, NULL ), _nRefs( 0 ) { SetFileFormatVersion( SOFFICE_FILEFORMAT_50 ); FreezeIdRanges(); // Create static defaults. pPoolDefs_Impl = new CntStaticPoolDefaults_Impl( this ); // Set item infos. SetItemInfos( pPoolDefs_Impl->GetItemInfos() ); // Set static pool default items. SetDefaults( pPoolDefs_Impl->GetDefaults() ); } //------------------------------------------------------------------------- //virtual CntItemPool::~CntItemPool() { // Release static pool default items. ReleaseDefaults( sal_False ); } //------------------------------------------------------------------------- // static CntItemPool* CntItemPool::Acquire() { if ( !_pThePool ) _pThePool = new CntItemPool; _pThePool->_nRefs++; return _pThePool; } //------------------------------------------------------------------------- // static sal_uInt16 CntItemPool::Release() { if ( !_pThePool ) return 0; sal_uInt16& nRefs = _pThePool->_nRefs; if ( nRefs ) --nRefs; if ( !nRefs ) { DELETEZ( _pThePool ); DELETEZ( pPoolDefs_Impl ); return 0; } return nRefs; } //========================================================================= // // CntStaticPoolDefaults_Impl implementation. // //========================================================================= inline void CntStaticPoolDefaults_Impl::Insert( SfxPoolItem* pItem, /* Static Pool Default Item */ sal_uInt16 nSID, sal_uInt16 nFlags /* Item Info */ ) { sal_uInt16 nPos = pItem->Which() - WID_CHAOS_START; m_ppDefaults[ nPos ] = pItem; m_pItemInfos[ nPos ]._nSID = nSID; m_pItemInfos[ nPos ]._nFlags = nFlags; } //------------------------------------------------------------------------- CntStaticPoolDefaults_Impl::~CntStaticPoolDefaults_Impl() { for ( sal_uInt32 n = 0; n < m_nItems; ++n ) delete m_ppDefaults[ n ]; delete [] m_ppDefaults; delete [] m_pItemInfos; } //------------------------------------------------------------------------- CntStaticPoolDefaults_Impl::CntStaticPoolDefaults_Impl( CntItemPool* /*pPool*/ ) : m_nItems( 1 ), m_ppDefaults( new SfxPoolItem* [ m_nItems ] ), m_pItemInfos( new SfxItemInfo [ m_nItems ] ) { rtl_zeroMemory( m_ppDefaults, sizeof( SfxPoolItem* ) * m_nItems ); rtl_zeroMemory( m_pItemInfos, sizeof( SfxItemInfo ) * m_nItems ); Insert( new SfxStringItem( WID_CHAOS_START, String() ), 0, SFX_ITEM_POOLABLE ); }
1,773
772
<reponame>ryansloan/code-dot-or<gh_stars>100-1000 { "co-CO": { "data": { "short_instructions": { "2-3 Artist 12": "Difinisce u bloccu \"ripete parechje volte\" ch'ellu fia une \"ronda\" da compie l'ochju. Sè ognu muvimentu gira à a diritta d'un gradu, quantu volta devi ripete lu per compie u rebbiu . 180 ? 360 ? 500 ?", "2-3 Artist 2 new": "Sta casa hà un muru finitu à a metà. Ogni latu face 100 pixels, mà u muru \nfinitu à a metà face 50 pixels. Finisci u muru." } } } }
237
777
<gh_stars>100-1000 // Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include <GLES2/gl2.h> #include <GLES2/gl2ext.h> #include <GLES2/gl2extchromium.h> #include <GLES3/gl3.h> #include "base/command_line.h" #include "base/strings/string_split.h" #include "base/strings/string_util.h" #include "gpu/command_buffer/tests/gl_manager.h" #include "gpu/command_buffer/tests/gl_test_utils.h" #include "testing/gmock/include/gmock/gmock.h" #include "testing/gtest/include/gtest/gtest.h" #include "ui/gl/gl_switches.h" #define SHADER_VERSION_300(Src) "#version 300 es\n" #Src namespace gpu { class OpenGLES3FunctionTest : public testing::Test { protected: void SetUp() override { base::CommandLine command_line(*base::CommandLine::ForCurrentProcess()); GLManager::Options options; options.context_type = gles2::CONTEXT_TYPE_OPENGLES3; gl_.InitializeWithCommandLine(options, command_line); } void TearDown() override { gl_.Destroy(); } bool IsApplicable() const { return gl_.IsInitialized(); } GLManager gl_; }; #if defined(OS_ANDROID) // Test is failing for Lollipop 64 bit Tester. // See crbug/550292. #define MAYBE_GetFragDataLocationInvalid DISABLED_GetFragDataLocationInvalid #else #define MAYBE_GetFragDataLocationInvalid GetFragDataLocationInvalid #endif TEST_F(OpenGLES3FunctionTest, MAYBE_GetFragDataLocationInvalid) { if (!IsApplicable()) { return; } // clang-format off static const char* kVertexShader = SHADER_VERSION_300( in vec4 position; void main() { gl_Position = position; }); static const char* kFragColorShader = SHADER_VERSION_300( precision mediump float; uniform vec4 src; out vec4 FragColor; void main() { FragColor = src; }); // clang-format on GLuint vsid = GLTestHelper::LoadShader(GL_VERTEX_SHADER, kVertexShader); GLuint fsid = GLTestHelper::LoadShader(GL_FRAGMENT_SHADER, kFragColorShader); GLuint program = glCreateProgram(); glAttachShader(program, vsid); glAttachShader(program, fsid); glDeleteShader(vsid); glDeleteShader(fsid); GLint location = glGetFragDataLocation(program, "FragColor"); EXPECT_EQ(static_cast<GLenum>(GL_INVALID_OPERATION), glGetError()); EXPECT_EQ(-1, location); location = glGetFragDataLocation(program, "Unknown"); EXPECT_EQ(static_cast<GLenum>(GL_INVALID_OPERATION), glGetError()); EXPECT_EQ(-1, location); glLinkProgram(program); location = glGetFragDataLocation(program, "FragColor"); EXPECT_EQ(static_cast<GLenum>(GL_NO_ERROR), glGetError()); EXPECT_EQ(0, location); location = glGetFragDataLocation(program, "Unknown"); EXPECT_EQ(static_cast<GLenum>(GL_NO_ERROR), glGetError()); EXPECT_EQ(-1, location); glDeleteProgram(program); } TEST_F(OpenGLES3FunctionTest, GetStringiTest) { if (!IsApplicable()) { return; } std::string extensionString = reinterpret_cast<const char*>(glGetString(GL_EXTENSIONS)); std::vector<std::string> extensions = base::SplitString(extensionString, base::kWhitespaceASCII, base::TRIM_WHITESPACE, base::SPLIT_WANT_NONEMPTY); int num_extensions = 0; glGetIntegerv(GL_NUM_EXTENSIONS, &num_extensions); EXPECT_EQ(extensions.size(), static_cast<size_t>(num_extensions)); std::set<std::string> extensions_from_string(extensions.begin(), extensions.end()); std::set<std::string> extensions_from_stringi; for (int i = 0; i < num_extensions; ++i) { extensions_from_stringi.insert( reinterpret_cast<const char*>(glGetStringi(GL_EXTENSIONS, i))); } EXPECT_EQ(extensions_from_string, extensions_from_stringi); } } // namespace gpu
1,495
5,169
{ "name": "CSSParser", "version": "1.0.1", "summary": "Swift CSS parser based on katana-parser.", "description": "CSSParser is a tiny Swift wrapper around the pure C katana-parser framework.", "homepage": "https://github.com/mcudich/CSSParser", "license": "MIT", "authors": { "<NAME>": "<EMAIL>" }, "source": { "git": "https://github.com/mcudich/CSSParser.git", "tag": "1.0.1", "submodules": true }, "social_media_url": "https://twitter.com/mcudich", "platforms": { "ios": "9.3" }, "source_files": [ "Sources/**/*", "Carthage/Checkouts/katana-parser/src/*" ], "public_header_files": "Sources/CSSParser.h", "pushed_with_swift_version": "3.0" }
291
528
from unittest.mock import MagicMock import pytest class TestAnnotate: @pytest.fixture def annotate(self): from palladium.interfaces import annotate return annotate def test_with_existing_data(self, annotate): model = MagicMock(__metadata__={'one': 1, 'two': 2}) annotate(model, {'one': '11'}) assert model.__metadata__['one'] == '11' assert model.__metadata__['two'] == 2 assert annotate(model) == {'one': '11', 'two': 2} def test_without_existing_data(self, annotate): model = TestAnnotate() assert annotate(model, {'one': '11'}) == {'one': '11'} assert model.__metadata__['one'] == '11' def load_data_decorator(func): def inner(self): X, y = func(self) return self.name + X, self.name + y return inner class TestDatasetLoader: @pytest.fixture def DatasetLoader(self): from palladium.interfaces import DatasetLoader return DatasetLoader def test_call_decorator(self, DatasetLoader, config): config['load_data_decorators'] = [ 'palladium.tests.test_interfaces.load_data_decorator' ] class MyDatasetLoader(DatasetLoader): name = 'hey' def __call__(self): return 'X', 'y' assert MyDatasetLoader()() == ('heyX', 'heyy') class TestPredictError: @pytest.fixture def PredictError(self): from palladium.interfaces import PredictError return PredictError def test_str(self, PredictError): assert str(PredictError("message", 123)) == "message (123)"
705
32,467
#ifndef PHP_DB1_H #define PHP_DB1_H #if DBA_DB1 #include "php_dba.h" DBA_FUNCS(db1); #endif #endif
60
410
#pragma once namespace LuaCpp { class TypeError : public std::exception { public: explicit TypeError(std::string expected) : _message(std::move(expected) + " expected, got no object.") {} explicit TypeError(std::string expected, const std::string &actual) : _message(std::move(expected) + " expected, got " + actual + '.') {} const char *what() const noexcept override { return _message.c_str(); } private: std::string _message; }; class CopyUnregisteredType : public std::exception { public: using TypeID = std::reference_wrapper<const std::type_info>; explicit CopyUnregisteredType(TypeID type) : _type(type) {} TypeID getType() const { return _type; } const char *what() const noexcept override { return "Tried to copy an object of an unregistered type. " "Please register classes before passing instances by value."; } private: TypeID _type; }; }
322
9,734
<gh_stars>1000+ // Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. #include "benchmark/benchmark.h" #include <cstdint> #include <string> #include <vector> #include "arrow/status.h" #include "arrow/testing/gtest_util.h" #include "arrow/util/trie.h" namespace arrow { namespace internal { std::vector<std::string> AllNulls() { return {"#N/A", "#N/A N/A", "#NA", "-1.#IND", "-1.#QNAN", "-NaN", "-nan", "1.#IND", "1.#QNAN", "N/A", "NA", "NULL", "NaN", "n/a", "nan", "null"}; } Trie MakeNullsTrie() { auto nulls = AllNulls(); TrieBuilder builder; for (const auto& str : AllNulls()) { ABORT_NOT_OK(builder.Append(str)); } return builder.Finish(); } std::vector<std::string> Expand(const std::vector<std::string>& base, size_t n) { std::vector<std::string> result; result.reserve(n); while (true) { for (const auto& v : base) { result.push_back(v); if (result.size() == n) { return result; } } } } static void BenchmarkTrieLookups(benchmark::State& state, // NOLINT non-const reference const std::vector<std::string>& strings) { Trie trie = MakeNullsTrie(); int32_t total = 0; auto lookups = Expand(strings, 100); for (auto _ : state) { for (const auto& s : lookups) { total += trie.Find(s); } } benchmark::DoNotOptimize(total); state.SetItemsProcessed(state.iterations() * lookups.size()); } static void TrieLookupFound(benchmark::State& state) { // NOLINT non-const reference BenchmarkTrieLookups(state, {"N/A", "null", "-1.#IND", "N/A"}); } static void TrieLookupNotFound(benchmark::State& state) { // NOLINT non-const reference BenchmarkTrieLookups(state, {"None", "1.0", "", "abc"}); } BENCHMARK(TrieLookupFound); BENCHMARK(TrieLookupNotFound); #ifdef ARROW_WITH_BENCHMARKS_REFERENCE static inline bool InlinedNullLookup(util::string_view s) { // An inlined version of trie lookup for a specific set of strings // (see AllNulls()) auto size = s.length(); auto data = s.data(); if (size == 0) { return false; } if (size == 1) { return false; } auto chars = reinterpret_cast<const char*>(data); auto first = chars[0]; auto second = chars[1]; switch (first) { case 'N': { // "NA", "N/A", "NaN", "NULL" if (size == 2) { return second == 'A'; } auto third = chars[2]; if (size == 3) { return (second == '/' && third == 'A') || (second == 'a' && third == 'N'); } if (size == 4) { return (second == 'U' && third == 'L' && chars[3] == 'L'); } return false; } case 'n': { // "n/a", "nan", "null" if (size == 2) { return false; } auto third = chars[2]; if (size == 3) { return (second == '/' && third == 'a') || (second == 'a' && third == 'n'); } if (size == 4) { return (second == 'u' && third == 'l' && chars[3] == 'l'); } return false; } case '1': { // '1.#IND', '1.#QNAN' if (size == 6) { // '#' is the most unlikely char here, check it first return (chars[2] == '#' && chars[1] == '.' && chars[3] == 'I' && chars[4] == 'N' && chars[5] == 'D'); } if (size == 7) { return (chars[2] == '#' && chars[1] == '.' && chars[3] == 'Q' && chars[4] == 'N' && chars[5] == 'A' && chars[6] == 'N'); } return false; } case '-': { switch (second) { case 'N': // "-NaN" return (size == 4 && chars[2] == 'a' && chars[3] == 'N'); case 'n': // "-nan" return (size == 4 && chars[2] == 'a' && chars[3] == 'n'); case '1': // "-1.#IND", "-1.#QNAN" if (size == 7) { return (chars[3] == '#' && chars[2] == '.' && chars[4] == 'I' && chars[5] == 'N' && chars[6] == 'D'); } if (size == 8) { return (chars[3] == '#' && chars[2] == '.' && chars[4] == 'Q' && chars[5] == 'N' && chars[6] == 'A' && chars[7] == 'N'); } return false; default: return false; } } case '#': { // "#N/A", "#N/A N/A", "#NA" if (size < 3 || chars[1] != 'N') { return false; } auto third = chars[2]; if (size == 3) { return third == 'A'; } if (size == 4) { return third == '/' && chars[3] == 'A'; } if (size == 8) { return std::memcmp(data + 2, "/A N/A", 5) == 0; } return false; } default: return false; } } static void BenchmarkInlinedTrieLookups( benchmark::State& state, // NOLINT non-const reference const std::vector<std::string>& strings) { int32_t total = 0; auto lookups = Expand(strings, 100); for (auto _ : state) { for (const auto& s : lookups) { total += InlinedNullLookup(s); } } benchmark::DoNotOptimize(total); state.SetItemsProcessed(state.iterations() * lookups.size()); } static void InlinedTrieLookupFound( benchmark::State& state) { // NOLINT non-const reference BenchmarkInlinedTrieLookups(state, {"N/A", "null", "-1.#IND", "N/A"}); } static void InlinedTrieLookupNotFound( benchmark::State& state) { // NOLINT non-const reference BenchmarkInlinedTrieLookups(state, {"None", "1.0", "", "abc"}); } BENCHMARK(InlinedTrieLookupFound); BENCHMARK(InlinedTrieLookupNotFound); #endif } // namespace internal } // namespace arrow
2,756