max_stars_count
int64
301
224k
text
stringlengths
6
1.05M
token_count
int64
3
727k
348
{"nom":"Robecq","circ":"9ème circonscription","dpt":"Pas-de-Calais","inscrits":1081,"abs":447,"votants":634,"blancs":14,"nuls":6,"exp":614,"res":[{"nuance":"MDM","nom":"Mme <NAME>","voix":164},{"nuance":"FN","nom":"M. <NAME>","voix":163},{"nuance":"LR","nom":"<NAME>","voix":77},{"nuance":"FI","nom":"M. <NAME>","voix":66},{"nuance":"RDG","nom":"M. <NAME>","voix":64},{"nuance":"ECO","nom":"Mme <NAME>","voix":25},{"nuance":"COM","nom":"M. <NAME>","voix":19},{"nuance":"DLF","nom":"M. <NAME>","voix":11},{"nuance":"DVG","nom":"M. <NAME>","voix":10},{"nuance":"DIV","nom":"M. <NAME>","voix":6},{"nuance":"ECO","nom":"M. <NAME>","voix":4},{"nuance":"EXG","nom":"Mme <NAME>","voix":4},{"nuance":"DIV","nom":"M. <NAME>","voix":1}]}
303
347
<filename>frontend/webadmin/modules/gwt-common/src/main/java/org/ovirt/engine/ui/common/widget/action/ContextMenuPanelPopup.java package org.ovirt.engine.ui.common.widget.action; import com.google.gwt.core.client.GWT; import com.google.gwt.resources.client.ClientBundle; public class ContextMenuPanelPopup extends MenuPanelPopup { public interface Resources extends ClientBundle { @Source("org/ovirt/engine/ui/common/css/ContextMenuPanelPopup.css") Style style(); } private static final Resources resources = GWT.create(Resources.class); public ContextMenuPanelPopup(boolean autoHide) { super(autoHide); } protected void ensureStyleInjected() { resources.style().ensureInjected(); } protected String getMenuBarStyle() { return resources.style().actionPanelPopupMenuBar(); } protected String getPopupPanelStyle() { return resources.style().actionPanelPopupPanel(); } }
335
861
package cn.springcloud.gray.client.switcher; public interface GraySwitcher { boolean state(); boolean isEanbleGrayRouting(); public static class DefaultGraySwitcher implements GraySwitcher { @Override public boolean state() { return true; } @Override public boolean isEanbleGrayRouting() { return true; } } }
167
435
{ "alias": "video/648/static-analysis-of-python-extension-modules-using", "category": "PyCon US 2012", "copyright_text": "", "description": "", "duration": null, "id": 648, "language": "eng", "quality_notes": "", "recorded": "2012-03-09", "slug": "static-analysis-of-python-extension-modules-using", "speakers": [ "<NAME>" ], "summary": "Want to analyse C/C++ code using Python? I've written a plugin for GCC\nthat embeds Python inside the compiler, allowing you to write new C/C++\ncompilation passes in Python. I've used this to build a static analysis\ntool that understands the CPython extension API, and can automatically\ndetect reference- counting bugs, and other errors.\n", "tags": [], "thumbnail_url": "https://img.youtube.com/vi/n6145JSeqWc/hqdefault.jpg", "title": "Static analysis of Python extension modules using GCC", "videos": [ { "length": 0, "type": "youtube", "url": "https://www.youtube.com/watch?v=n6145JSeqWc" } ] }
359
688
<reponame>frederikdangel/microservice-kubernetes package com.ewolff.microservice.order.catalogstub; import java.util.Arrays; import org.springframework.context.annotation.Profile; import org.springframework.hateoas.PagedModel; import org.springframework.hateoas.PagedModel.PageMetadata; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RestController; import com.ewolff.microservice.order.clients.Item; @RestController @RequestMapping("/catalog") @Profile("test") public class CatalogStub { @RequestMapping(value = "/{id}", method = RequestMethod.GET) public ResponseEntity<Item> getById(@PathVariable("id") long id) { if (id != 1) { return new ResponseEntity<Item>(HttpStatus.NOT_FOUND); } return new ResponseEntity<Item>(new Item(1, "iPod", 42.0), HttpStatus.OK); } @RequestMapping(method = RequestMethod.GET) public PagedModel<Item> getAll() { return new PagedModel<Item>( Arrays.asList(new Item(1, "iPod", 42.0)), new PageMetadata(1, 0, 1)); } }
433
2,939
<gh_stars>1000+ #include "lumen/EIR/Conversion/AggregateOpConversions.h" namespace lumen { namespace eir { struct ConsOpConversion : public EIROpConversion<ConsOp> { using EIROpConversion::EIROpConversion; LogicalResult matchAndRewrite( ConsOp op, ArrayRef<Value> operands, ConversionPatternRewriter &rewriter) const override { auto ctx = getRewriteContext(op, rewriter); ConsOpAdaptor adaptor(operands); auto termTy = ctx.getUsizeType(); auto termPtrTy = termTy.getPointerTo(); auto i32Ty = ctx.getI32Type(); auto consTy = ctx.targetInfo.getConsType(); auto head = adaptor.head(); auto tail = adaptor.tail(); // Allocate header on heap, write values to header, then box OpaqueTermType kind = rewriter.getType<ConsType>(); Value zero = llvm_constant(i32Ty, ctx.getI32Attr(0)); Value one = llvm_constant(i32Ty, ctx.getI32Attr(1)); Value arity = llvm_zext(termTy, zero); // TODO(pauls): We should optimize this for allocating multiple // cells by providing an optional pointer and index at which to // allocate this cell, by offsetting the pointer by `index * // sizeof(cell)` and then storing directly into that memory Value cellPtr = ctx.buildMalloc(consTy, TypeKind::Cons, arity); ArrayRef<Value> headIndices{zero, zero}; Value headPtr = llvm_gep(termPtrTy, cellPtr, headIndices); llvm_store(head, headPtr); ArrayRef<Value> tailIndices{zero, one}; Value tailPtr = llvm_gep(termPtrTy, cellPtr, tailIndices); llvm_store(tail, tailPtr); auto boxed = ctx.encodeList(cellPtr); rewriter.replaceOp(op, boxed); return success(); } }; struct ListOpConversion : public EIROpConversion<ListOp> { using EIROpConversion::EIROpConversion; LogicalResult matchAndRewrite( ListOp op, ArrayRef<Value> operands, ConversionPatternRewriter &rewriter) const override { auto ctx = getRewriteContext(op, rewriter); ListOpAdaptor adaptor(operands); auto elements = adaptor.elements(); auto numElements = elements.size(); if (numElements == 0) { Value nil = eir_nil(); rewriter.replaceOp(op, nil); return success(); } // Lower to single cons cell if it fits if (numElements < 2) { Value head = elements.front(); Value list = eir_cons(head, eir_nil()); rewriter.replaceOp(op, list); return success(); } unsigned cellsRequired = numElements; unsigned currentIndex = numElements; Value list; while (currentIndex > 0) { if (!list) { Value tail = elements[--currentIndex]; Value head = elements[--currentIndex]; list = eir_cons(head, tail); } else { Value head = elements[--currentIndex]; list = eir_cons(head, list); } } rewriter.replaceOp(op, list); return success(); } }; struct TupleOpConversion : public EIROpConversion<TupleOp> { using EIROpConversion::EIROpConversion; LogicalResult matchAndRewrite( TupleOp op, ArrayRef<Value> operands, ConversionPatternRewriter &rewriter) const override { auto ctx = getRewriteContext(op, rewriter); TupleOpAdaptor adaptor(operands); auto termTy = ctx.getUsizeType(); auto termPtrTy = termTy.getPointerTo(); auto i32Ty = ctx.getI32Type(); auto elements = adaptor.elements(); auto numElements = elements.size(); auto tupleTy = ctx.getTupleType(numElements); // Allocate header on heap, write values to header, then box Value arity = llvm_constant(termTy, ctx.getIntegerAttr(numElements)); Value ptr = ctx.buildMalloc(tupleTy, TypeKind::Tuple, arity); Value zero = llvm_constant(i32Ty, ctx.getI32Attr(0)); auto headerRaw = ctx.targetInfo.encodeHeader(TypeKind::Tuple, numElements); ArrayRef<Value> headerTermIndices{zero, zero}; Value headerTermPtr = llvm_gep(termPtrTy, ptr, headerTermIndices); llvm_store(llvm_constant(termTy, ctx.getIntegerAttr(headerRaw)), headerTermPtr); for (auto i = 0; i < numElements; i++) { auto element = elements[i]; auto elementTy = tupleTy.getStructElementType(i + 1).getPointerTo(); auto idx = llvm_constant(i32Ty, ctx.getI32Attr(i + 1)); ArrayRef<Value> elementIndices{zero, idx}; auto elementPtr = llvm_gep(elementTy, ptr, elementIndices); llvm_store(element, elementPtr); } // Box the allocated tuple auto boxed = ctx.encodeBox(ptr); rewriter.replaceOp(op, boxed); return success(); } }; void populateAggregateOpConversionPatterns(OwningRewritePatternList &patterns, MLIRContext *context, EirTypeConverter &converter, TargetInfo &targetInfo) { patterns.insert<ConsOpConversion, ListOpConversion, TupleOpConversion>( context, converter, targetInfo); } } // namespace eir } // namespace lumen
2,400
784
package com.alibaba.nacos.example.spring.boot; import com.alibaba.nacos.spring.context.annotation.config.NacosPropertySource; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; @SpringBootApplication @NacosPropertySource(dataId = "mysql.properties") public class SpringBootMySQLApplication { public static void main(String[] args) { SpringApplication.run(SpringBootMySQLApplication.class, args); } }
149
764
<reponame>641589523/token-profile { "symbol": "EPARA", "address": "0x7fc2a3a998E8a80dA7F2962C5A8803b641711e6c", "overview":{ "en": "EPARA is a digital token issued on the Ethereum network and maintains a 1:1 exchange relationship with Para coins. E-PARA can be used in both ecosystems of Ethereum and Paralism at the same time.", "zh": "EPARA是Paralism在以太坊发行的ERC20代币,和Paralism的基础货币PARA保持1比1的锚定兑换关系,EPARA可以同时在以太坊和Paralism生态中的应用使用。" }, "email": "<EMAIL>", "website": "https://www.paralism.com/", "whitepaper": "https://www.paralism.com/blog/paralism-commercial-white-paper/", "state": "NORMAL", "published_on": "2021-05-25", "initial_price":{ "ETH":"0.000000365 ETH", "USD":"0.001 USD", "BTC":"0.0000000259 BTC" }, "links": { "blog": "https://www.paralism.com/blog/", "twitter": "https://twitter.com/ParaPlatform", "telegram": "https://t.me/Paralism_Info", "github": "https://github.com/paralism", "facebook": "https://www.facebook.com/Paralism", "reddit": "", "slack": "", "medium": "" } }
539
12,278
// Copyright 2018 the V8 project authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef V8_OBJECTS_ALLOCATION_SITE_INL_H_ #define V8_OBJECTS_ALLOCATION_SITE_INL_H_ #include "src/objects/allocation-site.h" #include "src/heap/heap-write-barrier-inl.h" #include "src/objects/js-objects-inl.h" // Has to be the last include (doesn't have include guards): #include "src/objects/object-macros.h" namespace v8 { namespace internal { OBJECT_CONSTRUCTORS_IMPL(AllocationMemento, Struct) OBJECT_CONSTRUCTORS_IMPL(AllocationSite, Struct) NEVER_READ_ONLY_SPACE_IMPL(AllocationSite) CAST_ACCESSOR(AllocationMemento) CAST_ACCESSOR(AllocationSite) ACCESSORS(AllocationSite, transition_info_or_boilerplate, Object, kTransitionInfoOrBoilerplateOffset) ACCESSORS(AllocationSite, nested_site, Object, kNestedSiteOffset) INT32_ACCESSORS(AllocationSite, pretenure_data, kPretenureDataOffset) INT32_ACCESSORS(AllocationSite, pretenure_create_count, kPretenureCreateCountOffset) ACCESSORS(AllocationSite, dependent_code, DependentCode, kDependentCodeOffset) ACCESSORS_CHECKED(AllocationSite, weak_next, Object, kWeakNextOffset, HasWeakNext()) ACCESSORS(AllocationMemento, allocation_site, Object, kAllocationSiteOffset) JSObject AllocationSite::boilerplate() const { DCHECK(PointsToLiteral()); return JSObject::cast(transition_info_or_boilerplate()); } void AllocationSite::set_boilerplate(JSObject object, WriteBarrierMode mode) { set_transition_info_or_boilerplate(object, mode); } int AllocationSite::transition_info() const { DCHECK(!PointsToLiteral()); return Smi::cast(transition_info_or_boilerplate()).value(); } void AllocationSite::set_transition_info(int value) { DCHECK(!PointsToLiteral()); set_transition_info_or_boilerplate(Smi::FromInt(value), SKIP_WRITE_BARRIER); } bool AllocationSite::HasWeakNext() const { return map() == GetReadOnlyRoots().allocation_site_map(); } void AllocationSite::Initialize() { set_transition_info_or_boilerplate(Smi::kZero); SetElementsKind(GetInitialFastElementsKind()); set_nested_site(Smi::kZero); set_pretenure_data(0); set_pretenure_create_count(0); set_dependent_code( DependentCode::cast(GetReadOnlyRoots().empty_weak_fixed_array()), SKIP_WRITE_BARRIER); } bool AllocationSite::IsZombie() const { return pretenure_decision() == kZombie; } bool AllocationSite::IsMaybeTenure() const { return pretenure_decision() == kMaybeTenure; } bool AllocationSite::PretenuringDecisionMade() const { return pretenure_decision() != kUndecided; } void AllocationSite::MarkZombie() { DCHECK(!IsZombie()); Initialize(); set_pretenure_decision(kZombie); } ElementsKind AllocationSite::GetElementsKind() const { return ElementsKindBits::decode(transition_info()); } void AllocationSite::SetElementsKind(ElementsKind kind) { set_transition_info(ElementsKindBits::update(transition_info(), kind)); } bool AllocationSite::CanInlineCall() const { return DoNotInlineBit::decode(transition_info()) == 0; } void AllocationSite::SetDoNotInlineCall() { set_transition_info(DoNotInlineBit::update(transition_info(), true)); } bool AllocationSite::PointsToLiteral() const { Object raw_value = transition_info_or_boilerplate(); DCHECK_EQ(!raw_value.IsSmi(), raw_value.IsJSArray() || raw_value.IsJSObject()); return !raw_value.IsSmi(); } // Heuristic: We only need to create allocation site info if the boilerplate // elements kind is the initial elements kind. bool AllocationSite::ShouldTrack(ElementsKind boilerplate_elements_kind) { return IsSmiElementsKind(boilerplate_elements_kind); } inline bool AllocationSite::CanTrack(InstanceType type) { if (FLAG_allocation_site_pretenuring) { // TurboFan doesn't care at all about String pretenuring feedback, // so don't bother even trying to track that. return type == JS_ARRAY_TYPE || type == JS_OBJECT_TYPE; } return type == JS_ARRAY_TYPE; } AllocationSite::PretenureDecision AllocationSite::pretenure_decision() const { return PretenureDecisionBits::decode(pretenure_data()); } void AllocationSite::set_pretenure_decision(PretenureDecision decision) { int32_t value = pretenure_data(); set_pretenure_data(PretenureDecisionBits::update(value, decision)); } bool AllocationSite::deopt_dependent_code() const { return DeoptDependentCodeBit::decode(pretenure_data()); } void AllocationSite::set_deopt_dependent_code(bool deopt) { int32_t value = pretenure_data(); set_pretenure_data(DeoptDependentCodeBit::update(value, deopt)); } int AllocationSite::memento_found_count() const { return MementoFoundCountBits::decode(pretenure_data()); } inline void AllocationSite::set_memento_found_count(int count) { int32_t value = pretenure_data(); // Verify that we can count more mementos than we can possibly find in one // new space collection. DCHECK((GetHeap()->MaxSemiSpaceSize() / (Heap::kMinObjectSizeInTaggedWords * kTaggedSize + AllocationMemento::kSize)) < MementoFoundCountBits::kMax); DCHECK_LT(count, MementoFoundCountBits::kMax); set_pretenure_data(MementoFoundCountBits::update(value, count)); } int AllocationSite::memento_create_count() const { return pretenure_create_count(); } void AllocationSite::set_memento_create_count(int count) { set_pretenure_create_count(count); } bool AllocationSite::IncrementMementoFoundCount(int increment) { if (IsZombie()) return false; int value = memento_found_count(); set_memento_found_count(value + increment); return memento_found_count() >= kPretenureMinimumCreated; } inline void AllocationSite::IncrementMementoCreateCount() { DCHECK(FLAG_allocation_site_pretenuring); int value = memento_create_count(); set_memento_create_count(value + 1); } bool AllocationMemento::IsValid() const { return allocation_site().IsAllocationSite() && !AllocationSite::cast(allocation_site()).IsZombie(); } AllocationSite AllocationMemento::GetAllocationSite() const { DCHECK(IsValid()); return AllocationSite::cast(allocation_site()); } Address AllocationMemento::GetAllocationSiteUnchecked() const { return allocation_site().ptr(); } template <AllocationSiteUpdateMode update_or_check> bool AllocationSite::DigestTransitionFeedback(Handle<AllocationSite> site, ElementsKind to_kind) { Isolate* isolate = site->GetIsolate(); bool result = false; if (site->PointsToLiteral() && site->boilerplate().IsJSArray()) { Handle<JSArray> boilerplate(JSArray::cast(site->boilerplate()), isolate); ElementsKind kind = boilerplate->GetElementsKind(); // if kind is holey ensure that to_kind is as well. if (IsHoleyElementsKind(kind)) { to_kind = GetHoleyElementsKind(to_kind); } if (IsMoreGeneralElementsKindTransition(kind, to_kind)) { // If the array is huge, it's not likely to be defined in a local // function, so we shouldn't make new instances of it very often. uint32_t length = 0; CHECK(boilerplate->length().ToArrayLength(&length)); if (length <= kMaximumArrayBytesToPretransition) { if (update_or_check == AllocationSiteUpdateMode::kCheckOnly) { return true; } if (FLAG_trace_track_allocation_sites) { bool is_nested = site->IsNested(); PrintF("AllocationSite: JSArray %p boilerplate %supdated %s->%s\n", reinterpret_cast<void*>(site->ptr()), is_nested ? "(nested)" : " ", ElementsKindToString(kind), ElementsKindToString(to_kind)); } JSObject::TransitionElementsKind(boilerplate, to_kind); site->dependent_code().DeoptimizeDependentCodeGroup( isolate, DependentCode::kAllocationSiteTransitionChangedGroup); result = true; } } } else { // The AllocationSite is for a constructed Array. ElementsKind kind = site->GetElementsKind(); // if kind is holey ensure that to_kind is as well. if (IsHoleyElementsKind(kind)) { to_kind = GetHoleyElementsKind(to_kind); } if (IsMoreGeneralElementsKindTransition(kind, to_kind)) { if (update_or_check == AllocationSiteUpdateMode::kCheckOnly) return true; if (FLAG_trace_track_allocation_sites) { PrintF("AllocationSite: JSArray %p site updated %s->%s\n", reinterpret_cast<void*>(site->ptr()), ElementsKindToString(kind), ElementsKindToString(to_kind)); } site->SetElementsKind(to_kind); site->dependent_code().DeoptimizeDependentCodeGroup( isolate, DependentCode::kAllocationSiteTransitionChangedGroup); result = true; } } return result; } } // namespace internal } // namespace v8 #include "src/objects/object-macros-undef.h" #endif // V8_OBJECTS_ALLOCATION_SITE_INL_H_
3,305
7,482
/* * @brief RTC tick to (a more) Universal Time * Adds conversion functions to use an RTC that only provides a * seconds capability to provide "struct tm" support. * * @note * Copyright(C) NXP Semiconductors, 2014 * All rights reserved. * * @par * Software that is described herein is for illustrative purposes only * which provides customers with programming information regarding the * LPC products. This software is supplied "AS IS" without any warranties of * any kind, and NXP Semiconductors and its licensor disclaim any and * all warranties, express or implied, including all implied warranties of * merchantability, fitness for a particular purpose and non-infringement of * intellectual property rights. NXP Semiconductors assumes no responsibility * or liability for the use of the software, conveys no license or rights under any * patent, copyright, mask work right, or any other intellectual property rights in * or to any products. NXP Semiconductors reserves the right to make changes * in the software without notification. NXP Semiconductors also makes no * representation or warranty that such application will be suitable for the * specified use without further testing or modification. * * @par * Permission to use, copy, modify, and distribute this software and its * documentation is hereby granted, under NXP Semiconductors' and its * licensor's relevant copyrights in the software, without fee, provided that it * is used in conjunction with NXP Semiconductors microcontrollers. This * copyright, permission, and disclaimer notice must appear in all copies of * this code. */ #include "rtc_ut.h" /***************************************************************************** * Private types/enumerations/variables ****************************************************************************/ #define SECSPERMIN (60) #define MINSPERHOUR (60) #define SECSPERHOUR (SECSPERMIN * MINSPERHOUR) #define HOURSPERDAY (24) #define SECSPERDAY (SECSPERMIN * MINSPERHOUR * HOURSPERDAY) #define DAYSPERWEEK (7) #define MONETHSPERYEAR (12) #define DAYSPERYEAR (365) #define DAYSPERLEAPYEAR (366) /* Days per month, LY is special */ static uint8_t daysPerMonth[2][MONETHSPERYEAR] = { {31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31}, /* Normal year */ {31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31}, /* Leap year */ }; /***************************************************************************** * Public types/enumerations/variables ****************************************************************************/ /***************************************************************************** * Private functions ****************************************************************************/ /* Converts the number of days offset from the start year to real year data accounting for leap years */ static void GetDMLY(int dayOff, struct tm *pTime) { bool YearFound = false; int daysYear = dayOff; int leapYear, curLeapYear, year = TM_YEAR_BASE, monthYear = 0; bool MonthFound = false; /* Leap year check for less than 1 year time */ if ((year % 4) == 0) { curLeapYear = 1; } else { curLeapYear = 0; } /* Determine offset of years from days offset */ while (YearFound == false) { if ((year % 4) == 0) { /* Leap year, 366 days */ daysYear = DAYSPERLEAPYEAR; leapYear = 1; } else { /* Leap year, 365 days */ daysYear = DAYSPERYEAR; leapYear = 0; } if (dayOff > daysYear) { dayOff -= daysYear; year++; } else { YearFound = true; curLeapYear = leapYear; /* In a leap year */ } } /* Save relative year and day into year */ pTime->tm_year = year - TM_YEAR_BASE; /* Base year relative */ pTime->tm_yday = dayOff;/* 0 relative */ /* Determine offset of months from days offset */ while (MonthFound == false) { if ((dayOff + 1) > daysPerMonth[curLeapYear][monthYear]) { /* Next month */ dayOff -= daysPerMonth[curLeapYear][monthYear]; monthYear++; } else { /* Month found */ MonthFound = true; } } pTime->tm_mday = dayOff + 1;/* 1 relative */ pTime->tm_mon = monthYear; /* 0 relative */ } /***************************************************************************** * Public functions ****************************************************************************/ /* Converts an RTC tick time to Universal time */ void ConvertRtcTime(uint32_t rtcTick, struct tm *pTime) { int daySeconds, dayNum; /* Get day offset and seconds since start */ dayNum = (int) (rtcTick / SECSPERDAY); daySeconds = (int) (rtcTick % SECSPERDAY); /* Fill in secs, min, hours */ pTime->tm_sec = daySeconds % 60; pTime->tm_hour = daySeconds / SECSPERHOUR; pTime->tm_min = (daySeconds - (pTime->tm_hour * SECSPERHOUR)) / SECSPERMIN; /* Weekday, 0 = Sunday, 6 = Saturday */ pTime->tm_wday = (dayNum + TM_DAYOFWEEK) % DAYSPERWEEK; /* Get year, month, day of month, and day of year */ GetDMLY(dayNum, pTime); /* Not supported in this driver */ pTime->tm_isdst = 0; } /* Converts a Universal time to RTC tick time */ void ConvertTimeRtc(struct tm *pTime, uint32_t *rtcTick) { int leapYear, year = pTime->tm_year + TM_YEAR_BASE; uint32_t dayOff, monthOff, monthCur, rtcTicks = 0; /* Leap year check for less than 1 year time */ if ((year % 4) == 0) { leapYear = 1; } else { leapYear = 0; } /* Add days for each year and leap year */ while (year > TM_YEAR_BASE) { if ((year % 4) == 0) { /* Leap year, 366 days */ rtcTicks += DAYSPERLEAPYEAR * SECSPERDAY; leapYear = 1; } else { /* Leap year, 365 days */ rtcTicks += DAYSPERYEAR * SECSPERDAY; leapYear = 0; } year--; } /* Day and month are 0 relative offsets since day and month start at 1 */ dayOff = (uint32_t) pTime->tm_mday - 1; monthOff = (uint32_t) pTime->tm_mon; /* Add in seconds for passed months */ for (monthCur = 0; monthCur < monthOff; monthCur++) { rtcTicks += (uint32_t) (daysPerMonth[leapYear][monthCur] * SECSPERDAY); } /* Add in seconds for day offset into the current month */ rtcTicks += (dayOff * SECSPERDAY); /* Add in seconds for hours, minutes, and seconds */ rtcTicks += (uint32_t) (pTime->tm_hour * SECSPERHOUR); rtcTicks += (uint32_t) (pTime->tm_min * SECSPERMIN); rtcTicks += (uint32_t) pTime->tm_sec; *rtcTick = rtcTicks; }
2,080
363
import os import pytest from collections import defaultdict from hubblestack.exceptions import ArgumentValueError import hubblestack.fdg.process class TestProcess(): """ Class used to test the functions in ``process.py`` """ def test__compare_raises_exception_if_arguments_have_invalid_type(self): """ Test that given invalid ``comp``, the function raises an ArgumentValueError exception """ with pytest.raises(ArgumentValueError): hubblestack.fdg.process._compare('foo', 1, 2) def test__compare_returns_correctly_with_ge_comparator(self): """ Test that given correct values, the function outputs the correct result with 'ge' comparator ge = greater equal """ ret = hubblestack.fdg.process._compare('ge', 1, 2) assert ret is False, '1 >= 2' ret = hubblestack.fdg.process._compare('ge', 2, 2) assert ret is True, '2 >= 2' ret = hubblestack.fdg.process._compare('ge', 2, 1) assert ret is True, '2 >= 1' def test__compare_returns_correctly_with_gt_comparator(self): """ Test that given correct values, the function outputs the correct result with 'gt' comparator gt = greater than """ ret = hubblestack.fdg.process._compare('gt', 10, 2) assert ret is True, '10 > 2' ret = hubblestack.fdg.process._compare('gt', 1, 2) assert ret is False, '1 > 2' ret = hubblestack.fdg.process._compare('gt', 2, 2) assert ret is False, '2 > 2' def test__compare_returns_correctly_with_lt_comparator(self): """ Test that given correct values, the function outputs the correct result with 'lt' comparator lt = lower than """ ret = hubblestack.fdg.process._compare('lt', 1, 2) assert ret is True, '1 < 2' ret = hubblestack.fdg.process._compare('lt', 2, 2) assert ret is False, '2 < 2' ret = hubblestack.fdg.process._compare('lt', 2, 1) ret is False, '2 < 1' def test__compare_returns_correctly_with_le_comparator(self): """ Test that given correct values, the function outputs the correct result with 'le' comparator le = lower equal """ ret = hubblestack.fdg.process._compare('le', 1, 2) assert ret is True, '1 <= 2' ret = hubblestack.fdg.process._compare('le', 2, 2) assert ret is True, '2 <= 2' ret = hubblestack.fdg.process._compare('le', 2, 1) assert ret is False, '2 <= 1' def test__compare_returns_correctly_with_eq_comparator(self): """ Test that given correct values, the function outputs the correct result with 'eq' comparator eq = equal """ ret = hubblestack.fdg.process._compare('eq', 1, 2) assert ret is False, '1 == 2' ret = hubblestack.fdg.process._compare('eq', 2, 1) assert ret is False, '2 == 1' ret = hubblestack.fdg.process._compare('eq', 1, 1) assert ret is True, '1 == 1' def test__compare_returns_correctly_with_ne_comparator(self): """ Test that given correct values, the function outputs the correct result with 'ne' comparator ne = not equal """ ret = hubblestack.fdg.process._compare('ne', 1, 2) assert ret is True, '1 != 2' ret = hubblestack.fdg.process._compare('ne', 2, 1) assert ret is True, '2 != 1' ret = hubblestack.fdg.process._compare('ne', 1, 1) assert ret is False, '1 != 1' def test__filter_dict_returns_none_if_filter_values_is_invalid(self): """ Test that given invalid ``filter_values``, the function returns None """ ret = hubblestack.fdg.process._filter_dict( dct={1: 'a', 2: 'b'}, filter_values=False, filter_rules={'invalid': 1, 'data': 2}) assert ret is None, 'invalid filter_rules should return None' def test__filter_dict_returns_correctly_filtered_dict_by_keys(self): """ Test that given valid ``filter_values``, the function correctly filters a dict by keys """ # keep x if 1 < x <= 4 and x != 3 expected_ret = {2: 'b', 4: 'd'} ret = hubblestack.fdg.process._filter_dict( {1: 'a', 2: 'b', 3: 'c', 4: 'd'}, False, {'gt': 1, 'le': 4, 'ne': 3}) assert expected_ret == ret # keep x if 'a' <= x < 'd' and x != 'c' expected_ret = {'a': 1, 'b': 2} ret = hubblestack.fdg.process._filter_dict( {'a': 1, 'b': 2, 'c': 3, 'd': 4}, False, {'ge': 'a', 'lt': 'd', 'ne': 'c'}) assert expected_ret == ret def test__filter_dict_returns_correctly_filtered_dict_by_values(self): """ Test that given valid ``filter_values``, the function correctly filters a dict by values """ # keep x if 1 < x <= 4 and x != 3 expected_ret = {'b': 2, 'd': 4} ret = hubblestack.fdg.process._filter_dict( {'a': 1, 'b': 2, 'c': 3, 'd': 4}, True, {'gt': 1, 'le': 4, 'ne': 3}) assert expected_ret == ret # keep x if 'a' <= x < 'd' and x != 'c' expected_ret = {1: 'a', 2: 'b'} ret = hubblestack.fdg.process._filter_dict( {1: 'a', 2: 'b', 3: 'c', 4: 'd'}, True, {'ge': 'a', 'lt': 'd', 'ne': 'c'}) assert expected_ret == ret def test__filter_dict_returns_unaltered_dict_if_filter_rules_is_empty(self): """ Test that given empty ``filter_rules``, the function leaves the dict intact """ expected_ret = {1: 'a', 2: 'b'} ret = hubblestack.fdg.process._filter_dict({1: 'a', 2: 'b'}, True, {}) assert expected_ret == ret def test_filter_dict_returns_none_if_dict_is_invalid(self): """ Test that given invalid types for ``starting_dict`` or ``chained``, the function returns False and None """ # invalid starting_dict - is type list expected_status, expected_ret = False, None status, ret = hubblestack.fdg.process.filter_dict( starting_dict=[1, 2, 3], chained={1: 'a', 2: 'b'}) assert status is False, 'invalid starting_dict, should return False' assert ret is None, 'invalid starting_dict, should return None' # invalid chained dict - is type list status, ret = hubblestack.fdg.process.filter_dict( starting_dict={1: 'a', 2: 'b'}, chained=[1, 2]) assert status is False, 'invalid chained, should return False' assert ret is None, 'invalid chained, should return None' def test_filter_dict_correctly_filters_out_keys(self): """ Test that given correct input, the function correctly filters by keys """ expected_ret = {1: 'a', 2: 'b', 4: 'd'} status, ret = hubblestack.fdg.process.filter_dict( starting_dict={1: 'a', 2: 'b', 3: 'c'}, chained={1: 'b', 3: 'd', 4: 'd'}, ge=1, ne=3) assert status is True assert expected_ret == ret def test_filter_dict_correctly_filters_out_values(self): """ Test that given correct input, the function correctly filters by values """ expected_ret = {3: 'c', 4: 'd'} status, ret = hubblestack.fdg.process.filter_dict( starting_dict={1: 'a', 2: 'b', 3: 'c'}, filter_values=True, chained={1: 'b', 3: 'd', 4: 'd'}, gt='a', ne='b', le='d') assert status is True assert expected_ret == ret def test__filter_returns_none_if_input_is_invalid(self): """ Test that given invalid input, the function returns None """ ret = hubblestack.fdg.process._filter([1, 2, 3], {'foo': 1}) assert ret is None, 'invalid input type should return None' def test__filter_correctly_filters_sequence_if_input_is_valid(self): """ Test that given valid arguments of different types, the function returns the filtered sequence """ # list expected_ret = [2, 4] seq = [1, 2, 3, 4] ret = hubblestack.fdg.process._filter(seq, {"gt": 1, "ne": 3, "le": 4}) assert expected_ret == ret # set seq = set(seq) ret = hubblestack.fdg.process._filter(seq, {"gt": 1, "ne": 3, "le": 4}) assert expected_ret == ret # string seq = "test string" expected_ret = ['e', 's', ' ', 's', 'r', 'i', 'n', 'g'] ret = hubblestack.fdg.process._filter(seq, {"ne": 't'}) assert expected_ret == ret def test_filter_seq_returns_none_if_input_is_invalid(self): """ Test that given invalid input, the function returns None """ # invalid ``starting_seq`` status, ret = hubblestack.fdg.process.filter_seq( starting_seq=1, chained=[2, 3, 4], ge=1, lt=4) assert status is False, 'invalid starting_seq, should return False' assert ret is None, 'invalid starting_seq, should return None' # invalid ``chained`` status, ret = hubblestack.fdg.process.filter_seq( starting_seq=[1, 2], chained=4, ge=1, lt=4) assert status is False, 'invalid chained, should return False' assert ret is None, 'invalid chained, should return None' def test_filter_seq_returns_filtered_seq_with_valid_input(self): """Test that given valid input of different types, the function returns True and the filtered sequence """ # list expected_ret = [2, 4] chained = [1, 2] seq = [3, 4] status, ret = hubblestack.fdg.process.filter_seq( starting_seq=seq, chained=chained, gt=1, ne=3, le=4) assert expected_ret == ret assert status is True # set expected_ret = [3] seq = set(seq) chained = set(chained) status, ret = hubblestack.fdg.process.filter_seq( starting_seq=seq, chained=chained, ge=1, ne=2, lt=4, eq=3) assert expected_ret == ret assert status is True # string expected_ret = ['e', 's', ' ', 's', 'r', 'i', 'n', 'g'] seq = 'test {}' chained = 'string' status, ret = hubblestack.fdg.process.filter_seq( starting_seq=seq, chained=chained, ne='t') assert expected_ret == ret assert status is True def test_get_index_returns_none_if_invalid_input(self): """ Test that given invalid arguments, the function returns None """ # invalid ``chained`` status, ret = hubblestack.fdg.process.get_index(starting_list=[1, 2, 3]) assert status is False, 'invalid chained, should return False' assert ret is None, 'invalid chained, should return None' # index out of range status, ret = hubblestack.fdg.process.get_index(index=4, chained=[1, 2, 3]) assert status is False, 'index 4 out of range, list length is 3, should return False' assert ret is None, 'index 4 out of range, list length is 3, should return None' # invalid ``chained`` type status, ret = hubblestack.fdg.process.get_index(chained=set([1, 2, 3])) assert status is False, 'invalid chained type, should return False' assert ret is None, 'invalid chained type, should return None' def test_get_index_returns_correctly_if_valid_input(self): """ Test that given valid arguments, the function extracts the correct value """ # return element at index -1 from [3, 4, 1, 2] expected_ret = 2 status, ret = hubblestack.fdg.process.get_index( index=-1, starting_list=[1, 2], chained=[3, 4]) assert status is True assert expected_ret == ret # default to index 0 from [3, 4, 1, 2] expected_ret = 3 status, ret = hubblestack.fdg.process.get_index( starting_list=[1, 2], chained=[3, 4]) assert status is True assert expected_ret == ret # return element at index 2 from [3, 4, 1, 2] expected_ret = 1 status, ret = hubblestack.fdg.process.get_index( index=2, starting_list=[1, 2], chained=[3, 4]) assert status is True assert expected_ret == ret def test_get_key_returns_none_if_invalid_input(self): """ Test that given invalid arguments, the function returns None """ # invalid ``chained`` type status, ret = hubblestack.fdg.process.get_key(key='a', chained=['a', 'b', 'c']) assert status is False, 'invalid chained type, should return False' assert ret is None, 'invalid chained type, should return None' # invalid key status, ret = hubblestack.fdg.process.get_key( key='d', chained={'a': 1, 'b': 2, 'c': 3}) assert status is False, 'invalid key `d` in dict, should return False' assert ret is None, 'invalid key `d` in dict, should return None' def test_get_key_returns_correctly(self): """ Test that given valid arguments, the function returns the correct value """ expected_ret = 1 status, ret = hubblestack.fdg.process.get_key( key='b', starting_dict={'b': 1, 'c': 2}, chained={'a': 1, 'b': 2}) assert status is True assert expected_ret == ret def test_join_returns_none_if_invalid_arguments(self): """ Test that given invalid arguments, the function returns None """ # invalid ``chained`` status, ret = hubblestack.fdg.process.join(chained=1) assert status is False assert ret is None # invalid ``sep`` status, ret = hubblestack.fdg.process.join(sep=[1, 2], chained=['foo', 'bar']) assert status is False assert ret is None def test_join_returns_correct_string(self): """ Test that given valid arguments, the function will return the joined string """ # no ``sep`` expected_ret = 'testwordstogether' status, ret = hubblestack.fdg.process.join( words='together', chained=['test', 'words']) assert status is True assert expected_ret == ret # valid ``sep`` expected_ret = 'test-more-words-together' status, ret = hubblestack.fdg.process.join( words=['words', 'together'], sep='-', chained=['test', 'more']) assert status is True assert expected_ret == ret def test__sort_returns_none_if_invalid_input(self): """ Test that given invalid arguments, the function returns None """ # invalid ``seq`` ret = hubblestack.fdg.process._sort(seq=1, desc=True, lexico=False) assert ret is None # invalid ``desc`` ret = hubblestack.fdg.process._sort(seq=[2, 1], desc='yes', lexico=False) assert ret is None # invalid ``lexico`` ret = hubblestack.fdg.process._sort(seq=[1, 2, 12, 13], desc=False, lexico=True) assert ret is None def test__sort_returns_sorted_seq(self): """ Test that given valid arguments, the function correctly sorts them with different parameters """ expected_ret = ['Z', 'a', 'b'] ret = hubblestack.fdg.process._sort(seq=['b', 'a', 'Z'], desc=False, lexico=False) assert expected_ret == ret expected_ret = ['b', 'a', 'B'] ret = hubblestack.fdg.process._sort( seq={'a': 1, 'b': 2, 'B': 3}, desc=True, lexico=False) assert expected_ret == ret expected_ret = ['A', 'b', 'C'] ret = hubblestack.fdg.process._sort( seq=set(['b', 'A', 'C']), desc=False, lexico=True) assert expected_ret == ret def test_sort_returns_none_if_invalid_arguments(self): """ Test that given invalid arguments, the function returns None """ # invalid ``chained`` status, ret = hubblestack.fdg.process.sort(seq=2, chained=1) assert status is False assert ret is None # invalid ``desc`` status, ret = hubblestack.fdg.process.sort(chained=[1, 2, 3], desc='yes') assert status is False assert ret is None # invalid ``lexico`` status, ret = hubblestack.fdg.process.sort(chained=[1, 2, 3], lexico=True) assert status is False assert ret is None def test_sort_returns_sorted_seq(self): """ Test that given valid arguments, the function correctly sorts them with different parameters """ expected_ret = [3, 2, 1] # desc list status, ret = hubblestack.fdg.process.sort( seq=[1, 2], desc=True, chained=[3]) assert status is True assert expected_ret == ret # dict expected_ret = [1, 2, 3] status, ret = hubblestack.fdg.process.sort(chained={2: 'a', 1: 'b', 3: 'c'}) assert status is True assert expected_ret == ret # desc set expected_ret = ['b', 'a', 'B', 'A'] status, ret = hubblestack.fdg.process.sort( seq=['A', 'B'], chained=set(['a', 'b']), desc=True) assert status is True assert expected_ret == ret # lexicographic string expected_ret = ['A', 'a', 'b', 'B'] status, ret = hubblestack.fdg.process.sort( seq='A{}B', chained='ab', lexico=True) assert status is True assert expected_ret == ret def test__split_returns_none_if_invalid_arguments(self): """ Test that given invalid arguments, the function returns None """ ret = hubblestack.fdg.process._split(phrase=[1, 2, 3], sep=" ", regex=False) assert ret is None, "can't split list, should return None" ret = hubblestack.fdg.process._split(phrase="foo bar", sep=[1, 2, 3], regex=False) assert ret is None, "separator to split by can't be list, should return None" ret = hubblestack.fdg.process._split(phrase=[1, 2, 3], sep=" ", regex=True) assert ret is None, "can't split list, should return None" ret = hubblestack.fdg.process._split(phrase="foo bar", sep=[1, 2, 3], regex=True) assert ret is None, "separator to split by can't be list, should return None" def test__split_returns_list_from_string(self): """ Test that given valid arguments, the function correctly splits the string into a list """ # simple ``sep`` expected_ret = ['foo', 'bar'] ret = hubblestack.fdg.process._split("foo bar", " ", False) assert expected_ret == ret # ``sep`` simple regex ret = hubblestack.fdg.process._split("foo bar", " ", True) assert expected_ret == ret # regex ret = hubblestack.fdg.process._split("foo bar", r"\s+", True) assert expected_ret == ret # invalid ``sep`` expected_ret = ['foo bar'] ret = hubblestack.fdg.process._split("foo bar", "?", False) assert expected_ret == ret def test_split_returns_none_if_invalid_arguments(self): """ Test that given invalid arguments, the function returns None """ # invalid ``words`` status, ret = hubblestack.fdg.process.split([1, 2, 3], chained='ab') assert ret is None assert status is False status, ret = hubblestack.fdg.process.split({1: 'a', 2: 'b'}, chained='ab') assert ret is None assert status is False # invalid ``words`` & ``chained`` status, ret = hubblestack.fdg.process.split(1, chained=12) assert ret is None assert status is False status, ret = hubblestack.fdg.process.split('foo bar', regex=True) assert ret is None assert status is False def test_split_returns_list_from_string(self): """ Test that given valid arguments, the function correctly splits in all scenarios """ expected_ret = ['a', 'b', 'c', 'd'] # valid regex status, ret = hubblestack.fdg.process.split( phrase="a1b2c3d", sep=r"\d+", regex=True) assert status is True assert expected_ret == ret # simple sep expected_ret = ['a1', 'b2', 'c3', 'd'] status, ret = hubblestack.fdg.process.split( phrase="a1 b2 {}", sep=" ", chained='c3 d') assert status is True assert expected_ret == ret # no sep expected_ret = ['a1', 'b2', 'c3', 'd'] status, ret = hubblestack.fdg.process.split( phrase="a1 b2 \n{}", chained='c3 d') assert status is True assert expected_ret == ret # invalid regex expected_ret = ['a1b2c3d'] status, ret = hubblestack.fdg.process.split( phrase="a1b2{}", sep=r"\d+", regex=False, chained='c3d') assert status is False assert expected_ret == ret def test_dict_to_list_returns_none_if_invalid_arguments(self): """ Test that given invalid arguments, the function returns None """ status, ret = hubblestack.fdg.process.dict_to_list( starting_dict={1: 'a'}, chained=[1, 2, 3]) assert status is False assert ret is ret status, ret = hubblestack.fdg.process.dict_to_list( starting_dict='foo', chained={1: 'a', 2: 'b'}) assert status is False assert ret is None def test_dict_to_list_correctly_returns_list(self): """ Test that given valid arguments, the function outputs a valid list """ # flat dict expected_ret = [(1, 'b'), (2, 'c')] status, ret = hubblestack.fdg.process.dict_to_list( starting_dict={1: 'a'}, update_chained=False, chained={1: 'b', 2: 'c'}) assert status is True assert expected_ret == ret # nested dict expected_ret = [(1, 'a'), (2, 'c'), (3, {1: 'a'})] status, ret = hubblestack.fdg.process.dict_to_list( starting_dict={1: 'a', 3: {1: 'a'}}, chained={1: 'b', 2: 'c'}) assert status is True assert expected_ret == ret # empty dict expected_ret = [] status, ret = hubblestack.fdg.process.dict_to_list(chained={}) assert status is False assert expected_ret == ret def test__dict_convert_none_returns_none_if_invalid_arguments(self): """ Test that given invalid arguments, the function returns None """ ret = hubblestack.fdg.process._dict_convert_none([1, 2, 3]) assert ret is None ret = hubblestack.fdg.process._dict_convert_none(1) assert ret is None expected_ret = {} ret = hubblestack.fdg.process._dict_convert_none(defaultdict()) assert expected_ret == ret def test__dict_convert_none_replaces_empty_string_with_none_in_dict(self): """ Test that given valid arguments, the function converts empty strings to None in all scenarios """ # flat dict expected_ret = {1: None, 2: 'a', 3: "None", 4: None} ret = hubblestack.fdg.process._dict_convert_none( {1: "", 2: 'a', 3: "None", 4: None}) assert expected_ret == ret # nested dicts expected_ret = {'a': {'aa': {'aaa': 3, 'bbb': {'bbbb': 4, 'cccc': None}, 'ccc': None}, 'bb': None}, 'b': None} ret = hubblestack.fdg.process._dict_convert_none( {'a': {'aa': {'aaa': 3, 'bbb': {'bbbb': 4, 'cccc': ''}, 'ccc': ''}, 'bb': ''}, 'b': ''}) assert expected_ret == ret # nested dicts & seqs expected_ret = {'a': [{'b': [{'c': ['d', {'e': None}], 'f': None}, {'g': None}], 'h': None}, 'i'], 'j': None} ret = hubblestack.fdg.process._dict_convert_none( {'a': [{'b': ({'c': ['d', {'e': ''}], 'f': ''}, {'g': ''}), 'h': ''}, 'i'], 'j': ''}) assert expected_ret == ret def test__seq_convert_none_returns_none_if_invalid_arguments(self): """ Test that given invalid arguments, the function returns None """ ret = hubblestack.fdg.process._seq_convert_none({1: 'a', 2: 'b'}) assert ret is None ret = hubblestack.fdg.process._seq_convert_none(1) assert ret is None ret = hubblestack.fdg.process._seq_convert_none(True) assert ret is None def test__seq_convert_none_replaces_emtpy_strings_with_none(self): """ Test that given valid arguments, the function correctly converts empty strings to None in all scenarios """ # flat seq expected_ret = ['a', {1: None}, 'b', {1: None}, 'c'] ret = hubblestack.fdg.process._seq_convert_none( ['a', {1: ''}, 'b', {1: ''}, 'c']) assert expected_ret == ret # nested seq & dict expected_ret = ['a', [{1: None, 2: [3, [4, {1: None, 2: {3: None}}]]}, 'b'], 'c'] ret = hubblestack.fdg.process._seq_convert_none( ('a', [{1: '', 2: [3, (4, {1: '', 2: {3: ''}})]}, 'b'], 'c')) assert expected_ret == ret def test_dict_convert_none_returns_none_if_invalid_argument(self): """ Test that given invalid arguments, the function returns None """ status, ret = hubblestack.fdg.process.dict_convert_none(chained='foo bar') assert status is False assert ret is None status, ret = hubblestack.fdg.process.dict_convert_none( chained={1: 'a'}, starting_seq=[1, 2]) assert status is False assert ret is None expected_ret = [] status, ret = hubblestack.fdg.process.dict_convert_none(chained=[]) assert status is False assert expected_ret == ret def test_dict_convert_none_replaces_empty_string_with_none(self): """ Test that given valid arguments, the function returns a valid dict with None instead of empty strings """ # flat dict expected_ret = {1: 'a', 2: None, 3: 'b', 4: None} status, ret = hubblestack.fdg.process.dict_convert_none( chained={1: 'a', 2: '', 3: 'b', 4: ''}) assert expected_ret == ret assert status is True # nested dict & tuple expected_ret = {'a': [{'b': [{'c': {'e': None}, 'f': None}, {'g': None}], 'h': None}, 'i'], 'j': None} status, ret = hubblestack.fdg.process.dict_convert_none( chained={'a': [{'b': ({'c': {'e': ''}, 'f': ''}, {'g': ''}), 'h': ''}, 'i']}, starting_seq={'j': ''}) assert status is True assert expected_ret == ret # nested dict, list & tuple expected_ret = ['a', [{1: None, 2: [3, [4, {1: None, 2: {3: None}}]]}, 'b'], 'c'] status, ret = hubblestack.fdg.process.dict_convert_none( chained=('a', [{1: '', 2: [3, (4, {1: '', 2: {3: ''}})]}, 'b'], 'c')) assert status is True assert expected_ret == ret # nested dict & list expected_ret = ['a', {1: None}, 'b', {1: None}, 'c'] status, ret = hubblestack.fdg.process.dict_convert_none( chained=['a', {1: ''}, 'b'], starting_seq=[{1: ''}, 'c']) assert status is True assert expected_ret == ret def test_print_string_returns_none_when_invalid_arguments(self): """ Test that given invalid arguments, the function returns None """ status, ret = hubblestack.fdg.process.print_string( starting_string=['foo', 'bar']) assert status is False assert ret is None expected_ret = '' status, ret = hubblestack.fdg.process.print_string( starting_string='') assert status is False assert expected_ret == ret def test_print_string_returns_correct_string(self): """ Test that given valid arguments, the function returns the correct string """ expected_ret = 'foo' status, ret = hubblestack.fdg.process.print_string( starting_string='foo', chained='bar') assert status is True assert expected_ret == ret expected_ret = "foo ['b', 'a', 'r']" status, ret = hubblestack.fdg.process.print_string( starting_string='foo {}', chained=['b', 'a', 'r']) assert status is True assert expected_ret == ret def test__sterilize_dict_returns_none_if_invalid_arguments(self): """ Test that given invalid arguments, the function returns None """ ret = hubblestack.fdg.process._sterilize_dict(dictionary=[1, 2]) assert ret is None ret = hubblestack.fdg.process._sterilize_dict(dictionary={}) assert ret == {} ret = hubblestack.fdg.process._sterilize_dict(dictionary=12) assert ret is None def test__sterilize_dict_removes_none_values_if_nested_dict(self): """ Test that given valid arguments, the function correctly removes keys containing values of None """ # flat dict expected_ret = {2: 'a'} ret = hubblestack.fdg.process._sterilize_dict( {1: None, 2: 'a'}) assert expected_ret == ret # nested dicts expected_ret = {2: {3: {5: 'a'}, 7: 'b'}, 8: 'c', 9: {}} ret = hubblestack.fdg.process._sterilize_dict( {1: None, 2: {3: {4: None, 5: 'a'}, 6: None, 7: 'b'}, 8: 'c', 9: {10: None}}) assert expected_ret == ret # nested dicts & sequences expected_ret = {2: {3: [4, {}], 6: {7: ['b', {}]}}} ret = hubblestack.fdg.process._sterilize_dict( {1: None, 2: {3: [4, {5: None}], 6: {7: ('b', {9: None}), 8: None}}}) assert expected_ret == ret def test__sterilize_seq_returns_none_if_arguments_are_invalid(self): """ Test that given invalid arguments, the function returns None """ ret = hubblestack.fdg.process._sterilize_seq( {1: 'a', 2: ['b']}) assert ret is None ret = hubblestack.fdg.process._sterilize_seq(12) assert ret is None ret = hubblestack.fdg.process._sterilize_seq([]) assert ret == [] def test__sterilize_seq_removes_none_values_from_seq(self): """ Test that given valid arguments, the function finds nested dicts and removes keys with values of None """ # flat seq expected_ret = [1, 2, [1, 2], [1, 2]] ret = hubblestack.fdg.process._sterilize_seq( [1, 2, set([1, 2, 1]), (1, 2)]) assert expected_ret == ret # nested dicts & seq expected_ret = [{2: {3: [{5: 'a'}, [None, {7: 'b'}]], 8: 'c', 9: {}}}] ret = hubblestack.fdg.process._sterilize_seq( [{1: None, 2: {3: ({4: None, 5: 'a'}, [None, {6: None, 7: 'b'}]), 8: 'c', 9: {10: None}}}]) assert expected_ret == ret def test_remove_dict_none_returns_none_if_invalid_arguments(self): """ Test that given invalid arguments, the function returns None """ # invalid ``starting_seq`` status, ret = hubblestack.fdg.process.dict_remove_none( starting_seq=[1, 2, 3], chained={1: 'a', 2: 'b'}) assert status is False assert ret is None # invalid ``chained`` & valid ``starting_seq`` status, ret = hubblestack.fdg.process.dict_remove_none( starting_seq=[1, 2, 3], chained="123") assert status is False assert ret is None # invalid ``chained`` status, ret = hubblestack.fdg.process.dict_remove_none(chained="123") assert status is False assert ret is None def test_dict_remove_none_returns_valid_sequence(self): """ Test that given valid arguments, the function finds nested dicts and removes keys with values of None """ # flat dict expected_ret = {2: 'a', 4: 'b'} status, ret = hubblestack.fdg.process.dict_remove_none( chained={1: None, 2: 'a', 3: None, 4: 'b'}) assert status is True assert expected_ret == ret # flat seq expected_ret = [{}, {2: 'a'}, 5, None, {4: 'b'}] status, ret = hubblestack.fdg.process.dict_remove_none( chained=[{1: None}, {2: 'a', 3: None}], starting_seq=[5, None, {4: 'b'}]) assert status is True assert expected_ret == ret # nested sequences & dicts expected_ret = [{9: {11: [1, 2]}}, 11, {2: {3: [{5: 'a'}, [None, {7: 'b'}]], 8: 'c'}}] status, ret = hubblestack.fdg.process.dict_remove_none( starting_seq=[{1: None, 2: {3: ({4: None, 5: 'a'}, [None, {6: None, 7: 'b'}]), 8: 'c'}}], chained=[{9: {10: None, 11: set([1, 2, 1])}}, 11]) assert status is True assert expected_ret == ret # nested dicts & sequences expected_ret = {2: {3: [{5: 'a'}, [None, {7: 'b'}]], 8: 'c'}, 9: {11: [1, 2]}} status, ret = hubblestack.fdg.process.dict_remove_none( starting_seq={1: None, 2: {3: ({4: None, 5: 'a'}, [None, {6: None, 7: 'b'}]), 8: 'c'}}, chained={9: {10: None, 11: set([1, 2, 1])}, 11: None}) assert status is True assert expected_ret == ret def test_encode_base64_returns_none_if_invalid_arguments_type(self): """ Test that given invalid arguments, the function returns None """ # invalid `starting_string` status, ret = hubblestack.fdg.process.encode_base64( starting_string=123, chained="foo") assert status is False assert ret is None status, ret = hubblestack.fdg.process.encode_base64( starting_string=['a', 'c'], format_chained=False) assert status is False assert ret is None expected_ret = '' status, ret = hubblestack.fdg.process.encode_base64( starting_string='', format_chained=False) assert status is False assert expected_ret == ret def test_encode_base64_returns_string_if_valid_arguments(self): """ Test that given valid arguments, the function correctly encodes the string and returns it """ # format chained expected_ret = 'Zm9vIGJhcg==' status, ret = hubblestack.fdg.process.encode_base64( starting_string="foo {}", chained="bar") assert status is True assert expected_ret == ret # don't format chained expected_ret = 'Zm9v' status, ret = hubblestack.fdg.process.encode_base64( starting_string="foo", chained="bar") assert status is True assert expected_ret == ret # no chained expected_ret = 'Zm9vIHt9' status, ret = hubblestack.fdg.process.encode_base64( starting_string="foo {}", format_chained=False, chained="bar") assert status is True assert expected_ret == ret
16,002
5,220
/* Capstone testing regression */ /* By <NAME> <<EMAIL>>, 02-2019 */ #include "factory.h" char *get_detail_arm64(csh *handle, cs_mode mode, cs_insn *ins) { cs_arm64 *arm64; int i; cs_regs regs_read, regs_write; uint8_t regs_read_count, regs_write_count; uint8_t access; char *result; result = (char *)malloc(sizeof(char)); result[0] = '\0'; // detail can be NULL if SKIPDATA option is turned ON if (ins->detail == NULL) return result; arm64 = &(ins->detail->arm64); if (arm64->op_count) add_str(&result, " ; op_count: %u", arm64->op_count); for (i = 0; i < arm64->op_count; i++) { cs_arm64_op *op = &(arm64->operands[i]); switch(op->type) { default: break; case ARM64_OP_REG: add_str(&result, " ; operands[%u].type: REG = %s", i, cs_reg_name(*handle, op->reg)); break; case ARM64_OP_IMM: add_str(&result, " ; operands[%u].type: IMM = 0x%" PRIx64 "", i, op->imm); break; case ARM64_OP_FP: #if defined(_KERNEL_MODE) // Issue #681: Windows kernel does not support formatting float point add_str(&result, " ; operands[%u].type: FP = <float_point_unsupported>", i); #else add_str(&result, " ; operands[%u].type: FP = %f", i, op->fp); #endif break; case ARM64_OP_MEM: add_str(&result, " ; operands[%u].type: MEM", i); if (op->mem.base != ARM64_REG_INVALID) add_str(&result, " ; operands[%u].mem.base: REG = %s", i, cs_reg_name(*handle, op->mem.base)); if (op->mem.index != ARM64_REG_INVALID) add_str(&result, " ; operands[%u].mem.index: REG = %s", i, cs_reg_name(*handle, op->mem.index)); if (op->mem.disp != 0) add_str(&result, " ; operands[%u].mem.disp: 0x%x", i, op->mem.disp); break; case ARM64_OP_CIMM: add_str(&result, " ; operands[%u].type: C-IMM = %u", i, (int)op->imm); break; case ARM64_OP_REG_MRS: add_str(&result, " ; operands[%u].type: REG_MRS = 0x%x", i, op->reg); break; case ARM64_OP_REG_MSR: add_str(&result, " ; operands[%u].type: REG_MSR = 0x%x", i, op->reg); break; case ARM64_OP_PSTATE: add_str(&result, " ; operands[%u].type: PSTATE = 0x%x", i, op->pstate); break; case ARM64_OP_SYS: add_str(&result, " ; operands[%u].type: SYS = 0x%x", i, op->sys); break; case ARM64_OP_PREFETCH: add_str(&result, " ; operands[%u].type: PREFETCH = 0x%x", i, op->prefetch); break; case ARM64_OP_BARRIER: add_str(&result, " ; operands[%u].type: BARRIER = 0x%x", i, op->barrier); break; } access = op->access; switch(access) { default: break; case CS_AC_READ: add_str(&result, " ; operands[%u].access: READ", i); break; case CS_AC_WRITE: add_str(&result, " ; operands[%u].access: WRITE", i); break; case CS_AC_READ | CS_AC_WRITE: add_str(&result, " ; operands[%u].access: READ | WRITE", i); break; } if (op->shift.type != ARM64_SFT_INVALID && op->shift.value) add_str(&result, " ; Shift: type = %u, value = %u", op->shift.type, op->shift.value); if (op->ext != ARM64_EXT_INVALID) add_str(&result, " ; Ext: %u", op->ext); if (op->vas != ARM64_VAS_INVALID) add_str(&result, " ; Vector Arrangement Specifier: 0x%x", op->vas); if (op->vess != ARM64_VESS_INVALID) add_str(&result, " ; Vector Element Size Specifier: %u", op->vess); if (op->vector_index != -1) add_str(&result, " ; Vector Index: %u", op->vector_index); } if (arm64->update_flags) add_str(&result, " ; Update-flags: True"); if (arm64->writeback) add_str(&result, " ; Write-back: True"); if (arm64->cc) add_str(&result, " ; Code-condition: %u", arm64->cc); if (!cs_regs_access(*handle, ins, regs_read, &regs_read_count, regs_write, &regs_write_count)) { if (regs_read_count) { add_str(&result, " ; Registers read:"); for(i = 0; i < regs_read_count; i++) { add_str(&result, " %s", cs_reg_name(*handle, regs_read[i])); } } if (regs_write_count) { add_str(&result, " ; Registers modified:"); for(i = 0; i < regs_write_count; i++) { add_str(&result, " %s", cs_reg_name(*handle, regs_write[i])); } } } return result; }
1,915
1,384
<reponame>efeslab/llvmlite #ifndef LLVMPY_CORE_H_ #define LLVMPY_CORE_H_ #include "llvm-c/Core.h" #include <cstring> #include <cstdlib> #if defined(_MSC_VER) #define HAVE_DECLSPEC_DLL #endif #if defined(HAVE_DECLSPEC_DLL) #define API_EXPORT(RTYPE) __declspec(dllexport) RTYPE #else #define API_EXPORT(RTYPE) RTYPE #endif extern "C" { API_EXPORT(const char *) LLVMPY_CreateString(const char *msg); API_EXPORT(const char *) LLVMPY_CreateByteString(const char *buf, size_t len); API_EXPORT(void) LLVMPY_DisposeString(const char *msg); API_EXPORT(LLVMContextRef) LLVMPY_GetGlobalContext(); API_EXPORT(LLVMContextRef) LLVMPY_ContextCreate(); } /* end extern "C" */ #endif /* LLVMPY_CORE_H_ */
317
763
package org.batfish.dataplane.rib; import javax.annotation.ParametersAreNonnullByDefault; import org.batfish.datamodel.AnnotatedRoute; import org.batfish.datamodel.KernelRoute; /** RIB for storing {@link KernelRoute}s, which are identified solely by their network. */ @ParametersAreNonnullByDefault public class KernelRib extends AnnotatedRib<KernelRoute> { public KernelRib() { super(); } @Override public int comparePreference(AnnotatedRoute<KernelRoute> lhs, AnnotatedRoute<KernelRoute> rhs) { return 0; } }
178
884
<reponame>rt112000/CDM<filename>schemaDocuments/core/operationsCommon/Tables/Finance/EInvoice/WorksheetLine/CFDIWithholdingComplInterest_MX.1.0.cdm.json { "jsonSchemaSemanticVersion": "1.0.0", "imports": [ { "corpusPath": "cdm:/foundations.1.1.cdm.json" }, { "corpusPath": "/core/operationsCommon/Common.1.0.cdm.json", "moniker": "base_Common" }, { "corpusPath": "/core/operationsCommon/DataEntityView.1.0.cdm.json", "moniker": "base_DataEntityView" }, { "corpusPath": "/core/operationsCommon/Tables/Finance/EInvoice/Transaction/CFDIWithholdingJour_MX.1.0.cdm.json" }, { "corpusPath": "/core/operationsCommon/Tables/Finance/Ledger/Main/CompanyInfo.1.0.cdm.json" } ], "definitions": [ { "entityName": "CFDIWithholdingComplInterest_MX", "extendsEntity": "base_Common/Common", "exhibitsTraits": [ { "traitReference": "is.CDM.entityVersion", "arguments": [ { "name": "versionNumber", "value": "1.0" } ] } ], "hasAttributes": [ { "name": "FinancialSystem", "dataType": "CFDIFinancialSystem_MX", "isNullable": true, "description": "" }, { "name": "InterestBelongsDerivedFinOp", "dataType": "CFDIInterestBelongsDerivedFinOp_MX", "isNullable": true, "description": "" }, { "name": "InterestCashedInTheCurrentPeriod", "dataType": "CFDIInterestCashedInTheCurrentPeriod_MX", "isNullable": true, "description": "" }, { "name": "InterestLossAmount", "dataType": "CFDIInterestLossAmount_MX", "isNullable": true, "description": "" }, { "name": "InterestNominalAmount", "dataType": "CFDIInterestNominalAmount_MX", "isNullable": true, "description": "" }, { "name": "InterestRealAmount", "dataType": "CFDIInterestRealAmount_MX", "isNullable": true, "description": "" }, { "name": "WithholdingJourRecId", "dataType": "CFDIWithholdingJourRecId_MX", "isNullable": true, "description": "" }, { "name": "DataAreaId", "dataType": "string", "isReadOnly": true }, { "entity": { "entityReference": "CFDIWithholdingJour_MX" }, "name": "Relationship_CFDIWithholdingJour_MXRelationship", "resolutionGuidance": { "entityByReference": { "allowReference": true } } }, { "entity": { "entityReference": "CompanyInfo" }, "name": "Relationship_CompanyRelationship", "resolutionGuidance": { "entityByReference": { "allowReference": true } } } ], "displayName": "Interests" }, { "dataTypeName": "CFDIFinancialSystem_MX", "extendsDataType": "integer" }, { "dataTypeName": "CFDIInterestBelongsDerivedFinOp_MX", "extendsDataType": "integer" }, { "dataTypeName": "CFDIInterestCashedInTheCurrentPeriod_MX", "extendsDataType": "integer" }, { "dataTypeName": "CFDIInterestLossAmount_MX", "extendsDataType": "decimal" }, { "dataTypeName": "CFDIInterestNominalAmount_MX", "extendsDataType": "decimal" }, { "dataTypeName": "CFDIInterestRealAmount_MX", "extendsDataType": "decimal" }, { "dataTypeName": "CFDIWithholdingJourRecId_MX", "extendsDataType": "bigInteger" } ] }
2,018
424
<gh_stars>100-1000 import os import sublime import sublime_plugin from ..lib import helpers from ..lib import omnisharp class OmniSharpAddFileToProjectEventListener(sublime_plugin.EventListener): def on_post_save(self, view): if not helpers.is_csharp(view): return omnisharp.get_response(view, '/addtoproject', self._handle_addtoproject) def _handle_addtoproject(self, data): print('file added to project') print(data)
181
450
<reponame>LiuLeif/onnc //===- IStrStream.h -------------------------------------------------------===// // // The ONNC Project // // See LICENSE.TXT for details. // //===----------------------------------------------------------------------===// #ifndef ONNC_SUPPORT_INTPUT_STRING_STREAM_H #define ONNC_SUPPORT_INTPUT_STRING_STREAM_H #include <onnc/Support/IOSFwd.h> #include <onnc/Support/IStream.h> #include <onnc/Support/StringBuf.h> #include <sstream> //< for std::basic_stringbuf namespace onnc { template<typename CharT, typename Traits, typename Alloc> class BasicIStrStream : public BasicIStream<CharT, Traits> { public: typedef CharT char_type; typedef Traits traits_type; typedef Alloc allocator_type; typedef typename traits_type::int_type int_type; typedef typename traits_type::pos_type pos_type; typedef typename traits_type::off_type off_type; // Non-standard types: typedef std::basic_string<CharT, Traits, Alloc> string_type; // XXX: temporarily use std::basic_stringbuf instead of StringBuf typedef std::basic_stringbuf<CharT, Traits, Alloc> stringbuf_type; typedef BasicIStream<CharT, Traits> istream_type; public: explicit BasicIStrStream(string_type& pStr); ~BasicIStrStream() { } stringbuf_type* rdbuf() const { return const_cast<stringbuf_type*>(&m_StringBuf); } /// str - get a copy of output stream string_type str() const { return m_StringBuf.str(); } private: stringbuf_type m_StringBuf; }; } // namespace of onnc #include <onnc/Support/Bits/IStrStream.tcc> #endif
566
652
<gh_stars>100-1000 """ Tests for the parts of jsonschema related to the :validator:`format` property. """ from jsonschema.tests.compat import mock, unittest from jsonschema import FormatError, ValidationError, FormatChecker from jsonschema.validators import Draft4Validator class TestFormatChecker(unittest.TestCase): def setUp(self): self.fn = mock.Mock() def test_it_can_validate_no_formats(self): checker = FormatChecker(formats=()) self.assertFalse(checker.checkers) def test_it_raises_a_key_error_for_unknown_formats(self): with self.assertRaises(KeyError): FormatChecker(formats=["o noes"]) def test_it_can_register_cls_checkers(self): with mock.patch.dict(FormatChecker.checkers, clear=True): FormatChecker.cls_checks("new")(self.fn) self.assertEqual(FormatChecker.checkers, {"new" : (self.fn, ())}) def test_it_can_register_checkers(self): checker = FormatChecker() checker.checks("new")(self.fn) self.assertEqual( checker.checkers, dict(FormatChecker.checkers, new=(self.fn, ())) ) def test_it_catches_registered_errors(self): checker = FormatChecker() cause = self.fn.side_effect = ValueError() checker.checks("foo", raises=ValueError)(self.fn) with self.assertRaises(FormatError) as cm: checker.check("bar", "foo") self.assertIs(cm.exception.cause, cause) self.assertIs(cm.exception.__cause__, cause) # Unregistered errors should not be caught self.fn.side_effect = AttributeError with self.assertRaises(AttributeError): checker.check("bar", "foo") def test_format_error_causes_become_validation_error_causes(self): checker = FormatChecker() checker.checks("foo", raises=ValueError)(self.fn) cause = self.fn.side_effect = ValueError() validator = Draft4Validator({"format" : "foo"}, format_checker=checker) with self.assertRaises(ValidationError) as cm: validator.validate("bar") self.assertIs(cm.exception.__cause__, cause)
918
339
<filename>shared/binutils.h #pragma once #include <windows.h> namespace Afx { namespace BinUtils { struct MemRange { static MemRange FromSize(DWORD address, DWORD size); // inclusive DWORD Start; // exclusive DWORD End; MemRange(); MemRange(DWORD start, DWORD end); bool IsEmpty(void) const; MemRange And(const MemRange & range) const; }; class ImageSectionsReader { public: ImageSectionsReader(HMODULE hModule); bool Eof(void); void Next(void); /// <param name="characteristicsFilter">Minium bit mask that must be matched, should be a combination of IMAGE_SCN_*</param> void Next(DWORD characteristicsFilter); PIMAGE_SECTION_HEADER Get(void); MemRange GetMemRange(void); DWORD GetStartAddress(void); DWORD GetSize(void); private: HMODULE m_hModule; PIMAGE_SECTION_HEADER m_Section; DWORD m_SectionsLeft; }; /// <remarks>The memory specified by memRange must be readable.</remarks> MemRange FindBytes(MemRange memRange, char const * pattern, DWORD patternSize); /// <remarks>The memory specified by memRange must be readable.</remarks> MemRange FindCString(MemRange memRange, char const * pattern); /// <remarks>The memory specified by memRange must be readable.</remarks> MemRange FindWCString(MemRange memRange, wchar_t const * pattern); /// <remarks>The memory specified by memRange must be readable.</remarks> /// <param name="hexBytePattern"> /// A pattern like &quot;00 de ?? be ef&quot; /// Pattern is assumed to be valid, if it's not nothing will crash, but results /// can be unexpected. /// </param> MemRange FindPatternString(MemRange memRange, char const * hexBytePattern); /// <returns>0 if not found, otherwise address of vtable</returns> DWORD FindClassVtable(HMODULE hModule, const char * name, DWORD rttiBaseClassArrayOffset, DWORD completeObjectLocatorOffset); } // namespace Afx { } // namespace BinUtils {
667
8,054
#ifndef MARKDOWNTABLEHELPER_H #define MARKDOWNTABLEHELPER_H #include <QObject> #include "markdowntable.h" class QTimer; namespace vte { class VTextEditor; } namespace vnotex { class MarkdownTableHelper : public QObject { Q_OBJECT public: MarkdownTableHelper(vte::VTextEditor *p_editor, QObject *p_parent = nullptr); void insertTable(int p_bodyRow, int p_col, Alignment p_alignment); public slots: void updateTableBlocks(const QVector<vte::peg::TableBlock> &p_blocks); private: // Return the block index which contains the cursor. int currentCursorTableBlock(const QVector<vte::peg::TableBlock> &p_blocks) const; void formatTable(); bool isSmartTableEnabled() const; QTimer *getTimer(); vte::VTextEditor *m_editor = nullptr; // Use getTimer() to access. QTimer *m_timer = nullptr; vte::peg::TableBlock m_block; }; } #endif // MARKDOWNTABLEHELPER_H
410
884
{ "manifestName": "WorksheetLine", "entities": [ { "type": "LocalEntity", "entityName": "RetailLabelChangeJournalTrans", "entityPath": "RetailLabelChangeJournalTrans.1.0.cdm.json/RetailLabelChangeJournalTrans" } ], "jsonSchemaSemanticVersion": "1.0.0", "relationships": [ { "name": "", "fromEntity": "RetailLabelChangeJournalTrans.1.0.cdm.json/RetailLabelChangeJournalTrans", "fromEntityAttribute": "Relationship_InventTableRelationshipId", "toEntity": "/core/operationsCommon/Tables/SupplyChain/ProductInformationManagement/Main/InventTable.1.1.cdm.json/InventTable", "toEntityAttribute": "RecId" }, { "name": "", "fromEntity": "RetailLabelChangeJournalTrans.1.0.cdm.json/RetailLabelChangeJournalTrans", "fromEntityAttribute": "Relationship_RetailLabelChangeJournalTableRelationshipId", "toEntity": "/core/operationsCommon/Tables/Commerce/ChannelManagement/BrickAndMortarStore/WorksheetHeader/RetailLabelChangeJournalTable.1.0.cdm.json/RetailLabelChangeJournalTable", "toEntityAttribute": "RecId" }, { "name": "", "fromEntity": "RetailLabelChangeJournalTrans.1.0.cdm.json/RetailLabelChangeJournalTrans", "fromEntityAttribute": "Relationship_RetailStoreTableRelationshipId", "toEntity": "/core/operationsCommon/Tables/Commerce/ChannelManagement/BrickAndMortarStore/Main/RetailStoreTable.1.0.cdm.json/RetailStoreTable", "toEntityAttribute": "RecId" }, { "name": "", "fromEntity": "RetailLabelChangeJournalTrans.1.0.cdm.json/RetailLabelChangeJournalTrans", "fromEntityAttribute": "Relationship_RetailVariantIdRelationshipId", "toEntity": "/core/operationsCommon/Tables/SupplyChain/ProductInformationManagement/Main/InventDimCombination.1.1.cdm.json/InventDimCombination", "toEntityAttribute": "RecId" }, { "name": "", "fromEntity": "RetailLabelChangeJournalTrans.1.0.cdm.json/RetailLabelChangeJournalTrans", "fromEntityAttribute": "Relationship_CompanyRelationshipId", "toEntity": "/core/operationsCommon/Tables/Finance/Ledger/Main/CompanyInfo.1.1.cdm.json/CompanyInfo", "toEntityAttribute": "RecId" }, { "name": "", "fromEntity": "/core/operationsCommon/Tables/Commerce/InventoryAndAdvancedWarehouse/Group/RetailInventItemLabel.1.0.cdm.json/RetailInventItemLabel", "fromEntityAttribute": "Relationship_RetailLabelChangeJournalTransRelationshipId", "toEntity": "RetailLabelChangeJournalTrans.1.0.cdm.json/RetailLabelChangeJournalTrans", "toEntityAttribute": "RecId" } ] }
995
711
<reponame>jingetiema2100/MicroCommunity package com.java110.api.bmo.smallWeChat.impl; import com.alibaba.fastjson.JSONObject; import com.java110.api.bmo.ApiBaseBMO; import com.java110.api.bmo.smallWeChat.ISmallWechatAttrBMO; import com.java110.core.context.DataFlowContext; import com.java110.intf.store.ISmallWechatAttrInnerServiceSMO; import com.java110.po.smallWechatAttr.SmallWechatAttrPo; import com.java110.utils.constant.BusinessTypeConstant; import com.java110.utils.util.BeanConvertUtil; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; @Service("smallWechatAttrBMOImpl") public class SmallWechatAttrBMOImpl extends ApiBaseBMO implements ISmallWechatAttrBMO { @Autowired private ISmallWechatAttrInnerServiceSMO smallWechatAttrInnerServiceSMOImpl; /** * 添加小区信息 * * @param paramInJson 接口调用放传入入参 * @param dataFlowContext 数据上下文 * @return 订单服务能够接受的报文 */ public void addSmallWechatAttr(JSONObject paramInJson, DataFlowContext dataFlowContext) { paramInJson.put("attrId", "-1"); SmallWechatAttrPo smallWechatAttrPo = BeanConvertUtil.covertBean(paramInJson, SmallWechatAttrPo.class); super.insert(dataFlowContext, smallWechatAttrPo, BusinessTypeConstant.BUSINESS_TYPE_SAVE_SMALL_WECHAT_ATTR); } /** * 添加活动信息 * * @param paramInJson 接口调用放传入入参 * @param dataFlowContext 数据上下文 * @return 订单服务能够接受的报文 */ public void updateSmallWechatAttr(JSONObject paramInJson, DataFlowContext dataFlowContext) { SmallWechatAttrPo smallWechatAttrPo = BeanConvertUtil.covertBean(paramInJson, SmallWechatAttrPo.class); super.update(dataFlowContext, smallWechatAttrPo, BusinessTypeConstant.BUSINESS_TYPE_UPDATE_SMALL_WECHAT_ATTR); } /** * 添加小区信息 * * @param paramInJson 接口调用放传入入参 * @param dataFlowContext 数据上下文 * @return 订单服务能够接受的报文 */ public void deleteSmallWechatAttr(JSONObject paramInJson, DataFlowContext dataFlowContext) { SmallWechatAttrPo smallWechatAttrPo = BeanConvertUtil.covertBean(paramInJson, SmallWechatAttrPo.class); super.update(dataFlowContext, smallWechatAttrPo, BusinessTypeConstant.BUSINESS_TYPE_DELETE_SMALL_WECHAT_ATTR); } }
1,085
3,508
<gh_stars>1000+ package com.fishercoder.solutions; import java.util.ArrayList; import java.util.List; /** * 1417. Reformat The String * * Given alphanumeric string s. (Alphanumeric string is a string consisting of lowercase English letters and digits). * You have to find a permutation of the string where no letter is followed by another letter and no digit is followed by another digit. * That is, no two adjacent characters have the same type. * Return the reformatted string or return an empty string if it is impossible to reformat the string. * * Example 1: * Input: s = "a0b1c2" * Output: "0a1b2c" * Explanation: No two adjacent characters have the same type in "0a1b2c". "a0b1c2", "0a1b2c", "0c2a1b" are also valid permutations. * * Example 2: * Input: s = "leetcode" * Output: "" * Explanation: "leetcode" has only characters so we cannot separate them by digits. * * Example 3: * Input: s = "1229857369" * Output: "" * Explanation: "1229857369" has only digits so we cannot separate them by characters. * * Example 4: * Input: s = "covid2019" * Output: "c2o0v1i9d" * * Example 5: * Input: s = "ab123" * Output: "1a2b3" * * Constraints: * 1 <= s.length <= 500 * s consists of only lowercase English letters and/or digits. * */ public class _1417 { public static class Solution1 { public String reformat(String s) { List<Character> characterList = new ArrayList<>(); List<Character> numberList = new ArrayList<>(); for (char c : s.toCharArray()) { if (Character.isAlphabetic(c)) { characterList.add(c); } else { numberList.add(c); } } if (Math.abs(characterList.size() - numberList.size()) > 1) { return ""; } else { StringBuilder sb = new StringBuilder(); if (characterList.size() > numberList.size()) { for (int i = 0; i < characterList.size() - 1; i++) { sb.append(characterList.get(i)); sb.append(numberList.get(i)); } sb.append(characterList.get(characterList.size() - 1)); } else if (characterList.size() == numberList.size()) { for (int i = 0; i < numberList.size(); i++) { sb.append(numberList.get(i)); sb.append(characterList.get(i)); } } else { for (int i = 0; i < numberList.size() - 1; i++) { sb.append(numberList.get(i)); sb.append(characterList.get(i)); } sb.append(numberList.get(numberList.size() - 1)); } return sb.toString(); } } } }
1,368
975
/* Copyright (C) 2012 <NAME> <<EMAIL>> Distributed under the Boost Software License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */ #include "utils/test_results.h" #include "utils/test_reporter.h" #include "insn/tests.h" #include <simdpp/simd.h> #include <algorithm> #include <cfenv> #include <cstdlib> #include <cstring> #include <iostream> #include <iomanip> #include <vector> #include <simdpp/dispatch/get_arch_linux_cpuinfo.h> #include <simdpp/dispatch/get_arch_raw_cpuid.h> #include <simdpp/dispatch/get_arch_string_list.h> simdpp::Arch get_arch_from_system(bool is_simulator) { (void) is_simulator; std::vector<simdpp::Arch> supported_archs; #if SIMDPP_HAS_GET_ARCH_LINUX_CPUINFO if (!is_simulator) supported_archs.push_back(simdpp::get_arch_linux_cpuinfo()); #endif #if SIMDPP_HAS_GET_ARCH_RAW_CPUID supported_archs.push_back(simdpp::get_arch_raw_cpuid()); #endif if (supported_archs.empty()) { std::cerr << "No architecture information could be retrieved. Testing not supported\n"; std::exit(EXIT_FAILURE); } simdpp::Arch result = supported_archs.front(); for (unsigned i = 1; i < supported_archs.size(); ++i) { if (supported_archs[i] != result) { std::cerr << "Different CPU architecture evaluators return different results\n" << std::hex << (unsigned)result << " " << i << " " << std::hex << (unsigned)supported_archs[i] << "\n"; std::exit(EXIT_FAILURE); } } std::cerr << "Evaluated architecture bit set: " << std::hex << (unsigned) result << "\n" << std::dec; return result; } simdpp::Arch parse_arch_from_args(int argc, char* argv[]) { return simdpp::get_arch_string_list(argv + 1, argc - 1, "--arch_"); } void parse_args(int argc, char* argv[], bool& is_simulator, bool& force_arch) { is_simulator = false; force_arch = false; for (int i = 1; i < argc; ++i) { if (std::strcmp(argv[i], "--simulator") == 0) is_simulator = true; if (std::strcmp(argv[i], "--force_arch") == 0) force_arch = true; } } bool find_arch_null(const simdpp::detail::FnVersion& a) { return a.arch_name && std::strcmp(a.arch_name, "arch_null") == 0; } void invoke_test_run_function(const simdpp::detail::FnVersion& fn, TestResults& res, TestReporter& reporter, const TestOptions& options) { reinterpret_cast<void(*)(TestResults&, TestReporter&, const TestOptions&)>(fn.fun_ptr)(res, reporter, options); } /* We test libsimdpp by comparing the results of the same computations done in different 'architectures'. That is, we build a list of results for each instruction set available plus the 'null' instruction set (simple, non-vectorized code). All tests are generated from the same source code, thus any differencies are likely to be caused by bugs in the library. */ int main(int argc, char* argv[]) { bool is_simulator = false; bool force_arch = false; parse_args(argc, argv, is_simulator, force_arch); simdpp::Arch current_arch; if (force_arch) current_arch = parse_arch_from_args(argc, argv); else current_arch = get_arch_from_system(is_simulator); TestOptions options; options.is_simulator = is_simulator; TestReporter tr(std::cerr); const auto& arch_list = get_test_archs(); auto null_arch = std::find_if(arch_list.begin(), arch_list.end(), find_arch_null); if (null_arch == arch_list.end()) { tr.out() << "FATAL: NULL architecture not defined\n"; return EXIT_FAILURE; } set_round_to_nearest(); TestResults null_results(null_arch->arch_name); invoke_test_run_function(*null_arch, null_results, tr, options); for (auto it = arch_list.begin(); it != arch_list.end(); it++) { if (it->fun_ptr == NULL || it == null_arch) { continue; } if (!simdpp::test_arch_subset(current_arch, it->needed_arch)) { tr.out() << "Not testing: " << it->arch_name << std::endl; continue; } tr.out() << "Testing: " << it->arch_name << std::endl; TestResults results(it->arch_name); invoke_test_run_function(*it, results, tr, options); report_test_comparison(null_results, results, tr); } tr.report_summary(); return tr.success() ? EXIT_SUCCESS : EXIT_FAILURE; }
1,931
471
<reponame>madanagopaltcomcast/pxCore<gh_stars>100-1000 ///////////////////////////////////////////////////////////////////////////// // Name: wx/msw/calctrl.h // Purpose: wxCalendarCtrl control implementation for MSW // Author: <NAME> // Copyright: (C) 2008 <NAME> <<EMAIL>> // Licence: wxWindows licence ///////////////////////////////////////////////////////////////////////////// #ifndef _WX_MSW_CALCTRL_H_ #define _WX_MSW_CALCTRL_H_ class WXDLLIMPEXP_ADV wxCalendarCtrl : public wxCalendarCtrlBase { public: wxCalendarCtrl() { Init(); } wxCalendarCtrl(wxWindow *parent, wxWindowID id, const wxDateTime& date = wxDefaultDateTime, const wxPoint& pos = wxDefaultPosition, const wxSize& size = wxDefaultSize, long style = wxCAL_SHOW_HOLIDAYS, const wxString& name = wxCalendarNameStr) { Init(); Create(parent, id, date, pos, size, style, name); } bool Create(wxWindow *parent, wxWindowID id, const wxDateTime& date = wxDefaultDateTime, const wxPoint& pos = wxDefaultPosition, const wxSize& size = wxDefaultSize, long style = wxCAL_SHOW_HOLIDAYS, const wxString& name = wxCalendarNameStr); virtual bool SetDate(const wxDateTime& date); virtual wxDateTime GetDate() const; virtual bool SetDateRange(const wxDateTime& lowerdate = wxDefaultDateTime, const wxDateTime& upperdate = wxDefaultDateTime); virtual bool GetDateRange(wxDateTime *lowerdate, wxDateTime *upperdate) const; virtual bool EnableMonthChange(bool enable = true); virtual void Mark(size_t day, bool mark); virtual void SetHoliday(size_t day); virtual wxCalendarHitTestResult HitTest(const wxPoint& pos, wxDateTime *date = NULL, wxDateTime::WeekDay *wd = NULL); virtual void SetWindowStyleFlag(long style); protected: virtual wxSize DoGetBestSize() const; virtual WXDWORD MSWGetStyle(long style, WXDWORD *exstyle) const; virtual bool MSWOnNotify(int idCtrl, WXLPARAM lParam, WXLPARAM *result); void MSWOnClick(wxMouseEvent& event); void MSWOnDoubleClick(wxMouseEvent& event); private: void Init(); // bring the control in sync with m_marks void UpdateMarks(); // set first day of week in the control to correspond to our // wxCAL_MONDAY_FIRST flag void UpdateFirstDayOfWeek(); // reset holiday information virtual void ResetHolidayAttrs() { m_holidays = 0; } // redisplay holidays virtual void RefreshHolidays() { UpdateMarks(); } // current date, we need to store it instead of simply retrieving it from // the control as needed in order to be able to generate the correct events // from MSWOnNotify() wxDateTime m_date; // bit field containing the state (marked or not) of all days in the month wxUint32 m_marks; // the same but indicating whether a day is a holiday or not wxUint32 m_holidays; DECLARE_DYNAMIC_CLASS(wxCalendarCtrl) wxDECLARE_NO_COPY_CLASS(wxCalendarCtrl); }; #endif // _WX_MSW_CALCTRL_H_
1,387
1,160
<gh_stars>1000+ { "op_stroff" : { "+example" : """ ins = ida_ua.insn_t() if ida_ua.decode_insn(ins, some_address): path_len = 1 path = ida_pro.tid_array(path_len) path[0] = ida_struct.get_struc_id("my_stucture_t") ida_bytes.op_stroff(ins, 0, path.cast(), path_len, 0) """ } }
163
32,544
<reponame>DBatOWL/tutorials<filename>core-java-modules/core-java-lang-4/src/main/java/com/baeldung/javadocmemberreference/Person.java package com.baeldung.javadocmemberreference; import com.baeldung.vehicle.Car; public class Person { Person() { } /** * Also, check the {@link #move() Move} method for more movement details. */ public void walk() { } /** * Check this {@link #move(String) Move} method for direction oriented movement. */ public void move() { } public void move(String direction) { } /** * Additionally, check this {@link Animal#run(String) Run} method for direction based run. */ public void run() { } /** * Also consider checking {@link com.baeldung.vehicle.Vehicle#Vehicle() Vehicle} constructor to initialize vehicle object. */ public void goToWork() { } /** * Have a look at {@link Car#getNumberOfSeats() SeatsAvailability} method for checking the available seats needed for driving. */ public void drive() { } }
395
432
package com.hccake.extend.pay.ali.enums; import cn.hutool.core.util.StrUtil; import com.fasterxml.jackson.annotation.JsonCreator; import lombok.AllArgsConstructor; import lombok.Getter; import lombok.var; /** * 交易状态 */ @Getter @AllArgsConstructor public enum TradeStatus { /** * 成功 */ SUCCESS("TRADE_SUCCESS"), /** * 未支付 */ WAIT("WAIT_BUYER_PAY"), /** * 未付款交易超时关闭,或支付完成后全额退款 */ CLOSED("TRADE_CLOSED"), /** * 交易结束,不可退款 */ FINISHED("TRADE_FINISHED"), /** * 异常. 具体信息查询 subCode和subMsg */ ERROR(""), ; private final String str; @JsonCreator public static TradeStatus of(String status) { if (StrUtil.isBlank(status)) { return ERROR; } for (var e : values()) { if (e.getStr().equals(status)) { return e; } } return ERROR; } }
437
766
""" Utils """ from typing import Dict from typing import Optional from typing import Sequence from typing import Tuple from typing import Union import torch from lightautoml.automl.base import AutoML from lightautoml.dataset.roles import ColumnRole from lightautoml.dataset.roles import TargetRole from lightautoml.dataset.roles import TreatmentRole from lightautoml.ml_algo.linear_sklearn import LinearLBFGS from lightautoml.pipelines.features.linear_pipeline import LinearFeatures from lightautoml.pipelines.ml.base import MLPipeline from lightautoml.reader.base import PandasToPandasReader from lightautoml.tasks import Task def create_linear_automl( task: Task, n_folds: int = 5, timeout: Optional[None] = None, n_reader_jobs: int = 1, cpu_limit: int = 4, # verbose: int = 0, random_state: int = 42, ): """Linear automl Args: base_task: task n_folds: number of folds timeout: Stub, not used. random_state: random_state Returns: automl: """ torch.set_num_threads(cpu_limit) reader = PandasToPandasReader(task, cv=n_folds, random_state=random_state, n_jobs=n_reader_jobs) pipe = LinearFeatures() model = LinearLBFGS() pipeline = MLPipeline([model], pre_selection=None, features_pipeline=pipe, post_selection=None) automl = AutoML(reader, [[pipeline]], skip_conn=False) # , verbose=0) return automl def _get_treatment_role( roles: Dict[Union[ColumnRole, str], Union[str, Sequence[str]]] ) -> Tuple[Union[TreatmentRole, str], str]: """Extract treatment pair (key/val) from roles Args: roles: Roles Returns: role, col: role, column name """ treatment_role: Optional[Union[TreatmentRole, str]] = None treatment_col: str for k, v in roles.items(): if isinstance(k, TreatmentRole) or (isinstance(k, str) and k == "treatment"): if not isinstance(v, str) and isinstance(v, Sequence): raise RuntimeError("Treatment column must be unique") else: treatment_role, treatment_col = k, v break if treatment_role is None: raise RuntimeError("Treatment role is absent") return treatment_role, treatment_col def _get_target_role( roles: Dict[Union[ColumnRole, str], Union[str, Sequence[str]]] ) -> Tuple[Union[TargetRole, str], str]: """Extract target pair (key/val) from roles Args: roles: Roles Returns: role, col: role, column name """ target_role: Optional[Union[TargetRole, str]] = None target_col: str for k, v in roles.items(): if isinstance(k, TargetRole) or (isinstance(k, str) and k == "target"): if isinstance(v, str): target_role, target_col = k, v break else: raise RuntimeError("Bad target column type") if target_role is None: raise RuntimeError("Target role is absent") return target_role, target_col
1,206
571
<reponame>ufora/ufora<gh_stars>100-1000 # Copyright 2015 Ufora Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. class DummyExecutor(object): @property def remotely(self): return self def __enter__(self): pass def __exit__(self, *args): return True w = 1 y = 2 def f(x): return x + y class C(object): def __init__(self, x): self.x = x def g(self, arg): return self.x + arg executor = DummyExecutor() c = C(3) with executor.remotely: z = f(w) + c.g(4) * C(5).g(6)
404
1,233
package com.github.mustachejava.reflect; import java.util.List; /** * Simple specialization of Predicate */ public interface Guard { boolean apply(List<Object> input); }
51
348
{"nom":"Baudres","circ":"2ème circonscription","dpt":"Indre","inscrits":369,"abs":170,"votants":199,"blancs":19,"nuls":11,"exp":169,"res":[{"nuance":"LR","nom":"<NAME>","voix":93},{"nuance":"MDM","nom":"Mme <NAME>","voix":76}]}
91
965
CObList list; CAge* pa1; CAge* pa2; list.AddHead(pa1 = new CAge(21)); list.AddHead(pa2 = new CAge(40)); // List now contains (40, 21). ASSERT(*(CAge*)list.RemoveTail() == CAge(21)); // Old tail ASSERT(*(CAge*)list.GetTail() == CAge(40)); // New tail delete pa1; delete pa2; // Clean up memory.
128
892
<reponame>github/advisory-database { "schema_version": "1.2.0", "id": "GHSA-9qv9-388v-rqg7", "modified": "2022-05-13T01:15:24Z", "published": "2022-05-13T01:15:24Z", "aliases": [ "CVE-2011-0664" ], "details": "Microsoft .NET Framework 2.0 SP1 and SP2, 3.5 Gold and SP1, 3.5.1, and 4.0, and Silverlight 4 before 4.0.60531.0, does not properly validate arguments to unspecified networking API functions, which allows remote attackers to execute arbitrary code via (1) a crafted XAML browser application (aka XBAP), (2) a crafted ASP.NET application, (3) a crafted .NET Framework application, or (4) a crafted Silverlight application, aka \".NET Framework Array Offset Vulnerability.\"", "severity": [ ], "affected": [ ], "references": [ { "type": "ADVISORY", "url": "https://nvd.nist.gov/vuln/detail/CVE-2011-0664" }, { "type": "WEB", "url": "https://docs.microsoft.com/en-us/security-updates/securitybulletins/2011/ms11-039" }, { "type": "WEB", "url": "https://oval.cisecurity.org/repository/search/definition/oval%3Aorg.mitre.oval%3Adef%3A12105" } ], "database_specific": { "cwe_ids": [ "CWE-20" ], "severity": "HIGH", "github_reviewed": false } }
525
3,434
<filename>jetcache-core/src/main/java/com/alicp/jetcache/support/JavaValueEncoder.java package com.alicp.jetcache.support; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.ObjectOutputStream; import java.lang.ref.WeakReference; /** * Created on 2016/10/4. * * @author <a href="mailto:<EMAIL>">huangli</a> */ public class JavaValueEncoder extends AbstractValueEncoder { public static final JavaValueEncoder INSTANCE = new JavaValueEncoder(true); protected static int IDENTITY_NUMBER = 0x4A953A80; private static final int INIT_BUF_SIZE = 256; public JavaValueEncoder(boolean useIdentityNumber) { super(useIdentityNumber); } private static ThreadLocal<WeakReference<ByteArrayOutputStream>> threadLocal = ThreadLocal.withInitial(() -> new WeakReference<>(new ByteArrayOutputStream(INIT_BUF_SIZE))); @Override public byte[] apply(Object value) { try { WeakReference<ByteArrayOutputStream> ref = threadLocal.get(); ByteArrayOutputStream bos = ref.get(); if (bos == null) { bos = new ByteArrayOutputStream(INIT_BUF_SIZE); threadLocal.set(new WeakReference<>(bos)); } try { if (useIdentityNumber) { bos.write((IDENTITY_NUMBER >> 24) & 0xFF); bos.write((IDENTITY_NUMBER >> 16) & 0xFF); bos.write((IDENTITY_NUMBER >> 8) & 0xFF); bos.write(IDENTITY_NUMBER & 0xFF); } ObjectOutputStream oos = new ObjectOutputStream(bos); oos.writeObject(value); oos.flush(); return bos.toByteArray(); } finally { bos.reset(); } } catch (IOException e) { StringBuilder sb = new StringBuilder("Java Encode error. "); sb.append("msg=").append(e.getMessage()); throw new CacheEncodeException(sb.toString(), e); } } }
931
363
/* Copyright 2012 predic8 GmbH, www.predic8.com Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.predic8.membrane.core.http.cookie; import com.predic8.membrane.core.Constants; import com.predic8.membrane.core.http.Header; import com.predic8.membrane.core.http.HeaderField; import com.predic8.membrane.core.http.Request; /** * Adapter between Tomcat classes ({@link ServerCookie} etc.) and Membrane * classes ({@link Request} etc.). */ public class MimeHeaders { private final HeaderField header[]; public MimeHeaders(Header header) { this.header = header.getAllHeaderFields(); } public int findHeader(String string, int pos) { while (true) { if (pos >= header.length) return -1; if (header[pos].getHeaderName().equals(string)) return pos; pos++; } } public MessageBytes getValue(int pos) { MessageBytes b = MessageBytes.newInstance(); byte buf[] = header[pos].getValue().getBytes(Constants.ISO_8859_1_CHARSET); b.setBytes(buf, 0, buf.length); return b; } }
499
488
<filename>projects/SATIrE/src/grato/GTextTransformation.h // Copyright 2005,2006,2007 <NAME>, <NAME> // $Id: GTextTransformation.h,v 1.2 2007-03-08 15:36:49 markus Exp $ // Author: <NAME> #ifndef GTEXTTRANSFORM_H #define GTEXTTRANSFORM_H #include "GTextBackEnd.h" class GTextTransformation : public GTextBackEnd { public: GTextTransformation() {} virtual ~GTextTransformation(); void transform(GGrammar* grammar) { _testGrammar=grammar; generate(grammar); printRegisteredAuxRules(); // postprocessing of registered auxiliary rules for generation } protected: //virtual void generateGrammarProlog(GGrammar* grammar); virtual void generateProductionRhsTerminal(GNonTerminal* _, GTerminal* rhsSymbol); protected: void printRegisteredAuxRules(); void registerAuxRule(string auxSymName,GSymbol* sym); list<pair<string,GSymbol*> > auxRuleList; private: GGrammar* _testGrammar; }; #endif
321
775
<gh_stars>100-1000 /* Mode: -*- C++ -*- */ // vim: set ai ts=4 sw=4 expandtab /* @BC * Copyright (c) 1993 * by Microelectronics and Computer Technology Corporation (MCC) * All Rights Reserved * * Permission to use, copy, modify, and distribute this software and its * documentation for any purpose and without fee is hereby granted, * provided that this notice be retained unaltered, and that the name of * MCC and its shareholders and participants shall not be used in * advertising or publicity pertaining to distribution of the software * without specific written prior permission. * * THIS SOFTWARE IS PROVIDED ``AS IS'' AND WITHOUT ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. */ #if !defined(_RBL_Cstruct_h) #define _RBL_Cstruct_h 1 #include "rosette.h" #include "Ob.h" class GenericDescriptor : public Actor { STD_DECLS(GenericDescriptor); protected: GenericDescriptor(pExt); GenericDescriptor(int, pOb, pOb, pOb, pExt); virtual int traversePtrs(PSOb__PSOb); virtual int traversePtrs(SI__PSOb); virtual void traversePtrs(V__PSOb); public: /* This seems to be the base-level protocol provided by descriptors. * * (method (S-get base path) ...) * (method (S-desc base path) ...) * (method (S-deref base path) ...) * (method (select base r) ...) * (method (S-set base [val & r]) ...) * (method (S-tupleSet base [val & r]) ...) * (method (nth base [i & path]) ...) * */ uint32_t _offset, _align_to, _size; /* memory map */ Ob* mnemonic; /* was consed up from rosette heap */ Ob* imported; /* was returned by a foreign function or is a */ /* substructure of a critter returned by a ff */ Ob* freeStructOnGC; static GenericDescriptor* create(); virtual ~GenericDescriptor(); virtual Ob* sGet(Ctxt* ctxt, uint32_t base, Tuple* path, int pindex = 0); virtual Ob* sDesc(Ctxt* ctxt, uint32_t base, Tuple* path, int pindex = 0); virtual Ob* sDeref(Ctxt* ctxt, uint32_t base, Tuple* path, int pindex = 0); virtual Ob* select(Ctxt* ctxt, uint32_t base, Tuple* path, int pindex = 0); virtual Ob* sSet(Ctxt* ctxt, uint32_t base, Ob* val, Tuple* path, int pindex = 0); virtual Ob* sTupleSet(Ctxt* ctxt, uint32_t base, Tuple* val, Tuple* path, int pindex = 0); virtual Ob* nthBase(Ctxt* ctxt, uint32_t base, int i, Tuple* path, int pindex = 0); virtual Ob* flatten(Ctxt* ctxt, uint32_t base, RblTable*); virtual convertArgReturnPair convertActualArg(Ctxt*, Ob*); virtual Ob* convertActualRslt(Ctxt*, uint32_t); Ob* nullDescriptor(Ctxt*); virtual Ob* oprnSwitch(Ctxt* ctxt, uint32_t base, Tuple* path, int pindex = 0); Ob* sBox(uint32_t off) { GenericDescriptor* rslt = (GenericDescriptor*)cloneTo(meta(), parent()); rslt->mbox = emptyMbox; rslt->_offset = off; rslt->imported = imported; rslt->freeStructOnGC = freeStructOnGC; /* should rslt be registered as a foreign ob? */ return (rslt); } virtual uint32_t absoluteAddress(uint32_t base); void setAddrContents(uint32_t base, uint32_t val); }; class NullDescriptor : public GenericDescriptor { STD_DECLS(NullDescriptor); protected: NullDescriptor(pExt); public: static NullDescriptor* create(); virtual Ob* sGet(Ctxt* ctxt, uint32_t base, Tuple* path, int pindex = 0); virtual Ob* sDesc(Ctxt* ctxt, uint32_t base, Tuple* path, int pindex = 0); virtual Ob* sDeref(Ctxt* ctxt, uint32_t base, Tuple* path, int pindex = 0); virtual Ob* select(Ctxt* ctxt, uint32_t base, Tuple* path, int pindex = 0); virtual Ob* sSet(Ctxt* ctxt, uint32_t base, Ob* val, Tuple* path, int pindex = 0); virtual Ob* nthBase(Ctxt* ctxt, uint32_t base, int i, Tuple* path, int pindex = 0); virtual Ob* flatten(Ctxt* ctxt, uint32_t base, RblTable*); virtual Ob* isNullP(); virtual uint32_t absoluteAddress(uint32_t base); }; class AtomicDescriptor : public GenericDescriptor { STD_DECLS(AtomicDescriptor); protected: AtomicDescriptor(RblBool*, pExt); AtomicDescriptor(RblBool*, int, pOb, pOb, pOb, pExt); virtual int traversePtrs(PSOb__PSOb); virtual int traversePtrs(SI__PSOb); virtual void traversePtrs(V__PSOb); public: RblBool* _signed; static AtomicDescriptor* create(RblBool*); static AtomicDescriptor* create(); virtual Ob* sGet(Ctxt* ctxt, uint32_t base, Tuple* path, int pindex = 0); virtual Ob* sSet(Ctxt* ctxt, uint32_t base, Ob* val, Tuple* path, int pindex = 0); virtual Ob* flatten(Ctxt* ctxt, uint32_t base, RblTable*); virtual convertArgReturnPair convertActualArg(Ctxt*, Ob*); virtual Ob* convertActualRslt(Ctxt*, uint32_t); virtual uint32_t absoluteAddress(uint32_t base); }; class CStructure : public GenericDescriptor { STD_DECLS(CStructure); protected: CStructure(RblTable*, Tuple*, pExt); virtual int traversePtrs(PSOb__PSOb); virtual int traversePtrs(SI__PSOb); virtual void traversePtrs(V__PSOb); public: RblTable* _descs; Tuple* _fieldNames; static CStructure* create(RblTable*, Tuple*); static CStructure* create(); virtual Ob* select(Ctxt* ctxt, uint32_t base, Tuple* path, int pindex = 0); virtual Ob* sTupleSet(Ctxt* ctxt, uint32_t base, Tuple* val, Tuple* path, int pindex = 0); virtual Ob* flatten(Ctxt* ctxt, uint32_t base, RblTable*); }; class CArray : public GenericDescriptor { STD_DECLS(CArray); protected: CArray(uint16_t, GenericDescriptor*, pExt); CArray(int s, pOb m, pOb p, pOb mbx, pExt, uint16_t, GenericDescriptor*); virtual int traversePtrs(PSOb__PSOb); virtual int traversePtrs(SI__PSOb); virtual void traversePtrs(V__PSOb); public: uint16_t _numElems; uint16_t filler_up_please; GenericDescriptor* _elemDesc; static CArray* create(uint16_t, GenericDescriptor*); static CArray* create(); virtual Ob* sTupleSet(Ctxt* ctxt, uint32_t base, Tuple* val, Tuple* path, int pindex = 0); virtual Ob* nthBase(Ctxt* ctxt, uint32_t base, int i, Tuple* path, int pindex = 0); virtual Ob* flatten(Ctxt* ctxt, uint32_t base, RblTable*); }; class CharArray : public CArray { STD_DECLS(CharArray); protected: CharArray(uint16_t, GenericDescriptor*, pExt); CharArray(int s, pOb m, pOb p, pOb mbx, pExt, uint16_t, GenericDescriptor*); public: static CharArray* create(uint16_t, GenericDescriptor*); static CharArray* create(); virtual Ob* sSet(Ctxt* ctxt, uint32_t base, Ob* val, Tuple* path, int pindex = 0); virtual Ob* flatten(Ctxt* ctxt, uint32_t base, RblTable*); }; class CharArray0 : public CharArray { STD_DECLS(CharArray0); protected: CharArray0(uint16_t, GenericDescriptor*, pExt); public: static CharArray0* create(uint16_t, GenericDescriptor*); static CharArray0* create(); virtual Ob* flatten(Ctxt* ctxt, uint32_t base, RblTable*); }; class CRef : public GenericDescriptor { STD_DECLS(CRef); protected: CRef(GenericDescriptor*, pExt); CRef(GenericDescriptor*, int, pOb, pOb, pOb, pExt); virtual int traversePtrs(PSOb__PSOb); virtual int traversePtrs(SI__PSOb); virtual void traversePtrs(V__PSOb); public: GenericDescriptor* _desc; static CRef* create(GenericDescriptor*); static CRef* create(); virtual Ob* sDeref(Ctxt* ctxt, uint32_t base, Tuple* path, int pindex = 0); virtual Ob* sSet(Ctxt* ctxt, uint32_t base, Ob* val, Tuple* path, int pindex = 0); virtual Ob* nthBase(Ctxt* ctxt, uint32_t base, int i, Tuple* path, int pindex = 0); virtual Ob* flatten(Ctxt* ctxt, uint32_t base, RblTable*); virtual convertArgReturnPair convertActualArg(Ctxt*, Ob*); virtual Ob* convertActualRslt(Ctxt*, uint32_t); }; class CharRef : public CRef { STD_DECLS(CharRef); protected: CharRef(GenericDescriptor*, pExt); CharRef(GenericDescriptor*, int, pOb, pOb, pOb, pExt); public: static CharRef* create(GenericDescriptor*); static CharRef* create(); virtual Ob* sSet(Ctxt* ctxt, uint32_t base, Ob* val, Tuple* path, int pindex = 0); virtual convertArgReturnPair convertActualArg(Ctxt*, Ob*); }; class CRef0 : public CRef { STD_DECLS(CRef0); protected: CRef0(GenericDescriptor*, pExt); CRef0(GenericDescriptor*, int, pOb, pOb, pOb, pExt); public: static CRef0* create(); static CRef0* create(GenericDescriptor*); virtual Ob* sGet(Ctxt* ctxt, uint32_t base, Tuple* path, int pindex = 0); virtual Ob* flatten(Ctxt* ctxt, uint32_t base, RblTable*); }; class CharRef0 : public CRef0 { STD_DECLS(CharRef0); protected: CharRef0(pExt); public: static CharRef0* create(); virtual Ob* flatten(Ctxt*, uint32_t, RblTable*); virtual Ob* sSet(Ctxt* ctxt, uint32_t base, Ob* val, Tuple* path, int pindex = 0); virtual convertArgReturnPair convertActualArg(Ctxt*, Ob*); }; class CUnion : public GenericDescriptor { STD_DECLS(CUnion); protected: CUnion(RblTable*, Tuple*, pExt); virtual int traversePtrs(PSOb__PSOb); virtual int traversePtrs(SI__PSOb); virtual void traversePtrs(V__PSOb); public: RblTable* _descs; Tuple* _fieldNames; static CUnion* create(RblTable*, Tuple*); static CUnion* create(); virtual Ob* select(Ctxt* ctxt, uint32_t base, Tuple* path, int pindex = 0); virtual Ob* flatten(Ctxt* ctxt, uint32_t base, RblTable*); }; #endif
4,173
499
#pragma pack(push, 1) struct LocateObjectRequest { short has_response; short type; long key; short mode; char name[1]; }; struct LocateObjectResponse { short has_response; short success; short error_code; long key; }; struct FreeLockRequest { short has_response; short type; long key; }; struct FreeLockResponse { short has_response; short success; short error_code; }; struct CopyDirRequest { short has_response; short type; long key; }; struct CopyDirResponse { short has_response; short success; short error_code; long key; }; struct ParentRequest { short has_response; short type; long key; }; struct ParentResponse { short has_response; short success; short error_code; long key; }; struct ExamineObjectRequest { short has_response; short type; long key; }; struct ExamineObjectResponse { short has_response; short success; short error_code; short disk_key; short entry_type; int size; int protection; int date[3]; char data[1]; }; struct ExamineNextRequest { short has_response; short type; long key; short disk_key; }; struct ExamineNextResponse { short has_response; short success; short error_code; short disk_key; short entry_type; int size; int protection; int date[3]; char data[1]; }; struct FindXxxRequest { short has_response; short type; long key; char name[1]; }; struct FindXxxResponse { short has_response; short success; short error_code; long arg1; }; struct ReadRequest { short has_response; short type; long arg1; int address; int length; }; struct ReadResponse { short has_response; short success; short error_code; int actual; }; struct WriteRequest { short has_response; short type; long arg1; int address; int length; }; struct WriteResponse { short has_response; short success; short error_code; int actual; }; struct SeekRequest { short has_response; short type; long arg1; int new_pos; int mode; }; struct SeekResponse { short has_response; short success; short error_code; int old_pos; }; struct EndRequest { short has_response; short type; long arg1; }; struct EndResponse { short has_response; short success; short error_code; }; struct DeleteObjectRequest { short has_response; short type; long key; char name[1]; }; struct DeleteObjectResponse { short has_response; short success; short error_code; }; struct RenameObjectRequest { short has_response; short type; long key; long target_dir; unsigned char name_len; unsigned char new_name_len; }; struct RenameObjectResponse { short has_response; short success; short error_code; }; struct CreateDirRequest { short has_response; short type; long key; char name[1]; }; struct CreateDirResponse { short has_response; short success; short error_code; long key; }; struct SetProtectRequest { short has_response; short type; long key; long mask; char name[1]; }; struct SetProtectResponse { short has_response; short success; short error_code; }; struct SetCommentRequest { short has_response; short type; long key; unsigned char name_len; unsigned char comment_len; }; struct SetCommentResponse { short has_response; short success; short error_code; }; struct SameLockRequest { short has_response; short type; long key1; long key2; }; struct SameLockResponse { short has_response; short success; short error_code; }; struct ExamineFhRequest { short has_response; short type; long arg1; }; struct ExamineFhResponse { short has_response; short success; short error_code; short disk_key; short entry_type; int size; int protection; int date[3]; char data[1]; }; struct UnsupportedRequest { short has_response; short type; short dp_Type; }; struct UnsupportedResponse { short has_response; short success; short error_code; }; #pragma pack(pop)
1,386
326
<reponame>mbrenman/MBSimpleLoadingIndicator // // Podfile_Bridge_Header.h // // // Created by <NAME> on 1/8/15. // // #ifndef _Podfile_Bridge_Header_h #define _Podfile_Bridge_Header_h #import "MBLoadingIndicator.h" #endif
98
1,467
<filename>libs/common/array_size.hpp<gh_stars>1000+ /** * Copyright Soramitsu Co., Ltd. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0 */ #ifndef IROHA_COMMON_ARRAY_SIZE_HPP #define IROHA_COMMON_ARRAY_SIZE_HPP #ifdef IROHA_ARRAY_SIZE #error IROHA_ARRAY_SIZE already defined. #endif // IROHA_ARRAY_SIZE #ifndef IROHA_ARRAY_SIZE template <typename T, size_t N> char (&IrohaArraySizeHelper(T (&array)[N]))[N]; #define IROHA_ARRAY_SIZE(array) (sizeof(IrohaArraySizeHelper(array))) #endif // IROHA_ARRAY_SIZE #endif // IROHA_COMMON_ARRAY_SIZE_HPP
239
1,139
<reponame>ghiloufibelgacem/jornaldev<filename>Spring/Spring-PropertySource-Annotation/src/main/java/com/journaldev/spring/DBConfiguration.java package com.journaldev.spring; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; import org.springframework.context.annotation.PropertySources; import org.springframework.core.env.Environment; @Configuration //@PropertySources({ //@PropertySource("classpath:db.properties"), //@PropertySource(value = "classpath:root.properties", ignoreResourceNotFound=true)}) //@PropertySource("file:${HOME}/db.properties") @PropertySource("classpath:db.properties") @PropertySource(value = "classpath:root.properties", ignoreResourceNotFound=true) public class DBConfiguration { @Autowired Environment env; @Value("${APP_NAME_NOT_FOUND:Default}") private String defaultAppName; @Value("${HOME}") private String homeDir; @Bean public DBConnection getDBConnection() { printValues(null); System.out.println("Getting DBConnection Bean for App: "+env.getProperty("APP_NAME")); //DBConnection dbConnection = new DBConnection(env.getProperty("DB_DRIVER_CLASS"), env.getProperty("DB_URL"), env.getProperty("DB_USERNAME"), env.getProperty("DB_PASSWORD").toCharArray()); DBConnection dbConnection = new DBConnection(); return dbConnection; } public void printValues(@Value("test") String s) { System.out.println("Input Argument "+s); System.out.println("Home Directory = "+homeDir); System.out.println("Default App Name = "+defaultAppName); } }
575
372
/* * * (c) Copyright 1989 OPEN SOFTWARE FOUNDATION, INC. * (c) Copyright 1989 HEWLETT-PACKARD COMPANY * (c) Copyright 1989 DIGITAL EQUIPMENT CORPORATION * To anyone who acknowledges that this file is provided "AS IS" * without any express or implied warranty: * permission to use, copy, modify, and distribute this * file for any purpose is hereby granted without fee, provided that * the above copyright notices and this notice appears in all source * code copies, and that none of the names of Open Software * Foundation, Inc., Hewlett-Packard Company, or Digital Equipment * Corporation be used in advertising or publicity pertaining to * distribution of the software without specific, written prior * permission. Neither Open Software Foundation, Inc., Hewlett- * Packard Company, nor Digital Equipment Corporation makes any * representations about the suitability of this software for any * purpose. * */ /* */ #ifndef RPCDEPDBP_H #define RPCDEPDBP_H /* ** ** NAME: ** ** rpcdepdbp.h ** ** FACILITY: ** ** RPC Daemon ** ** ABSTRACT: ** ** RPCD Endpoint Database Mgmt - routines, etc. shared by modules ** which know more about epdb internals ** ** ** */ /* * Delete disk copy of entry and free associated * memory */ PRIVATE void epdb_delete_entry ( struct db *h, db_entry_p_t entp, error_status_t *status ); PRIVATE void sliv_init ( struct db *h, error_status_t *status ); #endif
542
2,151
<reponame>zipated/src<filename>chromeos/system/factory_ping_embargo_check.h<gh_stars>1000+ // Copyright 2018 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef CHROMEOS_SYSTEM_FACTORY_PING_EMBARGO_CHECK_H_ #define CHROMEOS_SYSTEM_FACTORY_PING_EMBARGO_CHECK_H_ #include "base/time/time.h" #include "chromeos/chromeos_export.h" namespace chromeos { namespace system { class StatisticsProvider; // The RLZ embargo end date is considered invalid if it's more than this many // days in the future. constexpr base::TimeDelta kRlzEmbargoEndDateGarbageDateThreshold = base::TimeDelta::FromDays(14); enum class FactoryPingEmbargoState { // There is no correctly formatted factory ping embargo end date value in // VPD. kMissingOrMalformed, // There is a correctly formatted factory ping embargo end date value in VPD // which is too far in the future (indicating that the time source used in // the factory to write the embargo end date was not based on a not // synchronized clock). kInvalid, // The embargo period has not passed yet. kNotPassed, // The embargo period has passed. kPassed }; CHROMEOS_EXPORT FactoryPingEmbargoState GetFactoryPingEmbargoState(StatisticsProvider* statistics_provider); } // namespace system } // namespace chromeos #endif // CHROMEOS_SYSTEM_FACTORY_PING_EMBARGO_CHECK_H_
447
648
{"resourceType":"DataElement","id":"AuditEvent.entity.detail.type","meta":{"lastUpdated":"2017-04-19T07:44:43.294+10:00"},"url":"http://hl7.org/fhir/DataElement/AuditEvent.entity.detail.type","status":"draft","experimental":true,"stringency":"fully-specified","element":[{"id":"AuditEvent.entity.detail.type","path":"AuditEvent.entity.detail.type","short":"Name of the property","definition":"The type of extra detail provided in the value.","min":1,"max":"1","type":[{"code":"string"}],"mapping":[{"identity":"rim","map":".code"},{"identity":"dicom","map":"ParticipantObjectDetail.type"},{"identity":"w5","map":"context"}]}]}
185
1,875
/* * Copyright 2017 <NAME>. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.teavm.classlib.java.util.stream.impl; import java.util.function.Predicate; import java.util.function.Supplier; public class TGenerateStream<T> extends TSimpleStreamImpl<T> { private Supplier<T> s; public TGenerateStream(Supplier<T> s) { this.s = s; } @Override public boolean next(Predicate<? super T> consumer) { while (consumer.test(s.get())) { // go on } return true; } }
360
442
#include <Bindings/obe/Events/Game/Game.hpp> #include <Engine/Engine.hpp> #include <Bindings/Config.hpp> namespace obe::Events::Game::Bindings { void LoadClassEnd(sol::state_view state) { sol::table GameNamespace = state["obe"]["Events"]["Game"].get<sol::table>(); sol::usertype<obe::Events::Game::End> bindEnd = GameNamespace.new_usertype<obe::Events::Game::End>( "End", sol::call_constructor, sol::default_constructor); bindEnd["id"] = sol::var(&obe::Events::Game::End::id); } void LoadClassRender(sol::state_view state) { sol::table GameNamespace = state["obe"]["Events"]["Game"].get<sol::table>(); sol::usertype<obe::Events::Game::Render> bindRender = GameNamespace.new_usertype<obe::Events::Game::Render>( "Render", sol::call_constructor, sol::default_constructor); bindRender["id"] = sol::var(&obe::Events::Game::Render::id); } void LoadClassStart(sol::state_view state) { sol::table GameNamespace = state["obe"]["Events"]["Game"].get<sol::table>(); sol::usertype<obe::Events::Game::Start> bindStart = GameNamespace.new_usertype<obe::Events::Game::Start>( "Start", sol::call_constructor, sol::default_constructor); bindStart["id"] = sol::var(&obe::Events::Game::Start::id); } void LoadClassUpdate(sol::state_view state) { sol::table GameNamespace = state["obe"]["Events"]["Game"].get<sol::table>(); sol::usertype<obe::Events::Game::Update> bindUpdate = GameNamespace.new_usertype<obe::Events::Game::Update>( "Update", sol::call_constructor, sol::default_constructor); bindUpdate["dt"] = &obe::Events::Game::Update::dt; bindUpdate["id"] = sol::var(&obe::Events::Game::Update::id); } };
783
1,150
# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License" # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import math import numpy as np def compute_downsample_num(input_size, output_size): downsample_num = 0 while input_size > output_size: input_size = math.ceil(float(input_size) / 2.0) downsample_num += 1 if input_size != output_size: raise NotImplementedError( 'output_size must can downsample by input_size!!!') return downsample_num def check_points(count, points): if points is None: return False else: if isinstance(points, list): return (True if count in points else False) else: return (True if count == points else False) def get_random_tokens(range_table): tokens = [] for idx, max_value in enumerate(range_table): tokens_idx = int(np.floor(range_table[idx] * np.random.rand(1))) tokens.append(tokens_idx) return tokens
523
399
<reponame>kosua20/Rendu #include "system/System.hpp" #include <nfd/nfd.h> #include <GLFW/glfw3.h> #ifndef _WIN32 # include <sys/stat.h> #else # undef APIENTRY # include <Windows.h> #endif // On Windows, we can notify both AMD and Nvidia drivers that we prefer discrete GPUs. #ifdef _WIN32 extern "C" { // See https://gpuopen.com/learn/amdpowerxpressrequesthighperformance/ _declspec(dllexport) DWORD AmdPowerXpressRequestHighPerformance = 0x00000001; // See https://docs.nvidia.com/gameworks/content/technologies/desktop/optimus.htm _declspec(dllexport) DWORD NvOptimusEnablement = 0x00000001; } #endif bool System::showPicker(Picker mode, const std::string & startDir, std::string & outPath, const std::string & extensions) { nfdchar_t * outPathRaw = nullptr; nfdresult_t result = NFD_CANCEL; outPath = ""; #ifdef _WIN32 (void)startDir; const std::string internalStartPath; #else const std::string internalStartPath = startDir; #endif if(mode == Picker::Load) { result = NFD_OpenDialog(extensions.empty() ? nullptr : extensions.c_str(), internalStartPath.c_str(), &outPathRaw); } else if(mode == Picker::Save) { result = NFD_SaveDialog(extensions.empty() ? nullptr : extensions.c_str(), internalStartPath.c_str(), &outPathRaw); } else if(mode == Picker::Directory) { result = NFD_PickFolder(internalStartPath.c_str(), &outPathRaw); } if(result == NFD_OKAY) { outPath = std::string(outPathRaw); free(outPathRaw); return true; } if(result == NFD_CANCEL) { // Cancelled by user, nothing to do. } else { // Real error. Log::Error() << "Unable to present system picker (" << std::string(NFD_GetError()) << ")." << std::endl; } free(outPathRaw); return false; } #ifdef _WIN32 bool System::createDirectory(const std::string & directory) { return CreateDirectoryW(widen(directory), nullptr) != 0; } #else bool System::createDirectory(const std::string & directory) { return mkdir(directory.c_str(), S_IRWXU | S_IRWXG | S_IRWXO) == 0; } #endif void System::ping() { Log::Info() << '\a' << std::endl; } double System::time(){ return glfwGetTime(); } std::string System::timestamp(){ const auto time = std::time(nullptr); #ifdef _WIN32 tm ltime = { 0,0,0,0,0,0,0,0,0 }; localtime_s(&ltime, &time); #else const tm ltime = *(std::localtime(&time)); #endif std::stringstream str; str << std::put_time(&ltime, "%Y_%m_%d_%H_%M_%S"); return str.str(); } #ifdef _WIN32 wchar_t * System::widen(const std::string & str) { const int size = MultiByteToWideChar(CP_UTF8, 0, str.c_str(), -1, nullptr, 0); WCHAR * arr = new WCHAR[size]; MultiByteToWideChar(CP_UTF8, 0, str.c_str(), -1, static_cast<LPWSTR>(arr), size); // \warn Will leak on Windows. return arr; } std::string System::narrow(wchar_t * str) { const int size = WideCharToMultiByte(CP_UTF8, 0, str, -1, nullptr, 0, nullptr, nullptr); std::string res(size - 1, 0); WideCharToMultiByte(CP_UTF8, 0, str, -1, &res[0], size, nullptr, nullptr); return res; } #else const char * System::widen(const std::string & str) { return str.c_str(); } std::string System::narrow(char * str) { return std::string(str); } #endif
1,228
18,621
<reponame>delorenzosoftware/superset<filename>superset/common/query_context_factory.py # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import annotations from typing import Any, Dict, List, Optional, TYPE_CHECKING from superset import app, db from superset.common.chart_data import ChartDataResultFormat, ChartDataResultType from superset.common.query_context import QueryContext from superset.common.query_object_factory import QueryObjectFactory from superset.connectors.connector_registry import ConnectorRegistry from superset.utils.core import DatasourceDict if TYPE_CHECKING: from superset.connectors.base.models import BaseDatasource config = app.config def create_query_object_factory() -> QueryObjectFactory: return QueryObjectFactory(config, ConnectorRegistry(), db.session) class QueryContextFactory: # pylint: disable=too-few-public-methods _query_object_factory: QueryObjectFactory def __init__(self) -> None: self._query_object_factory = create_query_object_factory() def create( self, *, datasource: DatasourceDict, queries: List[Dict[str, Any]], form_data: Optional[Dict[str, Any]] = None, result_type: Optional[ChartDataResultType] = None, result_format: Optional[ChartDataResultFormat] = None, force: bool = False, custom_cache_timeout: Optional[int] = None ) -> QueryContext: datasource_model_instance = None if datasource: datasource_model_instance = self._convert_to_model(datasource) result_type = result_type or ChartDataResultType.FULL result_format = result_format or ChartDataResultFormat.JSON queries_ = [ self._query_object_factory.create(result_type, **query_obj) for query_obj in queries ] cache_values = { "datasource": datasource, "queries": queries, "result_type": result_type, "result_format": result_format, } return QueryContext( datasource=datasource_model_instance, queries=queries_, form_data=form_data, result_type=result_type, result_format=result_format, force=force, custom_cache_timeout=custom_cache_timeout, cache_values=cache_values, ) # pylint: disable=no-self-use def _convert_to_model(self, datasource: DatasourceDict) -> BaseDatasource: return ConnectorRegistry.get_datasource( str(datasource["type"]), int(datasource["id"]), db.session )
1,233
713
<reponame>blueblueblue/infinispan package org.infinispan.filter; import org.infinispan.metadata.Metadata; /** * This interface is an optimization that can be used when a filter and converter are most efficiently used as the same * object composing the filtering and conversion in the same method invocation. * @author wburns * @since 7.0 */ public interface KeyValueFilterConverter<K, V, C> extends KeyValueFilter<K, V>, Converter<K, V, C> { /** * Will both filter the entry and if passed subsequently convert the value to a new value. A returned value of null * will symbolize the value not passing the filter, so ensure your conversion will not return null if you want this * entry to be returned. * @param key The key of the entry to filter * @param value The value of the entry to filter and then convert * @param metadata The metadata attached to the entry * @return The converted value or null if the filter didn't pass */ public C filterAndConvert(K key, V value, Metadata metadata); }
289
583
#ifndef GRADIENTMATRIX_H #define GRADIENTMATRIX_H #include <QtGui> class GradientMatrix : public QMatrix { public: double height, width; }; #endif // GRADIENTMATRIX_H
73
1,144
<filename>backend/de.metas.adempiere.adempiere/base/src/main/java/org/compiere/apps/search/IUserQueryRestriction.java package org.compiere.apps.search; import org.compiere.model.MQuery.Operator; import com.google.common.collect.ImmutableMap; /* * #%L * de.metas.adempiere.adempiere.base * %% * Copyright (C) 2016 metas GmbH * %% * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as * published by the Free Software Foundation, either version 2 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public * License along with this program. If not, see * <http://www.gnu.org/licenses/gpl-2.0.html>. * #L% */ public interface IUserQueryRestriction { public static IUserQueryRestriction newInstance() { return new UserQueryRestriction(); } public static enum Join { AND("AND"), OR("OR"); private static final ImmutableMap<String, Join> codeToJoin = ImmutableMap.<String, Join> builder() .put(AND.getCode(), AND) .put(OR.getCode(), OR) .build(); private final String code; Join(final String code) { this.code = code; } public String getCode() { return code; } public static final Join forCodeOrAND(final String code) { final Join join = codeToJoin.get(code); return join != null ? join : AND; } } //@formatter:off void setJoin(Join join); Join getJoin(); //@formatter:on //@formatter:off IUserQueryField getSearchField(); void setSearchField(final IUserQueryField searchField); //@formatter:on //@formatter:off void setOperator(Operator operator); Operator getOperator(); //@formatter:on //@formatter:off Object getValue(); void setValue(Object value); //@formatter:on //@formatter:off Object getValueTo(); void setValueTo(Object valueTo); //@formatter:on /** @return true if restriction is empty (i.e. has nothing set) */ boolean isEmpty(); boolean isMandatory(); void setMandatory(boolean mandatory); boolean isInternalParameter(); void setInternalParameter(boolean internalParameter); }
785
452
<reponame>the-real-mrcs/firebase-admin-java /* * Copyright 2017 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.firebase.database.core.view; import static com.google.firebase.database.TestHelpers.ck; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import com.google.firebase.database.snapshot.EmptyNode; import java.util.Map; import org.junit.Test; public class QueryParamsTest { @Test public void startAtNullIsSerializable() { QueryParams params = QueryParams.DEFAULT_PARAMS; params = params.startAt(EmptyNode.Empty(), ck("key")); Map<String, Object> serialized = params.getWireProtocolParams(); QueryParams parsed = QueryParams.fromQueryObject(serialized); assertEquals(params, parsed); assertTrue(params.hasStart()); } @Test public void endAtNullIsSerializable() { QueryParams params = QueryParams.DEFAULT_PARAMS; params = params.endAt(EmptyNode.Empty(), ck("key")); Map<String, Object> serialized = params.getWireProtocolParams(); QueryParams parsed = QueryParams.fromQueryObject(serialized); assertEquals(params, parsed); assertTrue(params.hasEnd()); } }
535
3,402
<reponame>ApacheSourceCode/kylin /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kylin.dict.global; import org.apache.kylin.common.util.Bytes; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.util.Arrays; public class AppendDictSliceKey implements Comparable<AppendDictSliceKey> { static final AppendDictSliceKey START_KEY = AppendDictSliceKey.wrap(new byte[0]); byte[] key; public static AppendDictSliceKey wrap(byte[] key) { AppendDictSliceKey dictKey = new AppendDictSliceKey(); dictKey.key = key; return dictKey; } @Override public String toString() { return Bytes.toStringBinary(key); } @Override public int hashCode() { return Arrays.hashCode(key); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o instanceof AppendDictSliceKey) { AppendDictSliceKey that = (AppendDictSliceKey) o; return Arrays.equals(this.key, that.key); } return false; } @Override public int compareTo(AppendDictSliceKey that) { return Bytes.compareTo(key, that.key); } public void write(DataOutput out) throws IOException { out.writeInt(key.length); out.write(key); } public void readFields(DataInput in) throws IOException { key = new byte[in.readInt()]; in.readFully(key); } }
823
2,847
from tests.testmodels import IntFields from tortoise import Tortoise from tortoise.contrib import test from tortoise.expressions import Case, F, Q, When from tortoise.functions import Coalesce class TestCaseWhen(test.TestCase): async def setUp(self): self.intfields = [await IntFields.create(intnum=val) for val in range(10)] self.db = Tortoise.get_connection("models") async def test_single_when(self): category = Case(When(intnum__gte=8, then="big"), default="default") sql = IntFields.all().annotate(category=category).values("intnum", "category").sql() dialect = self.db.schema_generator.DIALECT if dialect == "mysql": expected_sql = "SELECT `intnum` `intnum`,CASE WHEN `intnum`>=8 THEN 'big' ELSE 'default' END `category` FROM `intfields`" else: expected_sql = 'SELECT "intnum" "intnum",CASE WHEN "intnum">=8 THEN \'big\' ELSE \'default\' END "category" FROM "intfields"' self.assertEqual(sql, expected_sql) async def test_multi_when(self): category = Case( When(intnum__gte=8, then="big"), When(intnum__lte=2, then="small"), default="default" ) sql = IntFields.all().annotate(category=category).values("intnum", "category").sql() dialect = self.db.schema_generator.DIALECT if dialect == "mysql": expected_sql = "SELECT `intnum` `intnum`,CASE WHEN `intnum`>=8 THEN 'big' WHEN `intnum`<=2 THEN 'small' ELSE 'default' END `category` FROM `intfields`" else: expected_sql = 'SELECT "intnum" "intnum",CASE WHEN "intnum">=8 THEN \'big\' WHEN "intnum"<=2 THEN \'small\' ELSE \'default\' END "category" FROM "intfields"' self.assertEqual(sql, expected_sql) async def test_q_object_when(self): category = Case(When(Q(intnum__gt=2, intnum__lt=8), then="middle"), default="default") sql = IntFields.all().annotate(category=category).values("intnum", "category").sql() dialect = self.db.schema_generator.DIALECT if dialect == "mysql": expected_sql = "SELECT `intnum` `intnum`,CASE WHEN `intnum`>2 AND `intnum`<8 THEN 'middle' ELSE 'default' END `category` FROM `intfields`" else: expected_sql = 'SELECT "intnum" "intnum",CASE WHEN "intnum">2 AND "intnum"<8 THEN \'middle\' ELSE \'default\' END "category" FROM "intfields"' self.assertEqual(sql, expected_sql) async def test_F_then(self): category = Case(When(intnum__gte=8, then=F("intnum_null")), default="default") sql = IntFields.all().annotate(category=category).values("intnum", "category").sql() dialect = self.db.schema_generator.DIALECT if dialect == "mysql": expected_sql = "SELECT `intnum` `intnum`,CASE WHEN `intnum`>=8 THEN `intnum_null` ELSE 'default' END `category` FROM `intfields`" else: expected_sql = 'SELECT "intnum" "intnum",CASE WHEN "intnum">=8 THEN "intnum_null" ELSE \'default\' END "category" FROM "intfields"' self.assertEqual(sql, expected_sql) async def test_AE_then(self): # AE: ArithmeticExpression category = Case(When(intnum__gte=8, then=F("intnum") + 1), default="default") sql = IntFields.all().annotate(category=category).values("intnum", "category").sql() dialect = self.db.schema_generator.DIALECT if dialect == "mysql": expected_sql = "SELECT `intnum` `intnum`,CASE WHEN `intnum`>=8 THEN `intnum`+1 ELSE 'default' END `category` FROM `intfields`" else: expected_sql = 'SELECT "intnum" "intnum",CASE WHEN "intnum">=8 THEN "intnum"+1 ELSE \'default\' END "category" FROM "intfields"' self.assertEqual(sql, expected_sql) async def test_func_then(self): category = Case(When(intnum__gte=8, then=Coalesce("intnum_null", 10)), default="default") sql = IntFields.all().annotate(category=category).values("intnum", "category").sql() dialect = self.db.schema_generator.DIALECT if dialect == "mysql": expected_sql = "SELECT `intnum` `intnum`,CASE WHEN `intnum`>=8 THEN COALESCE(`intnum_null`,10) ELSE 'default' END `category` FROM `intfields`" else: expected_sql = 'SELECT "intnum" "intnum",CASE WHEN "intnum">=8 THEN COALESCE("intnum_null",10) ELSE \'default\' END "category" FROM "intfields"' self.assertEqual(sql, expected_sql) async def test_F_default(self): category = Case(When(intnum__gte=8, then="big"), default=F("intnum_null")) sql = IntFields.all().annotate(category=category).values("intnum", "category").sql() dialect = self.db.schema_generator.DIALECT if dialect == "mysql": expected_sql = "SELECT `intnum` `intnum`,CASE WHEN `intnum`>=8 THEN 'big' ELSE `intnum_null` END `category` FROM `intfields`" else: expected_sql = 'SELECT "intnum" "intnum",CASE WHEN "intnum">=8 THEN \'big\' ELSE "intnum_null" END "category" FROM "intfields"' self.assertEqual(sql, expected_sql) async def test_AE_default(self): # AE: ArithmeticExpression category = Case(When(intnum__gte=8, then=8), default=F("intnum") + 1) sql = IntFields.all().annotate(category=category).values("intnum", "category").sql() dialect = self.db.schema_generator.DIALECT if dialect == "mysql": expected_sql = "SELECT `intnum` `intnum`,CASE WHEN `intnum`>=8 THEN 8 ELSE `intnum`+1 END `category` FROM `intfields`" else: expected_sql = 'SELECT "intnum" "intnum",CASE WHEN "intnum">=8 THEN 8 ELSE "intnum"+1 END "category" FROM "intfields"' self.assertEqual(sql, expected_sql) async def test_func_default(self): category = Case(When(intnum__gte=8, then=8), default=Coalesce("intnum_null", 10)) sql = IntFields.all().annotate(category=category).values("intnum", "category").sql() dialect = self.db.schema_generator.DIALECT if dialect == "mysql": expected_sql = "SELECT `intnum` `intnum`,CASE WHEN `intnum`>=8 THEN 8 ELSE COALESCE(`intnum_null`,10) END `category` FROM `intfields`" else: expected_sql = 'SELECT "intnum" "intnum",CASE WHEN "intnum">=8 THEN 8 ELSE COALESCE("intnum_null",10) END "category" FROM "intfields"' self.assertEqual(sql, expected_sql) async def test_case_when_in_where(self): category = Case( When(intnum__gte=8, then="big"), When(intnum__lte=2, then="small"), default="middle" ) sql = ( IntFields.all() .annotate(category=category) .filter(category__in=["big", "small"]) .values("intnum") .sql() ) dialect = self.db.schema_generator.DIALECT if dialect == "mysql": expected_sql = "SELECT `intnum` `intnum` FROM `intfields` WHERE CASE WHEN `intnum`>=8 THEN 'big' WHEN `intnum`<=2 THEN 'small' ELSE 'middle' END IN ('big','small')" else: expected_sql = "SELECT \"intnum\" \"intnum\" FROM \"intfields\" WHERE CASE WHEN \"intnum\">=8 THEN 'big' WHEN \"intnum\"<=2 THEN 'small' ELSE 'middle' END IN ('big','small')" self.assertEqual(sql, expected_sql)
3,020
593
package org.ananas.runner.kernel.paginate; import com.google.common.base.Preconditions; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.util.HashMap; import java.util.Map; import org.ananas.runner.kernel.common.VariableRender; import org.ananas.runner.kernel.errors.AnanasException; import org.ananas.runner.kernel.errors.ExceptionHandler; import org.ananas.runner.kernel.errors.ExceptionHandler.ErrorCode; import org.ananas.runner.kernel.model.Dataframe; import org.ananas.runner.kernel.model.Step; import org.ananas.runner.kernel.model.StepType; import org.apache.beam.sdk.schemas.Schema; import org.apache.beam.sdk.values.Row; import org.apache.commons.lang3.tuple.MutablePair; public class PaginatorFactory implements Paginator { private static final Map<String, Class<? extends AutoDetectedSchemaPaginator>> REGISTRY = new HashMap<>(); String id; String metadataId; String type; Map<String, Object> config; Dataframe dataframe; public static void register( String metadataId, Class<? extends AutoDetectedSchemaPaginator> paginatorClass) { REGISTRY.put(metadataId, paginatorClass); } private PaginatorFactory(String id, PaginationBody body) { this( id, body.metadataId, body.type, VariableRender.renderConfig(body.params, body.config), body.dataframe); } private PaginatorFactory( String id, String metadataId, String type, Map<String, Object> config, Dataframe dataframe) { this.id = id; this.type = type; this.metadataId = metadataId; this.config = config; this.dataframe = dataframe; } public static PaginatorFactory of(String id, PaginationBody body) { Preconditions.checkNotNull(body.config, "config cannot be null"); return new PaginatorFactory(id, body); } public static PaginatorFactory of( String id, String metadataId, String type, Map<String, Object> config, Dataframe dataframe) { Preconditions.checkNotNull(config, "config cannot be null"); return new PaginatorFactory(id, metadataId, type, config, dataframe); } public static PaginatorFactory of( String id, String metadataId, String type, Map<String, Object> config, Schema schema) { Preconditions.checkNotNull(config, "config cannot be null"); Dataframe dataframe = new Dataframe(); dataframe.schema = org.ananas.runner.kernel.schema.Schema.of(schema); return new PaginatorFactory(id, metadataId, type, config, dataframe); } public AutoDetectedSchemaPaginator buildPaginator() { if (!REGISTRY.containsKey(this.metadataId)) { throw new IllegalStateException("Unsupported source type '" + this.metadataId + "'"); } Class<? extends AutoDetectedSchemaPaginator> clazz = REGISTRY.get(this.metadataId); try { Constructor<? extends AutoDetectedSchemaPaginator> ctor = clazz.getDeclaredConstructor(String.class, String.class, Map.class, Schema.class); ctor.setAccessible(true); // get the schema here if user choose to use the schema from dataframe boolean forceSchemaAutodetect = (Boolean) this.config.getOrDefault(Step.FORCE_AUTODETECT_SCHEMA, false); Schema schema = null; if (!forceSchemaAutodetect && (dataframe != null && dataframe.schema != null) && StepType.from(this.type) .equals(StepType.Connector)) { // only avoid autodetect for connector schema = dataframe.schema.toBeamSchema(); if (schema.getFieldCount() == 0) { schema = null; } } return ctor.newInstance(this.id, this.type, this.config, schema); } catch (InstantiationException | NoSuchMethodException | IllegalAccessException e) { throw new AnanasException(ExceptionHandler.ErrorCode.GENERAL, e.getLocalizedMessage()); } catch (InvocationTargetException ex) { Throwable targetException = ex.getTargetException(); if (targetException != null) { throw new AnanasException( ExceptionHandler.ErrorCode.GENERAL, targetException.getLocalizedMessage()); } else { throw new AnanasException(ErrorCode.GENERAL); } } } @Override public MutablePair<Schema, Iterable<Row>> paginateRows(Integer page, Integer pageSize) { return buildPaginator().paginateRows(page, pageSize); } @Override public Dataframe paginate(Integer page, Integer pageSize) { return buildPaginator().paginate(page, pageSize); } }
1,569
2,236
// // TABComponentLayer+TABDropLayer.h // AnimatedDemo // // Created by tigerAndBull on 2020/4/19. // Copyright © 2020 tigerAndBull. All rights reserved. // #import <UIKit/UIKit.h> #import "TABComponentLayer.h" NS_ASSUME_NONNULL_BEGIN @interface TABComponentLayer (TABDropAnimation) /** * 该动画元素在豆瓣动画队列中的下标 */ @property (nonatomic, assign) NSInteger dropAnimationIndex; /** * 对于多行的动画元素,在豆瓣动画队列中,设置它的起点下标 */ @property (nonatomic, assign) NSInteger dropAnimationFromIndex; /** * 豆瓣动画间隔时间,默认0.2。 */ @property (nonatomic, assign) CGFloat dropAnimationStayTime; /** * 是否将该元素从豆瓣动画队列中移除 */ @property (nonatomic, assign) BOOL removeOnDropAnimation; @end NS_ASSUME_NONNULL_END
383
328
<filename>multi_video_reid.py # coding=utf-8 # multi-camera reid # given sync camera group (will use homography to check spatial) # and consecutive camera group # for reid, use hungarian algo. import argparse import cv2 import json import os import numpy as np from tqdm import tqdm from sklearn.metrics.pairwise import euclidean_distances import lap # 0.4.0 #from scipy.optimize import linear_sum_assignment from utils import parse_camera_file, compute_c1_to_c2_homography from utils import parse_meva_clip_name from utils import valid_box, warp_points import torch from torchreid.feature_extractor import FeatureExtractor from torchreid.distance import compute_distance_matrix from enqueuer_thread import VideoEnqueuer from diva_io.video import VideoReader from moviepy.editor import VideoFileClip # outdoor cameras (10), ignoring "G300", "G301", #camera_list = ["G505", "G506", "G638", "G424", "G339", "G328", # "G341", "G436", "G336", "G340"] # for G339, after 03-07.11-10, it enters patrol mode (03-07.16-50) # only train set has G339 #exclude_list = {"G339": ("2018-03-07", "11-10-07")} """ "sync_groups": { "2018-03-11.16-40-08.16-45-08": [ "2018-03-11.16-40-08.16-45-08.hospital.G341", "2018-03-11.16-40-02.16-45-02.school.G424", "2018-03-11.16-40-01.16-45-00.school.G328", "2018-03-11.16-40-01.16-45-01.school.G336", "2018-03-11.16-40-01.16-45-01.bus.G506", .. ], ..., "consecutive_groups": [ [ "2018-03-11.16-40-08.16-45-08" ], [ "2018-03-11.16-30-02.16-35-02", "2018-03-11.16-35-01.16-40-01" ], ... """ parser = argparse.ArgumentParser() parser.add_argument("filepath", help="mot track result path") parser.add_argument("camera_group") parser.add_argument("camera_model_path") parser.add_argument("topdown_camera") parser.add_argument("video_path") parser.add_argument("newfilepath") parser.add_argument("--gpuid", default=0, type=int, help="gpu id") parser.add_argument("--vehicle_reid_model", default=None) parser.add_argument("--person_reid_model", default=None) parser.add_argument("--use_lijun_video_loader", action="store_true") parser.add_argument("--use_moviepy", action="store_true") parser.add_argument("--max_size", type=int, default=1920) parser.add_argument("--short_edge_size", type=int, default=1080) parser.add_argument("--use_avg_pool", action="store_true", help="use average pooling on each track's features") parser.add_argument("--feature_box_num", default=100, type=int, help="maximum box num to use for feature extraction, -1 " "means all") parser.add_argument("--feature_box_gap", default=20, type=int, help="interval when getting boxes") parser.add_argument("--spatial_dist_tol", default=50., type=float, help="pixel distance tolerance") camera_model = { "G505": "2018-03-05.13-20-01.13-25-00.bus.G505.krtd", "G506": "2018-03-05.13-15-00.13-20-00.bus.G506.krtd", "G638": "2018-03-07.13-15-01.13-20-01.school.G638.krtd", "G424": "2018-03-05.18-25-00.18-29-31.school.G424.krtd", "G339": "2018-03-05.11-15-00.11-20-00.school.G339.krtd", "G328": "2018-03-05.13-25-01.13-30-01.school.G328.krtd", "G341": "2018-03-05.15-55-00.16-00-00.hospital.G341.krtd", "G436": "2018-03-05.18-10-00.18-15-00.hospital.G436.krtd", "G336": "2018-03-06.15-05-02.15-10-02.school.G336.krtd", "G340": "2018-03-05.11-20-00.11-25-00.bus.G340.krtd" } def compute_homographys(topdown_camera, camera_path, camera_files): hs = {} c2_r, c2_t, c2_k = parse_camera_file(topdown_camera) for camera in camera_files: c1_r, c1_t, c1_k = parse_camera_file( os.path.join(camera_path, camera_files[camera])) homography = compute_c1_to_c2_homography(c1_r, c1_t, c1_k, c2_r, c2_t, c2_k) hs[camera] = homography return hs def compute_frame_offset(v1, v2, fps): date1, start_time1, end_time, location, camera = v1.split(".") date2, start_time2, end_time, location, camera = v2.split(".") assert date1 == date2 def time2sec(time_str): # hour-minutes-seconds hours, minutes, seconds = time_str.split("-") return float(hours)*60.*60. + float(minutes)*60. + float(seconds) time_offset = time2sec(start_time2) - time2sec(start_time1) return time_offset * fps def load_track_and_features(args, video_name, p_file, v_file, p_extractor, v_extractor, hs): date, hr_slot, camera = parse_meva_clip_name(video_name) # start loading video_frames first video_path = os.path.join(args.video_path, date, hr_slot, video_name + ".avi") if args.use_lijun_video_loader: vcap = VideoReader(video_path) frame_count = int(vcap.length) elif args.use_moviepy: vcap = VideoFileClip(video_path, audio=False) frame_count = int(vcap.fps * vcap.duration) # uh vcap = vcap.iter_frames() else: try: vcap = cv2.VideoCapture(video_path) if not vcap.isOpened(): raise Exception("cannot open %s" % video_path) except Exception as e: raise Exception("warning, cannot open %s" % video_path) # opencv 3/4 frame_count = vcap.get(cv2.CAP_PROP_FRAME_COUNT) # start reading frames into queues now video_queuer = VideoEnqueuer( args, vcap, frame_count, frame_gap=1, # no skipping frames prefetch=100, start=True, is_moviepy=args.use_moviepy, batch_size=1) get_frame_batches = video_queuer.get() def load_track_file(file_path, homography): """load a tracking file into dict of numpy arrays.""" # assuming sorted by frameid data = [] with open(file_path, "r") as f: for line in f: frame_idx, track_id, left, top, width, height, conf, _, _, _ = line.strip().split(",") data.append([frame_idx, track_id, left, top, width, height, conf]) if not data: return {} data = np.array(data, dtype="float32") # [N, 7] # compute topdown points foot_points_x = data[:, 2] + data[:, 4] / 2. # [N] foot_points_y = data[:, 3] + data[:, 5] foot_points = np.stack([foot_points_x, foot_points_y], axis=0) # [2, N] # [2, N] top_down_points = warp_points(foot_points, homography) top_down_points = np.transpose(top_down_points, [1, 0]) # [N, 2] # [N, 9] data = np.concatenate([data, top_down_points], axis=1) track_ids = np.unique(data[:, 1]).tolist() track_data = {} # [num_track, K, 9] for track_id in track_ids: track_data[track_id] = data[data[:, 1] == track_id, :] return track_data # track_id -> data p_tracks = load_track_file(p_file, hs[camera]) v_tracks = load_track_file(v_file, hs[camera]) # get each frame's boxes to extract frame_data = {} # frame_idx -> a list of boxes, def get_track_boxes(tracks, cat_name): for track_id in tracks: idxs = list(range(0, len(tracks[track_id]), args.feature_box_gap)) idxs = idxs[:args.feature_box_num] boxes = tracks[track_id][idxs, :] # [k, 7] for box_idx, box in enumerate(boxes): frame_idx = box[0] tlwh = box[2:6] if not frame_idx in frame_data: frame_data[frame_idx] = [] frame_data[frame_idx].append((tlwh, track_id, box_idx, cat_name)) get_track_boxes(p_tracks, "Person") get_track_boxes(v_tracks, "Vehicle") # 2. go through the video once and crop all the images to extract features # assuming not conflict between person/vehicle track_id p_track_to_feat = {} # "track_id" => features v_track_to_feat = {} # "track_id" => features for batch in tqdm(get_frame_batches, total=video_queuer.num_batches): image, scale, frame_idx = batch[0] image = image.astype("uint8") # need uint8 type if frame_idx in frame_data: for tlwh, track_id, box_idx, cat_name in frame_data[frame_idx]: # check box valid if valid_box(tlwh, image): x, y, w, h = tlwh x, y, w, h = int(x), int(y), int(w), int(h) #print(x, y, w, h) #print(image[y:y+h, x:x+w]) box_img = cv2.cvtColor( image[y:y+h, x:x+w], cv2.COLOR_BGR2RGB) if cat_name == "Person": if track_id not in p_track_to_feat: p_track_to_feat[track_id] = [] p_track_to_feat[track_id].append(box_img) elif cat_name == "Vehicle": if track_id not in v_track_to_feat: v_track_to_feat[track_id] = [] v_track_to_feat[track_id].append(box_img) # extract features def get_features(track_to_imgs, extractor): for track_id in track_to_imgs: box_imgs = track_to_imgs[track_id] track_to_imgs[track_id] = extractor(box_imgs).cpu().numpy() # [K, 512] if args.use_avg_pool: # [1, 512] track_to_imgs[track_id] = np.mean( track_to_imgs[track_id], axis=0, keepdims=True) get_features(p_track_to_feat, p_extractor) get_features(v_track_to_feat, v_extractor) data = {} def gather_data(track_data, track_features, cat_name): data[cat_name] = {} for track_id in track_data: # ignore track with no valid boxes if track_id in track_features: data[cat_name][track_id] = ( track_data[track_id], track_features[track_id]) gather_data(p_tracks, p_track_to_feat, "Person") gather_data(v_tracks, v_track_to_feat, "Vehicle") return data def compute_spatial_dist(tracks1, tracks2, frame_offset=0, tol=50, ignore_pairs=[[], []]): # frameoffset: all frames in tracks2 add this # tol: tolerance pixels N = len(tracks1) M = len(tracks2) frame_offset = int(frame_offset) spatial_dist = np.ones((N, M), dtype="float") * 9999. for i, track_id1 in enumerate(sorted(tracks1.keys())): track1 = tracks1[track_id1][0] # (K, 9) frame_to_points1 = {int(p[0]): p[-2:] for p in track1} frame_set1 = set([int(p[0]) for p in track1]) for j, track_id2 in enumerate(sorted(tracks2.keys())): track2 = tracks2[track_id2][0] frame_to_points2 = {(int(p[0]) + frame_offset): p[-2:] for p in track2} frame_set2 = set([int(p[0]) + frame_offset for p in track2]) intersected_frame_ids = list(frame_set1 & frame_set2) if intersected_frame_ids: # [K, 2] track1_points_to_compare = np.array( [frame_to_points1[fid] for fid in intersected_frame_ids]) track2_points_to_compare = np.array( [frame_to_points2[fid] for fid in intersected_frame_ids]) # pixel dist of the intersected frame part [K] dist = np.linalg.norm( track1_points_to_compare - track2_points_to_compare, axis=1) # check how many are above the tolerance # the tolerance should be taken into account the synchronize error, # and the homography errors #good = [1. if d <= tol else 0. for d in dist] #spatial_dist[i, j] = np.sum(good) mean_dist = np.mean(dist) if mean_dist <= tol: spatial_dist[i, j] = mean_dist # TODO: the above does not consider intersected length # reset the ignore pairs dist to large for i, track_id1 in enumerate(sorted(tracks1.keys())): for j, track_id2 in enumerate(sorted(tracks2.keys())): if track_id1 in ignore_pairs[0] and track_id2 in ignore_pairs[1]: spatial_dist[i, j] = 9999. return spatial_dist def compute_feature_dist(tracks1, tracks2, spatial_dist): """Compute squared l2 distance, save time on the sqrt op""" N = len(tracks1) M = len(tracks2) feature_dist = np.ones((N, M), dtype="float") * 999 for i, track_id1 in enumerate(sorted(tracks1.keys())): track1 = tracks1[track_id1][1] # features [K1, 512] for j, track_id2 in enumerate(sorted(tracks2.keys())): track2 = tracks2[track_id2][1] # features [K2, 512] if spatial_dist[i, j] < 9999.: # [K1, K2] dist_mat = euclidean_distances(track1, track2, squared=True) min_dist = dist_mat.min() feature_dist[i, j] = min_dist return feature_dist def get_cur_links(tracks1, tracks2, video_name1, video_name2, global_track_ids, cat_name): # 1. get trackid pairs that already in the same global track linked_pairs = [[], []] for gid in global_track_ids[cat_name]: track_id_set = global_track_ids[cat_name][gid] for track_id1 in tracks1: for track_id2 in tracks2: key1 = (video_name1, track_id1) key2 = (video_name2, track_id2) if key1 in track_id_set and key2 in track_id_set: linked_pairs[0].append(track_id1) linked_pairs[1].append(track_id2) # 2. get trackid pairs [at most NxM] that are in separate global tracks # so we don't want to accidentally match them track1_ids_in_global = [] track2_ids_in_global = [] for track_id in tracks1: key = (video_name1, track_id) for gid in global_track_ids[cat_name]: track_id_set = global_track_ids[cat_name][gid] if key in track_id_set: track1_ids_in_global.append(track_id) for track_id in tracks2: key = (video_name2, track_id) for gid in global_track_ids[cat_name]: track_id_set = global_track_ids[cat_name][gid] if key in track_id_set: track2_ids_in_global.append(track_id) return linked_pairs, (track1_ids_in_global, track2_ids_in_global) def create_or_merge_global_id(global_track_ids, cat_name, video_name1, track_id1, video_name2, track_id2): key1 = (video_name1, track_id1) key2 = (video_name2, track_id2) found = None for gid in global_track_ids[cat_name]: track_id_set = global_track_ids[cat_name][gid] if key1 in track_id_set or key2 in track_id_set: found = gid break if found is None: # global track Id start from 1 new_gid = len(global_track_ids[cat_name]) + 1 global_track_ids[cat_name][new_gid] = set([key1, key2]) else: global_track_ids[cat_name][found].add(key1) global_track_ids[cat_name][found].add(key2) def save_new_track(cat_name, track_data, global_track, out_dir, video_name): # save the global track id in the x,y,z track_results = sorted( [b.tolist() for t in track_data for b in track_data[t][0]], key=lambda x: (x[0], x[1])) # make a reverse index first local_to_global_track_ids = {tid: gid for gid in global_track for (vn, tid) in global_track[gid] if vn == video_name} out_file_dir = os.path.join(out_dir, video_name + ".avi", cat_name) if not os.path.exists(out_file_dir): os.makedirs(out_file_dir) out_file = os.path.join( out_file_dir, video_name + ".txt") with open(out_file, "w") as fw: for row in track_results: # replace all local track_id with global track local_track_id = row[1] global_track_id = -1 if local_track_id in local_to_global_track_ids: global_track_id = local_to_global_track_ids[local_track_id] line = "%d,%d,%.2f,%.2f,%.2f,%.2f,%.4f,%d,-1,-1" % ( row[0], local_track_id, row[2], row[3], row[4], row[5], row[6], global_track_id) fw.write(line + "\n") if __name__ == "__main__": args = parser.parse_args() np.set_printoptions(precision=2, suppress=True) if not os.path.exists(args.newfilepath): os.makedirs(args.newfilepath) if args.person_reid_model is None or args.vehicle_reid_model is None: raise Exception("Please provide models for person and vehicle!") # assuming your GPU can fit both model at once person_reid_extractor = FeatureExtractor( model_name="osnet_x1_0", model_path=args.person_reid_model, image_size=(256, 128), # (h, w) device="cuda:%d" % args.gpuid ) vehicle_reid_extractor = FeatureExtractor( model_name="resnet101", model_path=args.vehicle_reid_model, image_size=(128, 256), device="cuda:%d" % args.gpuid ) print("Model loaded.") # compute homography first # camera -> H hs = compute_homographys( args.topdown_camera, args.camera_model_path, camera_model) with open(args.camera_group, "r") as f: camera_data = json.load(f) # reid among synchronized videos print("reid in sync groups...") for time_slot in tqdm(camera_data["sync_groups"]): global_track_ids = { "Person": {}, "Vehicle": {}, } # id -> a set of (video_name, track_id) # 1. extract track data and features from each video # video_name -> object -> track_id -> boxes [N, 9] and features [<M, 512] tracks = {} for video_name in camera_data["sync_groups"][time_slot]: person_track_file = os.path.join( args.filepath, video_name + ".avi", "Person", video_name + ".txt") vehicle_track_file = os.path.join( args.filepath, video_name + ".avi", "Vehicle", video_name + ".txt") if not os.path.exists(person_track_file) or not os.path.exists(vehicle_track_file): tqdm.write("skipping %s due to track not exists" % video_name) continue # compute the top-down coordinates as well tracks[video_name] = load_track_and_features( args, video_name, person_track_file, vehicle_track_file, person_reid_extractor, vehicle_reid_extractor, hs) video_names = sorted(tracks.keys()) for cat_name in ["Person", "Vehicle"]: # bubble compare for i in range(len(video_names) - 1): # compare to all other video's tracks for j in range(i + 1, len(video_names)): tracks1 = tracks[video_names[i]][cat_name] tracks2 = tracks[video_names[j]][cat_name] # TODO: ignore short tracks? # some pairs in tracks1 and tracks2 might already be linked in the # global track in preivous a -> b, a -> c, so b -> c mapping # check and remove the already linked tracks linked_pairs, dont_match_pairs = get_cur_links( tracks1, tracks2, video_names[i], video_names[j], global_track_ids, cat_name) tracks1 = {tid: tracks1[tid] for tid in tracks1 if tid not in linked_pairs[0]} tracks2 = {tid: tracks2[tid] for tid in tracks2 if tid not in linked_pairs[1]} if not tracks1 or not tracks2: continue # theses time sync are only accurate within 1-2 seconds frame_offset = compute_frame_offset( video_names[i], video_names[j], 30.0) # [N, M] # the number of time-intersected trajectory that is within tol spatial_dist = compute_spatial_dist( tracks1, tracks2, frame_offset, tol=args.spatial_dist_tol, ignore_pairs=dont_match_pairs) #cost, x, y = lap.lapjv(spatial_dist, extend_cost=True, cost_limit=998.) # ignoring large spatial dist items feat_dist = compute_feature_dist(tracks1, tracks2, spatial_dist) # minimize the total cost cost, x, y = lap.lapjv(feat_dist, extend_cost=True, cost_limit=998.) tracks1_ids = sorted(tracks1.keys()) tracks2_ids = sorted(tracks2.keys()) """ print(video_names[i], video_names[j]) print(feat_dist) print(x, y) for ix, match_y in enumerate(x): if match_y >= 0: print("track 1 %s -> %s in track 2" % ( tracks1_ids[ix], tracks2_ids[match_y])) sys.exit() """ for ix, match_y in enumerate(x): if match_y >= 0: matched_track1_id = tracks1_ids[ix] matched_track2_id = tracks2_ids[match_y] create_or_merge_global_id( global_track_ids, cat_name, video_names[i], matched_track1_id, video_names[j], matched_track2_id) for cat_name in ["Person", "Vehicle"]: tqdm.write("group %s %s videos total %s %s track, %s got into %s global track" % ( time_slot, len(camera_data["sync_groups"][time_slot]), sum([len(tracks[vn][cat_name]) for vn in tracks]), cat_name, sum([len(global_track_ids[cat_name][gid]) for gid in global_track_ids[cat_name]]), len(global_track_ids[cat_name]))) # save the results for video_name in camera_data["sync_groups"][time_slot]: for cat_name in ["Person", "Vehicle"]: save_new_track( cat_name, tracks[video_name][cat_name], global_track_ids[cat_name], args.newfilepath, video_name) print("Done reid in sync group.") # TODO: multi-reid in consecutive camera groups
9,315
2,519
// This file is made available under Elastic License 2.0. // This file is based on code available under the Apache license here: // https://github.com/apache/incubator-doris/blob/master/fe/fe-core/src/main/java/org/apache/doris/persist/PrivInfo.java // Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package com.starrocks.persist; import com.google.common.base.Strings; import com.starrocks.analysis.ResourcePattern; import com.starrocks.analysis.TablePattern; import com.starrocks.analysis.UserIdentity; import com.starrocks.catalog.Catalog; import com.starrocks.common.FeMetaVersion; import com.starrocks.common.StarRocksFEMetaVersion; import com.starrocks.common.io.Text; import com.starrocks.common.io.Writable; import com.starrocks.mysql.privilege.Password; import com.starrocks.mysql.privilege.PrivBitSet; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; public class PrivInfo implements Writable { private UserIdentity userIdent; private TablePattern tblPattern; private ResourcePattern resourcePattern; private PrivBitSet privs; private Password passwd; private String role; private PrivInfo() { } public PrivInfo(UserIdentity userIdent, PrivBitSet privs, Password passwd, String role) { this.userIdent = userIdent; this.tblPattern = null; this.resourcePattern = null; this.privs = privs; this.passwd = <PASSWORD>; this.role = role; } public PrivInfo(UserIdentity userIdent, TablePattern tablePattern, PrivBitSet privs, Password passwd, String role) { this.userIdent = userIdent; this.tblPattern = tablePattern; this.resourcePattern = null; this.privs = privs; this.passwd = <PASSWORD>; this.role = role; } public PrivInfo(UserIdentity userIdent, ResourcePattern resourcePattern, PrivBitSet privs, Password passwd, String role) { this.userIdent = userIdent; this.tblPattern = null; this.resourcePattern = resourcePattern; this.privs = privs; this.passwd = <PASSWORD>; this.role = role; } public UserIdentity getUserIdent() { return userIdent; } public TablePattern getTblPattern() { return tblPattern; } public ResourcePattern getResourcePattern() { return resourcePattern; } public PrivBitSet getPrivs() { return privs; } public Password getPasswd() { return <PASSWORD>; } public String getRole() { return role; } public static PrivInfo read(DataInput in) throws IOException { PrivInfo info = new PrivInfo(); info.readFields(in); return info; } @Override public void write(DataOutput out) throws IOException { if (userIdent != null) { out.writeBoolean(true); userIdent.write(out); } else { out.writeBoolean(false); } if (tblPattern != null) { out.writeBoolean(true); tblPattern.write(out); } else { out.writeBoolean(false); } if (resourcePattern != null) { out.writeBoolean(true); resourcePattern.write(out); } else { out.writeBoolean(false); } if (privs != null) { out.writeBoolean(true); privs.write(out); } else { out.writeBoolean(false); } if (passwd != null) { out.writeBoolean(true); passwd.write(out); } else { out.writeBoolean(false); } if (!Strings.isNullOrEmpty(role)) { out.writeBoolean(true); Text.writeString(out, role); } else { out.writeBoolean(false); } } public void readFields(DataInput in) throws IOException { if (in.readBoolean()) { userIdent = UserIdentity.read(in); } if (in.readBoolean()) { tblPattern = TablePattern.read(in); } if (Catalog.getCurrentCatalogJournalVersion() >= FeMetaVersion.VERSION_87) { if (in.readBoolean()) { resourcePattern = ResourcePattern.read(in); } } if (in.readBoolean()) { privs = PrivBitSet.read(in); } if (in.readBoolean()) { if (Catalog.getCurrentCatalogStarRocksJournalVersion() >= StarRocksFEMetaVersion.VERSION_2) { passwd = Password.read(in); } else { int passwordLen = in.readInt(); byte[] password = new byte[passwordLen]; in.readFully(password); passwd = new Password(password); } } if (in.readBoolean()) { role = Text.readString(in); } } }
2,380
594
/* PR tree-optimization/19828 */ typedef __SIZE_TYPE__ size_t; extern size_t strlen (const char *s); extern int strncmp (const char *s1, const char *s2, size_t n); extern void abort (void); const char *a[16] = { "a", "bc", "de", "fgh" }; int foo (char *x, const char *y, size_t n) { size_t i, j = 0; for (i = 0; i < n; i++) { if (strncmp (x + j, a[i], strlen (a[i])) != 0) return 2; j += strlen (a[i]); if (y) j += strlen (y); } return 0; } int main (void) { if (foo ("abcde", (const char *) 0, 3) != 0) abort (); return 0; }
285
777
# Copyright 2016 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. { 'targets': [ { 'target_name': 'app', 'dependencies': [ '<(EXTERNS_GYP):chrome_extensions', 'store' ], 'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi'], }, { 'target_name': 'folder_node', 'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi'], 'dependencies': [ '<(EXTERNS_GYP):chrome_extensions', 'store' ], }, { 'target_name': 'item', 'dependencies': [ '<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:icon', '<(EXTERNS_GYP):chrome_extensions', ], 'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi'], }, { 'target_name': 'list', 'dependencies': [ '<(DEPTH)/ui/webui/resources/cr_elements/cr_action_menu/compiled_resources2.gyp:cr_action_menu', '<(EXTERNS_GYP):bookmark_manager_private', '<(EXTERNS_GYP):chrome_extensions', 'item', ], 'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi'], }, { 'target_name': 'sidebar', 'dependencies': [ 'folder_node', ], 'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi'], }, { 'target_name': 'store', 'dependencies': [ '<(EXTERNS_GYP):chrome_extensions' ], 'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi'] }, { 'target_name': 'toolbar', 'dependencies': [ '<(DEPTH)/ui/webui/resources/cr_elements/cr_action_menu/compiled_resources2.gyp:cr_action_menu', '<(DEPTH)/ui/webui/resources/cr_elements/cr_toolbar/compiled_resources2.gyp:cr_toolbar', ], 'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi'], }, ] }
931
777
<filename>chrome/browser/permissions/grouped_permission_infobar_delegate_android.h // Copyright 2016 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef CHROME_BROWSER_PERMISSIONS_GROUPED_PERMISSION_INFOBAR_DELEGATE_ANDROID_H_ #define CHROME_BROWSER_PERMISSIONS_GROUPED_PERMISSION_INFOBAR_DELEGATE_ANDROID_H_ #include <memory> #include "base/callback.h" #include "components/content_settings/core/common/content_settings_types.h" #include "components/infobars/core/confirm_infobar_delegate.h" class GURL; class InfoBarService; class PermissionPromptAndroid; class PermissionRequest; // An InfoBar that displays a group of permission requests, each of which can be // allowed or blocked independently. // TODO(tsergeant): Expand this class so it can be used without subclassing. class GroupedPermissionInfoBarDelegate : public ConfirmInfoBarDelegate { public: // Public so we can have std::unique_ptr<GroupedPermissionInfoBarDelegate>. ~GroupedPermissionInfoBarDelegate() override; static infobars::InfoBar* Create( PermissionPromptAndroid* permission_prompt, InfoBarService* infobar_service, const GURL& requesting_origin, const std::vector<PermissionRequest*>& requests); bool persist() const { return persist_; } void set_persist(bool persist) { persist_ = persist; } size_t permission_count() const { return requests_.size(); } // Returns true if the infobar should display a toggle to allow users to // opt-out of persisting their accept/deny decision. bool ShouldShowPersistenceToggle() const; ContentSettingsType GetContentSettingType(size_t position) const; int GetIconIdForPermission(size_t position) const; // Message text to display for an individual permission at |position|. base::string16 GetMessageTextFragment(size_t position) const; // Toggle accept value for an individual permission at |position|. void ToggleAccept(size_t position, bool new_value); // ConfirmInfoBarDelegate: base::string16 GetMessageText() const override; bool Accept() override; bool Cancel() override; void InfoBarDismissed() override; void PermissionPromptDestroyed(); protected: bool GetAcceptState(size_t position); private: GroupedPermissionInfoBarDelegate( PermissionPromptAndroid* permission_prompt, const GURL& requesting_origin, const std::vector<PermissionRequest*>& requests); // ConfirmInfoBarDelegate: InfoBarIdentifier GetIdentifier() const override; Type GetInfoBarType() const override; int GetButtons() const override; base::string16 GetButtonLabel(InfoBarButton button) const override; const GURL requesting_origin_; const std::vector<PermissionRequest*> requests_; // Whether the accept/deny decision is persisted. bool persist_; PermissionPromptAndroid* permission_prompt_; DISALLOW_COPY_AND_ASSIGN(GroupedPermissionInfoBarDelegate); }; #endif // CHROME_BROWSER_PERMISSIONS_GROUPED_PERMISSION_INFOBAR_DELEGATE_ANDROID_H_
934
348
{"nom":"Poggio-Mezzana","circ":"2ème circonscription","dpt":"Haute-Corse","inscrits":457,"abs":232,"votants":225,"blancs":15,"nuls":5,"exp":205,"res":[{"nuance":"REG","nom":"<NAME>","voix":138},{"nuance":"REM","nom":"<NAME>","voix":67}]}
95
521
<filename>third_party/virtualbox/src/VBox/Frontends/VirtualBox/shaders/mainOverlayNoDiscard.c<gh_stars>100-1000 /* $Id: mainOverlayNoDiscard.c $ */ #extension GL_ARB_texture_rectangle : enable uniform sampler2DRect uDstTex; uniform vec4 uDstClr; void vboxCConv(); void main(void) { vec4 dstClr = texture2DRect(uDstTex, vec2(gl_TexCoord[1])); vec3 difClr = dstClr.rgb - uDstClr.rgb; if(any(greaterThan(difClr, vec3(0.01, 0.01, 0.01))) || any(lessThan(difClr, vec3(-0.01, -0.01, -0.01)))) { gl_FragColor = dstClr; } else { vboxCConv(); } }
297
484
// // TUITextRenderer+Accessibility.h // TwUI // // Created by <NAME> on 10/27/11. // Copyright (c) 2011 Maybe Apps, LLC. All rights reserved. // #import <Foundation/Foundation.h> #import "TUITextRenderer.h" @interface TUITextRenderer (Accessibility) @end
104
348
<reponame>Amit0617/OpenMS { // Use IntelliSense to learn about possible attributes. // Hover to view descriptions of existing attributes. "version": "0.2.0", "configurations": [] }
60
640
<reponame>ahjelm/z88dk<gh_stars>100-1000 /* * Enterprise 128 Mandelbrot demo * * Example on how to interface to EXOS with z88dk * this program initializes a video channel and * communicates to it via the ESCape sequences. * * To build: * zcc +enterprise -lm -create-app mandel.c */ #include <stdio.h> #include <math.h> #include <enterprise.h> float a,b,c,d,e,g,h,i,j; int x,y; int xmax,ymax; int k; float l,m,n,o,p; void main() { // Initialize a custom video mode exos_set_vmode(VM_HRG,CM_16,40,24); // CLRHOME exos_write_character(DEFAULT_VIDEO, 0x1A); xmax=210; ymax=210; a=-2.0; b=2.0; c=a; d=b; e=4.0; g=(b-a)/(float)xmax; h=(d-c)/(float)ymax; for(y=ymax; y>0; y--) { j=(float)y*h+c; for(x=xmax; x>0; x--) { i=(float)x*g+a; k=0; l=0.0; m=0.0; n=0.0; o=0.0; l110: k++; if (k<100) //Iterates { p=n-o+i; m=2.0*l*m+j; l=p; n=l*l; o=m*m; //printf ("%f ",e); if ((n+o)<e) goto l110; esccmd_cmd='I'; // INK colour esccmd_x=k; exos_write_block(DEFAULT_VIDEO, 3, esccmd); esccmd_cmd='s'; // set beam off exos_write_block(DEFAULT_VIDEO, 2, esccmd); esccmd_cmd='A'; // set beam position esccmd_x=x*4; esccmd_y=y*4; exos_write_block(DEFAULT_VIDEO, 6, esccmd); esccmd_cmd='S'; // set beam on exos_write_block(DEFAULT_VIDEO, 2, esccmd); } } } while (getk()!=10); }
772
1,602
<reponame>jhh67/chapel /* This file is provided under a dual BSD/GPLv2 license. When using or redistributing this file, you may do so under either license. GPL LICENSE SUMMARY Copyright(c) 2015 Intel Corporation. This program is free software; you can redistribute it and/or modify it under the terms of version 2 of the GNU General Public License as published by the Free Software Foundation. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. Contact Information: Intel Corporation, www.intel.com BSD LICENSE Copyright(c) 2015 Intel Corporation. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Intel Corporation nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ /* This file contains hfi service routine interface used by the low */ /* level hfi protocol code. */ #include <sys/poll.h> #include <sys/types.h> #include <sys/stat.h> #include <stdint.h> #include <stdlib.h> #include <stddef.h> #include <unistd.h> #include <errno.h> #include <string.h> #include <stdio.h> #include <fcntl.h> #include <malloc.h> #include <time.h> #include "opa_user.h" /* keep track whether we disabled mmap in malloc */ int __hfi_malloc_no_mmap = 0; const char *hfi_get_next_name(char **names) { char *p, *start; p = start = *names; while (*p != '\0' && *p != '\n') { p++; } if (*p == '\n') { *p = '\0'; p++; *names = p; return start; } else return NULL; } void hfi_release_names(char *namep) { /* names were initialised in the data section before. Now * they are allocated when hfi_hfifs_read() is called. Allocation * for names is done only once at init time. Should we eventually * have an "stats_type_unregister" type of routine to explicitly * deallocate memory and free resources ? */ #if 0 if (namep != NULL) free(namep); #endif } /* * Add a constructor function to disable mmap if asked to do so by the user */ static void init_mallopt_disable_mmap(void) __attribute__ ((constructor)); static void init_mallopt_disable_mmap(void) { char *env = getenv("PSM3_DISABLE_MMAP_MALLOC"); if (env && *env) { if (mallopt(M_MMAP_MAX, 0) && mallopt(M_TRIM_THRESHOLD, -1)) { __hfi_malloc_no_mmap = 1; } } return; } /* Convert Timeout value from usec to * timeout_mult where usec = 4.096usec * 2^timeout_mult */ uint8_t timeout_usec_to_mult(uint64_t timeout_us) { /* all values are rounded up, comments reflect exact value */ if (timeout_us <= 4) return 0; /* 4.096 us */ else if (timeout_us <= 8) return 1; /* 8.192 us */ else if (timeout_us <= 16) return 2; /* 16.384 us */ else if (timeout_us <= 32) return 3; /* 32.768 us */ else if (timeout_us <= 65) return 4; /* 65.536 us */ else if (timeout_us <= 131) return 5; /* 131.072 us */ else if (timeout_us <= 262) return 6; /* 262.144 us */ else if (timeout_us <= 524) return 7; /* 524.288 us */ else if (timeout_us <= 1048) return 8; /* 1048.576 us */ else if (timeout_us <= 2097) return 9; /* 2.097 ms */ else if (timeout_us <= 4194) return 10; /* 4.197 ms */ else if (timeout_us <= 8388) return 11; /* 8.388 ms */ else if (timeout_us <= 16777) return 12; /* 16.777 ms */ else if (timeout_us <= 33554) return 13; /* 33.554 ms */ else if (timeout_us <= 67108) return 14; /* 67.1 ms */ else if (timeout_us <= 134217) return 15; /* 134.2 ms */ else if (timeout_us <= 268435) return 16; /* 268.4 ms */ else if (timeout_us <= 536870) return 17; /* 536.8 ms */ else if (timeout_us <= 1073741) return 18;/* 1.073 s */ else if (timeout_us <= 2147483) return 19;/* 2.148 s */ else if (timeout_us <= 4294967) return 20;/* 4.294 s */ else if (timeout_us <= 8589934) return 21;/* 8.589 s */ else if (timeout_us <= 17179869) return 22;/* 17.179 s */ else if (timeout_us <= 34359738) return 23;/* 34.359 s */ else if (timeout_us <= 68719476) return 24;/* 68.719 s */ else if (timeout_us <= 137438953ll) return 25;/* 2.2 minutes */ else if (timeout_us <= 274877906ll) return 26; /* 4.5 minutes */ else if (timeout_us <= 549755813ll) return 27; /* 9 minutes */ else if (timeout_us <= 1099511628ll) return 28; /* 18 minutes */ else if (timeout_us <= 2199023256ll) return 29; /* 0.6 hr */ else if (timeout_us <= 4398046511ll) return 30; /* 1.2 hr */ else return 31; /* 2.4 hr */ }
2,111
488
<reponame>NouamaneTazi/conv-emotion import sys import random import logging import numpy as np import torch import pickle def set_seed(seed): """Sets random seed everywhere.""" torch.manual_seed(seed) random.seed(seed) np.random.seed(seed) def get_logger(level=logging.INFO): log = logging.getLogger(__name__) if log.handlers: return log log.setLevel(level) ch = logging.StreamHandler(sys.stdout) formatter = logging.Formatter(fmt='%(asctime)s %(message)s', datefmt='%m/%d/%Y %I:%M:%S') ch.setFormatter(formatter) log.addHandler(ch) return log def save_pkl(obj, file): with open(file, "wb") as f: pickle.dump(obj, f) def load_pkl(file): with open(file, "rb") as f: return pickle.load(f)
338
464
package dev.fiki.forgehax.api.events.render; import com.mojang.blaze3d.matrix.MatrixStack; import net.minecraft.client.gui.screen.Screen; import net.minecraft.client.gui.screen.inventory.ContainerScreen; public class GuiContainerRenderEvent extends GuiRenderEvent { GuiContainerRenderEvent(Screen gui, MatrixStack stack, int mouseX, int mouseY, float renderPartialTicks) { super(gui, stack, mouseX, mouseY, renderPartialTicks); } public ContainerScreen getContainerScreen() { return (ContainerScreen) getGui(); } public static class Background extends GuiContainerRenderEvent { public Background(Screen gui, MatrixStack stack, int mouseX, int mouseY, float renderPartialTicks) { super(gui, stack, mouseX, mouseY, renderPartialTicks); } } }
242
715
<reponame>kihyuks/objax # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Unittests for Objax2Tf converter.""" import shutil import tempfile import unittest import numpy as np import objax from objax.zoo.wide_resnet import WideResNet import tensorflow as tf BATCH_SIZE = 4 NCHANNELS = 3 NCLASSES = 10 IMAGE_SIZE = 32 class TestObjax2Tf(unittest.TestCase): def verify_converted_predict_op(self, objax_op, tf_op, shape): x1 = np.random.normal(size=shape) x2 = np.random.normal(size=shape) # due to differences in op implementations, there might be small numerical # differences between TF and Objax, thus comparing up to 1e-4 relative tolerance np.testing.assert_allclose(objax_op(x1), tf_op(tf.convert_to_tensor(x1, dtype=tf.float32)), rtol=1e-4) np.testing.assert_allclose(objax_op(x2), tf_op(tf.convert_to_tensor(x2, dtype=tf.float32)), rtol=1e-4) def test_convert_wrn(self): # Make a model model = WideResNet(NCHANNELS, NCLASSES, depth=4, width=1) # Prediction op without JIT predict_op = objax.nn.Sequential([objax.ForceArgs(model, training=False), objax.functional.softmax]) predict_tf = objax.util.Objax2Tf(predict_op) # Compare results self.verify_converted_predict_op(predict_op, predict_tf, shape=(BATCH_SIZE, NCHANNELS, IMAGE_SIZE, IMAGE_SIZE)) # Predict op with JIT predict_op_jit = objax.Jit(predict_op) predict_tf_jit = objax.util.Objax2Tf(predict_op_jit) # Compare results self.verify_converted_predict_op(predict_op_jit, predict_tf_jit, shape=(BATCH_SIZE, NCHANNELS, IMAGE_SIZE, IMAGE_SIZE)) def test_savedmodel_wrn(self): model_dir = tempfile.mkdtemp() # Make a model and convert it to TF model = WideResNet(NCHANNELS, NCLASSES, depth=4, width=1) predict_op = objax.Jit(objax.nn.Sequential([objax.ForceArgs(model, training=False), objax.functional.softmax])) predict_tf = objax.util.Objax2Tf(predict_op) # Save model input_shape = (BATCH_SIZE, NCHANNELS, IMAGE_SIZE, IMAGE_SIZE) tf.saved_model.save( predict_tf, model_dir, signatures=predict_tf.__call__.get_concrete_function(tf.TensorSpec(input_shape, tf.float32))) # Load model loaded_tf_model = tf.saved_model.load(model_dir) loaded_predict_tf_op = loaded_tf_model.signatures['serving_default'] self.verify_converted_predict_op(predict_op, lambda x: loaded_predict_tf_op(x)['output_0'], shape=input_shape) self.verify_converted_predict_op(predict_op, lambda x: loaded_tf_model(x), shape=input_shape) # Cleanup shutil.rmtree(model_dir) if __name__ == '__main__': unittest.main()
1,602
790
from __future__ import unicode_literals from django.db import models from django.utils.encoding import python_2_unicode_compatible @python_2_unicode_compatible class Place(models.Model): name = models.CharField(max_length=50) address = models.CharField(max_length=80) def __str__(self): return "%s the place" % self.name @python_2_unicode_compatible class Restaurant(models.Model): place = models.OneToOneField(Place) serves_hot_dogs = models.BooleanField() serves_pizza = models.BooleanField() def __str__(self): return "%s the restaurant" % self.place.name @python_2_unicode_compatible class Bar(models.Model): place = models.OneToOneField(Place) serves_cocktails = models.BooleanField() def __str__(self): return "%s the bar" % self.place.name class UndergroundBar(models.Model): place = models.OneToOneField(Place, null=True) serves_cocktails = models.BooleanField() @python_2_unicode_compatible class Favorites(models.Model): name = models.CharField(max_length = 50) restaurants = models.ManyToManyField(Restaurant) def __str__(self): return "Favorites for %s" % self.name class Target(models.Model): pass class Pointer(models.Model): other = models.OneToOneField(Target, primary_key=True) class Pointer2(models.Model): other = models.OneToOneField(Target)
497
1,016
/* * Copyright 2021 Red Hat, Inc. * * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * and Apache License v2.0 which accompanies this distribution. * * The Eclipse Public License is available at * http://www.eclipse.org/legal/epl-v10.html * * The Apache License v2.0 is available at * http://www.opensource.org/licenses/apache2.0.php * * You may elect to redistribute this code under either of these licenses. */ package io.vertx.ext.web.client; import io.vertx.codegen.annotations.DataObject; import io.vertx.core.buffer.Buffer; import io.vertx.core.http.Http2Settings; import io.vertx.core.http.HttpMethod; import io.vertx.core.http.HttpVersion; import io.vertx.core.json.JsonObject; import io.vertx.core.net.JdkSSLEngineOptions; import io.vertx.core.net.JksOptions; import io.vertx.core.net.KeyCertOptions; import io.vertx.core.net.OpenSSLEngineOptions; import io.vertx.core.net.PemKeyCertOptions; import io.vertx.core.net.PemTrustOptions; import io.vertx.core.net.PfxOptions; import io.vertx.core.net.ProxyOptions; import io.vertx.core.net.SSLEngineOptions; import io.vertx.core.net.TrustOptions; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.TimeUnit; /** * @author <a href="mailto:<EMAIL>"><NAME></a> */ @DataObject(generateConverter = true) public class CachingWebClientOptions extends WebClientOptions { public static final Set<Integer> DEFAULT_CACHED_STATUS_CODES = buildDefaultStatusCodes(); public static final Set<HttpMethod> DEFAULT_CACHED_METHODS = buildDefaultMethods(); private boolean enableVaryCaching = false; private Set<Integer> cachedStatusCodes = DEFAULT_CACHED_STATUS_CODES; private Set<HttpMethod> cachedMethods = DEFAULT_CACHED_METHODS; public CachingWebClientOptions() { } public CachingWebClientOptions(boolean enableVaryCaching) { this.enableVaryCaching = enableVaryCaching; } public CachingWebClientOptions(WebClientOptions other) { super(other); } public CachingWebClientOptions(JsonObject json) { super(json); CachingWebClientOptionsConverter.fromJson(json, this); } void init(CachingWebClientOptions other) { super.init(other); this.enableVaryCaching = other.enableVaryCaching; this.cachedStatusCodes = other.cachedStatusCodes; this.cachedMethods = other.cachedMethods; } /** * Convert to JSON * * @return the JSON */ public JsonObject toJson() { JsonObject json = super.toJson(); CachingWebClientOptionsConverter.toJson(this, json); return json; } /** * Configure the client cache behavior for {@code Vary} responses. * * @param enabled true to enable caching varying responses * @return a reference to this, so the API can be used fluently */ public CachingWebClientOptions setEnableVaryCaching(boolean enabled) { this.enableVaryCaching = enabled; return this; } /** * @return the set of status codes to consider cacheable. */ public Set<Integer> getCachedStatusCodes() { return cachedStatusCodes; } /** * Configure the status codes that can be cached. * * @param codes the cacheable status code numbers * @return a reference to this, so the API can be used fluently */ public CachingWebClientOptions setCachedStatusCodes(Set<Integer> codes) { this.cachedStatusCodes = codes; return this; } /** * Add a status code that is cacheable. * * @param code the additional code number * @return a reference to this, so the API can be used fluently */ public CachingWebClientOptions addCachedStatusCode(Integer code) { this.cachedStatusCodes.add(code); return this; } /** * Remove a status code that is cacheable. * * @param code the code number to remove * @return a reference to this, so the API can be used fluently */ public CachingWebClientOptions removeCachedStatusCode(Integer code) { this.cachedStatusCodes.remove(code); return this; } /** * @return the set of HTTP methods to consider cacheable. */ public Set<HttpMethod> getCachedMethods() { return cachedMethods; } /** * Configure the HTTP methods that can be cached. * * @param methods the HTTP methods to cache * @return a reference to this, so the API can be used fluently */ public CachingWebClientOptions setCachedMethods(Set<HttpMethod> methods) { this.cachedMethods = methods; return this; } /** * Add an HTTP method that is cacheable. * * @param method the method to add * @return a reference to this, so the API can be used fluently */ public CachingWebClientOptions addCachedMethod(HttpMethod method) { this.cachedMethods.add(method); return this; } /** * Remove an HTTP method that is cacheable. * * @param method the method to remove * @return a reference to this, so the API can be used fluently */ public CachingWebClientOptions removeCachedMethod(HttpMethod method) { this.cachedMethods.remove(method); return this; } /** * @return true if the client will cache responses with the {@code Vary} header, false otherwise */ public boolean isVaryCachingEnabled() { return enableVaryCaching; } @Override public CachingWebClientOptions setUserAgentEnabled(boolean userAgentEnabled) { return (CachingWebClientOptions) super.setUserAgentEnabled(userAgentEnabled); } @Override public CachingWebClientOptions setUserAgent(String userAgent) { return (CachingWebClientOptions) super.setUserAgent(userAgent); } @Override public CachingWebClientOptions setFollowRedirects(boolean followRedirects) { return (CachingWebClientOptions) super.setFollowRedirects(followRedirects); } @Override public CachingWebClientOptions setMaxRedirects(int maxRedirects) { return (CachingWebClientOptions) super.setMaxRedirects(maxRedirects); } @Override public CachingWebClientOptions setSendBufferSize(int sendBufferSize) { return (CachingWebClientOptions) super.setSendBufferSize(sendBufferSize); } @Override public CachingWebClientOptions setReceiveBufferSize(int receiveBufferSize) { return (CachingWebClientOptions) super.setReceiveBufferSize(receiveBufferSize); } @Override public CachingWebClientOptions setReuseAddress(boolean reuseAddress) { return (CachingWebClientOptions) super.setReuseAddress(reuseAddress); } @Override public CachingWebClientOptions setTrafficClass(int trafficClass) { return (CachingWebClientOptions) super.setTrafficClass(trafficClass); } @Override public CachingWebClientOptions setTcpNoDelay(boolean tcpNoDelay) { return (CachingWebClientOptions) super.setTcpNoDelay(tcpNoDelay); } @Override public CachingWebClientOptions setTcpKeepAlive(boolean tcpKeepAlive) { return (CachingWebClientOptions) super.setTcpKeepAlive(tcpKeepAlive); } @Override public CachingWebClientOptions setSoLinger(int soLinger) { return (CachingWebClientOptions) super.setSoLinger(soLinger); } @Override public CachingWebClientOptions setIdleTimeout(int idleTimeout) { return (CachingWebClientOptions) super.setIdleTimeout(idleTimeout); } @Override public CachingWebClientOptions setIdleTimeoutUnit(TimeUnit idleTimeoutUnit) { return (CachingWebClientOptions) super.setIdleTimeoutUnit(idleTimeoutUnit); } @Override public CachingWebClientOptions setSsl(boolean ssl) { return (CachingWebClientOptions) super.setSsl(ssl); } @Override public CachingWebClientOptions setKeyCertOptions(KeyCertOptions options) { return (CachingWebClientOptions) super.setKeyCertOptions(options); } @Override public CachingWebClientOptions setKeyStoreOptions(JksOptions options) { return (CachingWebClientOptions) super.setKeyStoreOptions(options); } @Override public CachingWebClientOptions setPfxKeyCertOptions(PfxOptions options) { return (CachingWebClientOptions) super.setPfxKeyCertOptions(options); } @Override public CachingWebClientOptions setTrustOptions(TrustOptions options) { return (CachingWebClientOptions) super.setTrustOptions(options); } @Override public CachingWebClientOptions setPemKeyCertOptions(PemKeyCertOptions options) { return (CachingWebClientOptions) super.setPemKeyCertOptions(options); } @Override public CachingWebClientOptions setTrustStoreOptions(JksOptions options) { return (CachingWebClientOptions) super.setTrustStoreOptions(options); } @Override public CachingWebClientOptions setPfxTrustOptions(PfxOptions options) { return (CachingWebClientOptions) super.setPfxTrustOptions(options); } @Override public CachingWebClientOptions setPemTrustOptions(PemTrustOptions options) { return (CachingWebClientOptions) super.setPemTrustOptions(options); } @Override public CachingWebClientOptions addEnabledCipherSuite(String suite) { return (CachingWebClientOptions) super.addEnabledCipherSuite(suite); } @Override public CachingWebClientOptions addCrlPath(String crlPath) throws NullPointerException { return (CachingWebClientOptions) super.addCrlPath(crlPath); } @Override public CachingWebClientOptions addCrlValue(Buffer crlValue) throws NullPointerException { return (CachingWebClientOptions) super.addCrlValue(crlValue); } @Override public CachingWebClientOptions setConnectTimeout(int connectTimeout) { return (CachingWebClientOptions) super.setConnectTimeout(connectTimeout); } @Override public CachingWebClientOptions setTrustAll(boolean trustAll) { return (CachingWebClientOptions) super.setTrustAll(trustAll); } @Override public CachingWebClientOptions setMaxPoolSize(int maxPoolSize) { return (CachingWebClientOptions) super.setMaxPoolSize(maxPoolSize); } @Override public CachingWebClientOptions setHttp2MultiplexingLimit(int limit) { return (CachingWebClientOptions) super.setHttp2MultiplexingLimit(limit); } @Override public CachingWebClientOptions setHttp2MaxPoolSize(int max) { return (CachingWebClientOptions) super.setHttp2MaxPoolSize(max); } @Override public CachingWebClientOptions setHttp2ConnectionWindowSize(int http2ConnectionWindowSize) { return (CachingWebClientOptions) super.setHttp2ConnectionWindowSize(http2ConnectionWindowSize); } @Override public CachingWebClientOptions setKeepAlive(boolean keepAlive) { return (CachingWebClientOptions) super.setKeepAlive(keepAlive); } @Override public CachingWebClientOptions setPipelining(boolean pipelining) { return (CachingWebClientOptions) super.setPipelining(pipelining); } @Override public CachingWebClientOptions setPipeliningLimit(int limit) { return (CachingWebClientOptions) super.setPipeliningLimit(limit); } @Override public CachingWebClientOptions setVerifyHost(boolean verifyHost) { return (CachingWebClientOptions) super.setVerifyHost(verifyHost); } @Override public CachingWebClientOptions setTryUseCompression(boolean tryUseCompression) { return (CachingWebClientOptions) super.setTryUseCompression(tryUseCompression); } @Override public CachingWebClientOptions setSendUnmaskedFrames(boolean sendUnmaskedFrames) { return (CachingWebClientOptions) super.setSendUnmaskedFrames(sendUnmaskedFrames); } @Override public CachingWebClientOptions setMaxWebSocketFrameSize(int maxWebsocketFrameSize) { return (CachingWebClientOptions) super.setMaxWebSocketFrameSize(maxWebsocketFrameSize); } @Override public CachingWebClientOptions setDefaultHost(String defaultHost) { return (CachingWebClientOptions) super.setDefaultHost(defaultHost); } @Override public CachingWebClientOptions setDefaultPort(int defaultPort) { return (CachingWebClientOptions) super.setDefaultPort(defaultPort); } @Override public CachingWebClientOptions setMaxChunkSize(int maxChunkSize) { return (CachingWebClientOptions) super.setMaxChunkSize(maxChunkSize); } @Override public CachingWebClientOptions setProtocolVersion(HttpVersion protocolVersion) { return (CachingWebClientOptions) super.setProtocolVersion(protocolVersion); } @Override public CachingWebClientOptions setMaxHeaderSize(int maxHeaderSize) { return (CachingWebClientOptions) super.setMaxHeaderSize(maxHeaderSize); } @Override public CachingWebClientOptions setMaxWaitQueueSize(int maxWaitQueueSize) { return (CachingWebClientOptions) super.setMaxWaitQueueSize(maxWaitQueueSize); } @Override public CachingWebClientOptions setUseAlpn(boolean useAlpn) { return (CachingWebClientOptions) super.setUseAlpn(useAlpn); } @Override public CachingWebClientOptions setSslEngineOptions(SSLEngineOptions sslEngineOptions) { return (CachingWebClientOptions) super.setSslEngineOptions(sslEngineOptions); } @Override public CachingWebClientOptions setJdkSslEngineOptions(JdkSSLEngineOptions sslEngineOptions) { return (CachingWebClientOptions) super.setJdkSslEngineOptions(sslEngineOptions); } @Override public CachingWebClientOptions setOpenSslEngineOptions(OpenSSLEngineOptions sslEngineOptions) { return (CachingWebClientOptions) super.setOpenSslEngineOptions(sslEngineOptions); } @Override public CachingWebClientOptions setHttp2ClearTextUpgrade(boolean value) { return (CachingWebClientOptions) super.setHttp2ClearTextUpgrade(value); } @Override public CachingWebClientOptions setAlpnVersions(List<HttpVersion> alpnVersions) { return (CachingWebClientOptions) super.setAlpnVersions(alpnVersions); } @Override public CachingWebClientOptions setMetricsName(String metricsName) { return (CachingWebClientOptions) super.setMetricsName(metricsName); } @Override public CachingWebClientOptions setProxyOptions(ProxyOptions proxyOptions) { return (CachingWebClientOptions) super.setProxyOptions(proxyOptions); } @Override public CachingWebClientOptions setLocalAddress(String localAddress) { return (CachingWebClientOptions) super.setLocalAddress(localAddress); } @Override public CachingWebClientOptions setLogActivity(boolean logEnabled) { return (CachingWebClientOptions) super.setLogActivity(logEnabled); } @Override public CachingWebClientOptions addEnabledSecureTransportProtocol(String protocol) { return (CachingWebClientOptions) super.addEnabledSecureTransportProtocol(protocol); } @Override public CachingWebClientOptions removeEnabledSecureTransportProtocol(String protocol) { return (CachingWebClientOptions) super.removeEnabledSecureTransportProtocol(protocol); } @Override public CachingWebClientOptions setEnabledSecureTransportProtocols(Set<String> enabledSecureTransportProtocols) { return (CachingWebClientOptions) super.setEnabledSecureTransportProtocols(enabledSecureTransportProtocols); } @Override public CachingWebClientOptions setReusePort(boolean reusePort) { return (CachingWebClientOptions) super.setReusePort(reusePort); } @Override public CachingWebClientOptions setTcpFastOpen(boolean tcpFastOpen) { return (CachingWebClientOptions) super.setTcpFastOpen(tcpFastOpen); } @Override public CachingWebClientOptions setTcpCork(boolean tcpCork) { return (CachingWebClientOptions) super.setTcpCork(tcpCork); } @Override public CachingWebClientOptions setTcpQuickAck(boolean tcpQuickAck) { return (CachingWebClientOptions) super.setTcpQuickAck(tcpQuickAck); } @Override public CachingWebClientOptions setHttp2KeepAliveTimeout(int keepAliveTimeout) { return (CachingWebClientOptions) super.setHttp2KeepAliveTimeout(keepAliveTimeout); } @Override public CachingWebClientOptions setForceSni(boolean forceSni) { return (CachingWebClientOptions) super.setForceSni(forceSni); } @Override public CachingWebClientOptions setDecoderInitialBufferSize(int decoderInitialBufferSize) { return (CachingWebClientOptions) super.setDecoderInitialBufferSize(decoderInitialBufferSize); } @Override public CachingWebClientOptions setPoolCleanerPeriod(int poolCleanerPeriod) { return (CachingWebClientOptions) super.setPoolCleanerPeriod(poolCleanerPeriod); } @Override public CachingWebClientOptions setKeepAliveTimeout(int keepAliveTimeout) { return (CachingWebClientOptions) super.setKeepAliveTimeout(keepAliveTimeout); } @Override public CachingWebClientOptions setMaxWebSocketMessageSize(int maxWebsocketMessageSize) { return (CachingWebClientOptions) super.setMaxWebSocketMessageSize(maxWebsocketMessageSize); } @Override public CachingWebClientOptions setMaxInitialLineLength(int maxInitialLineLength) { return (CachingWebClientOptions) super.setMaxInitialLineLength(maxInitialLineLength); } @Override public CachingWebClientOptions setInitialSettings(Http2Settings settings) { return (CachingWebClientOptions) super.setInitialSettings(settings); } @Override public CachingWebClientOptions setSslHandshakeTimeout(long sslHandshakeTimeout) { return (CachingWebClientOptions) super.setSslHandshakeTimeout(sslHandshakeTimeout); } @Override public CachingWebClientOptions setSslHandshakeTimeoutUnit(TimeUnit sslHandshakeTimeoutUnit) { return (CachingWebClientOptions) super.setSslHandshakeTimeoutUnit(sslHandshakeTimeoutUnit); } @Override public CachingWebClientOptions setTryUsePerFrameWebSocketCompression(boolean offer) { return (CachingWebClientOptions) super.setTryUsePerFrameWebSocketCompression(offer); } @Override public CachingWebClientOptions setTryUsePerMessageWebSocketCompression(boolean offer) { return (CachingWebClientOptions) super.setTryUsePerMessageWebSocketCompression(offer); } @Override public CachingWebClientOptions setWebSocketCompressionLevel(int compressionLevel) { return (CachingWebClientOptions) super.setWebSocketCompressionLevel(compressionLevel); } @Override public CachingWebClientOptions setWebSocketCompressionAllowClientNoContext(boolean offer) { return (CachingWebClientOptions) super.setWebSocketCompressionAllowClientNoContext(offer); } @Override public CachingWebClientOptions setWebSocketCompressionRequestServerNoContext(boolean offer) { return (CachingWebClientOptions) super.setWebSocketCompressionRequestServerNoContext(offer); } private static Set<Integer> buildDefaultStatusCodes() { Set<Integer> codes = new HashSet<>(3); Collections.addAll(codes, 200, 301, 404); return codes; } private static Set<HttpMethod> buildDefaultMethods() { Set<HttpMethod> methods = new HashSet<>(1); methods.add(HttpMethod.GET); return methods; } }
5,815
1,799
<gh_stars>1000+ // Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #pragma once // clang-format off #define GEMM_SDOT_INT8_KERNEL \ "ldp q0, q1, [%[a_ptr]], #32\n" /* load a00,a01 to q0, q1*/ \ "ldp q4, q5, [%[b_ptr]], #32\n" /* load b0, b1 to q4, q5*/ \ "eor v8.16b, v8.16b, v8.16b\n" /* out0 = 0 */ \ "eor v9.16b, v9.16b, v9.16b\n" /* out1 = 0 */ \ "eor v10.16b, v10.16b, v10.16b\n" /* out2 = 0 */ \ "eor v11.16b, v11.16b, v11.16b\n" /* out3 = 0 */ \ "eor v12.16b, v12.16b, v12.16b\n" /* out4 = 0 */ \ "prfm pldl1keep, [%[b_ptr], #64]\n" /* preload b*/ \ "eor v13.16b, v13.16b, v13.16b\n" /* out5 = 0 */ \ "prfm pldl1keep, [%[a_ptr], #64]\n" /* preload a*/ \ "eor v14.16b, v14.16b, v14.16b\n" /* out6 = 0 */ \ "prfm pldl1keep, [%[b_ptr], #128]\n" /* preload b*/ \ "eor v15.16b, v15.16b, v15.16b\n" /* out7 = 0 */ \ "prfm pldl1keep, [%[a_ptr], #128]\n" /* preload a*/ \ "eor v16.16b, v16.16b, v16.16b\n" /* out8 = 0 */ \ "prfm pldl1keep, [%[b_ptr], #192]\n" /* preload b*/ \ "eor v17.16b, v17.16b, v17.16b\n" /* out9 = 0 */ \ "prfm pldl1keep, [%[b_ptr], #256]\n" /* preload b*/ \ "eor v18.16b, v18.16b, v18.16b\n" /* out10 = 0 */ \ "prfm pldl1keep, [%[a_ptr], #192]\n" /* preload a*/ \ "eor v19.16b, v19.16b, v19.16b\n" /* out11 = 0 */ \ "prfm pldl1keep, [%[b_ptr], #320]\n" /* preload b*/ \ "eor v20.16b, v20.16b, v20.16b\n" /* out12 = 0 */ \ "prfm pldl1keep, [%[a_ptr], #256]\n" /* preload a*/ \ "eor v21.16b, v21.16b, v21.16b\n" /* out13 = 0 */ \ "prfm pldl1keep, [%[b_ptr], #384]\n" /* preload b*/ \ "eor v22.16b, v22.16b, v22.16b\n" /* out14 = 0 */ \ "eor v23.16b, v23.16b, v23.16b\n" /* out15 = 0 */ \ "eor v24.16b, v24.16b, v24.16b\n" /* out16 = 0 */ \ "eor v25.16b, v25.16b, v25.16b\n" /* out17 = 0 */ \ "eor v26.16b, v26.16b, v26.16b\n" /* out18 = 0 */ \ "eor v27.16b, v27.16b, v27.16b\n" /* out19 = 0 */ \ "eor v28.16b, v28.16b, v28.16b\n" /* out20 = 0 */ \ "eor v29.16b, v29.16b, v29.16b\n" /* out21 = 0 */ \ "eor v30.16b, v30.16b, v30.16b\n" /* out22 = 0 */ \ "eor v31.16b, v31.16b, v31.16b\n" /* out23 = 0 */ \ "cbz %w[k], 2f\n" /* check loop count > 0 */ \ /* main loop, unrool 0*/ \ "1:\n" /* main loop */ \ "sdot v8.4s , v4.16b, v0.4b[0]\n" /* out0 = b0 * a00[0], b0 = q4 */ \ "sdot v11.4s , v4.16b, v0.4b[1]\n" /* out1 = b0 * a00[1], b0 = q4 */ \ "ldp q6, q7, [%[b_ptr]], #32\n" /* load b2, b0 to q6, q7 */ \ "sdot v14.4s, v4.16b, v0.4b[2]\n" /* out2 = b0 * a00[2], b0 = q4 */ \ "sdot v17.4s, v4.16b, v0.4b[3]\n" /* out3 = b0 * a00[3], b0 = q4 */ \ "ldp q2, q3, [%[a_ptr]], #32\n" /* load a10, a11 to q3, q4 */ \ "sdot v20.4s, v4.16b, v1.4b[0]\n" /* out4 = b0 * a01[0], b0 = q4 */ \ "sdot v23.4s, v4.16b, v1.4b[1]\n" /* out5 = b0 * a01[1], b0 = q4 */ \ "sdot v26.4s, v4.16b, v1.4b[2]\n" /* out6 = b0 * a01[2], b0 = q4 */ \ "sdot v29.4s, v4.16b, v1.4b[3]\n" /* out7 = b0 * a01[3], b0 = q4 */ \ "sdot v9.4s, v5.16b, v0.4b[0]\n" /* out8 = b1 * a00[0], b1 = q5 */ \ "sdot v12.4s, v5.16b, v0.4b[1]\n" /* out9 = b1 * a00[1], b1 = q5 */ \ "sdot v15.4s, v5.16b, v0.4b[2]\n" /* out10 = b1 * a00[2], b1 = q5*/ \ "sdot v18.4s, v5.16b, v0.4b[3]\n" /* out11 = b1 * a00[3], b1 = q5*/ \ "sdot v21.4s, v5.16b, v1.4b[0]\n" /* out12 = b1 * a01[0], b1 = q5*/ \ "sdot v24.4s, v5.16b, v1.4b[1]\n" /* out13 = b1 * a01[1], b1 = q5*/ \ "sdot v27.4s, v5.16b, v1.4b[2]\n" /* out14 = b1 * a01[2], b1 = q5*/ \ "sdot v30.4s, v5.16b, v1.4b[3]\n" /* out15 = b1 * a01[3], b1 = q5*/ \ "ldp q4, q5, [%[b_ptr]], #32\n" /* load b1, b2 to q4, q5 */ \ "sdot v10.4s, v6.16b, v0.4b[0]\n" /* out16 = b2 * a00[0], b2 = q6*/ \ "sdot v13.4s, v6.16b, v0.4b[1]\n" /* out17 = b2 * a00[1], b2 = q6*/ \ "prfm pldl1keep, [%[b_ptr], #384]\n" \ "sdot v16.4s, v6.16b, v0.4b[2]\n" /* out18 = b2 * a00[2], b2 = q6*/ \ "sdot v19.4s, v6.16b, v0.4b[3]\n" /* out19 = b2 * a00[3], b2 = q6*/ \ "sdot v22.4s, v6.16b, v1.4b[0]\n" /* out20 = b2 * a00[0], b2 = q6*/ \ "sdot v25.4s, v6.16b, v1.4b[1]\n" /* out21 = b2 * a00[1], b2 = q6*/ \ "sdot v28.4s, v6.16b, v1.4b[2]\n" /* out22 = b2 * a00[2], b2 = q6*/ \ "sdot v31.4s, v6.16b, v1.4b[3]\n" /* out23 = b2 * a00[3], b2 = q6*/ \ "ldp q0, q1, [%[a_ptr]], #32\n" /* load a00, a01 to q0, q1 */ \ /* unrool 1 */ \ "sdot v8.4s , v7.16b, v2.4b[0]\n" /* out0 = b0 * a10[0], b0 = q7 */ \ "sdot v11.4s , v7.16b, v2.4b[1]\n"/* out1 = b0 * a10[1], b0 = q7 */ \ "sdot v14.4s, v7.16b, v2.4b[2]\n" /* out2 = b0 * a10[2], b0 = q7 */ \ "prfm pldl1keep, [%[a_ptr], #256]\n" \ "sdot v17.4s, v7.16b, v2.4b[3]\n" /* out3 = b0 * a10[3], b0 = q7 */ \ "sdot v20.4s, v7.16b, v3.4b[0]\n" /* out4 = b0 * a11[0], b0 = q7 */ \ "sdot v23.4s, v7.16b, v3.4b[1]\n" /* out5 = b0 * a11[1], b0 = q7 */ \ "sdot v26.4s, v7.16b, v3.4b[2]\n" /* out6 = b0 * a11[2], b0 = q7 */ \ "sdot v29.4s, v7.16b, v3.4b[3]\n" /* out7 = b0 * a11[3], b0 = q7 */ \ "ldp q6, q7, [%[b_ptr]], #32\n" /* load b0, b1 to q6, q7 */ \ "sdot v9.4s, v4.16b, v2.4b[0]\n" /* out8 = b0 * a10[0], b1 = q4 */ \ "sdot v12.4s, v4.16b, v2.4b[1]\n" /* out9 = b0 * a10[1], b1 = q4 */ \ "sdot v15.4s, v4.16b, v2.4b[2]\n" /* out10 = b1 * a10[2], b1 = q4*/ \ "sdot v18.4s, v4.16b, v2.4b[3]\n" /* out11 = b1 * a10[3], b1 = q4*/ \ "sdot v21.4s, v4.16b, v3.4b[0]\n" /* out12 = b1 * a10[0], b1 = q4*/ \ "sdot v24.4s, v4.16b, v3.4b[1]\n" /* out13 = b1 * a10[1], b1 = q4*/ \ "sdot v27.4s, v4.16b, v3.4b[2]\n" /* out14 = b1 * a10[2], b1 = q4*/ \ "sdot v30.4s, v4.16b, v3.4b[3]\n" /* out15 = b1 * a10[3], b1 = q4*/ \ "sdot v10.4s, v5.16b, v2.4b[0]\n" /* out16 = b2 * a10[0], b2 = q5*/ \ "sdot v13.4s, v5.16b, v2.4b[1]\n" /* out17 = b2 * a10[0], b2 = q5*/ \ "sdot v16.4s, v5.16b, v2.4b[2]\n" /* out18 = b2 * a10[0], b2 = q5*/ \ "sdot v19.4s, v5.16b, v2.4b[3]\n" /* out19 = b2 * a10[0], b2 = q5*/ \ "sdot v22.4s, v5.16b, v3.4b[0]\n" /* out20 = b2 * a10[0], b2 = q5*/ \ "sdot v25.4s, v5.16b, v3.4b[1]\n" /* out21 = b2 * a10[0], b2 = q5*/ \ "sdot v28.4s, v5.16b, v3.4b[2]\n" /* out22 = b2 * a10[0], b2 = q5*/ \ "sdot v31.4s, v5.16b, v3.4b[3]\n" /* out23 = b2 * a10[0], b2 = q5*/ \ "ldp q4, q5, [%[b_ptr]], #32\n" /* load b2, b0 to q4, q5 */ \ /* unrool 2*/ \ "sdot v8.4s , v6.16b, v0.4b[0]\n" /* out0 = b0 * a00[0], b0 = q6 */ \ "sdot v11.4s , v6.16b, v0.4b[1]\n" /* out1 = b0 * a00[1], b0 = q6 */ \ "ldp q2, q3, [%[a_ptr]], #32\n" /* load a10, a11 to q3, q4*/ \ "sdot v14.4s, v6.16b, v0.4b[2]\n" /* out2 = b0 * a00[2], b0 = q6*/ \ "sdot v17.4s, v6.16b, v0.4b[3]\n" /* out3 = b0 * a00[3], b0 = q6*/ \ "sdot v20.4s, v6.16b, v1.4b[0]\n" /* out4 = b0 * a01[0], b0 = q6*/ \ "sdot v23.4s, v6.16b, v1.4b[1]\n" /* out5 = b0 * a01[1], b0 = q6*/ \ "sdot v26.4s, v6.16b, v1.4b[2]\n" /* out6 = b0 * a01[2], b0 = q6*/ \ "sdot v29.4s, v6.16b, v1.4b[3]\n" /* out7 = b0 * a01[3], b0 = q6*/ \ "sdot v9.4s, v7.16b, v0.4b[0]\n" /* out8 = b1 * a00[0], b1 = q7*/ \ "sdot v12.4s, v7.16b, v0.4b[1]\n" /* out9 = b1 * a00[1], b1 = q7*/ \ "prfm pldl1keep, [%[b_ptr], #384]\n" \ "sdot v15.4s, v7.16b, v0.4b[2]\n" /* out10 = b1 * a00[2], b1 = q7*/ \ "sdot v18.4s, v7.16b, v0.4b[3]\n" /* out11 = b1 * a00[3], b1 = q7*/ \ "sdot v21.4s, v7.16b, v1.4b[0]\n" /* out12 = b1 * a01[0], b1 = q7*/ \ "sdot v24.4s, v7.16b, v1.4b[1]\n" /* out13 = b1 * a01[1], b1 = q7*/ \ "sdot v27.4s, v7.16b, v1.4b[2]\n" /* out14 = b1 * a01[2], b1 = q7*/ \ "sdot v30.4s, v7.16b, v1.4b[3]\n" /* out15 = b1 * a01[3], b1 = q7*/ \ "ldp q6, q7, [%[b_ptr]], #32\n" /* load b1, b2 to q6, q7*/ \ "sdot v10.4s, v4.16b, v0.4b[0]\n" /* out16 = b2 * a00[0], b2 = q4*/ \ "sdot v13.4s, v4.16b, v0.4b[1]\n" /* out17 = b2 * a00[1], b2 = q4*/ \ "sdot v16.4s, v4.16b, v0.4b[2]\n" /* out18 = b2 * a00[2], b2 = q4*/ \ "sdot v19.4s, v4.16b, v0.4b[3]\n" /* out19 = b2 * a00[3], b2 = q4*/ \ "sdot v22.4s, v4.16b, v1.4b[0]\n" /* out20 = b2 * a00[0], b2 = q4*/ \ "sdot v25.4s, v4.16b, v1.4b[1]\n" /* out21 = b2 * a00[1], b2 = q4*/ \ "sdot v28.4s, v4.16b, v1.4b[2]\n" /* out22 = b2 * a00[2], b2 = q4*/ \ "sdot v31.4s, v4.16b, v1.4b[3]\n" /* out23 = b2 * a00[3], b2 = q4*/ \ "ldp q0, q1, [%[a_ptr]], #32\n" /* load a00, a01 to q0, q1*/ \ /* unrool 3*/ \ "sdot v8.4s , v5.16b, v2.4b[0]\n" /* out0 = b0 * a10[0], b0 = q5*/ \ "sdot v11.4s , v5.16b, v2.4b[1]\n" /* out1 = b0 * a10[1], b0 = q5*/ \ "sdot v14.4s, v5.16b, v2.4b[2]\n" /* out2 = b0 * a10[2], b0 = q5*/ \ "sdot v17.4s, v5.16b, v2.4b[3]\n" /* out3 = b0 * a10[3], b0 = q5*/ \ "sdot v20.4s, v5.16b, v3.4b[0]\n" /* out4 = b0 * a11[0], b0 = q5*/ \ "sdot v23.4s, v5.16b, v3.4b[1]\n" /* out5 = b0 * a11[1], b0 = q5*/ \ "sdot v26.4s, v5.16b, v3.4b[2]\n" /* out6 = b0 * a11[2], b0 = q5*/ \ "sdot v29.4s, v5.16b, v3.4b[3]\n" /* out7 = b0 * a11[3], b0 = q5*/ \ "ldp q4, q5, [%[b_ptr]], #32\n" /* load b0, b1 to q4, q5*/ \ "sdot v9.4s, v6.16b, v2.4b[0]\n" /* out8 = b0 * a10[0], b1 = q6*/ \ "sdot v12.4s, v6.16b, v2.4b[1]\n" /* out9 = b0 * a10[1], b1 = q6*/ \ "prfm pldl1keep, [%[a_ptr], #256]\n" \ "sdot v15.4s, v6.16b, v2.4b[2]\n" /* out10 = b1 * a10[2], b1 = q6*/ \ "sdot v18.4s, v6.16b, v2.4b[3]\n" /* out11 = b1 * a10[3], b1 = q6*/ \ "sdot v21.4s, v6.16b, v3.4b[0]\n" /* out12 = b1 * a10[0], b1 = q6*/ \ "sdot v24.4s, v6.16b, v3.4b[1]\n" /* out13 = b1 * a10[1], b1 = q6*/ \ "sdot v27.4s, v6.16b, v3.4b[2]\n" /* out14 = b1 * a10[2], b1 = q6*/ \ "prfm pldl1keep, [%[b_ptr], #384]\n" \ "sdot v30.4s, v6.16b, v3.4b[3]\n" /* out15 = b1 * a10[3], b1 = q6*/ \ "sdot v10.4s, v7.16b, v2.4b[0]\n" /* out16 = b2 * a10[0], b2 = q7*/ \ "sdot v13.4s, v7.16b, v2.4b[1]\n" /* out17 = b2 * a10[0], b2 = q7*/ \ "sdot v16.4s, v7.16b, v2.4b[2]\n" /* out18 = b2 * a10[0], b2 = q7*/ \ "sdot v19.4s, v7.16b, v2.4b[3]\n" /* out19 = b2 * a10[0], b2 = q7*/ \ "sdot v22.4s, v7.16b, v3.4b[0]\n" /* out20 = b2 * a10[0], b2 = q7*/ \ "sdot v25.4s, v7.16b, v3.4b[1]\n" /* out21 = b2 * a10[0], b2 = q7*/ \ "subs %w[k], %w[k], #1\n" /* loop count - 1*/ \ "sdot v28.4s, v7.16b, v3.4b[2]\n" /* out22 = b2 * a10[0], b2 = q7*/ \ "sdot v31.4s, v7.16b, v3.4b[3]\n" /* out23 = b2 * a10[0], b2 = q7*/ \ "bne 1b\n" /* Target to use when K is 1 or 2 */ \ "2:\n" /* process tail*/ \ "subs %w[tail], %w[tail], #1\n" /* tail--*/ \ "beq 3f\n" /*jump to tail = 1*/ \ /* final unrool 0, unrool 0, tail > 1*/ \ "sdot v8.4s , v4.16b, v0.4b[0]\n" /* out0 = b0 * a00[0], b0 = q4*/ \ "sdot v11.4s , v4.16b, v0.4b[1]\n" /* out1 = b0 * a00[1], b0 = q4*/ \ "ldp q6, q7, [%[b_ptr]], #32\n" /* load b2, b0 to q6, q7*/ \ "sdot v14.4s, v4.16b, v0.4b[2]\n" /* out2 = b0 * a00[2], b0 = q4*/ \ "sdot v17.4s, v4.16b, v0.4b[3]\n" /* out3 = b0 * a00[3], b0 = q4*/ \ "ldp q2, q3, [%[a_ptr]], #32\n" /* load a10, a11 to q2, q3*/ \ "sdot v20.4s, v4.16b, v1.4b[0]\n" /* out4 = b0 * a01[0], b0 = q4*/ \ "sdot v23.4s, v4.16b, v1.4b[1]\n" /* out5 = b0 * a01[1], b0 = q4*/ \ "sdot v26.4s, v4.16b, v1.4b[2]\n" /* out6 = b0 * a01[2], b0 = q4*/ \ "sdot v29.4s, v4.16b, v1.4b[3]\n" /* out7 = b0 * a01[3], b0 = q4*/ \ "subs %w[tail], %w[tail], #1\n" /* tail--*/ \ "sdot v9.4s, v5.16b, v0.4b[0]\n" /* out8 = b1 * a00[0], b1 = q5*/ \ "sdot v12.4s, v5.16b, v0.4b[1]\n" /* out9 = b1 * a00[1], b1 = q5*/ \ "sdot v15.4s, v5.16b, v0.4b[2]\n" /* out10 = b1 * a00[2], b1 = q5*/ \ "sdot v18.4s, v5.16b, v0.4b[3]\n" /* out11 = b1 * a00[3], b1 = q5*/ \ "sdot v21.4s, v5.16b, v1.4b[0]\n" /* out12 = b1 * a01[0], b1 = q5*/ \ "sdot v24.4s, v5.16b, v1.4b[1]\n" /* out13 = b1 * a01[1], b1 = q5*/ \ "sdot v27.4s, v5.16b, v1.4b[2]\n" /* out14 = b1 * a01[2], b1 = q5*/ \ "sdot v30.4s, v5.16b, v1.4b[3]\n" /* out15 = b1 * a01[3], b1 = q5*/ \ "ldp q4, q5, [%[b_ptr]], #32\n" /* load b1, b2 to q4, q5*/ \ "sdot v10.4s, v6.16b, v0.4b[0]\n" /* out16 = b2 * a00[0], b2 = q6*/ \ "sdot v13.4s, v6.16b, v0.4b[1]\n" /* out17 = b2 * a00[1], b2 = q6*/ \ "sdot v16.4s, v6.16b, v0.4b[2]\n" /* out18 = b2 * a00[2], b2 = q6*/ \ "sdot v19.4s, v6.16b, v0.4b[3]\n" /* out19 = b2 * a00[3], b2 = q6*/ \ "sdot v22.4s, v6.16b, v1.4b[0]\n" /* out20 = b2 * a00[0], b2 = q6*/ \ "sdot v25.4s, v6.16b, v1.4b[1]\n" /* out21 = b2 * a00[1], b2 = q6*/ \ "sdot v28.4s, v6.16b, v1.4b[2]\n" /* out22 = b2 * a00[2], b2 = q6*/ \ "sdot v31.4s, v6.16b, v1.4b[3]\n" /* out23 = b2 * a00[3], b2 = q6*/ \ "beq 4f\n" /*jump to tail = 2*/ \ /* unrool 1, tail > 2*/ \ "ldp q0, q1, [%[a_ptr]], #32\n" /* load a00, a01 to q0, q1*/ \ "sdot v8.4s , v7.16b, v2.4b[0]\n" /* out0 = b0 * a10[0], b0 = q7*/ \ "sdot v11.4s , v7.16b, v2.4b[1]\n" /* out1 = b0 * a10[1], b0 = q7*/ \ "sdot v14.4s, v7.16b, v2.4b[2]\n" /* out2 = b0 * a10[2], b0 = q7*/ \ "sdot v17.4s, v7.16b, v2.4b[3]\n" /* out3 = b0 * a10[3], b0 = q7*/ \ "sdot v20.4s, v7.16b, v3.4b[0]\n" /* out4 = b0 * a11[0], b0 = q7*/ \ "sdot v23.4s, v7.16b, v3.4b[1]\n" /* out5 = b0 * a11[1], b0 = q7*/ \ "sdot v26.4s, v7.16b, v3.4b[2]\n" /* out6 = b0 * a11[2], b0 = q7*/ \ "sdot v29.4s, v7.16b, v3.4b[3]\n" /* out7 = b0 * a11[3], b0 = q7*/ \ "ldp q6, q7, [%[b_ptr]], #32\n" /* load b0, b1 to q6, q7*/ \ "sdot v9.4s, v4.16b, v2.4b[0]\n" /* out8 = b0 * a10[0], b1 = q4*/ \ "sdot v12.4s, v4.16b, v2.4b[1]\n" /* out9 = b0 * a10[1], b1 = q4*/ \ "sdot v15.4s, v4.16b, v2.4b[2]\n" /* out10 = b1 * a10[2], b1 = q4*/ \ "sdot v18.4s, v4.16b, v2.4b[3]\n" /* out11 = b1 * a10[3], b1 = q4*/ \ "sdot v21.4s, v4.16b, v3.4b[0]\n" /* out12 = b1 * a10[0], b1 = q4*/ \ "sdot v24.4s, v4.16b, v3.4b[1]\n" /* out13 = b1 * a10[1], b1 = q4*/ \ "sdot v27.4s, v4.16b, v3.4b[2]\n" /* out14 = b1 * a10[2], b1 = q4*/ \ "sdot v30.4s, v4.16b, v3.4b[3]\n" /* out15 = b1 * a10[3], b1 = q4*/ \ "subs %w[tail], %w[tail], #1\n" /* tail--*/ \ "sdot v10.4s, v5.16b, v2.4b[0]\n" /* out16 = b2 * a10[0], b2 = q5*/ \ "sdot v13.4s, v5.16b, v2.4b[1]\n" /* out17 = b2 * a10[0], b2 = q5*/ \ "sdot v16.4s, v5.16b, v2.4b[2]\n" /* out18 = b2 * a10[0], b2 = q5*/ \ "sdot v19.4s, v5.16b, v2.4b[3]\n" /* out19 = b2 * a10[0], b2 = q5*/ \ "sdot v22.4s, v5.16b, v3.4b[0]\n" /* out20 = b2 * a10[0], b2 = q5*/ \ "sdot v25.4s, v5.16b, v3.4b[1]\n" /* out21 = b2 * a10[0], b2 = q5*/ \ "sdot v28.4s, v5.16b, v3.4b[2]\n" /* out22 = b2 * a10[0], b2 = q5*/ \ "sdot v31.4s, v5.16b, v3.4b[3]\n" /* out23 = b2 * a10[0], b2 = q5*/ \ "beq 5f\n" /*jump to tail = 3*/ \ /* unrool 2, tail = 4*/ \ "ldp q4, q5, [%[b_ptr]], #32\n" /* load b2, b0 to q4, q5*/ \ "sdot v8.4s , v6.16b, v0.4b[0]\n" /* out0 = b0 * a00[0], b0 = q6*/ \ "sdot v11.4s , v6.16b, v0.4b[1]\n" /* out1 = b0 * a00[1], b0 = q6*/ \ "ldp q2, q3, [%[a_ptr]], #32\n" /* load a10, a11 to q3, q4*/ \ "sdot v14.4s, v6.16b, v0.4b[2]\n" /* out2 = b0 * a00[2], b0 = q6*/ \ "sdot v17.4s, v6.16b, v0.4b[3]\n" /* out3 = b0 * a00[3], b0 = q6*/ \ "sdot v20.4s, v6.16b, v1.4b[0]\n" /* out4 = b0 * a01[0], b0 = q6*/ \ "sdot v23.4s, v6.16b, v1.4b[1]\n" /* out5 = b0 * a01[1], b0 = q6*/ \ "sdot v26.4s, v6.16b, v1.4b[2]\n" /* out6 = b0 * a01[2], b0 = q6*/ \ "sdot v29.4s, v6.16b, v1.4b[3]\n" /* out7 = b0 * a01[3], b0 = q6*/ \ "sdot v9.4s, v7.16b, v0.4b[0]\n" /* out8 = b1 * a00[0], b1 = q7*/ \ "sdot v12.4s, v7.16b, v0.4b[1]\n" /* out9 = b1 * a00[1], b1 = q7*/ \ "sdot v15.4s, v7.16b, v0.4b[2]\n" /* out10 = b1 * a00[2], b1 = q7*/ \ "sdot v18.4s, v7.16b, v0.4b[3]\n" /* out11 = b1 * a00[3], b1 = q7*/ \ "sdot v21.4s, v7.16b, v1.4b[0]\n" /* out12 = b1 * a01[0], b1 = q7*/ \ "sdot v24.4s, v7.16b, v1.4b[1]\n" /* out13 = b1 * a01[1], b1 = q7*/ \ "sdot v27.4s, v7.16b, v1.4b[2]\n" /* out14 = b1 * a01[2], b1 = q7*/ \ "sdot v30.4s, v7.16b, v1.4b[3]\n" /* out15 = b1 * a01[3], b1 = q7*/ \ "ldp q6, q7, [%[b_ptr]], #32\n" /* load b1, b2 to q6, q7*/ \ "sdot v10.4s, v4.16b, v0.4b[0]\n" /* out16 = b2 * a00[0], b2 = q4*/ \ "sdot v13.4s, v4.16b, v0.4b[1]\n" /* out17 = b2 * a00[1], b2 = q4*/ \ "sdot v16.4s, v4.16b, v0.4b[2]\n" /* out18 = b2 * a00[2], b2 = q4*/ \ "sdot v19.4s, v4.16b, v0.4b[3]\n" /* out19 = b2 * a00[3], b2 = q4*/ \ "sdot v22.4s, v4.16b, v1.4b[0]\n" /* out20 = b2 * a00[0], b2 = q4*/ \ "sdot v25.4s, v4.16b, v1.4b[1]\n" /* out21 = b2 * a00[1], b2 = q4*/ \ "sdot v28.4s, v4.16b, v1.4b[2]\n" /* out22 = b2 * a00[2], b2 = q4*/ \ "sdot v31.4s, v4.16b, v1.4b[3]\n" /* out23 = b2 * a00[3], b2 = q4*/ \ /* unrool 3, tail = 4*/ \ "sdot v8.4s , v5.16b, v2.4b[0]\n" /* out0 = b0 * a10[0], b0 = q5*/ \ "sdot v11.4s , v5.16b, v2.4b[1]\n" /* out1 = b0 * a10[1], b0 = q5*/ \ "sdot v14.4s, v5.16b, v2.4b[2]\n" /* out2 = b0 * a10[2], b0 = q5*/ \ "sdot v17.4s, v5.16b, v2.4b[3]\n" /* out3 = b0 * a10[3], b0 = q5*/ \ "sdot v20.4s, v5.16b, v3.4b[0]\n" /* out4 = b0 * a11[0], b0 = q5*/ \ "sdot v23.4s, v5.16b, v3.4b[1]\n" /* out5 = b0 * a11[1], b0 = q5*/ \ "sdot v26.4s, v5.16b, v3.4b[2]\n" /* out6 = b0 * a11[2], b0 = q5*/ \ "sdot v29.4s, v5.16b, v3.4b[3]\n" /* out7 = b0 * a11[3], b0 = q5*/ \ "sdot v9.4s, v6.16b, v2.4b[0]\n" /* out8 = b0 * a10[0], b1 = q6*/ \ "sdot v12.4s, v6.16b, v2.4b[1]\n" /* out9 = b1 * a10[1], b1 = q6*/ \ "sdot v15.4s, v6.16b, v2.4b[2]\n" /* out10 = b1 * a10[2], b1 = q6*/ \ "sdot v18.4s, v6.16b, v2.4b[3]\n" /* out11 = b1 * a10[3], b1 = q6*/ \ "sdot v21.4s, v6.16b, v3.4b[0]\n" /* out12 = b1 * a10[0], b1 = q6*/ \ "sdot v24.4s, v6.16b, v3.4b[1]\n" /* out13 = b1 * a10[1], b1 = q6*/ \ "sdot v27.4s, v6.16b, v3.4b[2]\n" /* out14 = b1 * a10[2], b1 = q6*/ \ "sdot v30.4s, v6.16b, v3.4b[3]\n" /* out15 = b1 * a10[3], b1 = q6*/ \ "sdot v10.4s, v7.16b, v2.4b[0]\n" /* out16 = b2 * a10[0], b2 = q7*/ \ "sdot v13.4s, v7.16b, v2.4b[1]\n" /* out17 = b2 * a10[0], b2 = q7*/ \ "sdot v16.4s, v7.16b, v2.4b[2]\n" /* out18 = b2 * a10[0], b2 = q7*/ \ "sdot v19.4s, v7.16b, v2.4b[3]\n" /* out19 = b2 * a10[0], b2 = q7*/ \ "sdot v22.4s, v7.16b, v3.4b[0]\n" /* out20 = b2 * a10[0], b2 = q7*/ \ "sdot v25.4s, v7.16b, v3.4b[1]\n" /* out21 = b2 * a10[0], b2 = q7*/ \ "sdot v28.4s, v7.16b, v3.4b[2]\n" /* out22 = b2 * a10[0], b2 = q7*/ \ "sdot v31.4s, v7.16b, v3.4b[3]\n" /* out23 = b2 * a10[0], b2 = q7*/ \ "b 11f\n" /* tails==1 final tail*/ \ "3: \n" /* tail=1*/ \ "ldr q6, [%[b_ptr]], #16\n" /* load b2 to q6*/ \ "sdot v8.4s , v4.16b, v0.4b[0]\n" /* out0 = b0 * a10[0], b0 = q5*/ \ "sdot v11.4s , v4.16b, v0.4b[1]\n" /* out1 = b0 * a10[1], b0 = q5*/ \ "sdot v14.4s, v4.16b, v0.4b[2]\n" /* out2 = b0 * a10[2], b0 = q5*/ \ "sdot v17.4s, v4.16b, v0.4b[3]\n" /* out3 = b0 * a10[3], b0 = q5*/ \ "sdot v20.4s, v4.16b, v1.4b[0]\n" /* out4 = b0 * a11[0], b0 = q5*/ \ "sdot v23.4s, v4.16b, v1.4b[1]\n" /* out5 = b0 * a11[1], b0 = q5*/ \ "sdot v26.4s, v4.16b, v1.4b[2]\n" /* out6 = b0 * a11[2], b0 = q5*/ \ "sdot v29.4s, v4.16b, v1.4b[3]\n" /* out7 = b0 * a11[3], b0 = q5*/ \ "sdot v9.4s, v5.16b, v0.4b[0]\n" /* out8 = b0 * a10[0], b1 = q6*/ \ "sdot v12.4s, v5.16b, v0.4b[1]\n" /* out9 = b1 * a10[1], b1 = q6*/ \ "sdot v15.4s, v5.16b, v0.4b[2]\n" /* out10 = b1 * a10[2], b1 = q6*/ \ "sdot v18.4s, v5.16b, v0.4b[3]\n" /* out11 = b1 * a10[3], b1 = q6*/ \ "sdot v21.4s, v5.16b, v1.4b[0]\n" /* out12 = b1 * a10[0], b1 = q6*/ \ "sdot v24.4s, v5.16b, v1.4b[1]\n" /* out13 = b1 * a10[1], b1 = q6*/ \ "sdot v27.4s, v5.16b, v1.4b[2]\n" /* out14 = b1 * a10[2], b1 = q6*/ \ "sdot v30.4s, v5.16b, v1.4b[3]\n" /* out15 = b1 * a10[3], b1 = q6*/ \ "sdot v10.4s, v6.16b, v0.4b[0]\n" /* out16 = b2 * a10[0], b2 = q7*/ \ "sdot v13.4s, v6.16b, v0.4b[1]\n" /* out17 = b2 * a10[0], b2 = q7*/ \ "sdot v16.4s, v6.16b, v0.4b[2]\n" /* out18 = b2 * a10[0], b2 = q7*/ \ "sdot v19.4s, v6.16b, v0.4b[3]\n" /* out19 = b2 * a10[0], b2 = q7*/ \ "sdot v22.4s, v6.16b, v1.4b[0]\n" /* out20 = b2 * a10[0], b2 = q7*/ \ "sdot v25.4s, v6.16b, v1.4b[1]\n" /* out21 = b2 * a10[0], b2 = q7*/ \ "sdot v28.4s, v6.16b, v1.4b[2]\n" /* out22 = b2 * a10[0], b2 = q7*/ \ "sdot v31.4s, v6.16b, v1.4b[3]\n" /* out23 = b2 * a10[0], b2 = q7*/ \ "b 11f\n" /* tails==2 final tail*/ \ "4:\n" /* tail = 2*/ \ "sdot v8.4s , v7.16b, v2.4b[0]\n" /* out0 = b0 * a10[0], b0 = q5*/ \ "sdot v11.4s , v7.16b, v2.4b[1]\n" /* out1 = b0 * a10[1], b0 = q5*/ \ "sdot v14.4s, v7.16b, v2.4b[2]\n" /* out2 = b0 * a10[2], b0 = q5*/ \ "sdot v17.4s, v7.16b, v2.4b[3]\n" /* out3 = b0 * a10[3], b0 = q5*/ \ "sdot v20.4s, v7.16b, v3.4b[0]\n" /* out4 = b0 * a11[0], b0 = q5*/ \ "sdot v23.4s, v7.16b, v3.4b[1]\n" /* out5 = b0 * a11[1], b0 = q5*/ \ "sdot v26.4s, v7.16b, v3.4b[2]\n" /* out6 = b0 * a11[2], b0 = q5*/ \ "sdot v29.4s, v7.16b, v3.4b[3]\n" /* out7 = b0 * a11[3], b0 = q5*/ \ "sdot v9.4s, v4.16b, v2.4b[0]\n" /* out8 = b0 * a10[0], b1 = q6*/ \ "sdot v12.4s, v4.16b, v2.4b[1]\n" /* out9 = b1 * a10[1], b1 = q6*/ \ "sdot v15.4s, v4.16b, v2.4b[2]\n" /* out10 = b1 * a10[2], b1 = q6*/ \ "sdot v18.4s, v4.16b, v2.4b[3]\n" /* out11 = b1 * a10[3], b1 = q6*/ \ "sdot v21.4s, v4.16b, v3.4b[0]\n" /* out12 = b1 * a10[0], b1 = q6*/ \ "sdot v24.4s, v4.16b, v3.4b[1]\n" /* out13 = b1 * a10[1], b1 = q6*/ \ "sdot v27.4s, v4.16b, v3.4b[2]\n" /* out14 = b1 * a10[2], b1 = q6*/ \ "sdot v30.4s, v4.16b, v3.4b[3]\n" /* out15 = b1 * a10[3], b1 = q6*/ \ "sdot v10.4s, v5.16b, v2.4b[0]\n" /* out16 = b2 * a10[0], b2 = q7*/ \ "sdot v13.4s, v5.16b, v2.4b[1]\n" /* out17 = b2 * a10[0], b2 = q7*/ \ "sdot v16.4s, v5.16b, v2.4b[2]\n" /* out18 = b2 * a10[0], b2 = q7*/ \ "sdot v19.4s, v5.16b, v2.4b[3]\n" /* out19 = b2 * a10[0], b2 = q7*/ \ "sdot v22.4s, v5.16b, v3.4b[0]\n" /* out20 = b2 * a10[0], b2 = q7*/ \ "sdot v25.4s, v5.16b, v3.4b[1]\n" /* out21 = b2 * a10[0], b2 = q7*/ \ "sdot v28.4s, v5.16b, v3.4b[2]\n" /* out22 = b2 * a10[0], b2 = q7*/ \ "sdot v31.4s, v5.16b, v3.4b[3]\n" /* out23 = b2 * a10[0], b2 = q7*/ \ "b 11f\n" /* tails==3 final tail*/ \ "5:\n" /* tail = 3*/ \ "ldr q4, [%[b_ptr]], #16\n" /* load b2, b0 to q4*/ \ "sdot v8.4s , v6.16b, v0.4b[0]\n" /* out0 = b0 * a10[0], b0 = q5*/ \ "sdot v11.4s , v6.16b, v0.4b[1]\n" /* out1 = b0 * a10[1], b0 = q5*/ \ "sdot v14.4s, v6.16b, v0.4b[2]\n" /* out2 = b0 * a10[2], b0 = q5*/ \ "sdot v17.4s, v6.16b, v0.4b[3]\n" /* out3 = b0 * a10[3], b0 = q5*/ \ "sdot v20.4s, v6.16b, v1.4b[0]\n" /* out4 = b0 * a11[0], b0 = q5*/ \ "sdot v23.4s, v6.16b, v1.4b[1]\n" /* out5 = b0 * a11[1], b0 = q5*/ \ "sdot v26.4s, v6.16b, v1.4b[2]\n" /* out6 = b0 * a11[2], b0 = q5*/ \ "sdot v29.4s, v6.16b, v1.4b[3]\n" /* out7 = b0 * a11[3], b0 = q5*/ \ "sdot v9.4s, v7.16b, v0.4b[0]\n" /* out8 = b0 * a10[0], b1 = q6*/ \ "sdot v12.4s, v7.16b, v0.4b[1]\n" /* out9 = b1 * a10[1], b1 = q6*/ \ "sdot v15.4s, v7.16b, v0.4b[2]\n" /* out10 = b1 * a10[2], b1 = q6*/ \ "sdot v18.4s, v7.16b, v0.4b[3]\n" /* out11 = b1 * a10[3], b1 = q6*/ \ "sdot v21.4s, v7.16b, v1.4b[0]\n" /* out12 = b1 * a10[0], b1 = q6*/ \ "sdot v24.4s, v7.16b, v1.4b[1]\n" /* out13 = b1 * a10[1], b1 = q6*/ \ "sdot v27.4s, v7.16b, v1.4b[2]\n" /* out14 = b1 * a10[2], b1 = q6*/ \ "sdot v30.4s, v7.16b, v1.4b[3]\n" /* out15 = b1 * a10[3], b1 = q6*/ \ "sdot v10.4s, v4.16b, v0.4b[0]\n" /* out16 = b2 * a10[0], b2 = q7*/ \ "sdot v13.4s, v4.16b, v0.4b[1]\n" /* out17 = b2 * a10[0], b2 = q7*/ \ "sdot v16.4s, v4.16b, v0.4b[2]\n" /* out18 = b2 * a10[0], b2 = q7*/ \ "sdot v19.4s, v4.16b, v0.4b[3]\n" /* out19 = b2 * a10[0], b2 = q7*/ \ "sdot v22.4s, v4.16b, v1.4b[0]\n" /* out20 = b2 * a10[0], b2 = q7*/ \ "sdot v25.4s, v4.16b, v1.4b[1]\n" /* out21 = b2 * a10[0], b2 = q7*/ \ "sdot v28.4s, v4.16b, v1.4b[2]\n" /* out22 = b2 * a10[0], b2 = q7*/ \ "sdot v31.4s, v4.16b, v1.4b[3]\n" /* out23 = b2 * a10[0], b2 = q7*/ \ "11: \n" /* end */ #define GEMM_SDOT_INT8_KERNEL_8x8 \ "prfm pldl1keep, [%[a_ptr], #64]\n" /* preload a*/ \ "eor v8.16b, v8.16b, v8.16b \n" /* out0 = 0 */ \ "eor v11.16b, v11.16b, v11.16b\n" /* out0 = 0 */ \ "eor v14.16b, v14.16b, v14.16b\n" /* out0 = 0 */ \ "eor v17.16b, v17.16b, v17.16b\n" /* out0 = 0 */ \ "eor v20.16b, v20.16b, v20.16b\n" /* out0 = 0 */ \ "eor v23.16b, v23.16b, v23.16b\n" /* out0 = 0 */ \ "eor v26.16b, v26.16b, v26.16b\n" /* out0 = 0 */ \ "eor v29.16b, v29.16b, v29.16b\n" /* out0 = 0 */ \ "prfm pldl1keep, [%[b_ptr], #64]\n" /* preload b*/ \ "eor v9.16b, v9.16b, v9.16b \n" /* out0 = 0 */ \ "eor v12.16b, v12.16b, v12.16b\n" /* out0 = 0 */ \ "eor v15.16b, v15.16b, v15.16b\n" /* out0 = 0 */ \ "eor v18.16b, v18.16b, v18.16b\n" /* out0 = 0 */ \ "eor v21.16b, v21.16b, v21.16b\n" /* out0 = 0 */ \ "eor v24.16b, v24.16b, v24.16b\n" /* out0 = 0 */ \ "eor v27.16b, v27.16b, v27.16b\n" /* out0 = 0 */ \ "eor v30.16b, v30.16b, v30.16b\n" /* out0 = 0 */ \ "1:\n" \ "ldp q0, q1, [%[a_ptr]], #32\n" \ "ldp q4, q5, [%[b_ptr]], #32\n" \ "sdot v8.4s, v4.16b, v0.4b[0]\n" \ "sdot v11.4s, v4.16b, v0.4b[1]\n" \ "sdot v14.4s, v4.16b, v0.4b[2]\n" \ "sdot v17.4s, v4.16b, v0.4b[3]\n" \ "prfm pldl1keep, [%[a_ptr], #64]\n" /* preload a*/ \ "sdot v20.4s, v4.16b, v1.4b[0]\n" \ "sdot v23.4s, v4.16b, v1.4b[1]\n" \ "sdot v26.4s, v4.16b, v1.4b[2]\n" \ "sdot v29.4s, v4.16b, v1.4b[3]\n" \ "prfm pldl1keep, [%[a_ptr], #128]\n" /* preload b*/ \ "prfm pldl1keep, [%[b_ptr], #64]\n" /* preload b*/ \ "sdot v9.4s, v5.16b, v0.4b[0]\n" \ "sdot v12.4s, v5.16b, v0.4b[1]\n" \ "sdot v15.4s, v5.16b, v0.4b[2]\n" \ "sdot v18.4s, v5.16b, v0.4b[3]\n" \ "prfm pldl1keep, [%[b_ptr], #128]\n" /* preload b*/ \ "sdot v21.4s, v5.16b, v1.4b[0]\n" \ "sdot v24.4s, v5.16b, v1.4b[1]\n" \ "sdot v27.4s, v5.16b, v1.4b[2]\n" \ "sdot v30.4s, v5.16b, v1.4b[3]\n" \ "subs %w[k], %w[k], #1\n" \ "bne 1b\n" #define GEMM_SDOT_INT8_KERNEL_8x4 \ "prfm pldl1keep, [%[a_ptr], #64]\n" /* preload a*/ \ "eor v8.16b, v8.16b, v8.16b \n" /* out0 = 0 */ \ "eor v11.16b, v11.16b, v11.16b\n" /* out0 = 0 */ \ "eor v14.16b, v14.16b, v14.16b\n" /* out0 = 0 */ \ "eor v17.16b, v17.16b, v17.16b\n" /* out0 = 0 */ \ "prfm pldl1keep, [%[b_ptr], #32]\n" /* preload b*/ \ "eor v20.16b, v20.16b, v20.16b\n" /* out0 = 0 */ \ "eor v23.16b, v23.16b, v23.16b\n" /* out0 = 0 */ \ "eor v26.16b, v26.16b, v26.16b\n" /* out0 = 0 */ \ "eor v29.16b, v29.16b, v29.16b\n" /* out0 = 0 */ \ "1:\n" \ "ldp q0, q1, [%[a_ptr]], #32\n" \ "ldr q4, [%[b_ptr]], #16\n" \ "sdot v8.4s, v4.16b, v0.4b[0]\n" \ "sdot v11.4s, v4.16b, v0.4b[1]\n" \ "prfm pldl1keep, [%[a_ptr], #64]\n" /* preload a*/ \ "sdot v14.4s, v4.16b, v0.4b[2]\n" \ "sdot v17.4s, v4.16b, v0.4b[3]\n" \ "prfm pldl1keep, [%[a_ptr], #64]\n" /* preload a*/ \ "sdot v20.4s, v4.16b, v1.4b[0]\n" \ "sdot v23.4s, v4.16b, v1.4b[1]\n" \ "prfm pldl1keep, [%[b_ptr], #32]\n" /* preload b*/ \ "sdot v26.4s, v4.16b, v1.4b[2]\n" \ "sdot v29.4s, v4.16b, v1.4b[3]\n" \ "subs %w[k], %w[k], #1\n" \ "bne 1b\n"
22,634
1,350
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.datalakestore.models; import com.azure.core.annotation.Fluent; import com.azure.core.annotation.JsonFlatten; import com.azure.core.util.logging.ClientLogger; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; /** The parameters used to create a new virtual network rule. */ @JsonFlatten @Fluent public class CreateOrUpdateVirtualNetworkRuleParameters { @JsonIgnore private final ClientLogger logger = new ClientLogger(CreateOrUpdateVirtualNetworkRuleParameters.class); /* * The resource identifier for the subnet. */ @JsonProperty(value = "properties.subnetId", required = true) private String subnetId; /** * Get the subnetId property: The resource identifier for the subnet. * * @return the subnetId value. */ public String subnetId() { return this.subnetId; } /** * Set the subnetId property: The resource identifier for the subnet. * * @param subnetId the subnetId value to set. * @return the CreateOrUpdateVirtualNetworkRuleParameters object itself. */ public CreateOrUpdateVirtualNetworkRuleParameters withSubnetId(String subnetId) { this.subnetId = subnetId; return this; } /** * Validates the instance. * * @throws IllegalArgumentException thrown if the instance is not valid. */ public void validate() { if (subnetId() == null) { throw logger .logExceptionAsError( new IllegalArgumentException( "Missing required property subnetId in model CreateOrUpdateVirtualNetworkRuleParameters")); } } }
678
348
<filename>docs/data/leg-t2/037/03704142.json<gh_stars>100-1000 {"nom":"Maillé","circ":"4ème circonscription","dpt":"Indre-et-Loire","inscrits":482,"abs":265,"votants":217,"blancs":20,"nuls":3,"exp":194,"res":[{"nuance":"REM","nom":"<NAME>","voix":109},{"nuance":"LR","nom":"<NAME>","voix":85}]}
122
310
<filename>gear/hardware/d/das-keyboard-4.json<gh_stars>100-1000 { "name": "Das Keyboard 4", "description": "A mechanical keyboard.", "url": "https://www.daskeyboard.com/daskeyboard-4-ultimate/" }
79
416
package org.springframework.roo.classpath; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.Validate; import org.springframework.roo.metadata.MetadataIdentificationUtils; import org.springframework.roo.model.JavaType; import org.springframework.roo.project.LogicalPath; import org.springframework.roo.project.Path; /** * Produces metadata identification strings that represent a {@link JavaType} * located in a particular {@link LogicalPath}. * <p> * The metadata identification strings separate the path name from the fully * qualified type name via the presence of a question mark character ("?"). A * question mark is used given it is reserved by {@link Path}. TODO these * methods are not specific to physical types; either rename this class, move * them somewhere more generic, and/or make them more specific, e.g. hardcode * the "metadata class" arguments to that of physical types. * * @author <NAME> * @since 1.0 */ public final class PhysicalTypeIdentifierNamingUtils { private static final String PATH_SUFFIX = "?"; /** * Creates a metadata ID from the given inputs * * @param metadataClass the fully-qualified name of the metadata class * (required) * @param projectType the fully-qualified name of the user project type to * which the metadata relates (required) * @param path the path to that type within the project (required) * @return a non-blank ID */ public static String createIdentifier(final String metadataClass, final JavaType projectType, final LogicalPath path) { Validate.notNull(projectType, "Java type required"); Validate.notNull(path, "Path required"); return MetadataIdentificationUtils.create(metadataClass, path.getName() + PATH_SUFFIX + projectType.getFullyQualifiedTypeName()); } /** * Parses the instance key from the given metadata ID. * * @param metadataClass the fully-qualified name of the metadata type * (required) * @param metadataId the ID of the metadata instance (must identify an * instance of the given metadata class) * @return a non-blank key, as per * {@link MetadataIdentificationUtils#getMetadataInstance(String)} */ private static String getInstanceKey(final String metadataClass, final String metadataId) { Validate.isTrue(isValid(metadataClass, metadataId), "Metadata id '%s' is not a valid %s identifier", metadataId, metadataClass); return MetadataIdentificationUtils.getMetadataInstance(metadataId); } public static JavaType getJavaType(final String metadataIdentificationString) { Validate.isTrue(metadataIdentificationString.contains("#"), "Metadata identification string '%s' does not appear to be a valid identifier", metadataIdentificationString); final String instance = MetadataIdentificationUtils.getMetadataInstance(metadataIdentificationString); final int index = instance.indexOf("?"); return new JavaType(instance.substring(index + 1), getModuleFromIdentificationString(metadataIdentificationString)); } /** * Returns the user project type with which the given metadata ID is * associated. * * @param metadataClass the fully-qualified name of the metadata type * (required) * @param metadataId the ID of the metadata instance (must identify an * instance of the given metadata class) * @return a non-<code>null</code> type */ public static JavaType getJavaType(final String metadataClass, final String metadataId) { final String instanceKey = getInstanceKey(metadataClass, metadataId); return new JavaType(instanceKey.substring(instanceKey.indexOf(PATH_SUFFIX) + 1), getModuleFromIdentificationString(metadataId)); } /** * Returns the name of the project module that contains the metadata item * with the given id. * * @param metadataId must be a valid metadata instance id * @return a non-<code>null</code> module name (blank means the root or only * module) * @since 1.2.0 */ public static String getModule(final String metadataId) { return getPath(metadataId).getModule(); } /** * Returns the {@link LogicalPath} of the metadata item with the given id. * * @param metadataId must be a valid metadata instance id * @return a non-<code>null</code> path */ public static LogicalPath getPath(final String metadataId) { Validate.isTrue(MetadataIdentificationUtils.isIdentifyingInstance(metadataId), "Metadata id '%s' does not appear to be a valid identifier", metadataId); final String instanceKey = MetadataIdentificationUtils.getMetadataInstance(metadataId); final int index = instanceKey.indexOf("?"); return LogicalPath.getInstance(instanceKey.substring(0, index)); } /** * Parses the user project path from the given metadata ID. * * @param metadataClass the fully-qualified name of the metadata type * (required) * @param metadataId the ID of the metadata instance (must identify an * instance of the given metadata class) * @return a non-<code>null</code> path */ public static LogicalPath getPath(final String providesType, final String metadataIdentificationString) { Validate .isTrue( isValid(providesType, metadataIdentificationString), "Metadata identification string '%s' does not appear to be a valid physical type identifier", metadataIdentificationString); final String instance = MetadataIdentificationUtils.getMetadataInstance(metadataIdentificationString); final int index = instance.indexOf("?"); return LogicalPath.getInstance(instance.substring(0, index)); } /** * Indicates whether the given metadata id appears to identify an instance * of the given metadata class. * * @param metadataClass the fully-qualified name of the expected metadata * type (can be blank) * @param metadataId the ID to evaluate (can be blank) * @return true only if the metadata ID appears to be valid */ public static boolean isValid(final String metadataClass, final String metadataId) { return MetadataIdentificationUtils.isIdentifyingInstance(metadataId) && MetadataIdentificationUtils.getMetadataClass(metadataId).equals(metadataClass) && MetadataIdentificationUtils.getMetadataInstance(metadataId).contains(PATH_SUFFIX); } /** * Constructor is private to prevent instantiation * * @since 1.2.0 */ private PhysicalTypeIdentifierNamingUtils() {} /** * Extracts module name from a metadata identification string * * @param metadataId * @return module name or null if metadataId is not a valid metadata identification string */ public static final String getModuleFromIdentificationString(String metadataId) { return StringUtils.defaultString(StringUtils.substringBetween(metadataId, "#", ":"), ""); } }
2,169
829
<filename>library/src/main/java/uk/co/alt236/bluetoothlelib/device/beacon/BeaconDevice.java package uk.co.alt236.bluetoothlelib.device.beacon; /** * */ public interface BeaconDevice { BeaconType getBeaconType(); }
82
6,837
#!/usr/bin/python # -*- coding: utf-8 -*- # thumbor imaging service # https://github.com/thumbor/thumbor/wiki # Licensed under the MIT license: # http://www.opensource.org/licenses/mit-license # Copyright (c) 2011 globo.com <EMAIL> from os.path import exists from subprocess import PIPE, Popen from thumbor.optimizers import BaseOptimizer from thumbor.utils import logger class Optimizer(BaseOptimizer): def should_run(self, image_extension, image_buffer): if image_extension in [".jpg", ".jpeg"]: if self.context.config.JPEGTRAN_PATH is None or not exists( self.context.config.JPEGTRAN_PATH ): logger.warning( "jpegtran optimizer enabled but binary JPEGTRAN_PATH does not exist" ) return False return True return False def run_optimizer(self, image_extension, buffer): if not self.should_run(image_extension, buffer): return buffer if "strip_icc" in self.context.request.filters: copy_chunks = "comments" else: # have to copy everything to preserve icc profile copy_chunks = "all" command = [ self.context.config.JPEGTRAN_PATH, "-copy", copy_chunks, "-optimize", ] if self.context.config.PROGRESSIVE_JPEG: command += ["-progressive"] if self.context.config.JPEGTRAN_SCANS_FILE: if exists(self.context.config.JPEGTRAN_SCANS_FILE): command += ["-scans", self.context.config.JPEGTRAN_SCANS_FILE] else: logger.warning("jpegtran optimizer scans file does not exist") jpg_process = Popen(command, stdin=PIPE, stdout=PIPE, stderr=PIPE) output_stdout, output_stderr = jpg_process.communicate(buffer) if jpg_process.returncode != 0: logger.warning( "jpegtran finished with non-zero return code (%d): %s", jpg_process.returncode, output_stderr, ) return buffer return output_stdout
1,023
4,268
#!/usr/bin/env python # # Compatibility stub which now executes JS-based tooling. # # Should be Python2 and Python3 compatible. import os import sys import time import subprocess import optparse import yaml import tempfile def detect_nodejs(): try: cmd = [ 'nodejs', '-e', 'console.log("test")' ] res = subprocess.check_output(cmd) if res[:4] == 'test'.encode('utf-8'): return 'nodejs' except: pass try: cmd = [ 'node', '-e', 'console.log("test")' ] res = subprocess.check_output(cmd) if res[:4] == 'test'.encode('utf-8'): return 'node' except: pass return None def main(): sys.stderr.write('\n') sys.stderr.write('****************************************************************************\n') sys.stderr.write('*** Duktape python tooling is obsolete, migrate to JS-based tooling! ***\n') sys.stderr.write('*** This tool now internally invokes the JS-based tooling. ***\n') sys.stderr.write('*** Minimum Node.js version is 14.x. ***\n') sys.stderr.write('****************************************************************************\n') sys.stderr.write('\n') time.sleep(2) parser = optparse.OptionParser( usage='Usage: %prog [options]', description='Compatibility stub for JS-based tooling' ) # Forced options from multiple sources are gathered into a shared list # so that the override order remains the same as on the command line. force_options_yaml = [] def add_force_option_yaml(option, opt, value, parser): force_options_yaml.append(value) def add_force_option_file(option, opt, value, parser): with open(value, 'rb') as f: force_options_yaml.append(f.read()) def add_force_option_define(option, opt, value, parser): defname, eq, defval = value.partition('=') if not eq: doc = { defname: True } else: defname, paren, defargs = defname.partition('(') if not paren: doc = { defname: defval } else: doc = { defname: { 'verbatim': '#define %s%s%s %s' % (defname, paren, defargs, defval) } } force_options_yaml.append(yaml.safe_dump(doc)) def add_force_option_undefine(option, opt, value, parser): tmp = value.split('=') if len(tmp) == 1: doc = { tmp[0]: False } else: raise Exception('invalid option value: %r' % value) force_options_yaml.append(yaml.safe_dump(doc)) fixup_header_lines = [] def add_fixup_header_line(option, opt, value, parser): fixup_header_lines.append(value) def add_fixup_header_file(option, opt, value, parser): with open(value, 'rb') as f: for line in f: if line[-1] == '\n': line = line[:-1] fixup_header_lines.append(line) # Log level options. parser.add_option('--quiet', dest='quiet', action='store_true', default=False, help='Suppress info messages (show warnings)') parser.add_option('--verbose', dest='verbose', action='store_true', default=False, help='Show verbose debug messages') # Options for configure.py tool itself. parser.add_option('--source-directory', dest='source_directory', default=None, help='Directory with raw input sources (defaulted based on configure.py script path)') parser.add_option('--output-directory', dest='output_directory', default=None, help='Directory for output files (created automatically if it doesn\'t exist, reused if safe)') parser.add_option('--license-file', dest='license_file', default=None, help='Source for LICENSE.txt (defaulted based on configure.py script path)') parser.add_option('--authors-file', dest='authors_file', default=None, help='Source for AUTHORS.rst (defaulted based on configure.py script path)') parser.add_option('--git-commit', dest='git_commit', default=None, help='Force git commit hash') parser.add_option('--git-describe', dest='git_describe', default=None, help='Force git describe') parser.add_option('--git-branch', dest='git_branch', default=None, help='Force git branch name') parser.add_option('--duk-dist-meta', dest='duk_dist_meta', default=None, help='duk_dist_meta.json to read git commit etc info from') # Options for combining sources. parser.add_option('--separate-sources', dest='separate_sources', action='store_true', default=False, help='Output separate sources instead of combined source (default is combined)') parser.add_option('--line-directives', dest='line_directives', action='store_true', default=False, help='Output #line directives in combined source (default is false)') # Options forwarded to genbuiltins.py. parser.add_option('--rom-support', dest='rom_support', action='store_true', help='Add support for ROM strings/objects (increases duktape.c size considerably)') parser.add_option('--rom-auto-lightfunc', dest='rom_auto_lightfunc', action='store_true', default=False, help='Convert ROM built-in function properties into lightfuncs automatically whenever possible') parser.add_option('--user-builtin-metadata', dest='obsolete_builtin_metadata', default=None, help=optparse.SUPPRESS_HELP) parser.add_option('--builtin-file', dest='builtin_files', metavar='FILENAME', action='append', default=[], help='Built-in string/object YAML metadata to be applied over default built-ins (multiple files may be given, applied in sequence)') # Options for Unicode. parser.add_option('--unicode-data', dest='unicode_data', default=None, help='Provide custom UnicodeData.txt') parser.add_option('--special-casing', dest='special_casing', default=None, help='Provide custom SpecialCasing.txt') # Options for genconfig.py. parser.add_option('--config-metadata', dest='config_metadata', default=None, help='metadata directory (defaulted based on configure.py script path)') parser.add_option('--platform', dest='platform', default=None, help='platform (default is autodetect)') parser.add_option('--compiler', dest='compiler', default=None, help='compiler (default is autodetect)') parser.add_option('--architecture', dest='architecture', default=None, help='architecture (default is autodetec)') parser.add_option('--c99-types-only', dest='c99_types_only', action='store_true', default=False, help='assume C99 types, no legacy type detection') parser.add_option('--dll', dest='dll', action='store_true', default=False, help='dll build of Duktape, affects symbol visibility macros especially on Windows') parser.add_option('--support-feature-options', dest='support_feature_options', action='store_true', default=False, help=optparse.SUPPRESS_HELP) parser.add_option('--emit-legacy-feature-check', dest='emit_legacy_feature_check', action='store_true', default=False, help='emit preprocessor checks to reject legacy feature options (DUK_OPT_xxx)') parser.add_option('--emit-config-sanity-check', dest='emit_config_sanity_check', action='store_true', default=False, help='emit preprocessor checks for config option consistency (DUK_USE_xxx)') parser.add_option('--omit-removed-config-options', dest='omit_removed_config_options', action='store_true', default=False, help='omit removed config options from generated headers') parser.add_option('--omit-deprecated-config-options', dest='omit_deprecated_config_options', action='store_true', default=False, help='omit deprecated config options from generated headers') parser.add_option('--omit-unused-config-options', dest='omit_unused_config_options', action='store_true', default=False, help='omit unused config options from generated headers') parser.add_option('--add-active-defines-macro', dest='add_active_defines_macro', action='store_true', default=False, help='add DUK_ACTIVE_DEFINES macro, for development only') parser.add_option('--define', type='string', metavar='OPTION', dest='force_options_yaml', action='callback', callback=add_force_option_define, default=force_options_yaml, help='force #define option using a C compiler like syntax, e.g. "--define DUK_USE_DEEP_C_STACK" or "--define DUK_USE_TRACEBACK_DEPTH=10"') parser.add_option('-D', type='string', metavar='OPTION', dest='force_options_yaml', action='callback', callback=add_force_option_define, default=force_options_yaml, help='synonym for --define, e.g. "-DDUK_USE_DEEP_C_STACK" or "-DDUK_USE_TRACEBACK_DEPTH=10"') parser.add_option('--undefine', type='string', metavar='OPTION', dest='force_options_yaml', action='callback', callback=add_force_option_undefine, default=force_options_yaml, help='force #undef option using a C compiler like syntax, e.g. "--undefine DUK_USE_DEEP_C_STACK"') parser.add_option('-U', type='string', metavar='OPTION', dest='force_options_yaml', action='callback', callback=add_force_option_undefine, default=force_options_yaml, help='synonym for --undefine, e.g. "-UDUK_USE_DEEP_C_STACK"') parser.add_option('--option-yaml', type='string', metavar='YAML', dest='force_options_yaml', action='callback', callback=add_force_option_yaml, default=force_options_yaml, help='force option(s) using inline YAML (e.g. --option-yaml "DUK_USE_DEEP_C_STACK: true")') parser.add_option('--option-file', type='string', metavar='FILENAME', dest='force_options_yaml', action='callback', callback=add_force_option_file, default=force_options_yaml, help='YAML file(s) providing config option overrides') parser.add_option('--fixup-file', type='string', metavar='FILENAME', dest='fixup_header_lines', action='callback', callback=add_fixup_header_file, default=fixup_header_lines, help='C header snippet file(s) to be appended to generated header, useful for manual option fixups') parser.add_option('--fixup-line', type='string', metavar='LINE', dest='fixup_header_lines', action='callback', callback=add_fixup_header_line, default=fixup_header_lines, help='C header fixup line to be appended to generated header (e.g. --fixup-line "#define DUK_USE_FASTINT")') parser.add_option('--sanity-warning', dest='sanity_strict', action='store_false', default=True, help='emit a warning instead of #error for option sanity check issues') parser.add_option('--use-cpp-warning', dest='use_cpp_warning', action='store_true', default=False, help='emit a (non-portable) #warning when appropriate') parser.add_option('--nodejs-command', dest='nodejs_command', default=None, help='Force Node.js command name') entry_cwd = os.getcwd() script_path = sys.path[0] # http://stackoverflow.com/questions/4934806/how-can-i-find-scripts-directory-with-python (opts, args) = parser.parse_args() if len(args) > 0: raise Exception('unexpected arguments: %r' % args) if opts.obsolete_builtin_metadata is not None: raise Exception('--user-builtin-metadata has been removed, use --builtin-file instead') if opts.nodejs_command is None: nodejs_command = detect_nodejs() else: nodejs_command = opts.nodejs_command if nodejs_command is None: raise Exception('failed to detect Node.js, override with --nodejs-command') duktool_path = None for fn in [ os.path.join(script_path, 'duktool.js'), os.path.join(script_path, '..', 'src-tools', 'index.js'), os.path.join(script_path, '..', 'src-tools', 'duktool.js') ]: if os.path.isfile(fn): duktool_path = fn break if duktool_path is None: raise Exception('could not find duktool.js or src-tools/index.js') cmd = [ nodejs_command, duktool_path, 'configure' ] if opts.output_directory is not None: cmd += [ '--output-directory', opts.output_directory ] if opts.source_directory is not None: cmd += [ '--source-directory', opts.source_directory ] else: src_dir = os.path.join(script_path, '..', 'src-input') if os.path.isdir(src_dir) and os.path.isfile(os.path.join(src_dir, 'duktape.h.in')): cmd += [ '--source-directory', src_dir ] if opts.config_metadata is not None: cmd += [ '--config-directory', opts.config_metadata ] forced = {} for i in force_options_yaml: doc = yaml.safe_load(i) for k,v in doc.items(): forced[k] = v opts_fd, opts_fn = tempfile.mkstemp() with os.fdopen(opts_fd, 'wb') as f: f.write(yaml.safe_dump(forced).encode('utf-8')) cmd += [ '--option-file', opts_fn ] fixup_fd, fixup_fn = tempfile.mkstemp() with os.fdopen(fixup_fd, 'wb') as f: f.write(('\n'.join(fixup_header_lines) + '\n').encode('utf-8')) cmd += [ '--fixup-file', fixup_fn ] for i in opts.builtin_files: cmd += [ '--builtin-file', i ] if opts.line_directives: cmd += [ '--line-directives' ] if opts.platform is not None: cmd += [ '--platform', opts.platform ] if opts.compiler is not None: cmd += [ '--compiler', opts.compiler ] if opts.architecture is not None: cmd += [ '--architecture', opts.architecture ] if opts.dll: cmd += [ '--dll' ] if opts.c99_types_only: cmd += [ '--c99-types-only' ] sys.stderr.write('*** Executing JS-based tooling with command: ' + repr(cmd) + '\n\n') subprocess.check_call(cmd) if __name__ == '__main__': main()
5,010
1,155
//----------------------------------------------------------------------------- // boost-libs variant/test/variant_comparison_test.cpp source file // See http://www.boost.org for updates, documentation, and revision history. //----------------------------------------------------------------------------- // // Copyright (c) 2003 <NAME>, <NAME> // Copyright (c) 2014 <NAME> // // Distributed under the Boost Software License, Version 1.0. (See // accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) #include "boost/variant/variant.hpp" #include "boost/test/minimal.hpp" #include <iostream> #include <sstream> #include <string> #include <algorithm> #include <vector> #include "boost/detail/workaround.hpp" #if BOOST_WORKAROUND(__BORLANDC__, BOOST_TESTED_AT(0x0551)) # pragma warn -lvc // temporary used for parameter warning #endif template <typename T> void assert_equality_comparable( const T& x, const T& y, const T& z ) { // identity check BOOST_CHECK( !(&x == &y) || (x == y) ); BOOST_CHECK( !(&x == &z) || (x == z) ); BOOST_CHECK( !(&y == &z) || (y == z) ); BOOST_CHECK( !(&x == &y) || !(x != y) ); BOOST_CHECK( !(&x == &z) || !(x != z) ); BOOST_CHECK( !(&y == &z) || !(y != z) ); // reflexivity check BOOST_CHECK( (x == x) ); BOOST_CHECK( (y == y) ); BOOST_CHECK( (z == z) ); // symmetry check BOOST_CHECK( !(x == y) || (y == x) ); BOOST_CHECK( !(y == x) || (x == y) ); BOOST_CHECK( (x != y) || (y == x) ); BOOST_CHECK( (y != x) || (x == y) ); BOOST_CHECK( !(x == z) || (z == x) ); BOOST_CHECK( !(z == x) || (x == z) ); BOOST_CHECK( (x != z) || (z == x) ); BOOST_CHECK( (z != x) || (x == z) ); BOOST_CHECK( !(y == z) || (z == y) ); BOOST_CHECK( !(z == y) || (y == z) ); BOOST_CHECK( (y != z) || (z == y) ); BOOST_CHECK( (z != y) || (y == z) ); // transitivity check BOOST_CHECK( !(x == y && y == z) || (x == z) ); BOOST_CHECK( !(x == z && z == y) || (x == y) ); BOOST_CHECK( !(y == z && z == x) || (y == x) ); } template <typename T> void assert_less_than_comparable( const T& x, const T& y, const T& z ) { // irreflexivity check BOOST_CHECK( !(x < x) ); BOOST_CHECK( !(y < y) ); BOOST_CHECK( !(z < z) ); BOOST_CHECK( !(x > x) ); BOOST_CHECK( !(y > y) ); BOOST_CHECK( !(z > z) ); BOOST_CHECK( (x <= x) ); BOOST_CHECK( (y <= y) ); BOOST_CHECK( (z <= z) ); BOOST_CHECK( (x >= x) ); BOOST_CHECK( (y >= y) ); BOOST_CHECK( (z >= z) ); // transitivity check BOOST_CHECK( (x < y) ); BOOST_CHECK( (y < z) ); BOOST_CHECK( (x < z) ); BOOST_CHECK( (x <= y) ); BOOST_CHECK( (y <= z) ); BOOST_CHECK( (x <= z) ); BOOST_CHECK( (z > y) ); BOOST_CHECK( (y > x) ); BOOST_CHECK( (z > x) ); BOOST_CHECK( (z >= y) ); BOOST_CHECK( (y >= x) ); BOOST_CHECK( (z >= x) ); // antisymmetry check BOOST_CHECK( !(y < x) ); BOOST_CHECK( !(z < y) ); BOOST_CHECK( !(z < x) ); } template <typename It> std::string print_range(It first, It last) { std::ostringstream result; while (first != last) { result << *first << ' '; ++first; } return result.str(); } int test_main(int , char* []) { typedef boost::variant<int, std::string> var_t; var_t var1(3); var_t var2(5); var_t var3("goodbye"); var_t var4("hello"); assert_equality_comparable(var1, var1, var1); assert_equality_comparable(var_t(var1), var_t(var1), var_t(var1)); assert_equality_comparable(var1, var2, var3); assert_less_than_comparable(var1, var2, var3); assert_less_than_comparable(var2, var3, var4); std::vector<var_t> vec; vec.push_back( var3 ); vec.push_back( var2 ); vec.push_back( var4 ); vec.push_back( var1 ); std::sort(vec.begin(), vec.end()); std::string sort_result( print_range(vec.begin(), vec.end()) ); BOOST_CHECK( sort_result == "3 5 goodbye hello " ); // https://svn.boost.org/trac/boost/ticket/11751 int a = 0, b = 0; boost::variant< int& > v (a), u (b); BOOST_CHECK(v == u); return boost::exit_success; }
1,913
2,338
<reponame>mkinsner/llvm //===- Builder.cpp - Builder definitions ----------------------------------===// // // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. // See https://llvm.org/LICENSE.txt for license information. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception // //===----------------------------------------------------------------------===// #include "mlir/TableGen/Builder.h" #include "llvm/TableGen/Error.h" #include "llvm/TableGen/Record.h" using namespace mlir; using namespace mlir::tblgen; //===----------------------------------------------------------------------===// // Builder::Parameter //===----------------------------------------------------------------------===// /// Return a string containing the C++ type of this parameter. StringRef Builder::Parameter::getCppType() const { if (const auto *stringInit = dyn_cast<llvm::StringInit>(def)) return stringInit->getValue(); const llvm::Record *record = cast<llvm::DefInit>(def)->getDef(); return record->getValueAsString("type"); } /// Return an optional string containing the default value to use for this /// parameter. Optional<StringRef> Builder::Parameter::getDefaultValue() const { if (isa<llvm::StringInit>(def)) return llvm::None; const llvm::Record *record = cast<llvm::DefInit>(def)->getDef(); Optional<StringRef> value = record->getValueAsOptionalString("defaultValue"); return value && !value->empty() ? value : llvm::None; } //===----------------------------------------------------------------------===// // Builder //===----------------------------------------------------------------------===// Builder::Builder(const llvm::Record *record, ArrayRef<llvm::SMLoc> loc) : def(record) { // Initialize the parameters of the builder. const llvm::DagInit *dag = def->getValueAsDag("dagParams"); auto *defInit = dyn_cast<llvm::DefInit>(dag->getOperator()); if (!defInit || !defInit->getDef()->getName().equals("ins")) PrintFatalError(def->getLoc(), "expected 'ins' in builders"); bool seenDefaultValue = false; for (unsigned i = 0, e = dag->getNumArgs(); i < e; ++i) { const llvm::StringInit *paramName = dag->getArgName(i); const llvm::Init *paramValue = dag->getArg(i); Parameter param(paramName ? paramName->getValue() : Optional<StringRef>(), paramValue); // Similarly to C++, once an argument with a default value is detected, the // following arguments must have default values as well. if (param.getDefaultValue()) { seenDefaultValue = true; } else if (seenDefaultValue) { PrintFatalError(loc, "expected an argument with default value after other " "arguments with default values"); } parameters.emplace_back(param); } } /// Return an optional string containing the body of the builder. Optional<StringRef> Builder::getBody() const { Optional<StringRef> body = def->getValueAsOptionalString("body"); return body && !body->empty() ? body : llvm::None; }
922
691
/* pbrt source code is Copyright(c) 1998-2016 <NAME>, <NAME>, and <NAME>. This file is part of pbrt. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #pragma once class Metal : public SimpleMaterial<MicrofacetReflection<TrowbridgeReitzDistribution<>, FresnelConductor>> { public: __device__ void ComputeScatteringFunctions( const CoreMaterial& params, const float2 uv, const bool allowMultipleLobes, const TransportMode mode ) override { // TODO: Bumpmapping const auto k = SampleCoreTexture( params.absorption, uv ); const auto eta = SampleCoreTexture( params.eta_rgb, uv ); const FresnelConductor frMf( make_float3( 1.f ), eta, k ); const auto urough = SampleCoreTexture( params.urough, uv ); const auto vrough = SampleCoreTexture( params.vrough, uv ); // NOTE: PBRT Doesn't make the optimization here to use a SpecularReflection like Glass does, // when u- and vrough are zero. This means a black output when the value is zero, and banding when near zero // (Unsure if this is a mathematic PBRT issue since the documentation says near-zero values should represent // a mirror). // While the Mirror material would be a good alternative, it doesn't have an ETA nor K value. This material // on the other hand doesn't allow specifying the reflection color. const TrowbridgeReitzDistribution<> distrib( urough, vrough ); bxdfs.emplace_back<MicrofacetReflection<TrowbridgeReitzDistribution<>, FresnelConductor>>( make_float3( 1.f ), distrib, frMf ); } };
916
1,264
<gh_stars>1000+ /* * Copyright 2018-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.data.mongodb.core.messaging; import org.bson.Document; import org.springframework.data.mongodb.core.convert.MongoConverter; import org.springframework.util.ClassUtils; /** * @author <NAME> * @author <NAME> * @since 2.1 */ class LazyMappingDelegatingMessage<S, T> implements Message<S, T> { private final Message<S, ?> delegate; private final Class<T> targetType; private final MongoConverter converter; LazyMappingDelegatingMessage(Message<S, ?> delegate, Class<T> targetType, MongoConverter converter) { this.delegate = delegate; this.targetType = targetType; this.converter = converter; } /* * (non-Javadoc) * @see org.springframework.data.mongodb.core.messaging.Message#getRaw() */ @Override public S getRaw() { return delegate.getRaw(); } /* * (non-Javadoc) * @see org.springframework.data.mongodb.core.messaging.Message#getBody() */ @Override public T getBody() { if (delegate.getBody() == null || targetType.equals(delegate.getBody().getClass())) { return targetType.cast(delegate.getBody()); } Object messageBody = delegate.getBody(); if (ClassUtils.isAssignable(Document.class, messageBody.getClass())) { return converter.read(targetType, (Document) messageBody); } if (converter.getConversionService().canConvert(messageBody.getClass(), targetType)) { return converter.getConversionService().convert(messageBody, targetType); } throw new IllegalArgumentException( String.format("No converter found capable of converting %s to %s", messageBody.getClass(), targetType)); } /* * (non-Javadoc) * @see org.springframework.data.mongodb.core.messaging.Message#getProperties() */ @Override public MessageProperties getProperties() { return delegate.getProperties(); } public String toString() { return "LazyMappingDelegatingMessage(delegate=" + this.delegate + ", targetType=" + this.targetType + ")"; } }
833
724
<filename>vega/algorithms/nas/fis/autogate_s1_trainer_callback.py<gh_stars>100-1000 # -*- coding: utf-8 -*- # Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved. # This program is free software; you can redistribute it and/or modify # it under the terms of the MIT License. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # MIT License for more details. """AutoGate top-k version Stage1 TrainerCallback.""" import logging from vega.common import ClassFactory, ClassType from vega.common import FileOps from vega.algorithms.nas.fis.ctr_trainer_callback import CtrTrainerCallback logger = logging.getLogger(__name__) @ClassFactory.register(ClassType.CALLBACK) class AutoGateS1TrainerCallback(CtrTrainerCallback): """AutoGateS1TrainerCallback module.""" def __init__(self): """Construct AutoGateS1TrainerCallback class.""" super(CtrTrainerCallback, self).__init__() self.best_score = 0 logging.info("init autogate s1 trainer callback") def after_valid(self, logs=None): """Call after_valid of the managed callbacks.""" self.model = self.trainer.model feature_interaction_score = self.model.get_feature_interaction_score() print('get feature_interaction_score', feature_interaction_score) curr_auc = float(self.trainer.valid_metrics.results['auc']) if curr_auc > self.best_score: best_config = { 'score': curr_auc, 'feature_interaction_score': feature_interaction_score } logging.info("BEST CONFIG IS\n{}".format(best_config)) pickle_result_file = FileOps.join_path( self.trainer.local_output_path, 'best_config.pickle') logging.info("Saved to {}".format(pickle_result_file)) FileOps.dump_pickle(best_config, pickle_result_file) self.best_score = curr_auc
786
515
package com.bj58.argo.thirdparty.jetty; import java.nio.ByteBuffer; import java.util.AbstractMap; import java.util.Collections; import java.util.Comparator; import java.util.Map; import java.util.Set; import java.util.TreeMap; /* ------------------------------------------------------------ */ /** Map implementation Optimized for Strings keys.. * This String Map has been optimized for mapping small sets of * Strings where the most frequently accessed Strings have been put to * the map first. * * It also has the benefit that it can look up entries by substring or * sections of char and byte arrays. This can prevent many String * objects from being created just to look up in the map. * * This map is NOT synchronized. */ public class StringMap<O> extends AbstractMap<String,O> { private final TreeMap<Object, O> _map; public static final boolean CASE_INSENSTIVE=true; /* ------------------------------------------------------------ */ private final boolean _caseInsensitive; /* ------------------------------------------------------------ */ /** Constructor. */ public StringMap() { this(false); } /* ------------------------------------------------------------ */ /** Constructor. * @param ignoreCase */ public StringMap(final boolean ignoreCase) { _caseInsensitive=ignoreCase; _map = new TreeMap<Object,O>(new Comparator<Object>() { @Override public int compare(Object o1, Object o2) { String s1=(o1 instanceof String)?(String)o1:null; ByteBuffer b1=(o1 instanceof ByteBuffer)?(ByteBuffer)o1:null; if (s1==null && b1==null) s1=o1.toString(); String s2=(String)o2; int n1 = s1==null?b1.remaining():s1.length(); int n2 = s2.length(); int min = Math.min(n1, n2); for (int i = 0; i < min; i++) { char c1 = s1==null?(char)b1.get(b1.position()+i):s1.charAt(i); char c2 = s2.charAt(i); if (c1 != c2) { if (ignoreCase) { c1 = Character.toUpperCase(c1); c2 = Character.toUpperCase(c2); if (c1 != c2) { c1 = Character.toLowerCase(c1); c2 = Character.toLowerCase(c2); if (c1 != c2) { // No overflow because of numeric promotion return c1 - c2; } } } else return c1 - c2; } } return n1 - n2; } }); } /* ------------------------------------------------------------ */ public boolean isIgnoreCase() { return _caseInsensitive; } /* ------------------------------------------------------------ */ @Override public O put(String key, O value) { return _map.put(key,value); } /* ------------------------------------------------------------ */ @Override public O get(Object key) { return _map.get(key); } /* ------------------------------------------------------------ */ public O get(String key) { return _map.get(key); } /* ------------------------------------------------------------ */ public O get(String key,int offset,int length) { return _map.get(key.substring(offset,offset+length)); } /* ------------------------------------------------------------ */ public O get(ByteBuffer buffer) { return _map.get(buffer); } /* ------------------------------------------------------------ */ @Override public O remove(Object key) { return _map.remove(key); } /* ------------------------------------------------------------ */ public O remove(String key) { return _map.remove(key); } /* ------------------------------------------------------------ */ @Override public Set<Map.Entry<String,O>> entrySet() { Object o=_map.entrySet(); return Collections.unmodifiableSet((Set<Map.Entry<String,O>>)o); } /* ------------------------------------------------------------ */ @Override public int size() { return _map.size(); } /* ------------------------------------------------------------ */ @Override public boolean isEmpty() { return _map.isEmpty(); } /* ------------------------------------------------------------ */ @Override public boolean containsKey(Object key) { return _map.containsKey(key); } /* ------------------------------------------------------------ */ @Override public void clear() { _map.clear(); } }
2,257
342
package sentinelgroup.io.sentinel.ui.fragment; import android.os.Bundle; import android.support.v4.app.Fragment; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import sentinelgroup.io.sentinel.R; /** * A simple {@link Fragment} subclass. * Use the {@link VpnMapFragment#newInstance} factory method to * create an instance of this fragment. */ public class VpnMapFragment extends Fragment { public VpnMapFragment() { // Required empty public constructor } /** * Use this factory method to create a new instance of * this fragment. * * @return A new instance of fragment VpnMapFragment. */ public static VpnMapFragment newInstance(String param1, String param2) { return new VpnMapFragment(); } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { // Inflate the layout for this fragment return inflater.inflate(R.layout.fragment_map, container, false); } }
435
575
// Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "third_party/blink/renderer/platform/scheduler/main_thread/deadline_task_runner.h" #include <memory> #include "base/bind.h" #include "base/test/task_environment.h" #include "base/time/tick_clock.h" #include "testing/gmock/include/gmock/gmock.h" #include "testing/gtest/include/gtest/gtest.h" namespace blink { namespace scheduler { class DeadlineTaskRunnerTest : public testing::Test { public: DeadlineTaskRunnerTest() : task_environment_( base::test::TaskEnvironment::TimeSource::MOCK_TIME, base::test::TaskEnvironment::ThreadPoolExecutionMode::QUEUED) {} ~DeadlineTaskRunnerTest() override = default; void SetUp() override { deadline_task_runner_.reset(new DeadlineTaskRunner( base::BindRepeating(&DeadlineTaskRunnerTest::TestTask, base::Unretained(this)), task_environment_.GetMainThreadTaskRunner())); run_times_.clear(); } base::TimeTicks Now() { return task_environment_.GetMockTickClock()->NowTicks(); } void TestTask() { run_times_.push_back(Now()); } base::test::TaskEnvironment task_environment_; std::unique_ptr<DeadlineTaskRunner> deadline_task_runner_; std::vector<base::TimeTicks> run_times_; }; TEST_F(DeadlineTaskRunnerTest, RunOnce) { base::TimeTicks start_time = Now(); base::TimeDelta delay = base::TimeDelta::FromMilliseconds(10); deadline_task_runner_->SetDeadline(FROM_HERE, delay, Now()); task_environment_.FastForwardUntilNoTasksRemain(); EXPECT_THAT(run_times_, testing::ElementsAre(start_time + delay)); } TEST_F(DeadlineTaskRunnerTest, RunTwice) { base::TimeDelta delay1 = base::TimeDelta::FromMilliseconds(10); base::TimeTicks deadline1 = Now() + delay1; deadline_task_runner_->SetDeadline(FROM_HERE, delay1, Now()); task_environment_.FastForwardUntilNoTasksRemain(); base::TimeDelta delay2 = base::TimeDelta::FromMilliseconds(100); base::TimeTicks deadline2 = Now() + delay2; deadline_task_runner_->SetDeadline(FROM_HERE, delay2, Now()); task_environment_.FastForwardUntilNoTasksRemain(); EXPECT_THAT(run_times_, testing::ElementsAre(deadline1, deadline2)); } TEST_F(DeadlineTaskRunnerTest, EarlierDeadlinesTakePrecidence) { base::TimeTicks start_time = Now(); base::TimeDelta delay1 = base::TimeDelta::FromMilliseconds(1); base::TimeDelta delay10 = base::TimeDelta::FromMilliseconds(10); base::TimeDelta delay100 = base::TimeDelta::FromMilliseconds(100); deadline_task_runner_->SetDeadline(FROM_HERE, delay100, Now()); deadline_task_runner_->SetDeadline(FROM_HERE, delay10, Now()); deadline_task_runner_->SetDeadline(FROM_HERE, delay1, Now()); task_environment_.FastForwardUntilNoTasksRemain(); EXPECT_THAT(run_times_, testing::ElementsAre(start_time + delay1)); } TEST_F(DeadlineTaskRunnerTest, LaterDeadlinesIgnored) { base::TimeTicks start_time = Now(); base::TimeDelta delay100 = base::TimeDelta::FromMilliseconds(100); base::TimeDelta delay10000 = base::TimeDelta::FromMilliseconds(10000); deadline_task_runner_->SetDeadline(FROM_HERE, delay100, Now()); deadline_task_runner_->SetDeadline(FROM_HERE, delay10000, Now()); task_environment_.FastForwardUntilNoTasksRemain(); EXPECT_THAT(run_times_, testing::ElementsAre(start_time + delay100)); } TEST_F(DeadlineTaskRunnerTest, DeleteDeadlineTaskRunnerAfterPosting) { deadline_task_runner_->SetDeadline( FROM_HERE, base::TimeDelta::FromMilliseconds(10), Now()); // Deleting the pending task should cancel it. deadline_task_runner_.reset(nullptr); task_environment_.FastForwardUntilNoTasksRemain(); EXPECT_TRUE(run_times_.empty()); } } // namespace scheduler } // namespace blink
1,319
312
#include <occa/internal/bin/occa.hpp> int main(const int argc, const char **argv) { occa::bin::buildOccaCommand().run(argc, argv); return 0; }
61
696
/* * Copyright (C) 2014 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.strata.collect; import java.io.File; import java.io.IOException; import java.io.UncheckedIOException; import java.lang.reflect.InvocationTargetException; import java.util.function.BiConsumer; import java.util.function.BiFunction; import java.util.function.BiPredicate; import java.util.function.BinaryOperator; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Predicate; import java.util.function.Supplier; import java.util.function.UnaryOperator; import com.google.common.base.Throwables; import com.opengamma.strata.collect.function.CheckedBiConsumer; import com.opengamma.strata.collect.function.CheckedBiFunction; import com.opengamma.strata.collect.function.CheckedBiPredicate; import com.opengamma.strata.collect.function.CheckedBinaryOperator; import com.opengamma.strata.collect.function.CheckedConsumer; import com.opengamma.strata.collect.function.CheckedFunction; import com.opengamma.strata.collect.function.CheckedPredicate; import com.opengamma.strata.collect.function.CheckedRunnable; import com.opengamma.strata.collect.function.CheckedSupplier; import com.opengamma.strata.collect.function.CheckedUnaryOperator; /** * Static utility methods that convert checked exceptions to unchecked. * <p> * Two {@code wrap()} methods are provided that can wrap an arbitrary piece of logic * and convert checked exceptions to unchecked. * <p> * A number of other methods are provided that allow a lambda block to be decorated * to avoid handling checked exceptions. * For example, the method {@link File#getCanonicalFile()} throws an {@link IOException} * which can be handled as follows: * <pre> * stream.map(Unchecked.function(file -&gt; file.getCanonicalFile()) * </pre> * <p> * Each method accepts a functional interface that is defined to throw {@link Throwable}. * Catching {@code Throwable} means that any method can be wrapped. * Any {@code InvocationTargetException} is extracted and processed recursively. * Any {@link IOException} is converted to an {@link UncheckedIOException}. * Any {@link ReflectiveOperationException} is converted to an {@link UncheckedReflectiveOperationException}. * Any {@link Error} or {@link RuntimeException} is re-thrown without alteration. * Any other exception is wrapped in a {@link RuntimeException}. */ public final class Unchecked { /** * Restricted constructor. */ private Unchecked() { } //------------------------------------------------------------------------- /** * Wraps a block of code, converting checked exceptions to unchecked. * <pre> * Unchecked.wrap(() -&gt; { * // any code that throws a checked exception * } * </pre> * <p> * If a checked exception is thrown it is converted to an {@link UncheckedIOException} * or {@link RuntimeException} as appropriate. * * @param block the code block to wrap * @throws UncheckedIOException if an IO exception occurs * @throws RuntimeException if an exception occurs */ public static void wrap(CheckedRunnable block) { try { block.run(); } catch (Throwable ex) { throw propagate(ex); } } /** * Wraps a block of code, converting checked exceptions to unchecked. * <pre> * Unchecked.wrap(() -&gt; { * // any code that throws a checked exception * } * </pre> * <p> * If a checked exception is thrown it is converted to an {@link UncheckedIOException} * or {@link RuntimeException} as appropriate. * * @param <T> the type of the result * @param block the code block to wrap * @return the result of invoking the block * @throws UncheckedIOException if an IO exception occurs * @throws RuntimeException if an exception occurs */ public static <T> T wrap(CheckedSupplier<T> block) { try { return block.get(); } catch (Throwable ex) { throw propagate(ex); } } //------------------------------------------------------------------------- /** * Converts checked exceptions to unchecked based on the {@code Runnable} interface. * <p> * This wraps the specified runnable returning an instance that handles checked exceptions. * If a checked exception is thrown it is converted to an {@link UncheckedIOException} * or {@link RuntimeException} as appropriate. * * @param runnable the runnable to be decorated * @return the runnable instance that handles checked exceptions */ public static Runnable runnable(CheckedRunnable runnable) { return () -> { try { runnable.run(); } catch (Throwable ex) { throw propagate(ex); } }; } //------------------------------------------------------------------------- /** * Converts checked exceptions to unchecked based on the {@code Function} interface. * <p> * This wraps the specified function returning an instance that handles checked exceptions. * If a checked exception is thrown it is converted to an {@link UncheckedIOException} * or {@link RuntimeException} as appropriate. * * @param <T> the input type of the function * @param <R> the return type of the function * @param function the function to be decorated * @return the function instance that handles checked exceptions */ public static <T, R> Function<T, R> function(CheckedFunction<T, R> function) { return (t) -> { try { return function.apply(t); } catch (Throwable ex) { throw propagate(ex); } }; } /** * Converts checked exceptions to unchecked based on the {@code BiFunction} interface. * <p> * This wraps the specified function returning an instance that handles checked exceptions. * If a checked exception is thrown it is converted to an {@link UncheckedIOException} * or {@link RuntimeException} as appropriate. * * @param <T> the first input type of the function * @param <U> the second input type of the function * @param <R> the return type of the function * @param function the function to be decorated * @return the function instance that handles checked exceptions */ public static <T, U, R> BiFunction<T, U, R> biFunction(CheckedBiFunction<T, U, R> function) { return (t, u) -> { try { return function.apply(t, u); } catch (Throwable ex) { throw propagate(ex); } }; } //------------------------------------------------------------------------- /** * Converts checked exceptions to unchecked based on the {@code UnaryOperator} interface. * <p> * This wraps the specified operator returning an instance that handles checked exceptions. * If a checked exception is thrown it is converted to an {@link UncheckedIOException} * or {@link RuntimeException} as appropriate. * * @param <T> the type of the operator * @param function the function to be decorated * @return the function instance that handles checked exceptions */ public static <T> UnaryOperator<T> unaryOperator(CheckedUnaryOperator<T> function) { return (t) -> { try { return function.apply(t); } catch (Throwable ex) { throw propagate(ex); } }; } /** * Converts checked exceptions to unchecked based on the {@code BinaryOperator} interface. * <p> * This wraps the specified operator returning an instance that handles checked exceptions. * If a checked exception is thrown it is converted to an {@link UncheckedIOException} * or {@link RuntimeException} as appropriate. * * @param <T> the type of the operator * @param function the function to be decorated * @return the function instance that handles checked exceptions */ public static <T> BinaryOperator<T> binaryOperator(CheckedBinaryOperator<T> function) { return (t, u) -> { try { return function.apply(t, u); } catch (Throwable ex) { throw propagate(ex); } }; } //------------------------------------------------------------------------- /** * Converts checked exceptions to unchecked based on the {@code Predicate} interface. * <p> * This wraps the specified predicate returning an instance that handles checked exceptions. * If a checked exception is thrown it is converted to an {@link UncheckedIOException} * or {@link RuntimeException} as appropriate. * * @param <T> the type of the predicate * @param predicate the predicate to be decorated * @return the predicate instance that handles checked exceptions */ public static <T> Predicate<T> predicate(CheckedPredicate<T> predicate) { return (t) -> { try { return predicate.test(t); } catch (Throwable ex) { throw propagate(ex); } }; } /** * Converts checked exceptions to unchecked based on the {@code BiPredicate} interface. * <p> * This wraps the specified predicate returning an instance that handles checked exceptions. * If a checked exception is thrown it is converted to an {@link UncheckedIOException} * or {@link RuntimeException} as appropriate. * * @param <T> the first type of the predicate * @param <U> the second type of the predicate * @param predicate the predicate to be decorated * @return the predicate instance that handles checked exceptions */ public static <T, U> BiPredicate<T, U> biPredicate(CheckedBiPredicate<T, U> predicate) { return (t, u) -> { try { return predicate.test(t, u); } catch (Throwable ex) { throw propagate(ex); } }; } //------------------------------------------------------------------------- /** * Converts checked exceptions to unchecked based on the {@code Consumer} interface. * <p> * This wraps the specified consumer returning an instance that handles checked exceptions. * If a checked exception is thrown it is converted to an {@link UncheckedIOException} * or {@link RuntimeException} as appropriate. * * @param <T> the type of the consumer * @param consumer the consumer to be decorated * @return the consumer instance that handles checked exceptions */ public static <T> Consumer<T> consumer(CheckedConsumer<T> consumer) { return (t) -> { try { consumer.accept(t); } catch (Throwable ex) { throw propagate(ex); } }; } /** * Converts checked exceptions to unchecked based on the {@code BiConsumer} interface. * <p> * This wraps the specified consumer returning an instance that handles checked exceptions. * If a checked exception is thrown it is converted to an {@link UncheckedIOException} * or {@link RuntimeException} as appropriate. * * @param <T> the first type of the consumer * @param <U> the second type of the consumer * @param consumer the consumer to be decorated * @return the consumer instance that handles checked exceptions */ public static <T, U> BiConsumer<T, U> biConsumer(CheckedBiConsumer<T, U> consumer) { return (t, u) -> { try { consumer.accept(t, u); } catch (Throwable ex) { throw propagate(ex); } }; } //------------------------------------------------------------------------- /** * Converts checked exceptions to unchecked based on the {@code Supplier} interface. * <p> * This wraps the specified supplier returning an instance that handles checked exceptions. * If a checked exception is thrown it is converted to an {@link UncheckedIOException} * or {@link RuntimeException} as appropriate. * * @param <R> the result type of the supplier * @param supplier the supplier to be decorated * @return the supplier instance that handles checked exceptions */ public static <R> Supplier<R> supplier(CheckedSupplier<R> supplier) { return () -> { try { return supplier.get(); } catch (Throwable ex) { throw propagate(ex); } }; } /** * Propagates {@code throwable} as-is if possible, or by wrapping in a {@code RuntimeException} if not. * <ul> * <li>If {@code throwable} is an {@code InvocationTargetException} the cause is extracted and processed recursively.</li> * <li>If {@code throwable} is an {@code Error} or {@code RuntimeException} it is propagated as-is.</li> * <li>If {@code throwable} is an {@code IOException} it is wrapped in {@code UncheckedIOException} and thrown.</li> * <li>If {@code throwable} is an {@code ReflectiveOperationException} it is wrapped in * {@code UncheckedReflectiveOperationException} and thrown.</li> * <li>Otherwise {@code throwable} is wrapped in a {@code RuntimeException} and thrown.</li> * </ul> * This method always throws an exception. The return type is a convenience to satisfy the type system * when the enclosing method returns a value. For example: * <pre> * T foo() { * try { * return methodWithCheckedException(); * } catch (Exception e) { * throw Unchecked.propagate(e); * } * } * </pre> * * @param throwable the {@code Throwable} to propagate * @return nothing; this method always throws an exception */ public static RuntimeException propagate(Throwable throwable) { if (throwable instanceof InvocationTargetException) { throw propagate(((InvocationTargetException) throwable).getCause()); } else if (throwable instanceof IOException) { throw new UncheckedIOException((IOException) throwable); } else if (throwable instanceof ReflectiveOperationException) { throw new UncheckedReflectiveOperationException((ReflectiveOperationException) throwable); } else { Throwables.throwIfUnchecked(throwable); throw new RuntimeException(throwable); } } }
4,361
6,717
<reponame>crossmob/WinObjC //****************************************************************************** // // Copyright (c) 2016 Microsoft Corporation. All rights reserved. // // This code is licensed under the MIT License (MIT). // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. // //****************************************************************************** #pragma once #import <Twitter/TwitterExport.h> #import <Foundation/NSObject.h> @class NSData; @class NSHTTPURLResponse; @class NSError; @class NSURL; @class NSDictionary; @class ACAccount; @class NSString; @class NSURLRequest; enum TWRequestMethod { TWRequestMethodGET, TWRequestMethodPOST, TWRequestMethodDELETE }; typedef enum TWRequestMethod TWRequestMethod; typedef void (^TWRequestHandler)(NSData* responseData, NSHTTPURLResponse* urlResponse, NSError* error); TWITTER_EXPORT_CLASS @interface TWRequest : NSObject <NSObject> - (id)initWithURL:(NSURL*)url parameters:(NSDictionary*)parameters requestMethod:(TWRequestMethod)requestMethod STUB_METHOD; @property (retain, nonatomic) ACAccount* account STUB_PROPERTY; @property (readonly, nonatomic) TWRequestMethod requestMethod STUB_PROPERTY; @property (readonly, nonatomic) NSURL* URL STUB_PROPERTY; @property (readonly, nonatomic) NSDictionary* parameters STUB_PROPERTY; - (void)addMultiPartData:(NSData*)data withName:(NSString*)name type:(NSString*)type STUB_METHOD; - (void)performRequestWithHandler:(TWRequestHandler)handler STUB_METHOD; - (NSURLRequest*)signedURLRequest STUB_METHOD; @end
570
839
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.cxf.transport.websocket.jetty; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.PrintWriter; import java.util.Collection; import java.util.Locale; import java.util.Map; import java.util.TreeMap; import java.util.logging.Level; import java.util.logging.Logger; import javax.servlet.ServletOutputStream; import javax.servlet.WriteListener; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServletResponse; import org.apache.cxf.common.logging.LogUtils; import org.apache.cxf.transport.websocket.WebSocketConstants; import org.apache.cxf.transport.websocket.WebSocketUtils; /** * */ public class WebSocketVirtualServletResponse implements HttpServletResponse { private static final Logger LOG = LogUtils.getL7dLogger(WebSocketVirtualServletResponse.class); private WebSocketServletHolder webSocketHolder; private Map<String, String> responseHeaders; private ServletOutputStream outputStream; public WebSocketVirtualServletResponse(WebSocketServletHolder websocket) { this.webSocketHolder = websocket; this.responseHeaders = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); this.outputStream = createOutputStream(); } @Override public void flushBuffer() throws IOException { LOG.log(Level.FINE, "flushBuffer()"); outputStream.flush(); } @Override public int getBufferSize() { LOG.log(Level.FINE, "getBufferSize()"); return 0; } @Override public String getCharacterEncoding() { LOG.log(Level.FINE, "getCharacterEncoding()"); return null; } @Override public String getContentType() { LOG.log(Level.FINE, "getContentType()"); return responseHeaders.get("Content-Type"); } @Override public Locale getLocale() { LOG.log(Level.FINE, "getLocale"); return null; } @Override public ServletOutputStream getOutputStream() throws IOException { return outputStream; } @Override public PrintWriter getWriter() throws IOException { LOG.log(Level.FINE, "getWriter()"); return new PrintWriter(getOutputStream()); } @Override public boolean isCommitted() { return false; } @Override public void reset() { } @Override public void resetBuffer() { LOG.log(Level.FINE, "resetBuffer()"); } @Override public void setBufferSize(int size) { if (LOG.isLoggable(Level.FINE)) { LOG.log(Level.FINE, "setBufferSize({0})", size); } } @Override public void setCharacterEncoding(String charset) { // TODO if (LOG.isLoggable(Level.FINE)) { LOG.log(Level.FINE, "setCharacterEncoding({0})", charset); } } @Override public void setContentLength(int len) { if (LOG.isLoggable(Level.FINE)) { LOG.log(Level.FINE, "setContentLength({0})", len); } responseHeaders.put("Content-Length", Integer.toString(len)); } @Override public void setContentType(String type) { if (LOG.isLoggable(Level.FINE)) { LOG.log(Level.FINE, "setContentType({0})", type); } responseHeaders.put("Content-Type", type); } @Override public void setLocale(Locale loc) { if (LOG.isLoggable(Level.FINE)) { LOG.log(Level.FINE, "setLocale({0})", loc); } } @Override public void addCookie(Cookie cookie) { if (LOG.isLoggable(Level.FINE)) { LOG.log(Level.FINE, "addCookie({0})", cookie); } } @Override public void addDateHeader(String name, long date) { // TODO if (LOG.isLoggable(Level.FINE)) { LOG.log(Level.FINE, "addDateHeader({0}, {1})", new Object[]{name, date}); } } @Override public void addHeader(String name, String value) { if (LOG.isLoggable(Level.FINE)) { LOG.log(Level.FINE, "addHeader({0}, {1})", new Object[]{name, value}); } responseHeaders.put(name, value); } @Override public void addIntHeader(String name, int value) { if (LOG.isLoggable(Level.FINE)) { LOG.log(Level.FINE, "addIntHeader({0}, {1})", new Object[]{name, value}); } responseHeaders.put(name, Integer.toString(value)); } @Override public boolean containsHeader(String name) { if (LOG.isLoggable(Level.FINE)) { LOG.log(Level.FINE, "containsHeader({0})", name); } return responseHeaders.containsKey(name); } @Override public String encodeRedirectURL(String url) { if (LOG.isLoggable(Level.FINE)) { LOG.log(Level.FINE, "encodeRedirectURL({0})", url); } return null; } @Override public String encodeRedirectUrl(String url) { if (LOG.isLoggable(Level.FINE)) { LOG.log(Level.FINE, "encodeRedirectUrl({0})", url); } return null; } @Override public String encodeURL(String url) { if (LOG.isLoggable(Level.FINE)) { LOG.log(Level.FINE, "encodeURL({0})", url); } return null; } @Override public String encodeUrl(String url) { if (LOG.isLoggable(Level.FINE)) { LOG.log(Level.FINE, "encodeUrl({0})", url); } return null; } @Override public String getHeader(String name) { if (LOG.isLoggable(Level.FINE)) { LOG.log(Level.FINE, "getHeader({0})", name); } return null; } @Override public Collection<String> getHeaderNames() { LOG.log(Level.FINE, "getHeaderNames()"); return null; } @Override public Collection<String> getHeaders(String name) { if (LOG.isLoggable(Level.FINE)) { LOG.log(Level.FINE, "getHeaders({0})", name); } return null; } @Override public int getStatus() { LOG.log(Level.FINE, "getStatus()"); String v = responseHeaders.get(WebSocketUtils.SC_KEY); return v == null ? 200 : Integer.parseInt(v); } @Override public void sendError(int sc) throws IOException { if (LOG.isLoggable(Level.FINE)) { LOG.log(Level.FINE, "sendError{0}", sc); } responseHeaders.put(WebSocketUtils.SC_KEY, Integer.toString(sc)); byte[] data = WebSocketUtils.buildResponse(responseHeaders, null, 0, 0); webSocketHolder.write(data, 0, data.length); } @Override public void sendError(int sc, String msg) throws IOException { if (LOG.isLoggable(Level.FINE)) { LOG.log(Level.FINE, "sendError({0}, {1})", new Object[]{sc, msg}); } responseHeaders.put(WebSocketUtils.SC_KEY, Integer.toString(sc)); byte[] data = WebSocketUtils.buildResponse(responseHeaders, null, 0, 0); webSocketHolder.write(data, 0, data.length); } @Override public void sendRedirect(String location) throws IOException { if (LOG.isLoggable(Level.FINE)) { LOG.log(Level.FINE, "sendRedirect({0})", location); } } @Override public void setDateHeader(String name, long date) { // ignore if (LOG.isLoggable(Level.FINE)) { LOG.log(Level.FINE, "setDateHeader({0}, {1})", new Object[]{name, date}); } } @Override public void setHeader(String name, String value) { if (LOG.isLoggable(Level.FINE)) { LOG.log(Level.FINE, "setHeader({0}, {1})", new Object[]{name, value}); } responseHeaders.put(name, value); } @Override public void setIntHeader(String name, int value) { if (LOG.isLoggable(Level.FINE)) { LOG.log(Level.FINE, "setIntHeader({0}, {1})", new Object[]{name, value}); } } @Override public void setStatus(int sc) { if (LOG.isLoggable(Level.FINE)) { LOG.log(Level.FINE, "setStatus({0})", sc); } responseHeaders.put(WebSocketUtils.SC_KEY, Integer.toString(sc)); } @Override public void setStatus(int sc, String sm) { if (LOG.isLoggable(Level.FINE)) { LOG.log(Level.FINE, "setStatus({0}, {1})", new Object[]{sc, sm}); } responseHeaders.put(WebSocketUtils.SC_KEY, Integer.toString(sc)); } private ServletOutputStream createOutputStream() { //REVISIT // This output buffering is needed as the server side websocket does // not support the fragment transmission mode when sending back a large data. // And this buffering is only used for the response for the initial service innovation. // For the subsequently pushed data to the socket are sent back // unbuffered as individual websocket messages. // the things to consider : // - provide a size limit if we are use this buffering // - add a chunking mode in the cxf websocket's binding. //CHECKSTYLE:OFF return new ServletOutputStream() { private InternalByteArrayOutputStream buffer = new InternalByteArrayOutputStream(); @Override public void write(int b) throws IOException { byte[] data = new byte[1]; data[0] = (byte)b; write(data, 0, 1); } @Override public void write(byte[] data) throws IOException { write(data, 0, data.length); } @Override public void write(byte[] data, int offset, int length) throws IOException { if (responseHeaders.get(WebSocketUtils.FLUSHED_KEY) == null) { // buffer the data until it gets flushed buffer.write(data, offset, length); } else { // unbuffered write to the socket String respid = responseHeaders.get(WebSocketConstants.DEFAULT_RESPONSE_ID_KEY); byte[] headers = respid != null ? WebSocketUtils.buildHeaderLine(WebSocketConstants.DEFAULT_RESPONSE_ID_KEY, respid) : null; data = WebSocketUtils.buildResponse(headers, data, offset, length); webSocketHolder.write(data, 0, data.length); } } public void close() throws IOException { if (responseHeaders.get(WebSocketUtils.FLUSHED_KEY) == null) { byte[] data = WebSocketUtils.buildResponse(responseHeaders, buffer.getBytes(), 0, buffer.size()); webSocketHolder.write(data, 0, data.length); responseHeaders.put(WebSocketUtils.FLUSHED_KEY, "true"); } super.close(); } @Override public boolean isReady() { throw new UnsupportedOperationException(); } @Override public void setWriteListener(WriteListener arg0) { throw new UnsupportedOperationException(); } }; //CHECKSTYLE:ON } private static class InternalByteArrayOutputStream extends ByteArrayOutputStream { public byte[] getBytes() { return buf; } } @Override public void setContentLengthLong(long arg0) { throw new UnsupportedOperationException(); } }
5,237
1,600
<gh_stars>1000+ ../../../../chapter-04/recipe-02/cxx-example/sum_integers.hpp
33
1,284
<reponame>javalover123/akhq package org.akhq.modules.schemaregistry; import com.fasterxml.jackson.databind.ObjectMapper; import io.confluent.kafka.schemaregistry.json.JsonSchema; import org.akhq.configs.SchemaRegistryType; import org.akhq.utils.Album; import org.akhq.utils.ResourceTestUtil; import org.json.JSONObject; import org.junit.jupiter.api.Test; import java.io.IOException; import java.util.Collections; import java.util.List; import static org.junit.jupiter.api.Assertions.*; class JsonSchemaSerializerTest { @Test public void serializeJsonStringWithMagicByteAndSchemaId() throws IOException { JsonSchema jsonSchema = createJsonSchema("json_schema/album.json"); JsonSchemaSerializer jsonSchemaSerializer = JsonSchemaSerializer.newInstance(1, jsonSchema, SchemaRegistryType.CONFLUENT); Album objectSatisfyingJsonSchema = new Album("title", List.of("artist_1", "artist_2"), 1989, List.of("song_1", "song_2")); String recordAsJsonString = new ObjectMapper().writeValueAsString(objectSatisfyingJsonSchema); byte[] serialize = jsonSchemaSerializer.serialize(recordAsJsonString); assertEquals(SchemaRegistryType.CONFLUENT.getMagicByte(), serialize[0]); } @Test public void failsWhenObjectDoesNotAdhereToSchema() throws IOException { JsonSchema jsonSchema = createJsonSchema("json_schema/album.json"); JsonSchemaSerializer jsonSchemaSerializer = JsonSchemaSerializer.newInstance(1, jsonSchema, SchemaRegistryType.CONFLUENT); JSONObject notSchemaValidObject = new JSONObject(Collections.singletonMap("any_property", "property value")); try { jsonSchemaSerializer.serialize(notSchemaValidObject.toString()); fail("Exception should be thrown"); } catch (Exception e) { assertEquals(IllegalArgumentException.class, e.getClass()); } } private JsonSchema createJsonSchema(String resourcePath) throws IOException { String schemaAsString = ResourceTestUtil.resourceAsString(resourcePath); return new JsonSchema(schemaAsString); } }
785
344
<reponame>cclauss/archai # Copyright (c) Microsoft Corporation. # Licensed under the MIT license. """ Script to prepare food101 dataset for pytorch dataloader. This script assumes that one has downloaded and extracted the full food101 dataset from ETHZ. Invoke the script as $ python food101.py --rootdir /path/to/food101. It will create 'train' and 'test' folders inside the root folder filled with the official train and test splits. The folder structure in 'train' and 'test' respect that needed for pytorch torchvision.datasets.ImageFolder to work. """ import os import pdb import time import argparse import os import tempfile from torchvision.datasets.utils import download_and_extract_archive from torch.utils.model_zoo import tqdm from PIL import Image import shutil from collections import defaultdict import pathlib from archai.common import utils def copy_file_list(file_list, src_dir, dest_dir): with tqdm(total=len(file_list)) as pbar: for i,filename in enumerate(file_list): filename = filename.strip() if filename: # convert / to os-specific dir separator filename_parts = (filename + '.jpg').split('/') target = os.path.join(dest_dir, *filename_parts) if not os.path.isfile(target): utils.copy_file(os.path.join(src_dir, *filename_parts), target) pbar.update(1) def prepare_data(dataroot:str)->None: meta_path = os.path.join(dataroot, 'food-101', 'meta') images_path = os.path.join(dataroot, 'food-101', 'images') train_path = os.path.join(dataroot, 'food-101', 'train') test_path = os.path.join(dataroot, 'food-101', 'test') train_list = pathlib.Path(os.path.join(meta_path, 'train.txt')).read_text().splitlines() test_list = pathlib.Path(os.path.join(meta_path, 'test.txt')).read_text().splitlines() class_list = pathlib.Path(os.path.join(meta_path, 'classes.txt')).read_text().splitlines() for class_name in class_list: class_name = class_name.strip() if class_name: os.makedirs(os.path.join(train_path, class_name), exist_ok=True) os.makedirs(os.path.join(test_path, class_name), exist_ok=True) copy_file_list(train_list, images_path, train_path) copy_file_list(test_list, images_path, test_path) def download(dataroot:str): DOWNLOAD_URL = 'http://data.vision.ee.ethz.ch/cvl/food-101.tar.gz' download_and_extract_archive(DOWNLOAD_URL, tempfile.tempdir, extract_root=dataroot, remove_finished=True) if __name__ == '__main__': # download() parser = argparse.ArgumentParser() parser.add_argument('--dataroot', type=str, default='d:\\datasets', help='root directory where food-101 folder exist (downloaded and extracted from ETHZ)') args = parser.parse_args() prepare_data(args.dataroot)
1,209
339
<gh_stars>100-1000 ''' (c) 2011, 2012 Georgia Tech Research Corporation This source code is released under the New BSD license. Please see http://wiki.quantsoftware.org/index.php?title=QSTK_License for license details. Created on Jan 1, 2011 @author:<NAME> @contact: <EMAIL> @summary: Contains tutorial for backtester and report. ''' # Python imports import os import cPickle import sys import datetime as dt # 3rd Party Imports import numpy.core.multiarray import pandas as pand # QSTK imports import qstksim as qs from qstkutil import qsdateutil as du from qstkutil import DataAccess as da def quickSim( alloc, historic, start_cash ): """ @summary Quickly back tests an allocation for certain historical data, using a starting fund value @param alloc: DataMatrix containing timestamps to test as indices and Symbols to test as columns, with _CASH symbol as the last column @param historic: Historic dataframe of equity prices @param start_cash: integer specifing initial fund value @return funds: TimeSeries with fund values for each day in the back test @rtype TimeSeries """ from inspect import currentframe, getframeinfo frameinfo = getframeinfo(currentframe()) raise DeprecationWarning('Please use qstksim.tradesim instead, or' + ' comment me out in %s, line %i'%(frameinfo.filename, frameinfo.lineno)) #original quick simulator #not designed to handle shorts #check each row in alloc for row in range( 0, len(alloc.values[:, 0]) ): if( abs(alloc.values[row, :].sum() - 1) > .0001 ): print "warning, alloc row " + str(row) + \ "does not sum to one, rebalancing" #if no allocation, all in cash if(alloc.values[row, :].sum()==0): alloc.values[row, -1] = 1 else: alloc.values[row, :] = alloc.values[row, :] \ / alloc.values[row, :].sum() # add cash column historic['_CASH'] = 1 closest = historic[historic.index <= alloc.index[0]].ix[:] # start shares/fund out as 100% cash fund_ts = pand.Series( [start_cash], index = [closest.index[0]] ) shares = (alloc.ix[0:1] * 0.0) shares['_CASH'] = start_cash #compute all trades in the allocation frame for row_index, row in alloc.iterrows(): trade_price = historic.ix[row_index:].ix[0:1] trade_date = trade_price.index[0] # get stock prices on all the days up until this trade to_calculate = historic[ (historic.index <= trade_date) & (historic.index > fund_ts.index[-1]) ] # multiply prices by our current shares values_by_stock = to_calculate * shares.ix[-1] # calculate total value and append to our fund history fund_ts = fund_ts.append( values_by_stock.sum(axis=1) ) # Get new shares values shares = (row * fund_ts.ix[-1]) / trade_price return fund_ts def _compute_short(arr): ''' Computes total value of negative positions ''' tally = 0 for i in range(0, len(arr) - 1): if arr[i] < 0: tally = tally+arr[i] return abs(tally) def _compute_long(arr): ''' Computes total value of positive positions ''' tally = 0 for i in range(0, len(arr) - 1): if arr[i] > 0: tally = tally+arr[i] return tally def _compute_leverage(arr, fundval): ''' Computes percent leverage ''' if fundval == 0: return 0 return (_compute_long(arr) - _compute_short(arr))/fundval def shortingQuickSim(alloc, historic, start_cash, leverage): ''' shortingQuickSim designed to handle shorts, keeps track of leverage keeping it within paramaterized value, ignore alloc cash column ''' del alloc['_CASH'] #fix invalid days historic = historic.fillna(method='backfill') #compute first trade closest = historic[historic.index <= alloc.index[0]] fund_ts = pand.Series( [start_cash], index = [closest.index[-1]] ) shares = alloc.values[0, :] * fund_ts.values[-1] / closest.values[-1, :] cash_values = pand.DataMatrix( [shares * closest.values[-1, :]], index=[closest.index[-1]] ) #compute all trades for i in range(1, len(alloc.values[:, 0])): #check leverage #TODO Find out what to use for fundvall below... this_leverage = _compute_leverage( alloc.values[0, :], start_cash ) if this_leverage > leverage: print 'Warning, leverage of ', this_leverage, \ ' reached, exceeds leverage limit of ', leverage, '\n' #get closest date(previous date) closest = historic[ historic.index <= alloc.index[i] ] #for loop to calculate fund daily (without rebalancing) for date in closest[ closest.index > fund_ts.index[-1] ].index: #compute and record total fund value (Sum(closest close * stocks)) fund_ts = fund_ts.append( pand.Series( [(closest.xs(date) * shares).sum()], index=[date]) ) cash_values = cash_values.append( pand.DataMatrix( [shares*closest.xs(date)], index=[date]) ) #distribute fund in accordance with alloc shares = alloc.values[i, :] * ( fund_ts.values[-1] / closest.xs(closest.index[-1]) ) #compute fund value for rest of historic data with final share distribution for date in historic[ historic.index > alloc.index[-1] ].index: if date in closest.index: fund_ts = fund_ts.append( pand.Series( [(closest.xs(date) * shares).sum()], index=[date]) ) #return fund record return fund_ts def alloc_backtest(alloc, start): """ @summary: Back tests an allocation from a pickle file. Uses a starting portfolio value of start. @param alloc: Name of allocation pickle file. Pickle file contains a DataMatrix with timestamps as indexes and stock symbols as columns, with the last column being the _CASH symbol, indicating how much of the allocation is in cash. @param start: integer specifying the starting value of the portfolio @return funds: List of fund values indicating the value of the portfolio throughout the back test. @rtype timeSeries """ #read in alloc table from command line arguements alloc_input_file = open(alloc, "r") alloc = cPickle.load(alloc_input_file) # Get the data from the data store dataobj = da.DataAccess('Norgate') startday=alloc.index[0]-dt.timedelta(days=10) endday = alloc.index[-1] # Get desired timestamps timeofday=dt.timedelta(hours=16) timestamps = du.getNYSEdays(startday,endday,timeofday) historic = dataobj.get_data( timestamps, list(alloc.columns[0:-1]), "close" ) #backtestx [fund, leverage, commissions, slippage]= qs.tradesim(alloc, historic, int(start), 1, True, 0.02, 5, 0.02 ) return [fund, leverage, commissions, slippage] def strat_backtest1(strat, start, end, num, diff, startval): """ @summary: Back tests a strategy defined in a python script that takes in a start and end date along with a starting value a set number of times. @param strat: filename of python script strategy @param start: starting date in a datetime object @param end: ending date in a datetime object @param num: number of tests to perform @param diff: offset in days of the tests @param startval: starting value of fund during back tests @return fundsmatrix: Datamatrix of fund values returned from each test @rtype datamatrix """ fundsmatrix = [] startdates = du.getNextNNYSEdays(start, num * diff, dt.timedelta(hours=16)) enddates = du.getNextNNYSEdays(end, num * diff, dt.timedelta(hours=16)) for i in range(0, num): os.system( 'python ' + strat + ' ' + startdates[i].strftime("%m-%d-%Y")\ + ' ' + enddates[i].strftime("%m-%d-%Y") + \ ' temp_alloc.pkl') return alloc_backtest('temp_alloc.pkl', startval) return fundsmatrix def strat_backtest2(strat, start, end, diff, dur, startval): """ @summary: Back tests a strategy defined in a python script that takes in a start and end date along with a starting value over a given period. @param strat: filename of python script strategy @param start: starting date in a datetime object @param end: ending date in a datetime object @param diff: offset in days of the tests @param dur: length of a test @param startval: starting value of fund during back tests @return fundsmatrix: Datamatrix of fund values returned from each test @rtype datamatrix """ fundsmatrix = [] startdates = du.getNYSEdays( start, end, dt.timedelta(hours=16) ) for i in range(0, len(startdates), diff): if( i + dur>=len(startdates) ): enddate = startdates[-1] else: enddate = startdates[i+dur] os.system('python ' + strat + ' ' + startdates[i].strftime("%m-%d-%Y")\ + ' ' + enddate.strftime("%m-%d-%Y") + ' temp_alloc.pkl') funds = alloc_backtest('temp_alloc.pkl', startval) fundsmatrix.append(funds) return fundsmatrix def run_main(): ''' Main program ''' # # CmdlnQuickSim # # A function which runs a quick sim on an allocation provided via # command line, # along with a starting cash value # # Allocation Backtest: # python quickSim.py -a allocation_fiel start_value output_file # python quickSim.py -a 'alloc_file.pkl' 1000 'fund_output.pkl' # # Strategy backtest: # python quickSim.py -s strategy start end start_value output_file # python quickSim.py -s 'strategy.py' '2/2/2007' '2/2/2009' 1000 # 'fund_output.pkl' # # Robust backtest: # python quickSim.py -r strategy start end days_between duration # start_value output # python quickSim.py -r 'strategy.py' '1-1-2004' '1-1-2007' 7 28 10000 # 'out.pkl' # <NAME> # if(sys.argv[1] == '-a'): funds = alloc_backtest(sys.argv[2], sys.argv[3]) output = open(sys.argv[4], "w") cPickle.dump(funds, output) elif(sys.argv[1] == '-s'): t = map(int, sys.argv[3].split('-')) startday = dt.datetime(t[2], t[0], t[1]) t = map(int, sys.argv[4].split('-')) endday = dt.datetime(t[2], t[0], t[1]) fundsmatrix = strat_backtest1(sys.argv[2], startday, endday, 1, 0, int(sys.argv[5])) output = open(sys.argv[6], "w") cPickle.dump(fundsmatrix, output) elif(sys.argv[1] == '-r'): t = map(int, sys.argv[3].split('-')) startday = dt.datetime(t[2], t[0], t[1]) t = map(int, sys.argv[4].split('-')) endday = dt.datetime(t[2], t[0], t[1]) fundsmatrix = strat_backtest2(sys.argv[2], startday, endday, int(sys.argv[5]), int(sys.argv[6]), int(sys.argv[7])) output = open(sys.argv[8], "w") cPickle.dump(fundsmatrix, output) else: print 'invalid command line call' print 'use python quickSim.py -a alloc_pkl start_value output_pkl' print 'or python quickSim.py -s strategy start_date end_date' + \ 'start_value output_pkl' print 'or python quickSim.py -r strategy start_date end_date' + \ ' test_offset_in_days duration start_value output_pkl' if __name__ == "__main__": run_main()
5,219
392
<reponame>logzio/apollo package io.logz.apollo.helpers; import io.logz.apollo.ApolloApplication; import io.logz.apollo.clients.ApolloTestAdminClient; import io.logz.apollo.clients.ApolloTestClient; import io.logz.apollo.configuration.ApiConfiguration; import io.logz.apollo.configuration.ApolloConfiguration; import io.logz.apollo.configuration.DatabaseConfiguration; import io.logz.apollo.configuration.KubernetesConfiguration; import io.logz.apollo.configuration.ScmConfiguration; import io.logz.apollo.configuration.SlaveConfiguration; import io.logz.apollo.configuration.WebsocketConfiguration; import io.logz.apollo.kubernetes.KubernetesMonitor; import io.logz.apollo.kubernetes.KubernetesHealth; import io.logz.apollo.scm.GithubConnector; import org.apache.commons.lang3.StringUtils; import org.conf4j.core.ConfigurationProvider; import org.jetbrains.annotations.NotNull; import javax.script.ScriptException; import java.io.IOException; import java.sql.SQLException; import java.util.List; import java.util.function.BiConsumer; import java.util.function.Function; import java.util.stream.Collectors; public class StandaloneApollo { private static StandaloneApollo instance; private static String hostname = "localhost"; private static String protocol = "http"; private static final String DATA_SOURCE_CLASS_NAME = "org.mariadb.jdbc.MariaDbDataSource"; private final ApolloApplication apolloApplication; private final KubernetesMonitor kubernetesMonitor; private final KubernetesHealth kubernetesHealth; private final GithubConnector githubConnector; private final DatabaseConfiguration databaseConfiguration; private ApolloConfiguration apolloConfiguration; private StandaloneApollo() throws ScriptException, SQLException, IOException { System.setProperty(KubernetesMonitor.LOCAL_RUN_PROPERTY, "true"); System.setProperty(KubernetesHealth.LOCAL_RUN_PROPERTY, "true"); // Start DB and match configuration ApolloMySQL apolloMySQL = new ApolloMySQL(); databaseConfiguration = new DatabaseConfiguration( apolloMySQL.getMappedPort(), apolloMySQL.getContainerIpAddress(), apolloMySQL.getUsername(), apolloMySQL.getPassword(), apolloMySQL.getSchema(), DATA_SOURCE_CLASS_NAME ); apolloConfiguration = createApolloConfiguration(null, false, "", false); // Start apollo apolloApplication = new ApolloApplication(createConfigurationProvider(apolloConfiguration)); apolloApplication.start(); // Get Kubernetes monitor and health, they are stopped by default in tests because usually will want to inject mock first kubernetesMonitor = apolloApplication.getInjector().getInstance(KubernetesMonitor.class); kubernetesHealth = apolloApplication.getInjector().getInstance(KubernetesHealth.class); Runtime.getRuntime().addShutdownHook(new Thread(apolloApplication::shutdown)); githubConnector = new GithubConnector(apolloConfiguration); } @NotNull private ApolloConfiguration createApolloConfiguration(String slaveId, boolean isSlave, String slaveCsvEnvironments, boolean disableApiServer) { return new ApolloConfiguration( new ApiConfiguration(Common.getAvailablePort(), "0.0.0.0", "secret", disableApiServer), databaseConfiguration, new KubernetesConfiguration(1, 1), new ScmConfiguration(StringUtils.EMPTY, StringUtils.EMPTY), new WebsocketConfiguration(Common.getAvailablePort(), 5), new SlaveConfiguration(slaveId,1, isSlave, slaveCsvEnvironments) ); } public static StandaloneApollo getOrCreateServer() throws ScriptException, IOException, SQLException { if (instance == null) { instance = new StandaloneApollo(); } return instance; } public ApolloApplication createAndStartSlave(String slaveId, List<Integer> environmentIds, boolean disableApiServer) { if (instance == null) { throw new RuntimeException("Can't create slave without master first"); } ApolloConfiguration apolloConfiguration = createApolloConfiguration(slaveId, true, environmentIds.stream().map(Object::toString).collect(Collectors.joining(",")), disableApiServer); ApolloApplication apolloApplication = new ApolloApplication(createConfigurationProvider(apolloConfiguration)); apolloApplication.start(); return apolloApplication; } public GithubConnector getGithubConnector() { return githubConnector; } public void startKubernetesMonitor() { System.setProperty(KubernetesMonitor.LOCAL_RUN_PROPERTY, "false"); kubernetesMonitor.start(); } public void stopKubernetesMonitor() { System.setProperty(KubernetesMonitor.LOCAL_RUN_PROPERTY, "true"); kubernetesMonitor.stop(); } public void startKubernetesHealth() { System.setProperty(KubernetesHealth.LOCAL_RUN_PROPERTY, "false"); kubernetesHealth.start(); } public void stopKubernetesHealth() { kubernetesHealth.stop(); System.setProperty(KubernetesHealth.LOCAL_RUN_PROPERTY, "true"); } public ApolloTestClient createTestClient() { return new ApolloTestClient(ModelsGenerator.createRegularUser(), hostname, apolloConfiguration.getApi().getPort(), protocol); } public <T> T getInstance(Class<T> clazz) { return apolloApplication.getInjector().getInstance(clazz); } public ApolloTestAdminClient createTestAdminClient() { return new ApolloTestAdminClient(hostname, apolloConfiguration.getApi().getPort(), protocol); } private <T> ConfigurationProvider<T> createConfigurationProvider(T configuration) { return new ConfigurationProvider<T>() { @Override public T get() { return configuration; } @Override public <C> ConfigurationProvider<C> createConfigurationProvider(Function<T, C> extractor) { return null; } @Override public void registerChangeListener(BiConsumer<T, T> listener) {} }; } }
2,302
1,350
<reponame>Shashi-rk/azure-sdk-for-java // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.batchai.models; import com.azure.core.util.ExpandableStringEnum; import com.fasterxml.jackson.annotation.JsonCreator; import java.util.Collection; /** Defines values for DeallocationOption. */ public final class DeallocationOption extends ExpandableStringEnum<DeallocationOption> { /** Static value requeue for DeallocationOption. */ public static final DeallocationOption REQUEUE = fromString("requeue"); /** Static value terminate for DeallocationOption. */ public static final DeallocationOption TERMINATE = fromString("terminate"); /** Static value waitforjobcompletion for DeallocationOption. */ public static final DeallocationOption WAITFORJOBCOMPLETION = fromString("waitforjobcompletion"); /** * Creates or finds a DeallocationOption from its string representation. * * @param name a name to look for. * @return the corresponding DeallocationOption. */ @JsonCreator public static DeallocationOption fromString(String name) { return fromString(name, DeallocationOption.class); } /** @return known DeallocationOption values. */ public static Collection<DeallocationOption> values() { return values(DeallocationOption.class); } }
450