max_stars_count
int64
301
224k
text
stringlengths
6
1.05M
token_count
int64
3
727k
14,668
<filename>components/autofill_assistant/browser/service/cup.h<gh_stars>1000+ // Copyright 2021 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef COMPONENTS_AUTOFILL_ASSISTANT_BROWSER_SERVICE_CUP_H_ #define COMPONENTS_AUTOFILL_ASSISTANT_BROWSER_SERVICE_CUP_H_ #include "components/autofill_assistant/browser/service/rpc_type.h" #include "third_party/abseil-cpp/absl/types/optional.h" namespace autofill_assistant { namespace cup { // Whether |PackAndSignRequest| should be called before the request is // submitted. Can be |false| because signing is disabled via feature flag, // or given message type doesn't support CUP signing. bool ShouldSignRequests(RpcType rpc_type); // Whether |UnpackResponse| should be called on the response from the service // call. Can be false because verification is disabled via feature flag or // |ShouldSignRequest| returns |false|. bool ShouldVerifyResponses(RpcType rpc_type); class CUP { public: virtual ~CUP() = default; // Generates a new |request| where |original_request| is packed and signed in // its |cup_data| field. virtual std::string PackAndSignRequest( const std::string& original_request) = 0; // Generates a new |response| where |original_response| is unpacked from // the |cup_data| field. virtual absl::optional<std::string> UnpackResponse( const std::string& original_response) = 0; protected: CUP() = default; }; } // namespace cup } // namespace autofill_assistant #endif // COMPONENTS_AUTOFILL_ASSISTANT_BROWSER_SERVICE_CUP_H_
523
2,728
# ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- """ FILE: client_sample_async.py DESCRIPTION: These samples demonstrate creating a client and requesting a token. USAGE: python client_sample_async.py Set the environment variables with your own values before running the sample: 1) MIXEDREALITY_ACCOUNT_DOMAIN - the Mixed Reality account domain. 2) MIXEDREALITY_ACCOUNT_ID - the Mixed Reality account identifier. 3) MIXEDREALITY_ACCOUNT_KEY - the Mixed Reality account primary or secondary key. """ import os import asyncio class ClientSamplesAsync(object): from azure.core.credentials import AzureKeyCredential account_domain = os.environ.get("MIXEDREALITY_ACCOUNT_DOMAIN", None) if not account_domain: raise ValueError("Set MIXEDREALITY_ACCOUNT_DOMAIN env before run this sample.") account_id = os.environ.get("MIXEDREALITY_ACCOUNT_ID", None) if not account_id: raise ValueError("Set MIXEDREALITY_ACCOUNT_ID env before run this sample.") account_key = os.environ.get("MIXEDREALITY_ACCOUNT_KEY", None) if not account_key: raise ValueError("Set MIXEDREALITY_ACCOUNT_KEY env before run this sample.") key_credential = AzureKeyCredential(account_key) def create_client(self): # [START create_client] from azure.mixedreality.authentication.aio import MixedRealityStsClient client = MixedRealityStsClient(self.account_id, self.account_domain, self.key_credential) # [END create_client] print("client created") async def get_token(self): from azure.mixedreality.authentication.aio import MixedRealityStsClient client = MixedRealityStsClient(self.account_id, self.account_domain, self.key_credential) async with client: # [START get_token] access_token = await client.get_token() # [END get_token] print("token retrieved: " + access_token.token) async def main(): sample = ClientSamplesAsync() sample.create_client() await sample.get_token() if __name__ == '__main__': loop = asyncio.get_event_loop() loop.run_until_complete(main())
828
3,073
from apps.analyzer.models import Category, FeatureCategory from django.db.models.aggregates import Sum import math class Classifier: def __init__(self, user, feed, phrases): self.user = user self.feed = feed self.phrases = phrases def get_features(self, doc): found = {} for phrase in self.phrases: if phrase in doc: if phrase in found: found[phrase] += 1 else: found[phrase] = 1 return found def increment_feature(self, feature, category): count = self.feature_count(feature,category) if count==0: fc = FeatureCategory(user=self.user, feed=self.feed, feature=feature, category=category, count=1) fc.save() else: fc = FeatureCategory.objects.get(user=self.user, feed=self.feed, feature=feature, category=category) fc.count = count + 1 fc.save() def feature_count(self, feature, category): if isinstance(category, Category): category = category.category try: feature_count = FeatureCategory.objects.get(user=self.user, feed=self.feed, feature=feature, category=category) except FeatureCategory.DoesNotExist: return 0 else: return float(feature_count.count) def increment_category(self,category): count = self.category_count(category) if count==0: category = Category(user=self.user, feed=self.feed, category=category, count=1) category.save() else: category = Category.objects.get(user=self.user, feed=self.feed, category=category) category.count = count+1 category.save() def category_count(self, category): if not isinstance(category, Category): try: category_count = Category.objects.get(user=self.user, feed=self.feed, category=category) except Category.DoesNotExist: return 0 else: category_count = category return float(category_count.count) def categories(self): categories = Category.objects.all() return categories def totalcount(self): categories = Category.objects.filter(user=self.user, feed=self.feed).aggregate(sum=Sum('count')) return categories['sum'] def train(self, item, category): features = self.get_features(item) # Increment the count for every feature with this category for feature in features: self.increment_feature(feature, category) # Increment the count for this category self.increment_category(category) def feature_probability(self, feature, category): if self.category_count(category) == 0: return 0 # The total number of times this feature appeared in this # category divided by the total number of items in this category return self.feature_count(feature, category) / self.category_count(category) def weighted_probability(self, feature, category, prf, weight=1.0, ap=0.5): # Calculate current probability basic_prob = prf(feature, category) # Count the number of times this feature has appeared in all categories totals = sum([self.feature_count(feature, c) for c in self.categories()]) # Calculate the weighted average bp = ((weight*ap) + (totals*basic_prob)) / (weight+totals) print(feature, category, basic_prob, totals, bp) return bp class FisherClassifier(Classifier): def __init__(self, user, feed, phrases): Classifier.__init__(self, user, feed, phrases) self.minimums = {} def category_probability(self, feature, category): # The frequency of this feature in this category clf = self.feature_probability(feature, category) if clf==0: return 0 # The frequency of this feature in all the categories freqsum = sum([self.feature_probability(feature, category) for c in self.categories()]) # The probability is the frequency in this category divided by # the overall frequency p = clf / freqsum return p def fisher_probability(self, item, category): # Multiply all the probabilities together p = .5 features = self.get_features(item) if features: p = 1 for feature in features: p *= (self.weighted_probability(feature, category, self.category_probability)) # Take the natural log and multiply by -2 fscore = -2*math.log(p) # Use the inverse chi2 function to get a probability return self.invchi2(fscore,len(features)*2) def invchi2(self, chi, df): m = chi / 2.0 sum = term = math.exp(-m) for i in range(1, df//2): term *= m / i sum += term return min(sum, 1.0) def setminimum(self, category, min): self.minimums[category] = min def getminimum(self, category): if category not in self.minimums: return 0 return self.minimums[category] def classify(self,item,default=None): # Loop through looking for the best result best = default max = 0.0 print(self.categories(), item) for category in self.categories(): p=self.fisher_probability(item, category) # Make sure it exceeds its minimum if p > self.getminimum(category) and p > max: best = category max = p return best
2,523
2,151
<reponame>kniefliu/WindowsSamples // // Copyright (c) 2002-2014 The ANGLE Project Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. // // SurfaceImpl.cpp: Implementation of Surface stub method class #include "libANGLE/renderer/SurfaceImpl.h" namespace rx { SurfaceImpl::SurfaceImpl(const egl::SurfaceState &state) : mState(state) { } SurfaceImpl::~SurfaceImpl() { } egl::Error SurfaceImpl::swapWithDamage(const gl::Context *context, EGLint *rects, EGLint n_rects) { UNREACHABLE(); return egl::EglBadSurface() << "swapWithDamage implementation missing."; } } // namespace rx
231
1,561
<filename>tests/api/controllers/emitter_receiver_enable_disable/emitter_receiver_enable_disable.c /* * Description: Test Emitters and Receivers with mixed transmission type. * This file contains some tests of the emission/reception of data: * - disabled / ensabled receivers * - communicate only with device with the same transmission type * - communicate on different channels */ #include <webots/emitter.h> #include <webots/receiver.h> #include <webots/robot.h> #include "../../../lib/ts_assertion.h" #include "../../../lib/ts_utils.h" #define TIME_STEP 32 int main(int argc, char **argv) { ts_setup(argv[1]); WbDeviceTag emitter, receiver; int queueLength; const char *buffer; int robotID = -1; // messages const char *msgString0 = "Hello"; const char *msgString1 = "Radio"; const char *msgString2 = "Serial"; const char *msgString3 = "Infra-Red"; if (strcmp(wb_robot_get_name(), "robot0") == 0) { emitter = wb_robot_get_device("emitter0"); // serial, channel 2 receiver = wb_robot_get_device("receiver0"); // radio, channel -1 robotID = 0; } else if (strcmp(wb_robot_get_name(), "robot1") == 0) { emitter = wb_robot_get_device("emitter1"); // infra-red, channel -1 robotID = 1; } else if (strcmp(wb_robot_get_name(), "robot2") == 0) { emitter = wb_robot_get_device("emitter2"); // radio, channel -1 robotID = 2; } else if (strcmp(wb_robot_get_name(), "robot3") == 0) { receiver = wb_robot_get_device("receiver3"); // serial, channel 2 robotID = 3; } else if (strcmp(wb_robot_get_name(), "robot4") == 0) { receiver = wb_robot_get_device("receiver4"); // infra-red, channel 2 robotID = 4; } // TEST 1: receivers disabled // emitter0 -> msgString0 (serial, channel 2) // emitter1 -> msgString0 (infra-red, channel -1) // emitter2 -> msgString0 (radio, channel -1) wb_robot_step(TIME_STEP); switch (robotID) { case 0: wb_emitter_send(emitter, msgString0, strlen(msgString0) + 1); wb_robot_step(TIME_STEP); queueLength = wb_receiver_get_queue_length(receiver); ts_assert_int_equal(queueLength, 0, "Radio receiver should receive %d not %d packets when disabled", 0, queueLength); break; case 1: wb_emitter_send(emitter, msgString0, strlen(msgString0) + 1); wb_robot_step(TIME_STEP); break; case 2: wb_emitter_send(emitter, msgString0, strlen(msgString0) + 1); wb_robot_step(TIME_STEP); break; case 3: wb_robot_step(TIME_STEP); queueLength = wb_receiver_get_queue_length(receiver); ts_assert_int_equal(queueLength, 0, "Serial receiver should receive %d not %d packets when disabled", 0, queueLength); break; case 4: wb_robot_step(TIME_STEP); queueLength = wb_receiver_get_queue_length(receiver); ts_assert_int_equal(queueLength, 0, "Infra-red receiver should receive %d not %d packets when disabled", 0, queueLength); break; default: break; } // TEST 2: receivers enabled // emitter0 -> msgString2 (serial, channel 2) // emitter1 -> msgString3 (infra-red, channel -1) // emitter2 -> msgString1 (radio, channel -1) wb_robot_step(TIME_STEP); switch (robotID) { case 0: wb_receiver_enable(receiver, TIME_STEP); wb_emitter_send(emitter, msgString2, strlen(msgString2) + 1); wb_robot_step(TIME_STEP); queueLength = wb_receiver_get_queue_length(receiver); ts_assert_int_equal(queueLength, 1, "Radio receiver should receive %d not %d packets when enabled", 1, queueLength); buffer = wb_receiver_get_data(receiver); ts_assert_string_equal(buffer, msgString1, "Radio receiver should receive a packet containing \"%s\" not \"%s\"", msgString1, buffer); wb_receiver_next_packet(receiver); wb_robot_step(TIME_STEP); break; case 1: wb_emitter_send(emitter, msgString3, strlen(msgString3) + 1); wb_robot_step(TIME_STEP); wb_robot_step(TIME_STEP); break; case 2: wb_emitter_send(emitter, msgString1, strlen(msgString1) + 1); wb_robot_step(TIME_STEP); wb_robot_step(TIME_STEP); break; case 3: wb_receiver_enable(receiver, 2 * TIME_STEP); wb_robot_step(TIME_STEP); queueLength = wb_receiver_get_queue_length(receiver); ts_assert_int_equal(queueLength, 0, "Serial receiver should receive %d not %d packets one step after being enabled", 0, queueLength); wb_robot_step(TIME_STEP); queueLength = wb_receiver_get_queue_length(receiver); ts_assert_int_equal(queueLength, 1, "Serial receiver should receive %d not %d packets two steps after being enabled", 1, queueLength); buffer = wb_receiver_get_data(receiver); ts_assert_string_equal(buffer, msgString2, "Serial receiver should receive a packet containing \"%s\" not \"%s\"", msgString2, buffer); wb_receiver_next_packet(receiver); break; case 4: wb_receiver_enable(receiver, TIME_STEP); wb_robot_step(TIME_STEP); queueLength = wb_receiver_get_queue_length(receiver); ts_assert_int_equal(queueLength, 1, "Infra-red receiver should receive %d not %d packets when enabled", 1, queueLength); buffer = wb_receiver_get_data(receiver); ts_assert_string_equal(buffer, msgString3, "Infra-red receiver should receive a packet containing \"%s\" not \"%s\"", msgString3, buffer); wb_receiver_next_packet(receiver); wb_robot_step(TIME_STEP); break; default: break; } // TEST 3: Radio receiver channel = 0; serial receiver = 10; infra-red receiver disabled // emitter0 -> msgString2 (serial, channel 2) // emitter1 -> msgString3 (infra-red, channel -1) // emitter2 -> msgString1 (radio, channel -1) wb_robot_step(TIME_STEP); switch (robotID) { case 0: wb_receiver_set_channel(receiver, 0); wb_emitter_send(emitter, msgString2, strlen(msgString2) + 1); wb_robot_step(TIME_STEP); queueLength = wb_receiver_get_queue_length(receiver); ts_assert_int_equal(queueLength, 1, "Radio receiver should receive %d not %d packets when channel set to 0", 1, queueLength); buffer = wb_receiver_get_data(receiver); ts_assert_string_equal(buffer, msgString1, "Radio receiver should receive a packet containing \"%s\" not \"%s\" when channel set to 0", msgString1, buffer); wb_receiver_next_packet(receiver); wb_robot_step(TIME_STEP); break; case 1: wb_emitter_send(emitter, msgString3, strlen(msgString3) + 1); wb_robot_step(TIME_STEP); wb_robot_step(TIME_STEP); break; case 2: wb_emitter_send(emitter, msgString1, strlen(msgString1) + 1); wb_robot_step(TIME_STEP); wb_robot_step(TIME_STEP); break; case 3: wb_receiver_set_channel(receiver, 10); wb_robot_step(TIME_STEP); wb_robot_step(TIME_STEP); queueLength = wb_receiver_get_queue_length(receiver); ts_assert_int_equal(queueLength, 0, "Serial receiver should receive %d not %d packets when channel set to 10", 0, queueLength); wb_receiver_set_channel(receiver, 2); break; case 4: wb_receiver_disable(receiver); wb_robot_step(TIME_STEP); queueLength = wb_receiver_get_queue_length(receiver); ts_assert_int_equal(queueLength, 0, "Infra-red receiver should receive %d not %d packets when re-disabled", 0, queueLength); wb_robot_step(TIME_STEP); break; default: break; } // TEST 4: Radio receiver channel = 0; serial receiver = 2; infra-red receiver disabled // emitter2 -> msgString2 (serial, channel 2) // emitter2 -> msgString1 (radio, channel 0) wb_robot_step(TIME_STEP); switch (robotID) { case 0: wb_emitter_send(emitter, msgString2, strlen(msgString2) + 1); wb_robot_step(TIME_STEP); queueLength = wb_receiver_get_queue_length(receiver); ts_assert_int_equal(queueLength, 1, "Radio receiver should receive %d not %d packets when channel set to 0", 1, queueLength); buffer = wb_receiver_get_data(receiver); ts_assert_string_equal(buffer, msgString1, "Radio receiver should receive a packet containing \"%s\" not \"%s\" when channel set to 0", msgString1, buffer); wb_receiver_next_packet(receiver); wb_robot_step(TIME_STEP); break; case 1: wb_robot_step(TIME_STEP); wb_robot_step(TIME_STEP); break; case 2: wb_emitter_set_channel(emitter, 0); wb_emitter_send(emitter, msgString1, strlen(msgString1) + 1); wb_robot_step(TIME_STEP); wb_robot_step(TIME_STEP); break; case 3: queueLength = wb_receiver_get_queue_length(receiver); ts_assert_int_equal(queueLength, 0, "Serial receiver should receive %d not %d packets during same step when the message is sent", 0, queueLength); wb_robot_step(TIME_STEP); queueLength = wb_receiver_get_queue_length(receiver); ts_assert_int_equal(queueLength, 0, "Serial receiver should receive %d not %d packets one step after the message is sent", 0, queueLength); wb_robot_step(TIME_STEP); queueLength = wb_receiver_get_queue_length(receiver); ts_assert_int_equal( queueLength, 1, "Serial receiver should receive %d not %d packets two steps after the message is sent", 1, queueLength); wb_receiver_next_packet(receiver); break; case 4: wb_robot_step(TIME_STEP); wb_robot_step(TIME_STEP); break; default: break; } wb_robot_step(robotID * TIME_STEP); // avoid collision of simultaneous success messages on stdout ts_send_success(); return EXIT_SUCCESS; }
4,534
2,753
<reponame>ShankarNara/shogun import os import sys insertions=0 deletions=0 files=0 FROMVER="" if len(sys.argv)>1: FROMVER=sys.argv[1] TOVER="" if len(sys.argv)>2: TOVER=sys.argv[2] TMPNAME=os.tmpnam() VER="" if len(FROMVER)>0: VER=FROMVER+'..' if len(TOVER)>0: if len(VER)==0: VER='..' VER=VER+TOVER os.system('git log --oneline --shortstat %s >%s' % (VER,TMPNAME)) for line in file(TMPNAME).readlines(): if line.find('file') == -1: continue if line.find('changed') == -1: continue if line.find('insertion') == -1 and line.find('deletion') == -1: continue entries=line.split(',') for e in entries: if e.find('file') != -1: files+=int(e.strip().split(' ')[0]) elif e.find('insertion') != -1: insertions+=int(e.strip().split(' ')[0]) elif e.find('deletion') != -1: deletions+=int(e.strip().split(' ')[0]) print "Files changed: %d" % files print "Insertions: %d" % insertions print "Deletions: %d" % deletions os.unlink(TMPNAME)
439
841
<filename>resteasy-core/src/main/java/org/jboss/resteasy/core/ResourceMethodInvoker.java package org.jboss.resteasy.core; import org.jboss.resteasy.annotations.Stream; import org.jboss.resteasy.core.interception.jaxrs.PostMatchContainerRequestContext; import org.jboss.resteasy.core.providerfactory.ResteasyProviderFactoryImpl; import org.jboss.resteasy.core.registry.SegmentNode; import org.jboss.resteasy.plugins.server.resourcefactory.JndiComponentResourceFactory; import org.jboss.resteasy.plugins.server.resourcefactory.SingletonResource; import org.jboss.resteasy.resteasy_jaxrs.i18n.LogMessages; import org.jboss.resteasy.specimpl.BuiltResponse; import org.jboss.resteasy.specimpl.BuiltResponseEntityNotBacked; import org.jboss.resteasy.specimpl.ResteasyUriInfo; import org.jboss.resteasy.spi.ApplicationException; import org.jboss.resteasy.spi.AsyncResponseProvider; import org.jboss.resteasy.spi.AsyncStreamProvider; import org.jboss.resteasy.spi.Failure; import org.jboss.resteasy.spi.HttpRequest; import org.jboss.resteasy.spi.HttpResponse; import org.jboss.resteasy.spi.InjectorFactory; import org.jboss.resteasy.spi.MethodInjector; import org.jboss.resteasy.spi.ResourceFactory; import org.jboss.resteasy.spi.ResourceInvoker; import org.jboss.resteasy.spi.ResteasyAsynchronousResponse; import org.jboss.resteasy.spi.ResteasyProviderFactory; import org.jboss.resteasy.spi.UnhandledException; import org.jboss.resteasy.spi.ValueInjector; import org.jboss.resteasy.spi.interception.JaxrsInterceptorRegistry; import org.jboss.resteasy.spi.interception.JaxrsInterceptorRegistryListener; import org.jboss.resteasy.spi.metadata.MethodParameter; import org.jboss.resteasy.spi.metadata.Parameter; import org.jboss.resteasy.spi.metadata.ResourceMethod; import org.jboss.resteasy.spi.statistics.MethodStatisticsLogger; import org.jboss.resteasy.spi.validation.GeneralValidator; import org.jboss.resteasy.spi.validation.GeneralValidatorCDI; import org.jboss.resteasy.statistics.StatisticsControllerImpl; import org.jboss.resteasy.tracing.RESTEasyTracingLogger; import org.jboss.resteasy.util.DynamicFeatureContextDelegate; import jakarta.ws.rs.ProcessingException; import jakarta.ws.rs.Produces; import jakarta.ws.rs.RuntimeType; import jakarta.ws.rs.container.ContainerRequestFilter; import jakarta.ws.rs.container.ContainerResponseFilter; import jakarta.ws.rs.container.DynamicFeature; import jakarta.ws.rs.container.ResourceInfo; import jakarta.ws.rs.core.Configuration; import jakarta.ws.rs.core.GenericEntity; import jakarta.ws.rs.core.MediaType; import jakarta.ws.rs.core.Response; import jakarta.ws.rs.ext.ContextResolver; import jakarta.ws.rs.ext.WriterInterceptor; import jakarta.ws.rs.sse.SseEventSink; import java.lang.annotation.Annotation; import java.lang.reflect.Method; import java.lang.reflect.Type; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionException; import java.util.concurrent.CompletionStage; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicLong; /** * @author <a href="mailto:<EMAIL>"><NAME></a> * @version $Revision: 1 $ */ public class ResourceMethodInvoker implements ResourceInvoker, JaxrsInterceptorRegistryListener { protected MethodInjector methodInjector; protected InjectorFactory injector; protected ResourceFactory resource; protected ResteasyProviderFactory parentProviderFactory; protected ResteasyProviderFactory resourceMethodProviderFactory; protected ResourceMethod method; protected Annotation[] methodAnnotations; protected ContainerRequestFilter[] requestFilters; protected ContainerResponseFilter[] responseFilters; protected WriterInterceptor[] writerInterceptors; protected ConcurrentHashMap<String, AtomicLong> stats = new ConcurrentHashMap<String, AtomicLong>(); protected GeneralValidator validator; protected boolean isValidatable; protected boolean methodIsValidatable; @SuppressWarnings("rawtypes") protected AsyncResponseProvider asyncResponseProvider; @SuppressWarnings("rawtypes") AsyncStreamProvider asyncStreamProvider; protected boolean isSse; protected boolean isAsyncStreamProvider; protected ResourceInfo resourceInfo; protected boolean expectsBody; protected final boolean hasProduces; protected MethodStatisticsLogger methodStatisticsLogger; public ResourceMethodInvoker(final ResourceMethod method, final InjectorFactory injector, final ResourceFactory resource, final ResteasyProviderFactory providerFactory) { this.injector = injector; this.resource = resource; this.parentProviderFactory = providerFactory; this.method = method; this.methodAnnotations = this.method.getAnnotatedMethod().getAnnotations(); methodStatisticsLogger = StatisticsControllerImpl.EMPTY; resourceInfo = new ResourceInfo() { @Override public Method getResourceMethod() { return ResourceMethodInvoker.this.method.getMethod(); } @Override public Class<?> getResourceClass() { return ResourceMethodInvoker.this.method.getResourceClass().getClazz(); } }; Set<DynamicFeature> serverDynamicFeatures = providerFactory.getServerDynamicFeatures(); if (serverDynamicFeatures != null && !serverDynamicFeatures.isEmpty()) { this.resourceMethodProviderFactory = new ResteasyProviderFactoryImpl(RuntimeType.SERVER, providerFactory); for (DynamicFeature feature : serverDynamicFeatures) { feature.configure(resourceInfo, new DynamicFeatureContextDelegate(resourceMethodProviderFactory)); } ((ResteasyProviderFactoryImpl)this.resourceMethodProviderFactory).lockSnapshots(); } else { // if no dynamic features, we don't need to copy the parent. this.resourceMethodProviderFactory = providerFactory; } this.methodInjector = injector.createMethodInjector(method, resourceMethodProviderFactory); // hack for when message contentType == null // and @Consumes is on the class expectsBody = this.methodInjector.expectsBody(); requestFilters = resourceMethodProviderFactory.getContainerRequestFilterRegistry().postMatch(method.getResourceClass().getClazz(), method.getAnnotatedMethod()); responseFilters = resourceMethodProviderFactory.getContainerResponseFilterRegistry().postMatch(method.getResourceClass().getClazz(), method.getAnnotatedMethod()); writerInterceptors = resourceMethodProviderFactory.getServerWriterInterceptorRegistry().postMatch(method.getResourceClass().getClazz(), method.getAnnotatedMethod()); // register with parent to listen for redeploy events providerFactory.getContainerRequestFilterRegistry().getListeners().add(this); providerFactory.getContainerResponseFilterRegistry().getListeners().add(this); providerFactory.getServerWriterInterceptorRegistry().getListeners().add(this); ContextResolver<GeneralValidator> resolver = providerFactory.getContextResolver(GeneralValidator.class, MediaType.WILDCARD_TYPE); if (resolver != null) { validator = providerFactory.getContextResolver(GeneralValidator.class, MediaType.WILDCARD_TYPE).getContext(null); } if (validator != null) { Class<?> clazz = null; if (resource != null && resource.getScannableClass() != null) { clazz = resource.getScannableClass(); } else { clazz = getMethod().getDeclaringClass(); } if (resource instanceof JndiComponentResourceFactory) { isValidatable = true; } else { if (validator instanceof GeneralValidatorCDI) { isValidatable = GeneralValidatorCDI.class.cast(validator).isValidatable(clazz, injector); } else { isValidatable = validator.isValidatable(clazz); } } methodIsValidatable = validator.isMethodValidatable(getMethod()); } asyncResponseProvider = resourceMethodProviderFactory.getAsyncResponseProvider(method.getReturnType()); if(asyncResponseProvider == null){ asyncStreamProvider = resourceMethodProviderFactory.getAsyncStreamProvider(method.getReturnType()); } if (asyncStreamProvider != null) { for (Annotation annotation : method.getAnnotatedMethod().getAnnotations()) { if (annotation.annotationType() == Stream.class) { Stream stream = (Stream)annotation; if (stream.value() == Stream.MODE.GENERAL) { this.isAsyncStreamProvider = true; } } } } if (isSseResourceMethod(method)) { isSse = true; method.markAsynchronous(); } hasProduces = method.getMethod().isAnnotationPresent(Produces.class) || method.getMethod().getClass().isAnnotationPresent(Produces.class); } @Override public boolean hasProduces() { return hasProduces; } // spec section 9.3 Server API: // A resource method that injects an SseEventSink and // produces the media type text/event-stream is an SSE resource method. private boolean isSseResourceMethod(ResourceMethod resourceMethod) { // First exclusive condition to be a SSE resource method is to only // produce text/event-stream MediaType[] producedMediaTypes = resourceMethod.getProduces(); boolean onlyProduceServerSentEventsMediaType = producedMediaTypes != null && producedMediaTypes.length == 1 && MediaType.SERVER_SENT_EVENTS_TYPE.isCompatible(producedMediaTypes[0]); if (!onlyProduceServerSentEventsMediaType) { return false; } // Second condition to be a SSE resource method is to be injected with a // SseEventSink parameter MethodParameter[] resourceMethodParameters = resourceMethod.getParams(); if (resourceMethodParameters != null) { for (MethodParameter resourceMethodParameter : resourceMethodParameters) { if (Parameter.ParamType.CONTEXT.equals(resourceMethodParameter.getParamType()) && SseEventSink.class.equals(resourceMethodParameter.getType())) { return true; } } } // Resteasy specific: // Or the given application should register a // org.jboss.resteasy.spi.AsyncStreamProvider compatible with resource // method return type and the resource method must not be annotated with // any org.jboss.resteasy.annotations.Stream annotation if (asyncStreamProvider != null) { for (Annotation annotation : resourceMethod.getAnnotatedMethod().getAnnotations()) { if (annotation.annotationType() == Stream.class) { return false; } } return true; } return false; } public void cleanup() { parentProviderFactory.getContainerRequestFilterRegistry().getListeners().remove(this); parentProviderFactory.getContainerResponseFilterRegistry().getListeners().remove(this); parentProviderFactory.getServerWriterInterceptorRegistry().getListeners().remove(this); for (ValueInjector param : methodInjector.getParams()) { if (param instanceof MessageBodyParameterInjector) { parentProviderFactory.getServerReaderInterceptorRegistry().getListeners().remove(param); } } } @Override public void registryUpdated(JaxrsInterceptorRegistry registry, JaxrsInterceptorRegistry.InterceptorFactory factory) { if (registry.getIntf().equals(WriterInterceptor.class)) { JaxrsInterceptorRegistry<WriterInterceptor> serverWriterInterceptorRegistry = this.resourceMethodProviderFactory .getServerWriterInterceptorRegistry(); //Check to prevent StackOverflowError if (registry != serverWriterInterceptorRegistry) { serverWriterInterceptorRegistry.register(factory); } this.writerInterceptors = serverWriterInterceptorRegistry.postMatch(this.method.getResourceClass().getClazz(), this.method.getAnnotatedMethod()); } else if (registry.getIntf().equals(ContainerRequestFilter.class)) { JaxrsInterceptorRegistry<ContainerRequestFilter> containerRequestFilterRegistry = this.resourceMethodProviderFactory .getContainerRequestFilterRegistry(); //Check to prevent StackOverflowError if (registry != containerRequestFilterRegistry) { containerRequestFilterRegistry.register(factory); } this.requestFilters = containerRequestFilterRegistry.postMatch(this.method.getResourceClass().getClazz(), this.method.getAnnotatedMethod()); } else if (registry.getIntf().equals(ContainerResponseFilter.class)) { JaxrsInterceptorRegistry<ContainerResponseFilter> containerResponseFilterRegistry = this.resourceMethodProviderFactory .getContainerResponseFilterRegistry(); //Check to prevent StackOverflowError if (registry != containerResponseFilterRegistry) { containerResponseFilterRegistry.register(factory); } this.responseFilters = containerResponseFilterRegistry.postMatch(this.method.getResourceClass().getClazz(), this.method.getAnnotatedMethod()); } } protected void incrementMethodCount(String httpMethod) { AtomicLong stat = stats.get(httpMethod); if (stat == null) { stat = new AtomicLong(); AtomicLong old = stats.putIfAbsent(httpMethod, stat); if (old != null) stat = old; } stat.incrementAndGet(); } /** * Key is httpMethod called. * * @return statistics map */ public Map<String, AtomicLong> getStats() { return stats; } public ContainerRequestFilter[] getRequestFilters() { return requestFilters; } public ContainerResponseFilter[] getResponseFilters() { return responseFilters; } public WriterInterceptor[] getWriterInterceptors() { return writerInterceptors; } public Type getGenericReturnType() { return method.getGenericReturnType(); } public Class<?> getResourceClass() { return method.getResourceClass().getClazz(); } public Class<?> getReturnType() { return method.getReturnType(); } public Annotation[] getMethodAnnotations() { return methodAnnotations; } @Override public Method getMethod() { return method.getMethod(); } public CompletionStage<Object> invokeDryRun(HttpRequest request, HttpResponse response) { Object resource = this.resource.createResource(request, response, resourceMethodProviderFactory); if (resource instanceof CompletionStage) { @SuppressWarnings("unchecked") CompletionStage<Object> stage = (CompletionStage<Object>)resource; return stage .thenCompose(target -> invokeDryRun(request, response, target)); } return invokeDryRun(request, response, resource); } public BuiltResponse invoke(HttpRequest request, HttpResponse response) { Object resource = this.resource.createResource(request, response, resourceMethodProviderFactory); if (resource instanceof CompletionStage) { @SuppressWarnings("unchecked") CompletionStage<Object> stage = (CompletionStage<Object>)resource; return stage .thenApply(target -> invoke(request, response, target)).toCompletableFuture().getNow(null); } return invoke(request, response, resource); } public CompletionStage<Object> invokeDryRun(HttpRequest request, HttpResponse response, Object target) { request.setAttribute(ResourceMethodInvoker.class.getName(), this); incrementMethodCount(request.getHttpMethod()); ResteasyUriInfo uriInfo = (ResteasyUriInfo) request.getUri(); if (method.getPath() != null) { uriInfo.pushMatchedURI(uriInfo.getMatchingPath()); } uriInfo.pushCurrentResource(target); return invokeOnTargetDryRun(request, response, target); } public BuiltResponse invoke(HttpRequest request, HttpResponse response, Object target) { request.setAttribute(ResourceMethodInvoker.class.getName(), this); incrementMethodCount(request.getHttpMethod()); ResteasyUriInfo uriInfo = (ResteasyUriInfo) request.getUri(); if (method.getPath() != null) { uriInfo.pushMatchedURI(uriInfo.getMatchingPath()); } uriInfo.pushCurrentResource(target); return invokeOnTarget(request, response, target); } @SuppressWarnings("unchecked") protected CompletionStage<Object> invokeOnTargetDryRun(HttpRequest request, HttpResponse response, Object target) { ResteasyContext.pushContext(ResourceInfo.class, resourceInfo); // we don't pop so writer interceptors can get at this ResteasyContext.pushContext(Configuration.class, resourceMethodProviderFactory); try { Object rtn = internalInvokeOnTarget(request, response, target); if (rtn != null && rtn instanceof CompletionStage) { return (CompletionStage<Object>)rtn; } else { return CompletableFuture.completedFuture(rtn); } } catch (Failure failure) { throw failure; } catch (ApplicationException appException) { throw appException; } catch (RuntimeException ex) { throw new ProcessingException(ex); } } protected BuiltResponse invokeOnTarget(HttpRequest request, HttpResponse response, Object target) { final RESTEasyTracingLogger tracingLogger = RESTEasyTracingLogger.getInstance(request); final long timestamp = tracingLogger.timestamp("METHOD_INVOKE"); final long msTimeStamp = methodStatisticsLogger.timestamp(); try { ResteasyContext.pushContext(ResourceInfo.class, resourceInfo); // we don't pop so writer interceptors can get at this ResteasyContext.pushContext(Configuration.class, resourceMethodProviderFactory); if (requestFilters != null && requestFilters.length > 0) { PostMatchContainerRequestContext requestContext = new PostMatchContainerRequestContext(request, this, requestFilters, () -> invokeOnTargetAfterFilter(request, response, target)); // let it handle the continuation return requestContext.filter(); } else { return invokeOnTargetAfterFilter(request, response, target); } } finally { methodStatisticsLogger.duration(msTimeStamp); if (resource instanceof SingletonResource) { tracingLogger.logDuration("METHOD_INVOKE", timestamp, ((SingletonResource) resource).traceInfo(), method.getMethod()); } else { tracingLogger.logDuration("METHOD_INVOKE", timestamp, resource, method.getMethod()); } } } protected BuiltResponse invokeOnTargetAfterFilter(HttpRequest request, HttpResponse response, Object target) { if (validator != null) { if (isValidatable) { validator.validate(request, target); } if (methodIsValidatable) { request.setAttribute(GeneralValidator.class.getName(), validator); } else if (isValidatable) { validator.checkViolations(request); } } final AsyncResponseConsumer asyncResponseConsumer; if (asyncResponseProvider != null) { asyncResponseConsumer = AsyncResponseConsumer.makeAsyncResponseConsumer(this, asyncResponseProvider); } else if (asyncStreamProvider != null) { asyncResponseConsumer = AsyncResponseConsumer.makeAsyncResponseConsumer(this, asyncStreamProvider); } else { asyncResponseConsumer = null; } try { Object ret = internalInvokeOnTarget(request, response, target); if (ret != null && ret instanceof CompletionStage) { @SuppressWarnings("unchecked") CompletionStage<Object> retStage = (CompletionStage<Object>)ret; CompletionStage<BuiltResponse> stage = retStage .thenApply(rtn -> afterInvoke(request, asyncResponseConsumer, rtn)); // if async isn't finished, return null. Container will assume that its a suspended request return stage.toCompletableFuture().getNow(null); } else { return afterInvoke(request, asyncResponseConsumer, CompletionStageHolder.resolve(ret)); } } catch (CompletionException ex) { if(ex.getCause() instanceof RuntimeException) return handleInvocationException(asyncResponseConsumer, request, (RuntimeException) ex.getCause()); SynchronousDispatcher.rethrow(ex.getCause()); // never reached return null; } catch (RuntimeException ex) { return handleInvocationException(asyncResponseConsumer, request, ex); } } private BuiltResponse afterInvoke(HttpRequest request, AsyncResponseConsumer asyncResponseConsumer, Object rtn) { if(asyncResponseConsumer != null) { asyncResponseConsumer.subscribe(rtn); return null; } if (request.getAsyncContext().isSuspended()) { if(method.isAsynchronous()) return null; // resume a sync request that got turned async by filters initializeAsync(request.getAsyncContext().getAsyncResponse()); request.getAsyncContext().getAsyncResponse().resume(rtn); return null; } if (request.wasForwarded()) { return null; } if (!contextOutputStreamWrittenTo() && (rtn == null || method.getReturnType().equals(void.class))) { BuiltResponse build = (BuiltResponse) Response.noContent().build(); build.addMethodAnnotations(getMethodAnnotations()); return build; } if (Response.class.isAssignableFrom(method.getReturnType()) || rtn instanceof Response) { if (!(rtn instanceof BuiltResponse)) { Response r = (Response)rtn; Headers<Object> metadata = new Headers<Object>(); metadata.putAll(r.getMetadata()); rtn = new BuiltResponseEntityNotBacked(r.getStatus(), r.getStatusInfo().getReasonPhrase(), metadata, r.getEntity(), null); } BuiltResponse rtn1 = (BuiltResponse) rtn; rtn1.addMethodAnnotations(getMethodAnnotations()); if (rtn1.getGenericType() == null) { if (getMethod().getReturnType().equals(Response.class)) { rtn1.setGenericType(rtn1.getEntityClass()); } else { rtn1.setGenericType(method.getGenericReturnType()); } } return rtn1; } Response.ResponseBuilder builder = Response.ok(rtn); BuiltResponse jaxrsResponse = (BuiltResponse)builder.build(); if (jaxrsResponse.getGenericType() == null) { if (getMethod().getReturnType().equals(Response.class)) { jaxrsResponse.setGenericType(jaxrsResponse.getEntityClass()); } else { jaxrsResponse.setGenericType(method.getGenericReturnType()); } } jaxrsResponse.addMethodAnnotations(getMethodAnnotations()); return jaxrsResponse; } private BuiltResponse handleInvocationException(AsyncResponseConsumer asyncStreamResponseConsumer, HttpRequest request, RuntimeException ex) { if (asyncStreamResponseConsumer != null) { // WARNING: this can throw if the exception is not mapped by the user, in // which case we haven't completed the connection and called the callbacks try { AsyncResponseConsumer consumer = asyncStreamResponseConsumer; asyncStreamResponseConsumer.internalResume(ex, t -> consumer.complete(ex)); } catch(UnhandledException x) { // make sure we call the callbacks before throwing to the container request.getAsyncContext().getAsyncResponse().completionCallbacks(ex); throw x; } return null; } else if (request.getAsyncContext().isSuspended()) { try { request.getAsyncContext().getAsyncResponse().resume(ex); } catch (Exception e) { LogMessages.LOGGER.errorResumingFailedAsynchOperation(e); } return null; } else { throw ex; } } @SuppressWarnings("unchecked") private Object internalInvokeOnTarget(HttpRequest request, HttpResponse response, Object target) throws Failure, ApplicationException { PostResourceMethodInvokers postResourceMethodInvokers = ResteasyContext.getContextData(PostResourceMethodInvokers.class); try { Object methodResponse = this.methodInjector.invoke(request, response, target); CompletionStage<Object> stage = null; if (methodResponse != null && methodResponse instanceof CompletionStage) { stage = (CompletionStage<Object>)methodResponse; return stage .handle((ret, exception) -> { // on success if (exception == null && postResourceMethodInvokers != null) { postResourceMethodInvokers.getInvokers().forEach(e -> e.invoke()); } // finally if (postResourceMethodInvokers != null) { postResourceMethodInvokers.clear(); } if (exception != null) { SynchronousDispatcher.rethrow(exception); // never reached return null; } return ret; }); } else { // on success if (postResourceMethodInvokers != null) { postResourceMethodInvokers.getInvokers().forEach(e -> e.invoke()); } // finally if (postResourceMethodInvokers != null) { postResourceMethodInvokers.clear(); } return methodResponse; } } catch (RuntimeException failure) { if (postResourceMethodInvokers != null) { postResourceMethodInvokers.clear(); } throw failure; } } public void initializeAsync(ResteasyAsynchronousResponse asyncResponse) { asyncResponse.setAnnotations(method.getAnnotatedMethod().getAnnotations()); asyncResponse.setWriterInterceptors(getWriterInterceptors()); asyncResponse.setResponseFilters(getResponseFilters()); if (asyncResponse instanceof ResourceMethodInvokerAwareResponse) { ((ResourceMethodInvokerAwareResponse)asyncResponse).setMethod(this); } } public boolean doesProduce(List<? extends MediaType> accepts) { if (accepts == null || accepts.size() == 0) { //System.out.println("**** no accepts " +" method: " + method); return true; } if (method.getProduces().length == 0) { //System.out.println("**** no produces " +" method: " + method); return true; } for (MediaType accept : accepts) { for (MediaType type : method.getProduces()) { if (type.isCompatible(accept)) { return true; } } } return false; } public boolean doesConsume(MediaType contentType) { boolean matches = false; if (method.getConsumes().length == 0 || (contentType == null && !expectsBody)) return true; if (contentType == null) { contentType = MediaType.APPLICATION_OCTET_STREAM_TYPE; } for (MediaType type : method.getConsumes()) { if (type.isCompatible(contentType)) { matches = true; break; } } return matches; } public MediaType resolveContentType(HttpRequest in, Object entity) { MediaType chosen = (MediaType)in.getAttribute(SegmentNode.RESTEASY_CHOSEN_ACCEPT); if (chosen != null && !chosen.equals(MediaType.WILDCARD_TYPE)) { return chosen; } List<MediaType> accepts = in.getHttpHeaders().getAcceptableMediaTypes(); if (accepts == null || accepts.size() == 0) { if (method.getProduces().length == 0) return MediaType.WILDCARD_TYPE; else return method.getProduces()[0]; } if (method.getProduces().length == 0) { return resolveContentTypeByAccept(accepts, entity); } for (MediaType accept : accepts) { for (MediaType type : method.getProduces()) { if (type.isCompatible(accept)) return type; } } return MediaType.WILDCARD_TYPE; } protected MediaType resolveContentTypeByAccept(List<MediaType> accepts, Object entity) { if (accepts == null || accepts.size() == 0 || entity == null) { return MediaType.WILDCARD_TYPE; } Class<?> clazz = entity.getClass(); Type type = this.method.getGenericReturnType(); if (entity instanceof GenericEntity) { GenericEntity<?> gen = (GenericEntity<?>) entity; clazz = gen.getRawType(); type = gen.getType(); } for (MediaType accept : accepts) { if (resourceMethodProviderFactory.getMessageBodyWriter(clazz, type, method.getAnnotatedMethod().getAnnotations(), accept) != null) { return accept; } } return MediaType.WILDCARD_TYPE; } /** * Checks if any bytes were written to a @Context HttpServletResponse * @see ContextParameterInjector for details * Fix for RESTEASY-1721 */ private boolean contextOutputStreamWrittenTo() { for (ValueInjector vi : methodInjector.getParams()) { if (vi instanceof ContextParameterInjector) { return ((ContextParameterInjector) vi).isOutputStreamWasWritten(); } } return false; } public Set<String> getHttpMethods() { return method.getHttpMethods(); } public MediaType[] getProduces() { return method.getProduces(); } public MediaType[] getConsumes() { return method.getConsumes(); } public boolean isSse() { return isSse; } public boolean isAsyncStreamProvider() { return isAsyncStreamProvider; } public void markMethodAsAsync() { method.markAsynchronous(); } public void setMethodStatisticsLogger(MethodStatisticsLogger msLogger) { methodStatisticsLogger = msLogger; } public MethodStatisticsLogger getMethodStatisticsLogger() { return methodStatisticsLogger; } }
13,275
884
{ "manifestName": "SyncAgentCommon", "explanation": "CDM standard entities for 'SyncAgentCommon'", "entities": [ { "type": "LocalEntity", "entityName": "Contact", "entityPath": "Contact.1.0.cdm.json/Contact" }, { "type": "LocalEntity", "entityName": "AttributeValueMap", "entityPath": "AttributeValueMap.1.0.cdm.json/AttributeValueMap" }, { "type": "LocalEntity", "entityName": "AzureFHIRResource", "entityPath": "AzureFHIRResource.1.0.cdm.json/AzureFHIRResource" }, { "type": "LocalEntity", "entityName": "SyncAgentLog", "entityPath": "SyncAgentLog.1.0.cdm.json/SyncAgentLog" }, { "type": "LocalEntity", "entityName": "UpdateServiceAttributeMap", "entityPath": "UpdateServiceAttributeMap.1.0.cdm.json/UpdateServiceAttributeMap" }, { "type": "LocalEntity", "entityName": "UpdateServiceEntityMap", "entityPath": "UpdateServiceEntityMap.1.0.cdm.json/UpdateServiceEntityMap" } ], "jsonSchemaSemanticVersion": "1.0.0", "$schema": "CdmManifest.cdm.json" }
480
1,909
package org.knowm.xchange.globitex.dto; import com.fasterxml.jackson.annotation.JsonProperty; import java.io.Serializable; import java.util.List; public class GlobitexErrors implements Serializable { @JsonProperty("errors") private final List<GlobitexError> errors; public GlobitexErrors(@JsonProperty("errors") List<GlobitexError> errors) { this.errors = errors; } public List<GlobitexError> getErrors() { return errors; } @Override public String toString() { return "GlobitexErrors{" + "erros=" + errors + '}'; } }
197
787
class Solution { private: bool validSuffix(vector<int> &data, int &i, int cnt) { while (cnt && i < data.size() && (data[i] >> 6) == 2) ++i, --cnt; return cnt == 0; } public: bool validUtf8(vector<int>& data) { for (int i = 0; i < data.size(); ) { int n = data[i++]; if (n >> 7 == 0) { // noop } else if (n >> 5 == 6) { if (!validSuffix(data, i, 1)) return false; } else if (n >> 4 == 14) { if (!validSuffix(data, i, 2)) return false; } else if (n >> 3 == 30) { if (!validSuffix(data, i, 3)) return false; } else return false; } return true; } };
402
357
/* * Copyright (c) 2012-2015 VMware, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy * of the License at http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, without * warranties or conditions of any kind, EITHER EXPRESS OR IMPLIED. See the * License for the specific language governing permissions and limitations * under the License. */ package com.vmware.identity.rest.idm.data.attributes; import com.vmware.identity.rest.idm.data.SchemaObjectMappingDTO; /** * The {@code UserAttributeId} enum contains a set of known attribute mappings for use with * {@link SchemaObjectMappingDTO}. These attributes are tied to the {@link ObjectClass#User} * object class and should be taken from {@link UserAttributeId#getAttributeName()}. * * @author <NAME> */ public enum UserAttributeId { /** * Attribute containing the user's account name. * For example: {@code sAMAccountName} in Active Directory or {@code uid} in OpenLDAP. */ AccountName("UserAttributeAccountName"), /** * Attribute containing the user's common name. * For example: {@code cn} in Active Directory or OpenLDAP. */ CommonName("UserAttributeCommonName"), /** * Attribute containing the user's last name (family name). * For example: {@code sn} in Active Directory or OpenLDAP. */ LastName("UserAttributeLastName"), /** * Attribute containing the user's first name (given name). * For example: {@code givenName} in Active Directory or OpenLDAP. */ FirstName("UserAttributeFirstName"), /** * Attribute containing the user's description. * For example: {@code description} in Active Directory or OpenLDAP. */ Description("UserAttributeDescription"), /** * Attribute containing the user's display name. * For example: {@code displayName} in Active Directory or OpenLDAP. */ DisplayName("UserAttributeDisplayName"), /** * Attribute containing the user's email address. * For example: {@code mail} in Active Directory or OpenLDAP. */ Email("UserAttributeEmail"), /** * Attribute containing the user's object identifier. * For example: {@code objectSid} in Active Directory or {@code entryUUID} in OpenLDAP. */ ObjectId("UserAttributeObjectId"), /** * Attribute containing the user's User Principal Name (UPN). * For example: {@code userPrincipalName} in Active Directory. * * @see <a href="https://www.ietf.org/rfc/rfc0822.txt"> * RFC 882 - Standard for the Format of ARPA Internet Messages * </a> */ PrincipalName("UserAttributePrincipalName"), /** * Attribute containing the user's account control flags. * For example: {@code userAccountControl} in Active Directory or OpenLDAP. */ AcountControl("UserAttributeAcountControl"), /** * Attribute containing the distinguished names of the groups to which the user belongs. * For example: {@code memberOf} in Active Directory. */ MemberOf("UserAttributeMemberOf"), /** * Attribute containing the relative identifier (RID) of the primary group for the user. * For example: {@code primaryGroupID} in Active Directory. */ PrimaryGroupId("UserAttributePrimaryGroupId"), /** * Attribute containing the date and time (UTC) at which the user account was locked out. * For example: {@code lockoutTime} in Active Directory. */ LockoutTime("UserAttributeLockoutTime"), /** * Attribute containing the password settings applied to the user. * For example: {@code msDS-ResultantPSO} in Active Directory. */ PasswordSettingsObject("UserAttributePasswordSettingsObject"), /** * Attribute containing the date and time (UTC) that at which the user's * password was last set. * For example: {@code pwdLastSet} in Active Directory. */ PasswordLastSet("UserAttributePwdLastSet"), /** * The user's attribute which is referenced by {@link GroupAttributeId#MembersList}. * For example: {@code dn} in Active Directory or OpenLDAP. */ GroupMembersListLink("UserAttributeGroupMembersListLink"); private String attributeName; /** * Construct a {@code UserAttributeId} with an attribute name. * * @param attributeName the name of the attribute in IDM. */ private UserAttributeId(String attributeName) { this.attributeName = attributeName; } /** * Get the underlying attribute name. * * @return the attribute name. */ public String getAttributeName() { return this.attributeName; } }
1,598
1,133
/* ------------------------------------------------------------------ */ /* Decimal Floating Point Abstraction Layer (DFPAL) */ /* dfpaltypes.h */ /* ------------------------------------------------------------------ */ /* (c) Copyright IBM Corporation, 2007. All rights reserved. */ /* */ /* This software is made available under the terms of the */ /* ICU License -- ICU 1.8.1 and later. */ /* */ /* The description and User's Guide ("The DFPAL C Library") for */ /* this software is called dfpalugaio.html. This document is */ /* included in this package and also available at: */ /* http://www2.hursley.ibm.com/decimal */ /* */ /* Author: */ /* <NAME> (<EMAIL>) */ /* */ /* Please send comments, suggestions, and corrections to the */ /* the following email address: */ /* <EMAIL> */ /* */ /* Major contribution: */ /* <NAME> (<EMAIL>) */ /* <NAME> (<EMAIL>) */ /* ------------------------------------------------------------------ */ #if !defined (__DFPALTYPES_H__) #define __DFPALTYPES_H__ #if defined(DFPAL_USE_INTTYPES_H) #include <inttypes.h> #elif defined(DFPAL_LOCAL_INTTYPES) /* If your platform does not support inttypes.h or stdint.h, */ /* update following datatype definitions. Following datatypes */ /* are correctly defined for the Windows platform. */ typedef unsigned char uint8_t; typedef signed short int16_t; typedef unsigned short uint16_t; typedef signed int int32_t; typedef unsigned int uint32_t; typedef signed __int64 int64_t; typedef unsigned __int64 uint64_t; #else /* default, stdint.h is the most common case */ #include <stdint.h> #endif #if !defined(FALSE) #define TRUE 1 #define FALSE 0 #endif #if !defined(DECIMAL32) typedef struct{ uint8_t b[4]; } decimal32; #endif #if !defined(DECIMAL64) typedef struct{ uint8_t b[8]; } decimal64; #endif #if !defined(DECIMAL128) typedef struct{ uint8_t b[16]; } decimal128; #endif enum dfpalExeMode { AUTO, /* detect automatically */ PPCHW, /* POWER hardware DFP */ DNSW /* decNumber DFP */ }; typedef uint32_t dfpaltrap_t; typedef uint32_t dfpalflag_t; typedef uint16_t dfpalrnd_t; #if !defined(DFPAL_NO_HW_DFP) typedef long double dfp_quad; #else typedef int64_t dfp_quad; #endif #endif /* #if !defined (__DFPALTYPES_H__) */
1,735
1,909
<reponame>grmkris/XChange package org.knowm.xchange.btcmarkets; import org.knowm.xchange.Exchange; import org.knowm.xchange.ExchangeFactory; import org.knowm.xchange.ExchangeSpecification; import org.knowm.xchange.utils.AuthUtils; public class ExchangeUtils { public static Exchange createExchangeFromProperties() { ExchangeSpecification exSpec = new ExchangeSpecification(BTCMarketsExchange.class); AuthUtils.setApiAndSecretKey(exSpec); return ExchangeFactory.INSTANCE.createExchange(exSpec); } }
170
1,809
<gh_stars>1000+ package com.d_project.qrcode.web; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStream; import java.util.HashMap; import java.util.Map; /** * GIFイメージ(B/W) * @author <NAME> */ class GIFImage { private int width; private int height; private int[] data; public GIFImage(int width, int height) { this.width = width; this.height = height; this.data = new int[width * height]; } public void setPixel(int x, int y, int pixel) { if (x < 0 || width <= x) throw new IllegalArgumentException(); if (y < 0 || height <= y) throw new IllegalArgumentException(); data[y * width + x] = pixel; } public int getPixel(int x, int y) { if (x < 0 || width <= x) throw new IllegalArgumentException(); if (y < 0 || height <= y) throw new IllegalArgumentException(); return data[y * width + x]; } public void write(OutputStream out) throws IOException { //--------------------------------- // GIF Signature out.write("GIF87a".getBytes("ISO-8859-1") ); //--------------------------------- // Screen Descriptor write(out, width); write(out, height); out.write(0x80); // 2bit out.write(0); out.write(0); //--------------------------------- // Global Color Map // black out.write(0x00); out.write(0x00); out.write(0x00); // white out.write(0xff); out.write(0xff); out.write(0xff); //--------------------------------- // Image Descriptor out.write(','); write(out, 0); write(out, 0); write(out, width); write(out, height); out.write(0); //--------------------------------- // Local Color Map //--------------------------------- // Raster Data int lzwMinCodeSize = 2; byte[] raster = getLZWRaster(lzwMinCodeSize); out.write(lzwMinCodeSize); int offset = 0; while (raster.length - offset > 255) { out.write(255); out.write(raster, offset, 255); offset += 255; } out.write(raster.length - offset); out.write(raster, offset, raster.length - offset); out.write(0x00); //--------------------------------- // GIF Terminator out.write(';'); } private byte[] getLZWRaster(int lzwMinCodeSize) throws IOException { int clearCode = 1 << lzwMinCodeSize; int endCode = (1 << lzwMinCodeSize) + 1; int bitLength = lzwMinCodeSize + 1; // Setup LZWTable LZWTable table = new LZWTable(); for (int i = 0; i < clearCode; i++) { table.add(String.valueOf( (char)i) ); } table.add(String.valueOf( (char)clearCode) ); table.add(String.valueOf( (char)endCode) ); ByteArrayOutputStream byteOut = new ByteArrayOutputStream(); BitOutputStream bitOut = new BitOutputStream(byteOut); try { // clear code bitOut.write(clearCode, bitLength); int dataIndex = 0; String s = String.valueOf( (char)data[dataIndex++]); while (dataIndex < data.length) { char c = (char)data[dataIndex++]; if (table.contains(s + c) ) { s = s + c; } else { bitOut.write(table.indexOf(s), bitLength); if (table.size() < 0xfff) { if (table.size() == (1 << bitLength) ) { bitLength++; } table.add(s + c); } s = String.valueOf(c); } } bitOut.write(table.indexOf(s), bitLength); // end code bitOut.write(endCode, bitLength); } finally { bitOut.close(); } return byteOut.toByteArray(); } private static void write(OutputStream out, int i) throws IOException { out.write(i & 0xff); out.write( (i >>> 8) & 0xff); } private static class LZWTable { private Map<String,Integer> map; public LZWTable() { map = new HashMap<String,Integer>(); } public void add(String key) { if (contains(key) ) { throw new IllegalArgumentException("dup key:" + key); } map.put(key, map.size() ); } public int size() { return map.size(); } public int indexOf(String key) { return ( (Integer)map.get(key) ).intValue(); } public boolean contains(String key) { return map.containsKey(key); } } private static class BitOutputStream { private OutputStream out; private int bitLength; private int bitBuffer; public BitOutputStream(OutputStream out) { this.out = out; this.bitLength = 0; } public void write(int data, int length) throws IOException{ if ( (data >>> length) != 0) { throw new IOException("length over"); } while (bitLength + length >= 8) { out.write(0xff & ( (data << bitLength) | bitBuffer) ); length -= (8 - bitLength); data >>>= (8 - bitLength); bitBuffer = 0; bitLength = 0; } bitBuffer = (data << bitLength) | bitBuffer; bitLength = bitLength + length; } public void flush() throws IOException { if (bitLength > 0) { out.write(bitBuffer); } out.flush(); } public void close() throws IOException { flush(); out.close(); } } }
3,454
1,043
<reponame>JeffMuchine/micro-server<filename>micro-application-register/src/test/java/com/oath/micro/server/application/registry/RegisterEntryTest.java package com.oath.micro.server.application.registry; import static junit.framework.TestCase.assertFalse; import static org.junit.Assert.assertTrue; import java.util.Date; import java.util.Map; import org.junit.Before; import org.junit.Test; import com.oath.micro.server.rest.jackson.JacksonUtil; public class RegisterEntryTest { RegisterEntry entry; @Before public void setUp() throws Exception { entry = RegisterEntry.builder() .port(8080) .hostname("host") .module("module") .context("context") .time(new Date()) .uuid("1") .target("target") .externalPort(9090) .build(); Map<String, String> manifest = entry.getManifest(); manifest.put("Implementation-revision", "a2edfe4bc"); manifest.put("Implementation-Version", "version"); manifest.put("Implementation-Timestamp", "2017_1201"); } @Test public void test() { assertTrue(JacksonUtil.serializeToJson(entry).contains("\"context\":\"context")); } @Test public void matches() throws Exception { RegisterEntry re = new RegisterEntry(); re.getManifest().clear(); assertFalse(entry.matches(re)); re = RegisterEntry.builder().port(8080).externalPort(-1).build(); re.getManifest().clear(); assertTrue(entry.matches(re)); re = RegisterEntry.builder().port(8080).externalPort(9090).build(); re.getManifest().clear(); assertTrue(entry.matches(re)); re = RegisterEntry.builder().port(8080).hostname("host").externalPort(9090).build(); re.getManifest().clear(); assertTrue(entry.matches(re)); re = RegisterEntry.builder().port(8080).hostname("host1").externalPort(9090).build(); re.getManifest().clear(); assertFalse(entry.matches(re)); re = RegisterEntry.builder().port(8080).hostname("host").externalPort(9090).build(); re.getManifest().clear(); re.getManifest().put("Implementation-revision", "a2edfe4bc"); assertTrue(entry.matches(re)); re = RegisterEntry.builder().port(8080).hostname("host").externalPort(9090).build(); re.getManifest().clear(); re.getManifest().put("Implementation-Version", "version1"); assertFalse(entry.matches(re)); } }
1,026
3,655
<gh_stars>1000+ import paddlex as pdx train_analysis = pdx.datasets.analysis.Seg( data_dir='dataset/remote_sensing_seg', file_list='dataset/remote_sensing_seg/train.txt', label_list='dataset/remote_sensing_seg/labels.txt') train_analysis.analysis()
109
387
<filename>src/DlibDotNet.Native/dlib/svm/kernel/polynomial_kernel.h #ifndef _CPP_SVM_KERNEL_POLYNOMIAL_KERNEL_H_ #define _CPP_SVM_KERNEL_POLYNOMIAL_KERNEL_H_ #include "../../export.h" #include <dlib/svm/kernel.h> #include "../../template.h" #include "../../shared.h" using namespace dlib; #pragma region template #define polynomial_kernel_new_template(__TYPE__, error, __ELEMENT_TYPE__, __ROWS__, __COLUMNS__, ...) \ *ret = new dlib::polynomial_kernel<dlib::matrix<__TYPE__, __ROWS__, __COLUMNS__>>();\ #define polynomial_kernel_delete_template(__TYPE__, error, __ELEMENT_TYPE__, __ROWS__, __COLUMNS__, ...) \ auto k = static_cast<dlib::polynomial_kernel<dlib::matrix<__TYPE__, __ROWS__, __COLUMNS__>>*>(kernel);\ delete k;\ #pragma endregion #pragma region polynomial_kernel DLLEXPORT int polynomial_kernel_new(matrix_element_type type, const int templateRows, const int templateColumns, void** ret) { int error = ERR_OK; matrix_numeric_template(type, error, matrix_template_size_column1or0_template, polynomial_kernel_new_template, templateRows, templateColumns, ret); return error; } DLLEXPORT void polynomial_kernel_delete(matrix_element_type type, void* kernel, const int templateRows, const int templateColumns) { int error = ERR_OK; matrix_numeric_template(type, error, matrix_template_size_column1or0_template, polynomial_kernel_delete_template, templateRows, templateColumns, matrix); } #pragma endregion polynomial_kernel #endif
1,185
1,921
import random from copy import deepcopy class Node: def __init__(self, val): self.val = val self.prev = None self.next = None def __hash__(self): return hash(self.val) def __eq__(self, other): return self.val == other.val def __repr__(self): return str(self.val) class LRUCache: def __init__(self, size): self.head = Node(None) self.tail = Node(None) self.head.next = self.tail self.tail.prev = self.head self.size = size self.recent_nodes = dict() def use(self, val): if val in self.recent_nodes: used_node = self.recent_nodes[val] used_node.prev = used_node.next elif len(self.recent_nodes) == self.size: used_node = Node(val) del self.recent_nodes[self.head.next.val] self.head.next = self.head.next.next else: used_node = Node(val) before_tail = self.tail.prev before_tail.next = used_node used_node.next = self.tail used_node.prev = before_tail self.tail.prev = used_node self.recent_nodes[val] = used_node def count_playlists(song_ids, cache, plays_left): if plays_left == 0: return 1 total = 0 for song_id in song_ids: if song_id in cache.recent_nodes: continue new_cache = deepcopy(cache) new_cache.use(song_id) total += count_playlists(song_ids, new_cache, plays_left - 1) return total def get_valid_playlists(plays, songs, buffer): song_ids = set(range(songs)) lru_cache = LRUCache(buffer) total = count_playlists(song_ids, lru_cache, plays) return total # Tests assert get_valid_playlists(6, 4, 2) > get_valid_playlists(6, 4, 3)
843
1,562
<gh_stars>1000+ // Copyright 2019 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #include "sandboxed_api/sandbox2/util/minielf.h" #include <cstdint> #include <string> #include <vector> #include "gmock/gmock.h" #include "gtest/gtest.h" #include "absl/algorithm/container.h" #include "sandboxed_api/sandbox2/util/maps_parser.h" #include "sandboxed_api/testing.h" #include "sandboxed_api/util/file_helpers.h" #include "sandboxed_api/util/status_matchers.h" extern "C" void ExportedFunctionName() { // Don't do anything - used to generate a symbol. } namespace file = ::sapi::file; using ::sapi::GetTestSourcePath; using ::sapi::IsOk; using ::testing::Eq; using ::testing::IsTrue; using ::testing::Ne; using ::testing::StrEq; namespace sandbox2 { namespace { TEST(MinielfTest, Chrome70) { SAPI_ASSERT_OK_AND_ASSIGN( ElfFile elf, ElfFile::ParseFromFile( GetTestSourcePath("sandbox2/util/testdata/chrome_grte_header"), ElfFile::kGetInterpreter)); EXPECT_THAT(elf.interpreter(), StrEq("/usr/grte/v4/ld64")); } TEST(MinielfTest, SymbolResolutionWorks) { SAPI_ASSERT_OK_AND_ASSIGN( ElfFile elf, ElfFile::ParseFromFile("/proc/self/exe", ElfFile::kLoadSymbols)); ASSERT_THAT(elf.position_independent(), IsTrue()); // Load /proc/self/maps to take ASLR into account. std::string maps_buffer; ASSERT_THAT( file::GetContents("/proc/self/maps", &maps_buffer, file::Defaults()), IsOk()); SAPI_ASSERT_OK_AND_ASSIGN(std::vector<MapsEntry> maps, ParseProcMaps(maps_buffer)); // Find maps entry that covers this entry. uint64_t function_address = reinterpret_cast<uint64_t>(ExportedFunctionName); auto function_entry = absl::c_find_if(maps, [function_address](const MapsEntry& entry) { return entry.start <= function_address && entry.end > function_address; }); ASSERT_THAT(function_entry, Ne(maps.end())); function_address -= function_entry->start; auto function_symbol = absl::c_find_if(elf.symbols(), [](const ElfFile::Symbol& symbol) { return symbol.name == "ExportedFunctionName"; }); ASSERT_THAT(function_symbol, Ne(elf.symbols().end())); EXPECT_THAT(function_symbol->address, Eq(function_address)); } TEST(MinielfTest, ImportedLibraries) { SAPI_ASSERT_OK_AND_ASSIGN( ElfFile elf, ElfFile::ParseFromFile( GetTestSourcePath("sandbox2/util/testdata/hello_world"), ElfFile::kLoadImportedLibraries)); std::vector<std::string> imported_libraries = {"libc.so.6"}; EXPECT_THAT(elf.imported_libraries(), Eq(imported_libraries)); } } // namespace } // namespace sandbox2
1,209
1,470
# -*- coding: utf-8 -*- # Generated by Django 1.10.2 on 2016-12-24 09:23 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [("challenges", "0004_challenge_is_disabled")] operations = [ migrations.CreateModel( name="TestEnvironment", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("created_at", models.DateTimeField(auto_now_add=True)), ("modified_at", models.DateTimeField(auto_now=True)), ("name", models.CharField(max_length=100)), ("description", models.TextField()), ("leaderboard_public", models.BooleanField(default=False)), ( "start_date", models.DateTimeField( blank=True, null=True, verbose_name="Start Date (UTC)" ), ), ( "end_date", models.DateTimeField( blank=True, null=True, verbose_name="End Date (UTC)" ), ), ("test_annotation", models.FileField(upload_to=b"")), ], options={"db_table": "challenge_test_env"}, ), migrations.RemoveField(model_name="phase", name="challenge"), migrations.AddField( model_name="challenge", name="evaluation_script", field=models.FileField(default=False, upload_to=b""), ), migrations.DeleteModel(name="Phase"), migrations.AddField( model_name="testenvironment", name="challenge", field=models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, to="challenges.Challenge", ), ), ]
1,203
3,600
<reponame>duxinyu03/moco<filename>moco-core/src/main/java/com/github/dreamhead/moco/RestServer.java<gh_stars>1000+ package com.github.dreamhead.moco; public interface RestServer extends HttpServer { void resource(String name, RestSetting setting, RestSetting... settings); }
93
14,668
// Copyright (c) 2010 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef BASE_THIRD_PARTY_SYMBOLIZE_CONFIG_H_ #define BASE_THIRD_PARTY_SYMBOLIZE_CONFIG_H_ #define GOOGLE_GLOG_DLL_DECL #define GOOGLE_NAMESPACE google #define _START_GOOGLE_NAMESPACE_ namespace google { #define _END_GOOGLE_NAMESPACE_ } #endif // BASE_THIRD_PARTY_SYMBOLIZE_CONFIG_H_
180
841
""" This is the TestRunner module which executes the test programs. """ import os import sys import time import binary_runner import console from blade_util import environ_add_path from blade_util import md5sum from test_scheduler import TestScheduler def _get_ignore_set(): """ """ ignore_env_vars = [ # shell variables 'PWD', '<PASSWORD>', 'SHLVL', 'LC_ALL', 'TST_HACK_BASH_SESSION_ID', # CI variables 'BUILD_URL', 'BUILD_TAG', 'SVN_REVISION', 'BUILD_ID', 'EXECUTOR_NUMBER', 'START_USER', 'EXECUTOR_NUMBER', 'NODE_NAME', 'NODE_LABELS', 'IF_PKG', 'BUILD_NUMBER', 'HUDSON_COOKIE', # ssh variables 'SSH_CLIENT', 'SSH2_CLIENT', # vim variables 'VIM', 'MYVIMRC', 'VIMRUNTIME'] for i in range(30): ignore_env_vars.append('SVN_REVISION_%d' % i) return frozenset(ignore_env_vars) env_ignore_set = _get_ignore_set() def _diff_env(a, b): """Return difference of two environments dict""" seta = set([(k, a[k]) for k in a]) setb = set([(k, b[k]) for k in b]) return (dict(seta - setb), dict(setb - seta)) class TestRunner(binary_runner.BinaryRunner): """TestRunner. """ def __init__(self, targets, options, target_database, direct_targets): """Init method. """ binary_runner.BinaryRunner.__init__(self, targets, options, target_database) self.direct_targets = direct_targets self.inctest_md5_file = '.blade.test.stamp' self.tests_detail_file = './blade_tests_detail' self.inctest_run_list = [] self.last_test_stamp = {} self.last_test_stamp['md5'] = {} self.test_stamp = {} self.test_stamp['md5'] = {} self.valid_inctest_time_interval = 86400 self.tests_run_map = {} self.run_all_reason = '' self.title_str = '=' * 13 self.skipped_tests = [] if not self.options.fulltest: if os.path.exists(self.inctest_md5_file): try: self.last_test_stamp = eval(open(self.inctest_md5_file).read()) except (IOError, SyntaxError): console.warning('error loading incremental test history, will run full test') self.run_all_reason = 'NO_HISTORY' self.test_stamp['testarg'] = md5sum(str(self.options.args)) env_keys = os.environ.keys() env_keys = list(set(env_keys).difference(env_ignore_set)) env_keys.sort() last_test_stamp = {} for env_key in env_keys: last_test_stamp[env_key] = os.environ[env_key] self.test_stamp['env'] = last_test_stamp self.test_stamp['inctest_time'] = time.time() if not self.options.fulltest: if self.test_stamp['testarg'] != ( self.last_test_stamp.get('testarg', None)): self.run_all_reason = 'ARGUMENT' console.info('all tests will run due to test arguments changed') new_env = self.test_stamp['env'] old_env = self.last_test_stamp.get('env', {}) if isinstance(old_env, str): # For old test record old_env = {} if new_env != old_env: self.run_all_reason = 'ENVIRONMENT' console.info('all tests will run due to test environments changed:') (new, old) = _diff_env(new_env, old_env) if new: console.info('new environments: %s' % new) if old: console.info('old environments: %s' % old) this_time = int(round(self.test_stamp['inctest_time'])) last_time = int(round(self.last_test_stamp.get('inctest_time', 0))) interval = this_time - last_time if interval >= self.valid_inctest_time_interval or interval < 0: self.run_all_reason = 'STALE' console.info('all tests will run due to all passed tests are invalid now') else: self.run_all_reason = 'FULLTEST' def _get_test_target_md5sum(self, target): """Get test target md5sum. """ related_file_list = [] related_file_data_list = [] test_file_name = os.path.abspath(self._executable(target)) if os.path.exists(test_file_name): related_file_list.append(test_file_name) if target.data['dynamic_link']: target_key = (target.path, target.name) for dep in self.target_database[target_key].expanded_deps: dep_target = self.target_database[dep] if 'cc_library' in dep_target.type: lib_name = 'lib%s.so' % dep_target.name lib_path = os.path.join(self.build_dir, dep_target.path, lib_name) abs_lib_path = os.path.abspath(lib_path) if os.path.exists(abs_lib_path): related_file_list.append(abs_lib_path) for i in target.data['testdata']: if isinstance(i, tuple): data_target = i[0] else: data_target = i if '..' in data_target: continue if data_target.startswith('//'): data_target = data_target[2:] data_target_path = os.path.abspath(data_target) else: data_target_path = os.path.abspath('%s/%s' % ( target.path, data_target)) if os.path.exists(data_target_path): related_file_data_list.append(data_target_path) related_file_list.sort() related_file_data_list.sort() test_target_str = '' test_target_data_str = '' for f in related_file_list: mtime = os.path.getmtime(f) ctime = os.path.getctime(f) test_target_str += str(mtime) + str(ctime) for f in related_file_data_list: mtime = os.path.getmtime(f) ctime = os.path.getctime(f) test_target_data_str += str(mtime) + str(ctime) return md5sum(test_target_str), md5sum(test_target_data_str) def _generate_inctest_run_list(self): """Get incremental test run list. """ for target in self.targets.values(): if target.type != 'cc_test': continue target_key = (target.path, target.name) test_file_name = os.path.abspath(self._executable(target)) self.test_stamp['md5'][test_file_name] = self._get_test_target_md5sum(target) if self.run_all_reason: self.tests_run_map[target_key] = { 'runfile': test_file_name, 'result': '', 'reason': self.run_all_reason, 'costtime': 0} continue if target_key in self.direct_targets: self.inctest_run_list.append(target) self.tests_run_map[target_key] = { 'runfile': test_file_name, 'result': '', 'reason': 'EXPLICIT', 'costtime': 0} continue old_md5sum = self.last_test_stamp['md5'].get(test_file_name, None) new_md5sum = self.test_stamp['md5'][test_file_name] if new_md5sum != old_md5sum: self.inctest_run_list.append(target) reason = '' if isinstance(old_md5sum, tuple): if old_md5sum == (0, 0): reason = 'LAST_FAILED' else: if new_md5sum[0] != old_md5sum[0]: reason = 'BINARY' else: reason = 'TESTDATA' else: reason = 'STALE' self.tests_run_map[target_key] = { 'runfile': test_file_name, 'result': '', 'reason': reason, 'costtime': 0} # Append old md5sum that not existed into new old_keys = set(self.last_test_stamp['md5'].keys()) new_keys = set(self.test_stamp['md5'].keys()) diff_keys = old_keys.difference(new_keys) for key in list(diff_keys): self.test_stamp['md5'][key] = self.last_test_stamp['md5'][key] def _check_inctest_md5sum_file(self): """check the md5sum file size, remove it when it is too large. It is 2G by default. """ if os.path.exists(self.inctest_md5_file): if os.path.getsize(self.inctest_md5_file) > 2 * 1024 * 1024 * 1024: console.warning('Will remove the md5sum file for incremental ' 'test for it is oversized') os.remove(self.inctest_md5_file) def _write_test_history(self): """write md5sum to file. """ f = open(self.inctest_md5_file, 'w') print >> f, str(self.test_stamp) f.close() self._check_inctest_md5sum_file() def _write_tests_detail_map(self): """write the tests detail map for further use. """ f = open(self.tests_detail_file, 'w') print >> f, str(self.tests_run_map) f.close() def _show_tests_detail(self): """show the tests detail after scheduling them. """ sort_buf = [] for key in self.tests_run_map: costtime = self.tests_run_map.get(key, {}).get('costtime', 0) sort_buf.append((key, costtime)) sort_buf.sort(key=lambda x: x[1]) if self.tests_run_map: console.info('%s Testing detail %s' % (self.title_str, self.title_str)) for key, costtime in sort_buf: reason = self.tests_run_map.get(key, {}).get('reason', 'UNKNOWN') result = self.tests_run_map.get(key, {}).get('result', 'INTERRUPTED') if 'SIG' in result: result = 'with %s' % result console.info('%s:%s triggered by %s, exit(%s), cost %.2f s' % ( key[0], key[1], reason, result, costtime), prefix=False) def _finish_tests(self): """finish some work before return from runner. """ self._write_test_history() if self.options.show_details: self._write_tests_detail_map() if not self.run_all_reason: self._show_skipped_tests_detail() self._show_skipped_tests_summary() self._show_tests_detail() elif not self.run_all_reason: self._show_skipped_tests_summary() def _show_skipped_tests_detail(self): """show tests skipped. """ if not self.skipped_tests: return self.skipped_tests.sort() console.info('skipped tests') for target_key in self.skipped_tests: print '%s:%s' % (target_key[0], target_key[1]) def _show_skipped_tests_summary(self): """show tests skipped summary. """ console.info('%d tests skipped when doing incremental test' % len(self.skipped_tests)) console.info('to run all tests, please specify --full-test argument') def run(self): """Run all the cc_test target programs. """ failed_targets = [] self._generate_inctest_run_list() tests_run_list = [] for target in self.targets.values(): if target.type != 'cc_test': continue if (not self.run_all_reason) and target not in self.inctest_run_list: if not target.data.get('always_run'): self.skipped_tests.append((target.path, target.name)) continue self._prepare_env(target) cmd = [os.path.abspath(self._executable(target))] cmd += self.options.args sys.stdout.flush() # make sure output before scons if redirected test_env = dict(os.environ) environ_add_path(test_env, 'LD_LIBRARY_PATH', self._runfiles_dir(target)) if console.color_enabled: test_env['GTEST_COLOR'] = 'yes' else: test_env['GTEST_COLOR'] = 'no' test_env['GTEST_OUTPUT'] = 'xml' test_env['HEAPCHECK'] = target.data.get('heap_check', '') tests_run_list.append((target, self._runfiles_dir(target), test_env, cmd)) concurrent_jobs = 0 concurrent_jobs = self.options.test_jobs scheduler = TestScheduler(tests_run_list, concurrent_jobs, self.tests_run_map) scheduler.schedule_jobs() self._clean_env() console.info('%s Testing Summary %s' % (self.title_str, self.title_str)) console.info('Run %d test targets' % scheduler.num_of_run_tests) failed_targets = scheduler.failed_targets if failed_targets: console.error('%d tests failed:' % len(failed_targets)) for target in failed_targets: print '%s:%s, exit code: %s' % ( target.path, target.name, target.data['test_exit_code']) test_file_name = os.path.abspath(self._executable(target)) # Do not skip failed test by default if test_file_name in self.test_stamp['md5']: self.test_stamp['md5'][test_file_name] = (0, 0) console.info('%d tests passed' % ( scheduler.num_of_run_tests - len(failed_targets))) self._finish_tests() return 1 else: console.info('All tests passed!') self._finish_tests() return 0
7,389
1,042
{ "name": "@doabit/semantic-ui-sass", "version": "2.4.5", "description": "semantic-ui-sass npm package", "main": "src/semantic-ui.js", "files": [ "src/**/*" ], "scripts": { "prepublishOnly": "rm -rf src && yarn copy:js && yarn copy:css && yarn copy:assets && node ./script.js", "copy:js": "cp -R app/assets/javascripts/semantic-ui src", "copy:css": "cp -R app/assets/stylesheets/semantic-ui src/scss", "copy:assets": "mkdir src/assets && cp -R app/assets/fonts/semantic-ui src/assets/fonts && cp -R app/assets/images/semantic-ui src/assets/images" }, "devDependencies": {}, "repository": { "type": "git", "url": "git+https://github.com/doabit/semantic-ui-sass.git" }, "keywords": [ "semantic-ui-sass" ], "author": "doabit", "license": "MIT", "bugs": { "url": "https://github.com/doabit/semantic-ui-sass/issues" }, "homepage": "https://github.com/doabit/semantic-ui-sass#readme" }
403
515
package com.bj58.argo.thirdparty.jetty; /* ------------------------------------------------------------ */ /** * UTF-8 StringBuffer. * * This class wraps a standard {@link java.lang.StringBuffer} and provides methods to append * UTF-8 encoded bytes, that are converted into characters. * * This class is stateful and up to 4 calls to {@link #append(byte)} may be needed before * state a character is appended to the string buffer. * * The UTF-8 decoding is done by this class and no additional buffers or Readers are used. * The UTF-8 code was inspired by http://bjoern.hoehrmann.de/utf-8/decoder/dfa/ */ public class Utf8StringBuffer extends Utf8Appendable { final StringBuffer _buffer; public Utf8StringBuffer() { super(new StringBuffer()); _buffer = (StringBuffer)_appendable; } public Utf8StringBuffer(int capacity) { super(new StringBuffer(capacity)); _buffer = (StringBuffer)_appendable; } @Override public int length() { return _buffer.length(); } @Override public void reset() { super.reset(); _buffer.setLength(0); } public StringBuffer getStringBuffer() { checkState(); return _buffer; } @Override public String toString() { checkState(); return _buffer.toString(); } }
492
2,177
# -*- coding: utf-8 -*- import re from pprint import pprint import sys def parse(text): tag = None task = text m = re.match(r'#(?P<tag>[^ ]+)$', text, re.IGNORECASE) if m is not None: tag = m.group('tag') task = "" else: m = re.match(r'#(?P<tag>[^ ]+?) ', text, re.IGNORECASE) if m is not None: tag = m.group('tag') task = text.replace('#' + tag + ' ', "") else: m = re.match(r'.*? #(?P<tag>[^# ]+?)$', text, re.IGNORECASE) if m is not None: tag = m.group('tag') task = text.replace(' #' + tag, "") return { 'tag' : tag, 'task': task, 'rating' : None, 'pinned' : False } def showparse(): pprint(parse(sys.argv[1])) if __name__ == "__main__": showparse()
341
521
/* ERASE for dynamic arrays of objects: destruct elements and free the array if array is static (fixed length) then perform a clear operation only */ #include "fb.h" FBCALL int fb_ArrayEraseObj( FBARRAY *array, FB_DEFCTOR ctor, FB_DEFCTOR dtor ) { if( array->flags & FBARRAY_FLAGS_FIXED_LEN ) { fb_ArrayClearObj( array, ctor, dtor ); } else { if( dtor ) fb_ArrayDestructObj( array, dtor ); fb_ArrayErase( array ); } return fb_ErrorSetNum( FB_RTERROR_OK ); }
184
8,027
<filename>test/com/facebook/buck/core/rules/impl/PathReferenceRule.java /* * Copyright (c) Facebook, Inc. and its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.buck.core.rules.impl; import com.facebook.buck.core.build.buildable.context.BuildableContext; import com.facebook.buck.core.build.context.BuildContext; import com.facebook.buck.core.model.BuildTarget; import com.facebook.buck.core.rules.BuildRule; import com.facebook.buck.core.sourcepath.ExplicitBuildTargetSourcePath; import com.facebook.buck.core.sourcepath.SourcePath; import com.facebook.buck.io.filesystem.ProjectFilesystem; import com.facebook.buck.step.Step; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSortedSet; import java.nio.file.Path; import java.util.SortedSet; /** * A build rule that returns a specific {@link SourcePath} as its output. The specific path is * provided during initialization. */ public class PathReferenceRule extends AbstractBuildRule { private final Path source; public PathReferenceRule( BuildTarget buildTarget, ProjectFilesystem projectFilesystem, Path source) { super(buildTarget, projectFilesystem); this.source = source; } @Override public SortedSet<BuildRule> getBuildDeps() { return ImmutableSortedSet.of(); } @Override public ImmutableList<Step> getBuildSteps( BuildContext context, BuildableContext buildableContext) { return ImmutableList.of(); } @Override public SourcePath getSourcePathToOutput() { return ExplicitBuildTargetSourcePath.of(getBuildTarget(), source); } }
616
6,240
<reponame>lukec611/closure-compiler /* * Copyright 2020 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import java.io.File; import java.text.MessageFormat; /** A utility to abstract platform specific logic so it could be super-sourced for Web. */ final class Platform { static String getFileSeperator() { return File.separator; } static boolean isThreadInterrupted() { // Note that this clears interruption. Thread.isInterruped should be preferred instead. return Thread.interrupted(); } static long freeMemory() { return Runtime.getRuntime().freeMemory(); } static long totalMemory() { return Runtime.getRuntime().totalMemory(); } static String formatMessage(String message, String... arguments) { // Note that MessageFormat is removing single quotes and in many cases intended ones. Consider // moving to a simpler formatting version like the Web one. return MessageFormat.format(message, (Object[]) arguments); } private Platform() {} }
426
1,444
package mage.cards.w; import java.util.UUID; import mage.abilities.effects.common.search.SearchLibraryPutOnLibraryEffect; import mage.cards.CardImpl; import mage.cards.CardSetInfo; import mage.constants.CardType; import mage.filter.StaticFilters; import mage.target.common.TargetCardInLibrary; /** * * @author Plopman */ public final class WorldlyTutor extends CardImpl { public WorldlyTutor(UUID ownerId, CardSetInfo setInfo) { super(ownerId, setInfo, new CardType[]{CardType.INSTANT}, "{G}"); // Search your library for a creature card and reveal that card. Shuffle your library, then put the card on top of it. this.getSpellAbility().addEffect(new SearchLibraryPutOnLibraryEffect(new TargetCardInLibrary(StaticFilters.FILTER_CARD_CREATURE), true, true)); } private WorldlyTutor(final WorldlyTutor card) { super(card); } @Override public WorldlyTutor copy() { return new WorldlyTutor(this); } }
333
2,151
// Copyright 2013 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef CHROME_BROWSER_CHROMEOS_LOGIN_LOGIN_MANAGER_TEST_H_ #define CHROME_BROWSER_CHROMEOS_LOGIN_LOGIN_MANAGER_TEST_H_ #include <string> #include "base/macros.h" #include "chrome/browser/chromeos/login/mixin_based_browser_test.h" #include "chrome/browser/chromeos/login/test/https_forwarder.h" #include "chrome/browser/chromeos/login/test/js_checker.h" #include "google_apis/gaia/fake_gaia.h" class AccountId; namespace content { class WebContents; } // namespace content namespace chromeos { class UserContext; // Base class for Chrome OS out-of-box/login WebUI tests. // If no special configuration is done launches out-of-box WebUI. // To launch login UI use PRE_* test that will register user(s) and mark // out-of-box as completed. // Guarantees that WebUI has been initialized by waiting for // NOTIFICATION_LOGIN_OR_LOCK_WEBUI_VISIBLE notification. class LoginManagerTest : public MixinBasedBrowserTest { public: explicit LoginManagerTest(bool should_launch_browser); ~LoginManagerTest() override; // Overridden from InProcessBrowserTest. void TearDownOnMainThread() override; void SetUpCommandLine(base::CommandLine* command_line) override; void SetUpOnMainThread() override; void SetUp() override; // Registers the user with the given |user_id| on the device. // This method should be called in PRE_* test. // TODO(dzhioev): Add the ability to register users without a PRE_* test. void RegisterUser(const AccountId& account_id); // Set expected credentials for next login attempt. void SetExpectedCredentials(const UserContext& user_context); // Tries to login with the credentials in |user_context|. The return value // indicates whether the login attempt succeeded. bool TryToLogin(const UserContext& user_context); // Tries to add the user identified and authenticated by |user_context| to the // session. The return value indicates whether the attempt succeeded. This // method does the same as TryToLogin() but doesn't verify that the new user // has become the active user. bool AddUserToSession(const UserContext& user_context); // Log in user with |user_id|. User should be registered using RegisterUser(). void LoginUser(const AccountId& account_id); // Add user with |user_id| to session. void AddUser(const AccountId& user_id); // Executes given JS |expression| in |web_contents_| and checks // that it is true. void JSExpect(const std::string& expression); content::WebContents* web_contents() { return web_contents_; } test::JSChecker& js_checker() { return js_checker_; } static std::string GetGaiaIDForUserID(const std::string& user_id); // For obviously consumer users (that have e.g. @gmail.com e-mail) policy // fetching code is skipped. This code is executed only for users that may be // enterprise users. Thus if you derive from this class and don't need // policies, please use @gmail.com e-mail for login. But if you need policies // for your test, you must use e-mail addresses that a) have a potentially // enterprise domain and b) have been registered with |fake_gaia_|. // For your convenience, the e-mail addresses for users that have been set up // in this way are provided below. static const char kEnterpriseUser1[]; static const char kEnterpriseUser1GaiaId[]; static const char kEnterpriseUser2[]; static const char kEnterpriseUser2GaiaId[]; protected: FakeGaia fake_gaia_; HTTPSForwarder gaia_https_forwarder_; private: void InitializeWebContents(); void set_web_contents(content::WebContents* web_contents) { web_contents_ = web_contents; } bool should_launch_browser_; content::WebContents* web_contents_; test::JSChecker js_checker_; DISALLOW_COPY_AND_ASSIGN(LoginManagerTest); }; } // namespace chromeos #endif // CHROME_BROWSER_CHROMEOS_LOGIN_LOGIN_MANAGER_TEST_H_
1,221
310
<reponame>dreeves/usesthis { "name": "Lumix DMC-GH1", "description": "A 14 megapixel micro four thirds camera.", "url": "https://en.wikipedia.org/wiki/Panasonic_Lumix_DMC-GH1" }
75
5,169
{ "name": "MEVHorizontalContacts", "version": "0.1.0", "summary": "A horizontal collection view layout.", "description": "TODO: Add long description of the pod here.", "homepage": "https://github.com/manuelescrig/MEVHorizontalContacts", "license": "MIT", "authors": { "<NAME>": "<EMAIL>" }, "source": { "git": "https://github.com/manuelescrig/MEVHorizontalContacts.git", "tag": "0.1.0" }, "social_media_url": "https://twitter.com/manuelescrig", "platforms": { "ios": "8.0" }, "requires_arc": true, "source_files": "MEVHorizontalContacts/Classes/**/*", "frameworks": "UIKit" }
243
852
/** measure branch sizes * * */ #include "PerfTools/EdmEvent/interface/EdmEventSize.h" #include <boost/program_options.hpp> #include <string> #include <iostream> #include <fstream> #include <TROOT.h> #include <TSystem.h> #include <TError.h> #include "FWCore/FWLite/interface/FWLiteEnabler.h" static const char* const kHelpOpt = "help"; static const char* const kHelpCommandOpt = "help,h"; static const char* const kDataFileOpt = "data-file"; static const char* const kDataFileCommandOpt = "data-file,d"; static const char* const kTreeNameOpt = "tree-name"; static const char* const kTreeNameCommandOpt = "tree-name,n"; static const char* const kOutputOpt = "output"; static const char* const kOutputCommandOpt = "output,o"; static const char* const kAutoLoadOpt = "auto-loader"; static const char* const kAutoLoadCommandOpt = "auto-loader,a"; static const char* const kPlotOpt = "plot"; static const char* const kPlotCommandOpt = "plot,p"; static const char* const kSavePlotOpt = "save-plot"; static const char* const kSavePlotCommandOpt = "save-plot,s"; static const char* const kPlotTopOpt = "plot-top"; static const char* const kPlotTopCommandOpt = "plot-top,t"; static const char* const kVerboseOpt = "verbose"; static const char* const kVerboseCommandOpt = "verbose,v"; static const char* const kAlphabeticOrderOpt = "alphabetic-order"; static const char* const kAlphabeticOrderCommandOpt = "alphabetic-order,A"; static const char* const kFormatNamesOpt = "format-names"; static const char* const kFormatNamesCommandOpt = "format-names,F"; int main(int argc, char* argv[]) { using namespace boost::program_options; using namespace std; string programName(argv[0]); string descString(programName); descString += " [options] "; descString += "data_file \nAllowed options"; options_description desc(descString); // clang-format off desc.add_options()(kHelpCommandOpt, "produce help message")(kAutoLoadCommandOpt, "automatic library loading (avoid root warnings)")( kDataFileCommandOpt, value<string>(), "data file")( kTreeNameCommandOpt, value<string>(), "tree name (default \"Events\")")( kOutputCommandOpt, value<string>(), "output file")(kAlphabeticOrderCommandOpt, "sort by alphabetic order (default: sort by size)")( kFormatNamesCommandOpt, "format product name as \"product:label (type)\" (default: use full branch name)")( kPlotCommandOpt, value<string>(), "produce a summary plot")( kPlotTopCommandOpt, value<int>(), "plot only the <arg> top size branches")( kSavePlotCommandOpt, value<string>(), "save plot into root file <arg>")(kVerboseCommandOpt, "verbose printout"); // clang-format on positional_options_description p; p.add(kDataFileOpt, -1); variables_map vm; try { store(command_line_parser(argc, argv).options(desc).positional(p).run(), vm); notify(vm); } catch (const error&) { return 7000; } if (vm.count(kHelpOpt)) { cout << desc << std::endl; return 0; } if (!vm.count(kDataFileOpt)) { cerr << programName << ": no data file given" << endl; return 7001; } gROOT->SetBatch(); if (vm.count(kAutoLoadOpt) != 0) { gSystem->Load("libFWCoreFWLite"); FWLiteEnabler::enable(); } else gErrorIgnoreLevel = kError; bool verbose = vm.count(kVerboseOpt) > 0; std::string fileName = vm[kDataFileOpt].as<string>(); std::string treeName = "Events"; if (vm.count(kTreeNameOpt)) treeName = vm[kTreeNameOpt].as<string>(); perftools::EdmEventSize me; try { me.parseFile(fileName, treeName); } catch (perftools::EdmEventSize::Error const& error) { std::cerr << programName << ":" << error.descr << std::endl; return error.code; } if (vm.count(kFormatNamesOpt)) me.formatNames(); if (vm.count(kAlphabeticOrderOpt)) me.sortAlpha(); if (verbose) { std::cout << std::endl; me.dump(std::cout); std::cout << std::endl; } if (vm.count(kOutputOpt)) { std::ofstream of(vm[kOutputOpt].as<std::string>().c_str()); me.dump(of); of << std::endl; } bool plot = (vm.count(kPlotOpt) > 0); bool save = (vm.count(kSavePlotOpt) > 0); if (plot || save) { std::string plotName; std::string histName; if (plot) plotName = vm[kPlotOpt].as<string>(); if (save) histName = vm[kSavePlotOpt].as<string>(); int top = 0; if (vm.count(kPlotTopOpt) > 0) top = vm[kPlotTopOpt].as<int>(); me.produceHistos(plotName, histName, top); } return 0; }
1,920
2,671
from .inplace_abn import bn
10
2,338
<filename>clang-tools-extra/clang-tidy/bugprone/TooSmallLoopVariableCheck.cpp<gh_stars>1000+ //===--- TooSmallLoopVariableCheck.cpp - clang-tidy -----------------------===// // // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. // See https://llvm.org/LICENSE.txt for license information. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception // //===----------------------------------------------------------------------===// #include "TooSmallLoopVariableCheck.h" #include "clang/AST/ASTContext.h" #include "clang/ASTMatchers/ASTMatchFinder.h" using namespace clang::ast_matchers; namespace clang { namespace tidy { namespace bugprone { static constexpr llvm::StringLiteral LoopName = llvm::StringLiteral("forLoopName"); static constexpr llvm::StringLiteral LoopVarName = llvm::StringLiteral("loopVar"); static constexpr llvm::StringLiteral LoopVarCastName = llvm::StringLiteral("loopVarCast"); static constexpr llvm::StringLiteral LoopUpperBoundName = llvm::StringLiteral("loopUpperBound"); static constexpr llvm::StringLiteral LoopIncrementName = llvm::StringLiteral("loopIncrement"); TooSmallLoopVariableCheck::TooSmallLoopVariableCheck(StringRef Name, ClangTidyContext *Context) : ClangTidyCheck(Name, Context), MagnitudeBitsUpperLimit(Options.get("MagnitudeBitsUpperLimit", 16U)) {} void TooSmallLoopVariableCheck::storeOptions( ClangTidyOptions::OptionMap &Opts) { Options.store(Opts, "MagnitudeBitsUpperLimit", MagnitudeBitsUpperLimit); } /// The matcher for loops with suspicious integer loop variable. /// /// In this general example, assuming 'j' and 'k' are of integral type: /// \code /// for (...; j < 3 + 2; ++k) { ... } /// \endcode /// The following string identifiers are bound to these parts of the AST: /// LoopVarName: 'j' (as a VarDecl) /// LoopVarCastName: 'j' (after implicit conversion) /// LoopUpperBoundName: '3 + 2' (as an Expr) /// LoopIncrementName: 'k' (as an Expr) /// LoopName: The entire for loop (as a ForStmt) /// void TooSmallLoopVariableCheck::registerMatchers(MatchFinder *Finder) { StatementMatcher LoopVarMatcher = expr( ignoringParenImpCasts(declRefExpr(to(varDecl(hasType(isInteger())))))) .bind(LoopVarName); // We need to catch only those comparisons which contain any integer cast. StatementMatcher LoopVarConversionMatcher = traverse( TK_AsIs, implicitCastExpr(hasImplicitDestinationType(isInteger()), has(ignoringParenImpCasts(LoopVarMatcher))) .bind(LoopVarCastName)); // We are interested in only those cases when the loop bound is a variable // value (not const, enum, etc.). StatementMatcher LoopBoundMatcher = expr(ignoringParenImpCasts(allOf(hasType(isInteger()), unless(integerLiteral()), unless(hasType(isConstQualified())), unless(hasType(enumType()))))) .bind(LoopUpperBoundName); // We use the loop increment expression only to make sure we found the right // loop variable. StatementMatcher IncrementMatcher = expr(ignoringParenImpCasts(hasType(isInteger()))).bind(LoopIncrementName); Finder->addMatcher( forStmt( hasCondition(anyOf( binaryOperator(hasOperatorName("<"), hasLHS(LoopVarConversionMatcher), hasRHS(LoopBoundMatcher)), binaryOperator(hasOperatorName("<="), hasLHS(LoopVarConversionMatcher), hasRHS(LoopBoundMatcher)), binaryOperator(hasOperatorName(">"), hasLHS(LoopBoundMatcher), hasRHS(LoopVarConversionMatcher)), binaryOperator(hasOperatorName(">="), hasLHS(LoopBoundMatcher), hasRHS(LoopVarConversionMatcher)))), hasIncrement(IncrementMatcher)) .bind(LoopName), this); } /// Returns the magnitude bits of an integer type. static unsigned calcMagnitudeBits(const ASTContext &Context, const QualType &IntExprType) { assert(IntExprType->isIntegerType()); return IntExprType->isUnsignedIntegerType() ? Context.getIntWidth(IntExprType) : Context.getIntWidth(IntExprType) - 1; } /// Calculate the upper bound expression's magnitude bits, but ignore /// constant like values to reduce false positives. static unsigned calcUpperBoundMagnitudeBits(const ASTContext &Context, const Expr *UpperBound, const QualType &UpperBoundType) { // Ignore casting caused by constant values inside a binary operator. // We are interested in variable values' magnitude bits. if (const auto *BinOperator = dyn_cast<BinaryOperator>(UpperBound)) { const Expr *RHSE = BinOperator->getRHS()->IgnoreParenImpCasts(); const Expr *LHSE = BinOperator->getLHS()->IgnoreParenImpCasts(); QualType RHSEType = RHSE->getType(); QualType LHSEType = LHSE->getType(); if (!RHSEType->isIntegerType() || !LHSEType->isIntegerType()) return 0; bool RHSEIsConstantValue = RHSEType->isEnumeralType() || RHSEType.isConstQualified() || isa<IntegerLiteral>(RHSE); bool LHSEIsConstantValue = LHSEType->isEnumeralType() || LHSEType.isConstQualified() || isa<IntegerLiteral>(LHSE); // Avoid false positives produced by two constant values. if (RHSEIsConstantValue && LHSEIsConstantValue) return 0; if (RHSEIsConstantValue) return calcMagnitudeBits(Context, LHSEType); if (LHSEIsConstantValue) return calcMagnitudeBits(Context, RHSEType); return std::max(calcMagnitudeBits(Context, LHSEType), calcMagnitudeBits(Context, RHSEType)); } return calcMagnitudeBits(Context, UpperBoundType); } void TooSmallLoopVariableCheck::check(const MatchFinder::MatchResult &Result) { const auto *LoopVar = Result.Nodes.getNodeAs<Expr>(LoopVarName); const auto *UpperBound = Result.Nodes.getNodeAs<Expr>(LoopUpperBoundName)->IgnoreParenImpCasts(); const auto *LoopIncrement = Result.Nodes.getNodeAs<Expr>(LoopIncrementName)->IgnoreParenImpCasts(); // We matched the loop variable incorrectly. if (LoopVar->getType() != LoopIncrement->getType()) return; QualType LoopVarType = LoopVar->getType(); QualType UpperBoundType = UpperBound->getType(); ASTContext &Context = *Result.Context; unsigned LoopVarMagnitudeBits = calcMagnitudeBits(Context, LoopVarType); unsigned UpperBoundMagnitudeBits = calcUpperBoundMagnitudeBits(Context, UpperBound, UpperBoundType); if (UpperBoundMagnitudeBits == 0) return; if (LoopVarMagnitudeBits > MagnitudeBitsUpperLimit) return; if (LoopVarMagnitudeBits < UpperBoundMagnitudeBits) diag(LoopVar->getBeginLoc(), "loop variable has narrower type %0 than " "iteration's upper bound %1") << LoopVarType << UpperBoundType; } } // namespace bugprone } // namespace tidy } // namespace clang
2,978
551
{ "image": "https://user-images.githubusercontent.com/91655303/142618933-d2dead94-c735-47ac-92d8-025c23a2c07d.png", "issueId": 851, "name": "<NAME>", "username": "krishguptadev" }
88
384
<gh_stars>100-1000 package yifeiyuan.practice.practicedemos.viewdrager; import android.content.Context; import android.support.v4.widget.ViewDragHelper; import android.util.AttributeSet; import android.util.Log; import android.view.MotionEvent; import android.view.View; import android.widget.FrameLayout; /** * Created by alanchen on 15/7/29. */ public class SwipeBackFrameLayout extends FrameLayout { public static final String TAG = SwipeBackFrameLayout.class.getSimpleName(); private ViewDragHelper mDragHelper; public SwipeBackFrameLayout(Context context) { this(context, null); } public SwipeBackFrameLayout(Context context, AttributeSet attrs) { this(context, attrs, 0); } public SwipeBackFrameLayout(Context context, AttributeSet attrs, int defStyleAttr) { super(context, attrs, defStyleAttr); init(); } private int mLastdx; private void init() { mDragHelper = ViewDragHelper.create(this, 1f,new ViewDragHelper.Callback() { @Override public boolean tryCaptureView(View child, int pointerId) { return false; } @Override public void onEdgeTouched(int edgeFlags, int pointerId) { super.onEdgeTouched(edgeFlags, pointerId); mDragHelper.captureChildView(mContentView, pointerId); } @Override public int getViewHorizontalDragRange(View child) { return 1; } @Override public void onViewPositionChanged(View changedView, int left, int top, int dx, int dy) { super.onViewPositionChanged(changedView, left, top, dx, dy); Log.d(TAG, "onViewPositionChanged() called with left = [" + left + "], top = [" + top + "], dx = [" + dx + "], dy = [" + dy + "]"); //0.0 - 1.0 //Notice 这边可以给个接口回调出去,就可以做各种炫酷的效果了 float alpha = (float) (left*1.0/mDividerWidth); mDividerView.setAlpha(alpha); } @Override public int clampViewPositionHorizontal(View child, int left, int dx) { // Log.d(TAG, "clampViewPositionHorizontal() called with dx = [" + dx + "]"); mLastdx = dx; int newLeft = Math.min(mDividerWidth, Math.max(left,0)); return newLeft; } @Override public void onViewReleased(View releasedChild, float xvel, float yvel) { //>0代表用户想关闭 if (mLastdx>0){ if (mDividerWidth != releasedChild.getLeft()) { mDragHelper.settleCapturedViewAt(mDividerWidth, releasedChild.getTop()); invalidate(); } else { if (mCallback != null) { mCallback.onShouldFinish(); } } }else{ if (mDividerWidth != 0) { mDragHelper.settleCapturedViewAt(0, releasedChild.getTop()); invalidate(); } } } @Override public void onViewDragStateChanged(int state) { super.onViewDragStateChanged(state); if (mDragHelper.getViewDragState()==ViewDragHelper.STATE_IDLE&&mCallback != null&&mDividerWidth==mContentView.getLeft()&&mLastdx>0) { mCallback.onShouldFinish(); } } }); mDragHelper.setEdgeTrackingEnabled(ViewDragHelper.EDGE_LEFT); } @Override public boolean onInterceptTouchEvent(MotionEvent ev) { return mDragHelper.shouldInterceptTouchEvent(ev); } @Override public boolean onTouchEvent(MotionEvent event) { mDragHelper.processTouchEvent(event); return true; } private View mDividerView; private View mContentView; @Override protected void onFinishInflate() { super.onFinishInflate(); mDividerView = getChildAt(0); mDividerView.setAlpha(0f); mContentView = getChildAt(1); } private int mDividerWidth; @Override protected void onLayout(boolean changed, int left, int top, int right, int bottom) { super.onLayout(changed, left, top, right, bottom); mDividerWidth = mDividerView.getWidth(); } //Notice view 刚初始化的时候就会被调用一次 @Override public void computeScroll() { super.computeScroll(); // Log.d(TAG, "computeScroll() called with " + ""); if (mDragHelper.continueSettling(true)) { invalidate(); } } public void setCallback(Callback mCallback){ this.mCallback = mCallback; } private Callback mCallback; public interface Callback{ void onShouldFinish(); } }
2,377
2,542
<gh_stars>1000+ // ------------------------------------------------------------ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License (MIT). See License.txt in the repo root for license information. // ------------------------------------------------------------ #include "Ra.Stdafx.h" using namespace std; using namespace Common; using namespace Reliability; using namespace ReconfigurationAgentComponent; ReplicaCloseMode const ReplicaCloseMode::None(ReplicaCloseModeName::None); ReplicaCloseMode const ReplicaCloseMode::Close(ReplicaCloseModeName::Close); ReplicaCloseMode const ReplicaCloseMode::Drop(ReplicaCloseModeName::Drop); ReplicaCloseMode const ReplicaCloseMode::DeactivateNode(ReplicaCloseModeName::DeactivateNode); ReplicaCloseMode const ReplicaCloseMode::Abort(ReplicaCloseModeName::Abort); ReplicaCloseMode const ReplicaCloseMode::Restart(ReplicaCloseModeName::Restart); ReplicaCloseMode const ReplicaCloseMode::Delete(ReplicaCloseModeName::Delete); ReplicaCloseMode const ReplicaCloseMode::Deactivate(ReplicaCloseModeName::Deactivate); ReplicaCloseMode const ReplicaCloseMode::ForceAbort(ReplicaCloseModeName::ForceAbort); ReplicaCloseMode const ReplicaCloseMode::ForceDelete(ReplicaCloseModeName::ForceDelete); ReplicaCloseMode const ReplicaCloseMode::QueuedDelete(ReplicaCloseModeName::QueuedDelete); ReplicaCloseMode const ReplicaCloseMode::AppHostDown(ReplicaCloseModeName::AppHostDown); ReplicaCloseMode const ReplicaCloseMode::Obliterate(ReplicaCloseModeName::Obliterate); void ReplicaCloseMode::WriteTo(Common::TextWriter& w, Common::FormatOptions const &) const { w << name_; } std::string ReplicaCloseMode::AddField(Common::TraceEvent & traceEvent, std::string const & name) { string format = "{0}"; size_t index = 0; traceEvent.AddEventField<ReplicaCloseModeName::Trace>(format, name + ".name", index); return format; } void ReplicaCloseMode::FillEventData(Common::TraceEventContext & context) const { context.WriteCopy<uint>(static_cast<uint>(name_)); }
574
348
<gh_stars>100-1000 {"nom":"Porchères","circ":"11ème circonscription","dpt":"Gironde","inscrits":714,"abs":371,"votants":343,"blancs":21,"nuls":8,"exp":314,"res":[{"nuance":"REM","nom":"<NAME>","voix":174},{"nuance":"FN","nom":"<NAME>","voix":140}]}
100
372
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.retail.v2alpha.model; /** * The input config source for products. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Retail API. For a detailed explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class GoogleCloudRetailV2alphaProductInputConfig extends com.google.api.client.json.GenericJson { /** * BigQuery input source. * The value may be {@code null}. */ @com.google.api.client.util.Key private GoogleCloudRetailV2alphaBigQuerySource bigQuerySource; /** * Google Cloud Storage location for the input content. * The value may be {@code null}. */ @com.google.api.client.util.Key private GoogleCloudRetailV2alphaGcsSource gcsSource; /** * The Inline source for the input content for products. * The value may be {@code null}. */ @com.google.api.client.util.Key private GoogleCloudRetailV2alphaProductInlineSource productInlineSource; /** * BigQuery input source. * @return value or {@code null} for none */ public GoogleCloudRetailV2alphaBigQuerySource getBigQuerySource() { return bigQuerySource; } /** * BigQuery input source. * @param bigQuerySource bigQuerySource or {@code null} for none */ public GoogleCloudRetailV2alphaProductInputConfig setBigQuerySource(GoogleCloudRetailV2alphaBigQuerySource bigQuerySource) { this.bigQuerySource = bigQuerySource; return this; } /** * Google Cloud Storage location for the input content. * @return value or {@code null} for none */ public GoogleCloudRetailV2alphaGcsSource getGcsSource() { return gcsSource; } /** * Google Cloud Storage location for the input content. * @param gcsSource gcsSource or {@code null} for none */ public GoogleCloudRetailV2alphaProductInputConfig setGcsSource(GoogleCloudRetailV2alphaGcsSource gcsSource) { this.gcsSource = gcsSource; return this; } /** * The Inline source for the input content for products. * @return value or {@code null} for none */ public GoogleCloudRetailV2alphaProductInlineSource getProductInlineSource() { return productInlineSource; } /** * The Inline source for the input content for products. * @param productInlineSource productInlineSource or {@code null} for none */ public GoogleCloudRetailV2alphaProductInputConfig setProductInlineSource(GoogleCloudRetailV2alphaProductInlineSource productInlineSource) { this.productInlineSource = productInlineSource; return this; } @Override public GoogleCloudRetailV2alphaProductInputConfig set(String fieldName, Object value) { return (GoogleCloudRetailV2alphaProductInputConfig) super.set(fieldName, value); } @Override public GoogleCloudRetailV2alphaProductInputConfig clone() { return (GoogleCloudRetailV2alphaProductInputConfig) super.clone(); } }
1,164
837
/********************************************************************* * Software License Agreement (BSD License) * * Copyright (c) 2019, University of Stuttgart * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials provided * with the distribution. * * Neither the name of the University of Stuttgart nor the names * of its contributors may be used to endorse or promote products * derived from this software without specific prior written * permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. *********************************************************************/ /* Author: <NAME> */ const double edgeWidth = 0.1; const double runtime_limit = 10; const double memory_limit = 4096 * 4096; const int run_count = 10; unsigned int curDim = 8; int numberPlanners = 0; #include "QuotientSpacePlanningCommon.h" #include <ompl/base/spaces/RealVectorStateSpace.h> #include <ompl/geometric/planners/informedtrees/BITstar.h> #include <ompl/geometric/planners/est/BiEST.h> #include <ompl/geometric/planners/est/EST.h> #include <ompl/geometric/planners/est/ProjEST.h> #include <ompl/geometric/planners/fmt/BFMT.h> #include <ompl/geometric/planners/fmt/FMT.h> #include <ompl/geometric/planners/kpiece/BKPIECE1.h> #include <ompl/geometric/planners/kpiece/KPIECE1.h> #include <ompl/geometric/planners/kpiece/LBKPIECE1.h> #include <ompl/geometric/planners/pdst/PDST.h> // #include <ompl/geometric/planners/prm/LazyPRM.h> //TODO: segfault? #include <ompl/geometric/planners/prm/LazyPRMstar.h> #include <ompl/geometric/planners/prm/PRM.h> #include <ompl/geometric/planners/prm/PRMstar.h> #include <ompl/geometric/planners/prm/SPARS.h> #include <ompl/geometric/planners/prm/SPARStwo.h> #include <ompl/geometric/planners/quotientspace/QRRT.h> #include <ompl/geometric/planners/rrt/BiTRRT.h> #include <ompl/geometric/planners/rrt/InformedRRTstar.h> #include <ompl/geometric/planners/rrt/LazyRRT.h> #include <ompl/geometric/planners/rrt/LBTRRT.h> #include <ompl/geometric/planners/rrt/RRTConnect.h> #include <ompl/geometric/planners/rrt/RRT.h> #include <ompl/geometric/planners/rrt/RRTsharp.h> #include <ompl/geometric/planners/rrt/RRTstar.h> #include <ompl/geometric/planners/rrt/RRTXstatic.h> #include <ompl/geometric/planners/rrt/SORRTstar.h> #include <ompl/geometric/planners/rrt/TRRT.h> #include <ompl/geometric/planners/sbl/pSBL.h> #include <ompl/geometric/planners/sbl/SBL.h> #include <ompl/geometric/planners/sst/SST.h> #include <ompl/geometric/planners/stride/STRIDE.h> #include <ompl/tools/benchmark/Benchmark.h> #include <ompl/util/String.h> #include <boost/math/constants/constants.hpp> #include <boost/range/irange.hpp> #include <boost/range/algorithm_ext/push_back.hpp> #include <boost/format.hpp> #include <fstream> // Only states near some edges of a hypercube are valid. The valid edges form a // narrow passage from (0,...,0) to (1,...,1). A state s is valid if there exists // a k s.t. (a) 0<=s[k]<=1, (b) for all i<k s[i]<=edgeWidth, and (c) for all i>k // s[i]>=1-edgewidth. class HyperCubeValidityChecker : public ob::StateValidityChecker { public: HyperCubeValidityChecker(const ob::SpaceInformationPtr &si, int dimension) : ob::StateValidityChecker(si), dimension_(dimension) { si->setStateValidityCheckingResolution(0.001); } bool isValid(const ob::State *state) const override { const auto *s = static_cast<const ob::RealVectorStateSpace::StateType *>(state); bool foundMaxDim = false; for (int i = dimension_ - 1; i >= 0; i--) if (!foundMaxDim) { if ((*s)[i] > edgeWidth) foundMaxDim = true; } else if ((*s)[i] < (1. - edgeWidth)) return false; return true; } protected: int dimension_; }; static unsigned int numberRuns{0}; void PostRunEvent(const ob::PlannerPtr &planner, ot::Benchmark::RunProperties &run) { static unsigned int pid = 0; ob::SpaceInformationPtr si = planner->getSpaceInformation(); ob::ProblemDefinitionPtr pdef = planner->getProblemDefinition(); unsigned int states = boost::lexical_cast<int>(run["graph states INTEGER"]); double time = boost::lexical_cast<double>(run["time REAL"]); double memory = boost::lexical_cast<double>(run["memory REAL"]); bool solved = (time < runtime_limit); std::cout << "Run " << pid << "/" << numberRuns << " [" << planner->getName() << "] " << (solved ? "solved" : "FAILED") << "(time: " << time << ", states: " << states << ", memory: " << memory << ")" << std::endl; std::cout << std::string(80, '-') << std::endl; pid++; } // Note: Number of all simplifications is // unsigned int numberSimplifications = std::pow(2, curDim - 1); // But here we will only create three simplifications, the trivial one, the // discrete one and a two-step simplifications, which we found worked well in // this experiment. You can experiment with finding better simplifications. // std::cout << "dimension: " << curDim << " simplifications:" << numberSimplifications << std::endl; std::vector<std::vector<int>> getHypercubeAdmissibleProjections(int dim) { std::vector<std::vector<int>> projections; // trivial: just configuration space // discrete: use all admissible projections std::vector<int> trivial{dim}; std::vector<int> discrete; boost::push_back(discrete, boost::irange(2, dim + 1)); std::vector<int> twoStep; boost::push_back(twoStep, boost::irange(2, dim + 1, 2)); if (twoStep.back() != dim) twoStep.push_back(dim); projections.push_back(trivial); projections.push_back(discrete); projections.push_back(twoStep); auto last = std::unique(projections.begin(), projections.end()); projections.erase(last, projections.end()); // std::cout << "Projections for dim " << dim << std::endl; // for(unsigned int k = 0; k < projections.size(); k++){ // std::vector<int> pk = projections.at(k); // std::cout << k << ": "; // for(unsigned int j = 0; j < pk.size(); j++){ // std::cout << pk.at(j) << (j<pk.size()-1?",":""); // } // std::cout << std::endl; // } return projections; } void addPlanner(ompl::tools::Benchmark &benchmark, const ompl::base::PlannerPtr &planner, double range) { ompl::base::ParamSet &params = planner->params(); if (params.hasParam(std::string("range"))) params.setParam(std::string("range"), ompl::toString(range)); benchmark.addPlanner(planner); numberPlanners++; } ob::PlannerPtr GetQRRT(std::vector<int> sequenceLinks, ob::SpaceInformationPtr si) { // ompl::msg::setLogLevel(ompl::msg::LOG_DEV2); std::vector<ob::SpaceInformationPtr> si_vec; for (unsigned int k = 0; k < sequenceLinks.size() - 1; k++) { int links = sequenceLinks.at(k); auto spaceK(std::make_shared<ompl::base::RealVectorStateSpace>(links)); ompl::base::RealVectorBounds bounds(links); bounds.setLow(0.); bounds.setHigh(1.); spaceK->setBounds(bounds); auto siK = std::make_shared<ob::SpaceInformation>(spaceK); siK->setStateValidityChecker(std::make_shared<HyperCubeValidityChecker>(siK, links)); spaceK->setup(); si_vec.push_back(siK); } si_vec.push_back(si); auto planner = std::make_shared<og::QRRT>(si_vec); std::string qName = "QuotientSpaceRRT["; for (unsigned int k = 0; k < sequenceLinks.size() - 1; k++) { int links = sequenceLinks.at(k); qName += std::to_string(links) + ","; } qName += std::to_string(si->getStateDimension()); qName += "]"; std::cout << qName << std::endl; planner->setName(qName); return planner; } int main(int argc, char **argv) { if (argc > 1) { curDim = std::atoi(argv[1]); } numberPlanners = 0; double range = edgeWidth * 0.5; auto space(std::make_shared<ompl::base::RealVectorStateSpace>(curDim)); ompl::base::RealVectorBounds bounds(curDim); ompl::geometric::SimpleSetup ss(space); ob::SpaceInformationPtr si = ss.getSpaceInformation(); ob::ProblemDefinitionPtr pdef = ss.getProblemDefinition(); ompl::base::ScopedState<> start(space), goal(space); bounds.setLow(0.); bounds.setHigh(1.); space->setBounds(bounds); ss.setStateValidityChecker(std::make_shared<HyperCubeValidityChecker>(si, curDim)); for (unsigned int i = 0; i < curDim; ++i) { start[i] = 0.; goal[i] = 1.; } ss.setStartAndGoalStates(start, goal); ot::Benchmark benchmark(ss, "HyperCube"); benchmark.addExperimentParameter("num_dims", "INTEGER", std::to_string(curDim)); //############################################################################ // Load All Planner //############################################################################ std::vector<std::vector<int>> admissibleProjections = getHypercubeAdmissibleProjections(curDim); for (unsigned int k = 0; k < admissibleProjections.size(); k++) { std::vector<int> proj = admissibleProjections.at(k); ob::PlannerPtr quotientSpacePlannerK = GetQRRT(proj, si); addPlanner(benchmark, quotientSpacePlannerK, range); } addPlanner(benchmark, std::make_shared<og::BITstar>(si), range); addPlanner(benchmark, std::make_shared<og::EST>(si), range); addPlanner(benchmark, std::make_shared<og::BiEST>(si), range); addPlanner(benchmark, std::make_shared<og::ProjEST>(si), range); addPlanner(benchmark, std::make_shared<og::FMT>(si), range); addPlanner(benchmark, std::make_shared<og::BFMT>(si), range); addPlanner(benchmark, std::make_shared<og::KPIECE1>(si), range); addPlanner(benchmark, std::make_shared<og::BKPIECE1>(si), range); addPlanner(benchmark, std::make_shared<og::LBKPIECE1>(si), range); addPlanner(benchmark, std::make_shared<og::PDST>(si), range); addPlanner(benchmark, std::make_shared<og::PRM>(si), range); addPlanner(benchmark, std::make_shared<og::PRMstar>(si), range); addPlanner(benchmark, std::make_shared<og::LazyPRMstar>(si), range); addPlanner(benchmark, std::make_shared<og::SPARS>(si), range); addPlanner(benchmark, std::make_shared<og::SPARStwo>(si), range); addPlanner(benchmark, std::make_shared<og::RRT>(si), range); addPlanner(benchmark, std::make_shared<og::RRTConnect>(si), range); addPlanner(benchmark, std::make_shared<og::RRTsharp>(si), range); addPlanner(benchmark, std::make_shared<og::RRTstar>(si), range); addPlanner(benchmark, std::make_shared<og::RRTXstatic>(si), range); addPlanner(benchmark, std::make_shared<og::LazyRRT>(si), range); addPlanner(benchmark, std::make_shared<og::InformedRRTstar>(si), range); addPlanner(benchmark, std::make_shared<og::TRRT>(si), range); addPlanner(benchmark, std::make_shared<og::BiTRRT>(si), range); addPlanner(benchmark, std::make_shared<og::LBTRRT>(si), range); addPlanner(benchmark, std::make_shared<og::SORRTstar>(si), range); addPlanner(benchmark, std::make_shared<og::SBL>(si), range); addPlanner(benchmark, std::make_shared<og::SST>(si), range); addPlanner(benchmark, std::make_shared<og::STRIDE>(si), range); //############################################################################ printEstimatedTimeToCompletion(numberPlanners, run_count, runtime_limit); ot::Benchmark::Request request; request.maxTime = runtime_limit; request.maxMem = memory_limit; request.runCount = run_count; request.simplify = false; request.displayProgress = false; numberRuns = numberPlanners * run_count; benchmark.setPostRunEvent(std::bind(&PostRunEvent, std::placeholders::_1, std::placeholders::_2)); benchmark.benchmark(request); benchmark.saveResultsToFile(boost::str(boost::format("hypercube_%i.log") % curDim).c_str()); printBenchmarkResults(benchmark); return 0; }
4,938
2,151
/* * This file is part of Wireless Display Software for Linux OS * * Copyright (C) 2014 Intel Corporation. * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA */ #ifndef MIRAC_GST_SINK_HPP #define MIRAC_GST_SINK_HPP #include <gst/gst.h> #include <string> class MiracGstSink { public: MiracGstSink(std::string hostname, int port); ~MiracGstSink (); void Play(); void Pause(); bool IsPaused() const; void Teardown(); int sink_udp_port(); private: bool IsInState(GstState state) const; GstElement* gst_elem; guint bus_watch_id; }; #endif
411
1,467
{ "version": "2.13.64", "date": "2020-07-29", "entries": [ { "type": "feature", "category": "Amazon Elastic Compute Cloud", "description": "Adding support to target EC2 On-Demand Capacity Reservations within an AWS Resource Group to launch EC2 instances." }, { "type": "feature", "category": "Amazon EC2 Container Registry", "description": "This release adds support for encrypting the contents of your Amazon ECR repository with customer master keys (CMKs) stored in AWS Key Management Service." }, { "type": "feature", "category": "AWS Resource Groups", "description": "Resource Groups released a new feature that enables you to create a group with an associated configuration that specifies how other AWS services interact with the group. There are two new operations `GroupResources` and `UngroupResources` to work on a group with a configuration. In this release, you can associate EC2 Capacity Reservations with a resource group. Resource Groups also added a new request parameter `Group` to replace `GroupName` for all existing operations." }, { "type": "feature", "category": "AWS Cloud Map", "description": "Added new attribute AWS_EC2_INSTANCE_ID for RegisterInstance API" }, { "type": "feature", "category": "Amazon GuardDuty", "description": "GuardDuty now supports S3 Data Events as a configurable data source type. This feature expands GuardDuty's monitoring scope to include S3 data plane operations, such as GetObject and PutObject. This data source is optional and can be enabled or disabled at anytime. Accounts already using GuardDuty must first enable the new feature to use it; new accounts will be enabled by default. GuardDuty masters can configure this data source for individual member accounts and GuardDuty masters associated through AWS Organizations can automatically enable the data source in member accounts." }, { "type": "feature", "category": "Amazon Kinesis Firehose", "description": "This release includes a new Kinesis Data Firehose feature that supports data delivery to Https endpoint and to partners. You can now use Kinesis Data Firehose to ingest real-time data and deliver to Https endpoint and partners in a serverless, reliable, and salable manner." }, { "type": "feature", "category": "AWS SDK for Java v2", "description": "Updated service endpoint metadata." } ] }
908
1,040
<reponame>wenhao-fly/LTScrollView<filename>OCExample/OCExample/LTPageView/Controller/LTPageViewTestOneVC.h // // LTPageViewTestOneVC.h // OCExample // // Created by 高刘通 on 2018/6/28. // Copyright © 2018年 LT. All rights reserved. // #import <UIKit/UIKit.h> @interface LTPageViewTestOneVC : UIViewController @property(assign, nonatomic) NSInteger totalCount; @end
143
310
{ "name": "Wealthfront", "description": "An automated investment service.", "url": "https://www.wealthfront.com/" }
39
580
// test that perf of benchmark is better than perf of baseline_benchmark (or suite baseline if nullptr) times q // returns false on fail bool test_regression(const picobench::report& report, const char* suite_name, const char* benchmark, double q = 1, const char* baseline_benchmark = nullptr) { auto suite = report.find_suite(suite_name); if (!suite) throw std::runtime_error("Can't find suite"); auto bl = baseline_benchmark ? suite->find_benchmark(baseline_benchmark) : suite->find_baseline(); if (!bl) throw std::runtime_error("Can't find baseline"); auto bm = suite->find_benchmark(benchmark); if (!bm) throw std::runtime_error("Can't find benchmark"); if (bl->data.size() != bm->data.size()) throw std::runtime_error("Can't compare benchmarks"); if (!baseline_benchmark) baseline_benchmark = "baseline"; bool success = true; for (size_t i = 0; i < bl->data.size(); ++i) { auto& bld = bl->data[i]; auto& bmd = bm->data[i]; if (bld.dimension != bmd.dimension) throw std::runtime_error("Can't compare benchmark dimensions"); if (double(bld.total_time_ns) * q <= double(bmd.total_time_ns)) { cerr << benchmark << " is not faster than " << baseline_benchmark << " x " << q << " @ " << bld.dimension << "\n"; success = false; } } return success; }
575
3,673
// ---------------------------------------------------------------------------- // - Open3D: www.open3d.org - // ---------------------------------------------------------------------------- // The MIT License (MIT) // // Copyright (c) 2018-2021 www.open3d.org // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS // IN THE SOFTWARE. // ---------------------------------------------------------------------------- #pragma once #include <vector> #include "open3d/core/Tensor.h" #include "open3d/core/nns/FaissIndex.h" #include "open3d/core/nns/FixedRadiusIndex.h" #include "open3d/core/nns/KnnIndex.h" #include "open3d/core/nns/NanoFlannIndex.h" #include "open3d/utility/Optional.h" namespace open3d { namespace core { namespace nns { /// \class NearestNeighborSearch /// /// \brief A Class for nearest neighbor search. class NearestNeighborSearch { public: /// Constructor. /// /// \param dataset_points Dataset points for constructing search index. Must /// be 2D, with shape {n, d}. NearestNeighborSearch(const Tensor &dataset_points) : dataset_points_(dataset_points){}; ~NearestNeighborSearch(); NearestNeighborSearch(const NearestNeighborSearch &) = delete; NearestNeighborSearch &operator=(const NearestNeighborSearch &) = delete; public: /// Set index for knn search. /// /// \return Returns true if building index success, otherwise false. bool KnnIndex(); /// Set index for multi-radius search. /// /// \return Returns true if building index success, otherwise false. bool MultiRadiusIndex(); /// Set index for fixed-radius search. /// /// \param radius optional radius parameter. required for gpu fixed radius /// index. \return Returns true if building index success, otherwise false. bool FixedRadiusIndex(utility::optional<double> radius = {}); /// Set index for hybrid search. /// /// \return Returns true if building index success, otherwise false. bool HybridIndex(utility::optional<double> radius = {}); /// Perform knn search. /// /// \param query_points Query points. Must be 2D, with shape {n, d}. /// \param knn Number of neighbors to search per query point. /// \return Pair of Tensors, (indices, distances): /// - indices: Tensor of shape {n, knn}, with dtype Int32. /// - distances: Tensor of shape {n, knn}, same dtype with query_points. /// The distances are squared L2 distances. std::pair<Tensor, Tensor> KnnSearch(const Tensor &query_points, int knn); /// Perform fixed radius search. All query points share the same radius. /// /// \param query_points Data points for querying. Must be 2D, with shape {n, /// d}. /// \param radius Radius. /// \return Tuple of Tensors, (indices, distances, num_neighbors): /// - indicecs: Tensor of shape {total_number_of_neighbors,}, with dtype /// Int32. /// - distances: Tensor of shape {total_number_of_neighbors,}, same dtype /// with query_points. The distances are squared L2 distances. /// - num_neighbors: Tensor of shape {n,}, with dtype Int32. std::tuple<Tensor, Tensor, Tensor> FixedRadiusSearch( const Tensor &query_points, double radius, bool sort = true); /// Perform multi-radius search. Each query point has an independent radius. /// /// \param query_points Query points. Must be 2D, with shape {n, d}. /// \param radii Radii of query points. Each query point has one radius. /// Must be 1D, with shape {n,}. /// \return Tuple of Tensors, (indices,distances, num_neighbors): /// - indicecs: Tensor of shape {total_number_of_neighbors,}, with dtype /// Int32. /// - distances: Tensor of shape {total_number_of_neighbors,}, same dtype /// with query_points. The distances are squared L2 distances. /// - num_neighbors: Tensor of shape {n,}, with dtype Int32. std::tuple<Tensor, Tensor, Tensor> MultiRadiusSearch( const Tensor &query_points, const Tensor &radii); /// Perform hybrid search. /// /// \param query_points Data points for querying. Must be 2D, with shape {n, /// d}. /// \param radius Radius. /// \param max_knn Maximum number of neighbor to search per query. /// \return Tuple of Tensors, (indices, distances, counts): /// - indices: Tensor of shape {n, knn}, with dtype Int32. /// - distainces: Tensor of shape {n, knn}, with same dtype with /// query_points. The distances are squared L2 distances. /// - counts: Counts of neighbour for each query points. [Tensor /// of shape {n}, with dtype Int32]. std::tuple<Tensor, Tensor, Tensor> HybridSearch(const Tensor &query_points, double radius, int max_knn); private: bool SetIndex(); /// Assert a Tensor is not CUDA tensoer. This will be removed in the future. void AssertNotCUDA(const Tensor &t) const; protected: std::unique_ptr<NanoFlannIndex> nanoflann_index_; std::unique_ptr<FaissIndex> faiss_index_; std::unique_ptr<nns::FixedRadiusIndex> fixed_radius_index_; std::unique_ptr<nns::KnnIndex> knn_index_; const Tensor dataset_points_; }; } // namespace nns } // namespace core } // namespace open3d
2,151
1,275
<reponame>JimMadge/aws-fpga<filename>shared/lib/fileprovider.py # Amazon FPGA Hardware Development Kit # # Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Amazon Software License (the "License"). You may not use # this file except in compliance with the License. A copy of the License is # located at # # http://aws.amazon.com/asl/ # # or in the "license" file accompanying this file. This file is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, express or # implied. See the License for the specific language governing permissions and # limitations under the License. # External Modules from git import Git import logging import os from os.path import basename, dirname, realpath, relpath import re import sys import traceback try: import aws_fpga_test_utils from aws_fpga_test_utils.AwsFpgaTestBase import AwsFpgaTestBase import aws_fpga_utils except ImportError as e: traceback.print_tb(sys.exc_info()[2]) print "error: {}\nMake sure to source shared/tests/bin/setup_test_env.sh".format(sys.exc_info()[1]) sys.exit(1) logger = aws_fpga_utils.get_logger(__name__) class FileProvider(object): ''' Find all files starting at a directory in a git repo. ''' def __init__(self,): # All exclude paths will be relative to the repo directory. self.repo_dir = aws_fpga_test_utils.get_git_repo_root(dirname(realpath(__file__))) self.git = Git(self.repo_dir) self.exclude_files = [] self.exclude_file_regexps = [] self.exclude_paths = [] self.exclude_path_regexps = [] def set_exclude_files(self, files): self.exclude_files = files self.exclude_file_regexps = [] for file in files: self.exclude_file_regexps.append(re.compile(file)) def set_exclude_paths(self, paths): self.exclude_paths = paths self.exclude_path_regexps = [] for path in paths: self.exclude_path_regexps.append(re.compile(path)) def get_files(self, directory_to_search): directory_to_search = os.path.join(self.repo_dir, directory_to_search) if not os.path.exists(directory_to_search): logger.error("Directory doesn't exist: {}".format(directory_to_search)) return None file_list = [] for root, dirs, files in os.walk(directory_to_search, topdown=True): relative_root = relpath(root, self.repo_dir) # Skip excluded paths excluded = False for exclude_path_re in self.exclude_path_regexps: if exclude_path_re.match(relative_root): excluded = True break; if excluded: logger.debug("Excluded {}".format(relative_root)) continue for dir in dirs: relative_dir = relpath(os.path.join(root, dir), self.repo_dir) excluded = False for exclude_path_re in self.exclude_path_regexps: if exclude_path_re.match(relative_dir): excluded = True break; if excluded: logger.debug("Excluded {}".format(relative_dir)) dirs.remove(dir) continue # Exclude files for filename in files: relative_filename = os.path.join(relative_root, filename) # Ignore files not in the repo rval = self.git.ls_files(relative_filename) if not rval: logger.debug("Excluded {}".format(relative_filename)) continue excluded = False for exclude_file_re in self.exclude_file_regexps: if exclude_file_re.match(relative_filename): excluded = True break if excluded: logger.debug("Excluded {}".format(relative_filename)) continue file_list.append(relative_filename) return file_list
1,914
371
<reponame>PlexPt/mybatis-plus-generator-ui package com.github.davidfantasy.mybatisplus.generatorui.strategy; import com.baomidou.mybatisplus.generator.config.ConstVal; import lombok.Data; @Data public class ServiceImplStrategy { /** * 自定义继承的ServiceImpl类全称,带包名 */ private String superServiceImplClass = ConstVal.SUPER_SERVICE_IMPL_CLASS; }
162
678
/** * This header is generated by class-dump-z 0.2b. * * Source: /System/Library/PrivateFrameworks/GeoServices.framework/GeoServices */ #import <GeoServices/XXUnknownSuperclass.h> @interface GEOReverseGeocoderCache : XXUnknownSuperclass { } + (id)sharedCache; // 0x56fd - (void)clear; // 0x5789 - (void)setResponse:(id)response forRequest:(id)request; // 0x5779 - (id)responseForRequest:(id)request; // 0x5769 - (id)init; // 0x568d @end
166
2,113
<reponame>vbillet/Torque3D //~~~~~~~~~~~~~~~~~~~~//~~~~~~~~~~~~~~~~~~~~//~~~~~~~~~~~~~~~~~~~~//~~~~~~~~~~~~~~~~~~~~~// // Arcane-FX for MIT Licensed Open Source version of Torque 3D from GarageGames // Copyright (C) 2015 Faust Logic, Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to // deal in the Software without restriction, including without limitation the // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or // sell copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS // IN THE SOFTWARE. // //~~~~~~~~~~~~~~~~~~~~//~~~~~~~~~~~~~~~~~~~~//~~~~~~~~~~~~~~~~~~~~//~~~~~~~~~~~~~~~~~~~~~// #include <typeinfo> #include "afx/arcaneFX.h" #include "afx/afxEffectDefs.h" #include "afx/afxEffectWrapper.h" #include "afx/afxChoreographer.h" #include "afx/ce/afxMooring.h" //~~~~~~~~~~~~~~~~~~~~//~~~~~~~~~~~~~~~~~~~~//~~~~~~~~~~~~~~~~~~~~//~~~~~~~~~~~~~~~~~~~~~// // afxEA_Mooring class afxEA_Mooring : public afxEffectWrapper { typedef afxEffectWrapper Parent; afxMooringData* mMooring_data; afxMooring* mObj; void do_runtime_substitutions(); public: /*C*/ afxEA_Mooring(); /*D*/ ~afxEA_Mooring(); virtual void ea_set_datablock(SimDataBlock*); virtual bool ea_start(); virtual bool ea_update(F32 dt); virtual void ea_finish(bool was_stopped); virtual void onDeleteNotify(SimObject*); }; //~~~~~~~~~~~~~~~~~~~~// afxEA_Mooring::afxEA_Mooring() { mMooring_data = 0; mObj = 0; } afxEA_Mooring::~afxEA_Mooring() { if (mObj) mObj->deleteObject(); if (mMooring_data && mMooring_data->isTempClone()) delete mMooring_data; mMooring_data = 0; } void afxEA_Mooring::ea_set_datablock(SimDataBlock* db) { mMooring_data = dynamic_cast<afxMooringData*>(db); } bool afxEA_Mooring::ea_start() { if (!mMooring_data) { Con::errorf("afxEA_Mooring::ea_start() -- missing or incompatible datablock."); return false; } do_runtime_substitutions(); return true; } bool afxEA_Mooring::ea_update(F32 dt) { if (!mObj) { if (mDatablock->use_ghost_as_cons_obj && mDatablock->effect_name != ST_NULLSTRING) { mObj = new afxMooring(mMooring_data->networking, mChoreographer->getChoreographerId(), mDatablock->effect_name); } else { mObj = new afxMooring(mMooring_data->networking, 0, ST_NULLSTRING); } mObj->onNewDataBlock(mMooring_data, false); if (!mObj->registerObject()) { delete mObj; mObj = 0; Con::errorf("afxEA_Mooring::ea_update() -- effect failed to register."); return false; } deleteNotify(mObj); } if (mObj) { mObj->setTransform(mUpdated_xfm); } return true; } void afxEA_Mooring::ea_finish(bool was_stopped) { } void afxEA_Mooring::onDeleteNotify(SimObject* obj) { if (mObj == obj) obj = 0; Parent::onDeleteNotify(obj); } void afxEA_Mooring::do_runtime_substitutions() { // only clone the datablock if there are substitutions if (mMooring_data->getSubstitutionCount() > 0) { // clone the datablock and perform substitutions afxMooringData* orig_db = mMooring_data; mMooring_data = new afxMooringData(*orig_db, true); orig_db->performSubstitutions(mMooring_data, mChoreographer, mGroup_index); } } //~~~~~~~~~~~~~~~~~~~~//~~~~~~~~~~~~~~~~~~~~//~~~~~~~~~~~~~~~~~~~~//~~~~~~~~~~~~~~~~~~~~~// class afxEA_MooringDesc : public afxEffectAdapterDesc, public afxEffectDefs { static afxEA_MooringDesc desc; public: virtual bool testEffectType(const SimDataBlock*) const; virtual bool requiresStop(const afxEffectWrapperData*, const afxEffectTimingData&) const; virtual bool runsOnServer(const afxEffectWrapperData*) const; virtual bool runsOnClient(const afxEffectWrapperData*) const; virtual afxEffectWrapper* create() const { return new afxEA_Mooring; } }; afxEA_MooringDesc afxEA_MooringDesc::desc; bool afxEA_MooringDesc::testEffectType(const SimDataBlock* db) const { return (typeid(afxMooringData) == typeid(*db)); } bool afxEA_MooringDesc::requiresStop(const afxEffectWrapperData* ew, const afxEffectTimingData& timing) const { return (timing.lifetime < 0); } bool afxEA_MooringDesc::runsOnServer(const afxEffectWrapperData* ew) const { U8 networking = ((const afxMooringData*)ew->effect_data)->networking; return ((networking & CLIENT_ONLY) == 0); } bool afxEA_MooringDesc::runsOnClient(const afxEffectWrapperData* ew) const { U8 networking = ((const afxMooringData*)ew->effect_data)->networking; return ((networking & CLIENT_ONLY) != 0); } //~~~~~~~~~~~~~~~~~~~~//~~~~~~~~~~~~~~~~~~~~//~~~~~~~~~~~~~~~~~~~~//~~~~~~~~~~~~~~~~~~~~~//
2,027
2,116
<reponame>Charliemaple/qmc-decoder<gh_stars>1000+ /* * Author: mayusheng - <EMAIL> * Last modified: 2020-06-29 10:56 * Filename: decoder.cpp * * Description: qmc file auto decode * * */ #include <algorithm> #include <iostream> #include <memory> #include <regex> #include <vector> #include "seed.hpp" #if defined(__cplusplus) && __cplusplus >= 201703L && defined(__has_include) #if __has_include(<filesystem>) #define GHC_USE_STD_FS #include <filesystem> namespace fs = std::filesystem; #endif #endif #ifndef GHC_USE_STD_FS #include <ghc/filesystem.hpp> namespace fs = ghc::filesystem; #endif namespace { void close_file(std::FILE* fp) { std::fclose(fp); } using smartFilePtr = std::unique_ptr<std::FILE, decltype(&close_file)>; enum class openMode { read, write }; /** * @brief open a file * * @param aDir * @param aOpenMode * @return smartFilePtr */ smartFilePtr openFile(const std::string& aPath, openMode aOpenMode) { #ifndef _WIN32 std::FILE* fp = fopen(aPath.c_str(), aOpenMode == openMode::read ? "rb" : "wb"); #else std::wstring aPath_w; aPath_w.resize(aPath.size()); int newSize = MultiByteToWideChar(CP_UTF8, 0, aPath.c_str(), static_cast<int>(aPath.length()), const_cast<wchar_t*>(aPath_w.c_str()), static_cast<int>(aPath_w.size())); aPath_w.resize(newSize); std::FILE* fp = NULL; _wfopen_s(&fp, aPath_w.c_str(), aOpenMode == openMode::read ? L"rb" : L"wb"); #endif return smartFilePtr(fp, &close_file); } static const std::regex mp3_regex{"\\.(qmc3|qmc0)$"}; static const std::regex ogg_regex{"\\.qmcogg$"}; static const std::regex flac_regex{"\\.qmcflac$"}; void sub_process(std::string dir) { std::cout << "decode: " + dir << std::endl; std::string outloc(dir); auto mp3_outloc = regex_replace(outloc, mp3_regex, ".mp3"); auto flac_outloc = regex_replace(outloc, flac_regex, ".flac"); auto ogg_outloc = regex_replace(outloc, ogg_regex, ".ogg"); if (mp3_outloc != outloc) outloc = mp3_outloc; else if (flac_outloc != outloc) outloc = flac_outloc; else outloc = ogg_outloc; auto infile = openFile(dir, openMode::read); if (infile == nullptr) { std::cerr << "failed read file: " << outloc << std::endl; return; } int res = fseek(infile.get(), 0, SEEK_END); if (res != 0) { std::cerr << "seek file failed" << std::endl; return; } auto len = ftell(infile.get()); res = fseek(infile.get(), 0, SEEK_SET); std::unique_ptr<char[]> buffer(new (std::nothrow) char[len]); if (buffer == nullptr) { std::cerr << "create buffer error" << std::endl; return; } auto fres = fread(buffer.get(), 1, len, infile.get()); if (fres != len) { std::cerr << "read file error" << std::endl; } qmc_decoder::seed seed_; for (int i = 0; i < len; ++i) { buffer[i] = seed_.next_mask() ^ buffer[i]; } auto outfile = openFile(outloc, openMode::write); if (outfile == nullptr) { std::cerr << "failed write file: " << outloc << std::endl; return; } fres = fwrite(buffer.get(), 1, len, outfile.get()); if (fres != len) { std::cerr << "write file error" << std::endl; } } static const std::regex qmc_regex{"^.+\\.(qmc3|qmc0|qmcflac|qmcogg)$"}; } // namespace int main(int argc, char** argv) { if (argc > 1) { std::cerr << "put decoder binary file in your qmc file directory, then run it." << std::endl; return -1; } if ((fs::status(fs::path(".")).permissions() & fs::perms::owner_write) == fs::perms::none) { std::cerr << "please check if you have the write permissions on this dir." << std::endl; return -1; } std::vector<std::string> qmc_paths; for (auto& p : fs::recursive_directory_iterator(fs::path("."))) { auto file_path = p.path().string(); if ((fs::status(p).permissions() & fs::perms::owner_read) != fs::perms::none && fs::is_regular_file(p) && regex_match(file_path, qmc_regex)) { qmc_paths.emplace_back(std::move(file_path)); } }; std::for_each(qmc_paths.begin(), qmc_paths.end(), sub_process); return 0; }
1,774
1,436
// // NIMContactSelectTabView.h // NIMKit // // Created by chris on 15/9/15. // Copyright (c) 2015年 NetEase. All rights reserved. // #import <UIKit/UIKit.h> @class NIMContactPickedView; @interface NIMContactSelectTabView : UIView @property (nonatomic,strong) NIMContactPickedView *pickedView; @property (nonatomic,strong) UIButton *doneButton; @end
132
473
#!/usr/bin/env python from generator.actions import Actions, Variable import random import string import struct def random_string(length): s = '' for x in xrange(length): s += random.choice(string.ascii_uppercase) return s def r_uint32(): return random.randint(0, 0xffffffff) def r_vendor(): vid = r_uint32() vtype = random.randint(0, 6) loc = random_string(200) return vid, vtype, loc class MultiPass(Actions): INIT = 0 AUTH = 1 OPS = 2 FIN = 3 PURCHASE = 0 RECHARGE = 1 BALANCE = 2 HISTORY = 3 ISSUE = 4 REFUND = 5 OK = 0 def start(self): self.state['counter'] = 0 self.state['cards'] = {} self.state['purchases'] = [] self.state['transactions'] = [] def packet(self, card_id, auth_code, pkt_type, op_code, status, transaction_id): if isinstance(card_id, Variable): self.write(card_id) else: self.write(struct.pack('<I', card_id)) if isinstance(auth_code, Variable): self.write(auth_code) else: self.write(struct.pack('<I', auth_code)) self.write(struct.pack('<BBB', pkt_type, op_code, status)) if isinstance(transaction_id, Variable): self.write(transaction_id) else: self.write(struct.pack('<I', transaction_id)) def rpacket(self, pkt_type, op_code, status): self.read(length=8) # we can't easily check these tid = Variable('trans' + str(self.state['counter'])) tid.set_slice(0) self.state['counter'] += 1 self.read(length=3, expect=struct.pack('<BBB', pkt_type, op_code, status)) self.read(length=4, assign=tid) return tid def loop(self): pass def exit(self): self.packet(0, 0, self.AUTH, self.ISSUE, 0, 0) self.rpacket(self.AUTH, self.ISSUE, 13) def issue(self): value = r_uint32() self.packet(0, 0, self.INIT, self.ISSUE, 0, 0) self.write(struct.pack('<I', value)) card_id = Variable('card' + str(self.state['counter'])) card_id.set_slice(0) self.state['counter'] += 1 auth_code = Variable('auth' + str(self.state['counter'])) auth_code.set_slice(0) self.state['counter'] += 1 tid = Variable('trans' + str(self.state['counter'])) tid.set_slice(0) self.state['counter'] += 1 self.read(length=4, assign=card_id) self.read(length=4, assign=auth_code) self.read(length=3, expect=struct.pack('<BBB', self.INIT, self.ISSUE, self.OK)) self.read(length=4, assign=tid) self.packet(card_id, auth_code, self.FIN, self.ISSUE, self.OK, tid) self.rpacket(self.FIN, self.ISSUE, self.OK) self.state['cards'][(card_id, auth_code)] = value def purchase(self): (card_id, auth_code), value = random.choice(self.state['cards'].items()) cost = random.randint(0, value) pid = r_uint32() vendor = r_vendor() self.packet(card_id, auth_code, self.AUTH, self.PURCHASE, 0, 0) tid = self.rpacket(self.AUTH, self.PURCHASE, self.OK) self.packet(card_id, auth_code, self.OPS, self.PURCHASE, 0, tid) self.write(struct.pack('<IIIBp', pid, cost, vendor[0], vendor[1], vendor[2])) self.rpacket(self.OPS, self.PURCHASE, self.OK) self.packet(card_id, auth_code, self.FIN, self.PURCHASE, 0, tid) self.rpacket(self.FIN, self.PURCHASE, self.OK) self.state['cards'][(card_id, auth_code)] -= cost self.state['purchases'].append((card_id, auth_code, tid, pid, cost)) def balance(self): (card_id, auth_code), value = random.choice(self.state['cards'].items()) self.packet(card_id, auth_code, self.AUTH, self.BALANCE, 0, 0) tid = self.rpacket(self.AUTH, self.BALANCE, self.OK) self.packet(card_id, auth_code, self.OPS, self.BALANCE, 0, tid) self.rpacket(self.OPS, self.BALANCE, self.OK) self.read(expect=struct.pack('<I', value), length=4) self.packet(card_id, auth_code, self.FIN, self.BALANCE, 0, tid) self.rpacket(self.FIN, self.BALANCE, self.OK) def history(self): pass def refund(self): if len(self.state['purchases']) == 0: return p = random.choice(self.state['purchases']) (card_id, auth_code, ptid, pid, pvalue) = p value = self.state['cards'][(card_id, auth_code)] if pvalue >= 0xffffffff - value: return self.state['purchases'].remove(p) self.state['cards'][(card_id, auth_code)] += pvalue self.packet(card_id, auth_code, self.AUTH, self.REFUND, 0, 0) tid = self.rpacket(self.AUTH, self.REFUND, self.OK) self.packet(card_id, auth_code, self.OPS, self.REFUND, 0, tid) self.write(struct.pack('<I', pid)) self.write(ptid) self.rpacket(self.OPS, self.REFUND, self.OK) self.packet(card_id, auth_code, self.FIN, self.REFUND, 0, tid) self.rpacket(self.FIN, self.REFUND, self.OK) def recharge(self): (card_id, auth_code), value = random.choice(self.state['cards'].items()) amount = random.randint(0, 0xffffffff - value) vendor = r_vendor() self.packet(card_id, auth_code, self.AUTH, self.RECHARGE, 0, 0) tid = self.rpacket(self.AUTH, self.RECHARGE, self.OK) self.packet(card_id, auth_code, self.OPS, self.RECHARGE, 0, tid) self.write(struct.pack('<IIBp', amount, vendor[0], vendor[1], vendor[2])) self.rpacket(self.OPS, self.RECHARGE, self.OK) self.packet(card_id, auth_code, self.FIN, self.RECHARGE, 0, tid) self.rpacket(self.FIN, self.RECHARGE, self.OK) self.state['cards'][(card_id, auth_code)] += amount
2,793
852
#ifndef FWCore_MessageService_test_ProblemTestClient_t1_h #define FWCore_MessageService_test_ProblemTestClient_t1_h #include "FWCore/Framework/interface/Frameworkfwd.h" #include "FWCore/Framework/interface/global/EDAnalyzer.h" namespace edm { class ParameterSet; } namespace edmtest { class ProblemTestClient_t1 : public edm::global::EDAnalyzer<> { public: explicit ProblemTestClient_t1(edm::ParameterSet const&) {} void analyze(edm::StreamID, edm::Event const& e, edm::EventSetup const& c) const final; private: }; } // namespace edmtest #endif // FWCore_MessageService_test_ProblemTestClient_t1_h
222
305
<filename>llvm-project/clang/test/CoverageMapping/break.c // RUN: %strip_comments > %t.stripped.c // RUN: %clang_cc1 -fprofile-instrument=clang -fcoverage-mapping -dump-coverage-mapping -emit-llvm-only -main-file-name break.c %t.stripped.c | FileCheck %s int main() { // CHECK: File 0, [[@LINE]]:12 -> {{[0-9]+}}:2 = #0 int cnt = 0; // CHECK-NEXT: File 0, [[@LINE+1]]:9 -> [[@LINE+1]]:18 = #0 while(cnt < 100) { // CHECK: File 0, [[@LINE]]:20 -> [[@LINE+3]]:4 = #1 break; ++cnt; // CHECK-NEXT: File 0, [[@LINE]]:5 -> [[@LINE+1]]:4 = 0 } // CHECK-NEXT: File 0, [[@LINE+1]]:9 -> [[@LINE+1]]:18 = #0 while(cnt < 100) { // CHECK: File 0, [[@LINE]]:20 -> [[@LINE+6]]:4 = #2 { break; ++cnt; // CHECK-NEXT: File 0, [[@LINE]]:7 -> [[@LINE+3]]:4 = 0 } ++cnt; } // CHECK-NEXT: File 0, [[@LINE+1]]:9 -> [[@LINE+1]]:18 = ((#0 + #3) - #4) while(cnt < 100) { // CHECK: File 0, [[@LINE]]:20 -> [[@LINE+7]]:4 = #3 // CHECK-NEXT: File 0, [[@LINE+1]]:8 -> [[@LINE+1]]:16 = #3 if(cnt == 0) { // CHECK: File 0, [[@LINE]]:18 -> [[@LINE+3]]:6 = #4 break; ++cnt; // CHECK-NEXT: File 0, [[@LINE]]:7 -> [[@LINE+1]]:6 = 0 } ++cnt; // CHECK-NEXT: File 0, [[@LINE]]:5 -> [[@LINE+1]]:4 = (#3 - #4) } // CHECK-NEXT: File 0, [[@LINE+1]]:9 -> [[@LINE+1]]:18 = (#0 + #6) while(cnt < 100) { // CHECK: File 0, [[@LINE]]:20 -> [[@LINE+8]]:4 = #5 // CHECK-NEXT: File 0, [[@LINE+1]]:8 -> [[@LINE+1]]:16 = #5 if(cnt == 0) { // CHECK: File 0, [[@LINE]]:18 -> [[@LINE+2]]:6 = #6 ++cnt; // CHECK-NEXT: Gap,File 0, [[@LINE+1]]:6 -> [[@LINE+1]]:12 = (#5 - #6) } else { // CHECK-NEXT: File 0, [[@LINE]]:12 -> [[@LINE+2]]:6 = (#5 - #6) break; } ++cnt; } } // CHECK-LABEL: break_continue_in_increment: // CHECK: [[@LINE+6]]:11 -> [[@LINE+6]]:45 = #1 // CHECK: [[@LINE+5]]:18 -> [[@LINE+5]]:19 = #1 // CHECK: [[@LINE+4]]:21 -> [[@LINE+4]]:26 = #2 // CHECK: [[@LINE+3]]:33 -> [[@LINE+3]]:41 = (#1 - #2) // CHECK: [[@LINE+3]]:5 -> [[@LINE+3]]:6 = #1 void break_continue_in_increment(int x) { for (;; ({ if (x) break; else continue; })) ; }
1,216
1,403
<reponame>beldenfox/LLGL<filename>sources/Renderer/ShaderFlags.cpp /* * ShaderFlags.cpp * * This file is part of the "LLGL" project (Copyright (c) 2015-2019 by <NAME>) * See "LICENSE.txt" for license information. */ #include <LLGL/ShaderFlags.h> namespace LLGL { LLGL_EXPORT bool IsShaderSourceCode(const ShaderSourceType type) { return (type == ShaderSourceType::CodeString || type == ShaderSourceType::CodeFile); } LLGL_EXPORT bool IsShaderSourceBinary(const ShaderSourceType type) { return (type == ShaderSourceType::BinaryBuffer || type == ShaderSourceType::BinaryFile); } } // /namespace LLGL // ================================================================================
224
440
/*========================== begin_copyright_notice ============================ Copyright (C) 2020-2021 Intel Corporation SPDX-License-Identifier: MIT ============================= end_copyright_notice ===========================*/ #ifndef IGCLLVM_SUPPORT_MEMORYBUFFER_H #define IGCLLVM_SUPPORT_MEMORYBUFFER_H #include "llvm/Config/llvm-config.h" #include "llvm/Support/MemoryBuffer.h" namespace IGCLLVM { #if LLVM_VERSION_MAJOR < 9 // There's only MemoryBufferRef::MemoryBufferRef(MemoryBuffer&) prior to LLVM-9. // Though inconstancy is not required. static inline llvm::MemoryBufferRef makeMemoryBufferRef(const llvm::MemoryBuffer &Buffer) { return llvm::MemoryBufferRef{Buffer.getBuffer(), Buffer.getBufferIdentifier()}; } #else static inline llvm::MemoryBufferRef makeMemoryBufferRef(const llvm::MemoryBuffer &Buffer) { return llvm::MemoryBufferRef{Buffer}; } #endif } // namespace IGCLLVM #endif
312
14,668
<gh_stars>1000+ // Copyright 2017 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "third_party/blink/renderer/modules/animationworklet/animator_definition.h" #include "third_party/blink/renderer/bindings/core/v8/v8_function.h" #include "third_party/blink/renderer/bindings/modules/v8/v8_animate_callback.h" #include "third_party/blink/renderer/bindings/modules/v8/v8_animator_constructor.h" #include "third_party/blink/renderer/bindings/modules/v8/v8_state_callback.h" namespace blink { AnimatorDefinition::AnimatorDefinition(V8AnimatorConstructor* constructor, V8AnimateCallback* animate, V8StateCallback* state) : constructor_(constructor), animate_(animate), state_(state) { DCHECK(constructor_); DCHECK(animate_); } void AnimatorDefinition::Trace(Visitor* visitor) const { visitor->Trace(constructor_); visitor->Trace(animate_); visitor->Trace(state_); } } // namespace blink
416
1,157
#include <maxtest/testconnections.hh> int main(int argc, char** argv) { TestConnections test(argc, argv); std::string query = "SELECT '"; // One byte command byte and one byte for the single quote query.append(0xfffffb - 1 - query.size() - 1, 'a'); query += "'"; auto c = test.maxscale->rwsplit(); c.connect(); test.expect(c.query(query), "First query should work: %s", c.error()); test.expect(c.query(query), "Second query should work: %s", c.error()); test.expect(c.query(query), "Third query should work: %s", c.error()); test.expect(c.query("SELECT 1"), "Small query should work: %s", c.error()); return test.global_result; }
258
598
/* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.traffic_control.traffic_router.core.status.model; import java.util.List; /** * Model for a Cache. */ public class CacheModel { private String cacheId; private String fqdn; private List<String> ipAddresses; private int port; private String adminStatus; private boolean lastUpdateHealthy; private long lastUpdateTime; private long connections; private long currentBW; private long availBW; boolean cacheOnline; /** * Gets adminStatus. * * @return the adminStatus */ public String getAdminStatus() { return adminStatus; } /** * Gets cacheId. * * @return the cacheId */ public String getCacheId() { return cacheId; } /** * Gets fqdn. * * @return the fqdn */ public String getFqdn() { return fqdn; } /** * Gets ipAddresses. * * @return the ipAddresses */ public List<String> getIpAddresses() { return ipAddresses; } /** * Gets lastUpdateTime. * * @return the lastUpdateTime */ public long getLastUpdateTime() { return lastUpdateTime; } /** * Gets port. * * @return the port */ public int getPort() { return port; } /** * Gets lastUpdateHealth. * * @return the lastUpdateHealth */ public boolean isLastUpdateHealthy() { return lastUpdateHealthy; } /** * Sets adminStatus. * * @param adminStatus * the adminStatus to set */ public void setAdminStatus(final String adminStatus) { this.adminStatus = adminStatus; } /** * Sets cacheId. * * @param cacheId * the cacheId to set */ public void setCacheId(final String cacheId) { this.cacheId = cacheId; } /** * Sets fqdn. * * @param fqdn * the fqdn to set */ public void setFqdn(final String fqdn) { this.fqdn = fqdn; } /** * Sets lastUpdateHealthy. * * @param lastUpdateHealthy * the lastUpdateHealthy to set */ public void setLastUpdateHealthy(final boolean lastUpdateHealthy) { this.lastUpdateHealthy = lastUpdateHealthy; } /** * Sets ipAddresses. * * @param ipAddresses * the ipAddresses to set */ public void setIpAddresses(final List<String> ipAddresses) { this.ipAddresses = ipAddresses; } /** * Sets lastUpdateTime. * * @param lastUpdateTime * the lastUpdateTime to set */ public void setLastUpdateTime(final long lastUpdateTime) { this.lastUpdateTime = lastUpdateTime; } /** * Sets port. * * @param port * the port to set */ public void setPort(final int port) { this.port = port; } public void setConnections(final long numConn) { this.connections = numConn; } public long getConnections() { return connections; } public long getCurrentBW() { return currentBW; } public long getAvailBW() { return availBW; } public void setCurrentBW(final long currBW) { this.currentBW = currBW; } public void setAvailBW(final long availBW) { this.availBW = availBW; } public void setCacheOnline(final boolean cacheOnline) { this.cacheOnline = cacheOnline; } public boolean isCacheOnline() { return cacheOnline; } }
1,686
11,356
<reponame>shreyasvj25/turicreate<gh_stars>1000+ #include <boost/parameter.hpp> BOOST_PARAMETER_NAME((pass_foo, keywords) foo) BOOST_PARAMETER_FUNCTION( (int), f, keywords, (required (foo, *))) { return foo + 1; } int x = f(pass_foo = 41); int main() {}
123
343
<reponame>lukegehron/rhino3dm<filename>src/bindings/bnd_dimensionstyle.cpp #include "bindings.h" BND_DimensionStyle::BND_DimensionStyle() { SetTrackedPointer(new ON_DimStyle(), nullptr); } BND_DimensionStyle::BND_DimensionStyle(ON_DimStyle* dimstyle, const ON_ModelComponentReference* compref) { SetTrackedPointer(dimstyle, compref); } void BND_DimensionStyle::SetTrackedPointer(ON_DimStyle* dimstyle, const ON_ModelComponentReference* compref) { m_dimstyle = dimstyle; BND_CommonObject::SetTrackedPointer(dimstyle, compref); } BND_Font* BND_DimensionStyle::GetFont() const { const ON_Font& font = m_dimstyle->Font(); return new BND_Font(font); } void BND_DimensionStyle::SetFont(const BND_Font* font) { if (font) m_dimstyle->SetFont(*font->m_managed_font); } #if defined(ON_PYTHON_COMPILE) namespace py = pybind11; void initDimensionStyleBindings(pybind11::module& m) { py::class_<BND_DimensionStyle, BND_CommonObject> pyDimStyle(m, "DimensionStyle"); pyDimStyle.def(py::init<>()) .def_property("Name", &BND_DimensionStyle::GetName, &BND_DimensionStyle::SetName) .def_property("Font", &BND_DimensionStyle::GetFont, &BND_DimensionStyle::SetFont) .def("ScaleLengthValues", &BND_DimensionStyle::ScaleLengthValues, py::arg("scale")) .def_property("ArrowBlockId1", &BND_DimensionStyle::GetArrowBlockId1, &BND_DimensionStyle::SetArrowBlockId1) .def_property("ArrowBlockId2", &BND_DimensionStyle::GetArrowBlockId2, &BND_DimensionStyle::SetArrowBlockId2) .def_property("LeaderArrowBlockId", &BND_DimensionStyle::GetLeaderArrowBlockId, &BND_DimensionStyle::SetLeaderArrowBlockId) .def_property("SuppressExtension1", &BND_DimensionStyle::GetSuppressExtension1, &BND_DimensionStyle::SetSuppressExtension1) .def_property("SuppressExtension2", &BND_DimensionStyle::GetSuppressExtension2, &BND_DimensionStyle::SetSuppressExtension2) .def_property("SuppressArrow1", &BND_DimensionStyle::GetSuppressArrow1, &BND_DimensionStyle::SetSuppressArrow1) .def_property("SuppressArrow2", &BND_DimensionStyle::GetSuppressArrow2, &BND_DimensionStyle::SetSuppressArrow2) .def_property("AlternateBelowLine", &BND_DimensionStyle::GetAlternateBelowLine, &BND_DimensionStyle::SetAlternateBelowLine) .def_property("DrawTextMask", &BND_DimensionStyle::GetDrawTextMask, &BND_DimensionStyle::SetDrawTextMask) .def_property("LeaderHasLanding", &BND_DimensionStyle::GetLeaderHasLanding, &BND_DimensionStyle::SetLeaderHasLanding) .def_property("DrawForward", &BND_DimensionStyle::GetDrawForward, &BND_DimensionStyle::SetDrawForward) .def_property("TextUnderlined", &BND_DimensionStyle::GetTextUnderlined, &BND_DimensionStyle::SetTextUnderlined) .def_property("ArrowLength", &BND_DimensionStyle::GetArrowSize, &BND_DimensionStyle::SetArrowSize) .def_property("LeaderArrowLength", &BND_DimensionStyle::GetLeaderArrowSize, &BND_DimensionStyle::SetLeaderArrowSize) .def_property("CentermarkSize", &BND_DimensionStyle::GetCenterMark, &BND_DimensionStyle::SetCenterMark) .def_property("TextGap", &BND_DimensionStyle::GetTextGap, &BND_DimensionStyle::SetTextGap) .def_property("TextHEight", &BND_DimensionStyle::GetTextHeight, &BND_DimensionStyle::SetTextHeight) .def_property("LengthFactor", &BND_DimensionStyle::GetLengthFactor, &BND_DimensionStyle::SetLengthFactor) .def_property("AlternateLengthFactor", &BND_DimensionStyle::GetAlternateLengthFactor, &BND_DimensionStyle::SetAlternateLengthFactor) .def_property("ToleranceUpperValue", &BND_DimensionStyle::GetToleranceUpperValue, &BND_DimensionStyle::SetToleranceUpperValue) .def_property("ToleranceLowerValue", &BND_DimensionStyle::GetToleranceLowerValue, &BND_DimensionStyle::SetToleranceLowerValue) .def_property("ToleranceHeightScale", &BND_DimensionStyle::GetToleranceHeightScale, &BND_DimensionStyle::SetToleranceHeightScale) .def_property("BaselineSpacing", &BND_DimensionStyle::GetBaselineSpacing, &BND_DimensionStyle::SetBaselineSpacing) .def_property("TextRotation", &BND_DimensionStyle::GetTextRotation, &BND_DimensionStyle::SetTextRotation) .def_property("StackHeightScale", &BND_DimensionStyle::GetStackHeightScale, &BND_DimensionStyle::SetStackHeightScale) .def_property("LeaderLandingLength", &BND_DimensionStyle::GetLeaderLandingLength, &BND_DimensionStyle::SetLeaderLandingLength) .def("IsFieldOverridden", &BND_DimensionStyle::IsFieldOverriden, py::arg("field")) .def("SetFieldOverride", &BND_DimensionStyle::SetFieldOverride, py::arg("field")) .def("ClearFieldOverride", &BND_DimensionStyle::ClearFieldOverride, py::arg("field")) .def("ClearAllFieldOverrides", &BND_DimensionStyle::ClearAllFieldOverrides) .def_property_readonly("HasFieldOverrides", &BND_DimensionStyle::HasFieldOverrides) .def_property_readonly("IsChild", &BND_DimensionStyle::IsChild) .def("IsChildOf", &BND_DimensionStyle::IsChildOf, py::arg("id")) .def_property("ParentId", &BND_DimensionStyle::GetParentId, &BND_DimensionStyle::SetParentId) ; py::enum_<ON_DimStyle::field>(pyDimStyle, "Field") .value("Unset", ON_DimStyle::field::Unset) .value("Name", ON_DimStyle::field::Name) .value("Index", ON_DimStyle::field::Index) .value("ExtensionLineExtension", ON_DimStyle::field::ExtensionLineExtension) .value("ExtensionLineOffset", ON_DimStyle::field::ExtensionLineOffset) .value("ArrowSize", ON_DimStyle::field::Arrowsize) .value("LeaderArrowSize", ON_DimStyle::field::LeaderArrowsize) .value("Centermark", ON_DimStyle::field::Centermark) .value("TextGap", ON_DimStyle::field::TextGap) .value("TextHeight", ON_DimStyle::field::TextHeight) .value("DimTextLocation", ON_DimStyle::field::DimTextLocation) .value("LengthResolution", ON_DimStyle::field::LengthResolution) .value("AngleFormat", ON_DimStyle::field::AngleFormat) .value("AngleResolution", ON_DimStyle::field::AngleResolution) .value("Font", ON_DimStyle::field::Font) .value("LengthFactor", ON_DimStyle::field::LengthFactor) .value("Alternate", ON_DimStyle::field::Alternate) .value("AlternateLengthFactor", ON_DimStyle::field::AlternateLengthFactor) .value("AlternateLengthResolution", ON_DimStyle::field::AlternateLengthResolution) .value("Prefix", ON_DimStyle::field::Prefix) .value("Suffix", ON_DimStyle::field::Suffix) .value("AlternatePrefix", ON_DimStyle::field::AlternatePrefix) .value("AlternateSuffix", ON_DimStyle::field::AlternateSuffix) .value("DimensionLineExtension", ON_DimStyle::field::DimensionLineExtension) .value("SuppressExtension1", ON_DimStyle::field::SuppressExtension1) .value("SuppressExtension2", ON_DimStyle::field::SuppressExtension2) .value("ExtLineColorSource", ON_DimStyle::field::ExtLineColorSource) .value("DimLineColorSource", ON_DimStyle::field::DimLineColorSource) .value("ArrowColorSource", ON_DimStyle::field::ArrowColorSource) .value("TextColorSource", ON_DimStyle::field::TextColorSource) .value("ExtLineColor", ON_DimStyle::field::ExtLineColor) .value("DimLineColor", ON_DimStyle::field::DimLineColor) .value("ArrowColor", ON_DimStyle::field::ArrowColor) .value("TextColor", ON_DimStyle::field::TextColor) .value("ExtLinePlotColorSource", ON_DimStyle::field::ExtLinePlotColorSource) .value("DimLinePlotColorSource", ON_DimStyle::field::DimLinePlotColorSource) .value("ArrowPlotColorSource", ON_DimStyle::field::ArrowPlotColorSource) .value("TextPlotColorSource", ON_DimStyle::field::TextPlotColorSource) .value("ExtLinePlotColor", ON_DimStyle::field::ExtLinePlotColor) .value("DimLinePlotColor", ON_DimStyle::field::DimLinePlotColor) .value("ArrowPlotColor", ON_DimStyle::field::ArrowPlotColor) .value("TextPlotColor", ON_DimStyle::field::TextPlotColor) .value("ExtLinePlotWeightSource", ON_DimStyle::field::ExtLinePlotWeightSource) .value("DimLinePlotWeightSource", ON_DimStyle::field::DimLinePlotWeightSource) .value("ExtLinePlotWeight_mm", ON_DimStyle::field::ExtLinePlotWeight_mm) .value("DimLinePlotWeight_mm", ON_DimStyle::field::DimLinePlotWeight_mm) .value("ToleranceFormat", ON_DimStyle::field::ToleranceFormat) .value("ToleranceResolution", ON_DimStyle::field::ToleranceResolution) .value("ToleranceUpperValue", ON_DimStyle::field::ToleranceUpperValue) .value("ToleranceLowerValue", ON_DimStyle::field::ToleranceLowerValue) .value("AltToleranceResolution", ON_DimStyle::field::AltToleranceResolution) .value("ToleranceHeightScale", ON_DimStyle::field::ToleranceHeightScale) .value("BaselineSpacing", ON_DimStyle::field::BaselineSpacing) .value("DrawMask", ON_DimStyle::field::DrawMask) .value("MaskColorSource", ON_DimStyle::field::MaskColorSource) .value("MaskColor", ON_DimStyle::field::MaskColor) .value("MaskBorder", ON_DimStyle::field::MaskBorder) .value("DimensionScale", ON_DimStyle::field::DimensionScale) .value("DimscaleSource", ON_DimStyle::field::DimscaleSource) .value("FixedExtensionLength", ON_DimStyle::field::FixedExtensionLength) .value("FixedExtensionOn", ON_DimStyle::field::FixedExtensionOn) .value("TextRotation", ON_DimStyle::field::TextRotation) .value("SuppressArrow1", ON_DimStyle::field::SuppressArrow1) .value("SuppressArrow2", ON_DimStyle::field::SuppressArrow2) .value("TextmoveLeader", ON_DimStyle::field::TextmoveLeader) .value("ArclengthSymbol", ON_DimStyle::field::ArclengthSymbol) .value("StackTextheightScale", ON_DimStyle::field::StackTextheightScale) .value("StackFormat", ON_DimStyle::field::StackFormat) .value("AltRound", ON_DimStyle::field::AltRound) .value("Round", ON_DimStyle::field::Round) .value("AngularRound", ON_DimStyle::field::AngularRound) .value("AltZeroSuppress", ON_DimStyle::field::AltZeroSuppress) .value("AngleZeroSuppress", ON_DimStyle::field::AngleZeroSuppress) .value("AltBelow", ON_DimStyle::field::AltBelow) .value("ArrowType1", ON_DimStyle::field::ArrowType1) .value("ArrowType2", ON_DimStyle::field::ArrowType2) .value("LeaderArrowType", ON_DimStyle::field::LeaderArrowType) .value("ArrowBlockId1", ON_DimStyle::field::ArrowBlockId1) .value("ArrowBlockId2", ON_DimStyle::field::ArrowBlockId2) .value("LeaderArrowBlock", ON_DimStyle::field::LeaderArrowBlock) .value("DimRadialTextLocation", ON_DimStyle::field::DimRadialTextLocation) .value("TextVerticalAlignment", ON_DimStyle::field::TextVerticalAlignment) .value("LeaderTextVerticalAlignment", ON_DimStyle::field::LeaderTextVerticalAlignment) .value("LeaderContentAngleStyle", ON_DimStyle::field::LeaderContentAngleStyle) .value("LeaderCurveType", ON_DimStyle::field::LeaderCurveType) .value("LeaderContentAngle", ON_DimStyle::field::LeaderContentAngle) .value("LeaderHasLanding", ON_DimStyle::field::LeaderHasLanding) .value("LeaderLandingLength", ON_DimStyle::field::LeaderLandingLength) .value("MaskFlags", ON_DimStyle::field::MaskFlags) .value("CentermarkStyle", ON_DimStyle::field::CentermarkStyle) .value("TextHorizontalAlignment", ON_DimStyle::field::TextHorizontalAlignment) .value("LeaderTextHorizontalAlignment", ON_DimStyle::field::LeaderTextHorizontalAlignment) .value("DrawForward", ON_DimStyle::field::DrawForward) .value("SignedOrdinate", ON_DimStyle::field::SignedOrdinate) .value("UnitSystem", ON_DimStyle::field::UnitSystem) .value("TextMask", ON_DimStyle::field::TextMask) .value("TextOrientation", ON_DimStyle::field::TextOrientation) .value("LeaderTextOrientation", ON_DimStyle::field::LeaderTextOrientation) .value("DimTextOrientation", ON_DimStyle::field::DimTextOrientation) .value("DimRadialTextOrientation", ON_DimStyle::field::DimRadialTextOrientation) .value("DimTextAngleStyle", ON_DimStyle::field::DimTextAngleStyle) .value("DimRadialTextAngleStyle", ON_DimStyle::field::DimRadialTextAngleStyle) .value("TextUnderlined", ON_DimStyle::field::TextUnderlined) .value("DimensionLengthDisplay", ON_DimStyle::field::DimensionLengthDisplay) .value("AlternateDimensionLengthDisplay", ON_DimStyle::field::AlternateDimensionLengthDisplay) ; } #endif #if defined(ON_WASM_COMPILE) using namespace emscripten; void initDimensionStyleBindings(void*) { class_<BND_DimensionStyle, base<BND_CommonObject>>("DimensionStyle") .constructor<>() .property("name", &BND_DimensionStyle::GetName, &BND_DimensionStyle::SetName) .function("getFont", &BND_DimensionStyle::GetFont, allow_raw_pointers()) .function("setFont", &BND_DimensionStyle::SetFont, allow_raw_pointers()) .function("scaleLengthValues", &BND_DimensionStyle::ScaleLengthValues) .property("arrowBlockId1", &BND_DimensionStyle::GetArrowBlockId1, &BND_DimensionStyle::SetArrowBlockId1) .property("arrowBlockId2", &BND_DimensionStyle::GetArrowBlockId2, &BND_DimensionStyle::SetArrowBlockId2) .property("leaderArrowBlockId", &BND_DimensionStyle::GetLeaderArrowBlockId, &BND_DimensionStyle::SetLeaderArrowBlockId) .property("suppressExtension1", &BND_DimensionStyle::GetSuppressExtension1, &BND_DimensionStyle::SetSuppressExtension1) .property("suppressExtension2", &BND_DimensionStyle::GetSuppressExtension2, &BND_DimensionStyle::SetSuppressExtension2) .property("suppressArrow1", &BND_DimensionStyle::GetSuppressArrow1, &BND_DimensionStyle::SetSuppressArrow1) .property("suppressArrow2", &BND_DimensionStyle::GetSuppressArrow2, &BND_DimensionStyle::SetSuppressArrow2) .property("alternateBelowLine", &BND_DimensionStyle::GetAlternateBelowLine, &BND_DimensionStyle::SetAlternateBelowLine) .property("drawTextMask", &BND_DimensionStyle::GetDrawTextMask, &BND_DimensionStyle::SetDrawTextMask) .property("leaderHasLanding", &BND_DimensionStyle::GetLeaderHasLanding, &BND_DimensionStyle::SetLeaderHasLanding) .property("drawForward", &BND_DimensionStyle::GetDrawForward, &BND_DimensionStyle::SetDrawForward) .property("textUnderlined", &BND_DimensionStyle::GetTextUnderlined, &BND_DimensionStyle::SetTextUnderlined) .property("arrowLength", &BND_DimensionStyle::GetArrowSize, &BND_DimensionStyle::SetArrowSize) .property("leaderArrowLength", &BND_DimensionStyle::GetLeaderArrowSize, &BND_DimensionStyle::SetLeaderArrowSize) .property("centermarkSize", &BND_DimensionStyle::GetCenterMark, &BND_DimensionStyle::SetCenterMark) .property("textGap", &BND_DimensionStyle::GetTextGap, &BND_DimensionStyle::SetTextGap) .property("textHEight", &BND_DimensionStyle::GetTextHeight, &BND_DimensionStyle::SetTextHeight) .property("lengthFactor", &BND_DimensionStyle::GetLengthFactor, &BND_DimensionStyle::SetLengthFactor) .property("alternateLengthFactor", &BND_DimensionStyle::GetAlternateLengthFactor, &BND_DimensionStyle::SetAlternateLengthFactor) .property("toleranceUpperValue", &BND_DimensionStyle::GetToleranceUpperValue, &BND_DimensionStyle::SetToleranceUpperValue) .property("toleranceLowerValue", &BND_DimensionStyle::GetToleranceLowerValue, &BND_DimensionStyle::SetToleranceLowerValue) .property("toleranceHeightScale", &BND_DimensionStyle::GetToleranceHeightScale, &BND_DimensionStyle::SetToleranceHeightScale) .property("baselineSpacing", &BND_DimensionStyle::GetBaselineSpacing, &BND_DimensionStyle::SetBaselineSpacing) .property("textRotation", &BND_DimensionStyle::GetTextRotation, &BND_DimensionStyle::SetTextRotation) .property("stackHeightScale", &BND_DimensionStyle::GetStackHeightScale, &BND_DimensionStyle::SetStackHeightScale) .property("leaderLandingLength", &BND_DimensionStyle::GetLeaderLandingLength, &BND_DimensionStyle::SetLeaderLandingLength) .function("clearAllFieldOverrides", &BND_DimensionStyle::ClearAllFieldOverrides) .property("hasFieldOverrides", &BND_DimensionStyle::HasFieldOverrides) .property("isChild", &BND_DimensionStyle::IsChild) .function("isChildOf", &BND_DimensionStyle::IsChildOf) .property("parentId", &BND_DimensionStyle::GetParentId, &BND_DimensionStyle::SetParentId) ; } #endif
5,649
764
package thread_safe; import java.io.IOException; import org.jruby.Ruby; import org.jruby.ext.thread_safe.JRubyCacheBackendLibrary; import org.jruby.runtime.load.BasicLibraryService; // can't name this JRubyCacheBackendService or else JRuby doesn't pick this up public class JrubyCacheBackendService implements BasicLibraryService { public boolean basicLoad(final Ruby runtime) throws IOException { new JRubyCacheBackendLibrary().load(runtime, false); return true; } }
155
482
"""resizer module handle image resizing""" import albumentations as A import cv2 import numpy as np from enum import Enum import imghdr _INTER_STR_TO_CV2 = dict( nearest=cv2.INTER_NEAREST, linear=cv2.INTER_LINEAR, bilinear=cv2.INTER_LINEAR, cubic=cv2.INTER_CUBIC, bicubic=cv2.INTER_CUBIC, area=cv2.INTER_AREA, lanczos=cv2.INTER_LANCZOS4, lanczos4=cv2.INTER_LANCZOS4, ) class ResizeMode(Enum): no = 0 keep_ratio = 1 center_crop = 2 border = 3 def inter_str_to_cv2(inter_str): inter_str = inter_str.lower() if inter_str not in _INTER_STR_TO_CV2: raise Exception(f"Invalid option for interpolation: {inter_str}") return _INTER_STR_TO_CV2[inter_str] class Resizer: """ Resize images Expose a __call__ method to be used as a callable object Should be used to resize one image at a time Options: resize_mode: "no", "keep_ratio", "center_crop", "border" resize_only_if_bigger: if True, resize only if image is bigger than image_size image_size: size of the output image to resize """ def __init__( self, image_size, resize_mode, resize_only_if_bigger, upscale_interpolation="lanczos", downscale_interpolation="area", encode_quality=95, skip_reencode=False, ): self.image_size = image_size if isinstance(resize_mode, str): if resize_mode not in ResizeMode.__members__: # pylint: disable=unsupported-membership-test raise Exception(f"Invalid option for resize_mode: {resize_mode}") resize_mode = ResizeMode[resize_mode] self.resize_mode = resize_mode self.resize_only_if_bigger = resize_only_if_bigger self.upscale_interpolation = inter_str_to_cv2(upscale_interpolation) self.downscale_interpolation = inter_str_to_cv2(downscale_interpolation) self.encode_params = [int(cv2.IMWRITE_JPEG_QUALITY), encode_quality] self.skip_reencode = skip_reencode def __call__(self, img_stream): """ input: an image stream output: img_str, width, height, original_width, original_height, err """ try: encode_needed = imghdr.what(img_stream) != "jpeg" if self.skip_reencode else True img_buf = np.frombuffer(img_stream.read(), np.uint8) img = cv2.imdecode(img_buf, cv2.IMREAD_UNCHANGED) if img is None: raise Exception("Image decoding error") if len(img.shape) == 3 and img.shape[-1] == 4: # alpha matting with white background alpha = img[:, :, 3, np.newaxis] img = alpha / 255 * img[..., :3] + 255 - alpha img = np.rint(img.clip(min=0, max=255)).astype(np.uint8) encode_needed = True original_height, original_width = img.shape[:2] # resizing in following conditions if self.resize_mode in (ResizeMode.keep_ratio, ResizeMode.center_crop): downscale = min(original_width, original_height) > self.image_size if not self.resize_only_if_bigger or downscale: interpolation = self.downscale_interpolation if downscale else self.upscale_interpolation img = A.smallest_max_size(img, self.image_size, interpolation=interpolation) if self.resize_mode == ResizeMode.center_crop: img = A.center_crop(img, self.image_size, self.image_size) encode_needed = True elif self.resize_mode == ResizeMode.border: downscale = max(original_width, original_height) > self.image_size if not self.resize_only_if_bigger or downscale: interpolation = self.downscale_interpolation if downscale else self.upscale_interpolation img = A.longest_max_size(img, self.image_size, interpolation=interpolation) img = A.pad( img, self.image_size, self.image_size, border_mode=cv2.BORDER_CONSTANT, value=[255, 255, 255] ) encode_needed = True height, width = img.shape[:2] if encode_needed: img_str = cv2.imencode(".jpg", img, params=self.encode_params)[1].tobytes() else: img_str = img_buf.tobytes() return img_str, width, height, original_width, original_height, None except Exception as err: # pylint: disable=broad-except return None, None, None, None, None, str(err)
2,185
3,200
/** * Copyright 2019-2020 Huawei Technologies Co., Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "utils/any.h" #ifndef _MSC_VER #include <cxxabi.h> #endif #include <memory> namespace mindspore { // only support (int, float, bool) as Literal bool AnyIsLiteral(const Any &any) { static const std::type_index typeid_int = std::type_index(typeid(int)); static const std::type_index typeid_float = std::type_index(typeid(float)); static const std::type_index typeid_bool = std::type_index(typeid(bool)); auto typeid_any = std::type_index(any.type()); return typeid_int == typeid_any || typeid_float == typeid_any || typeid_bool == typeid_any; } Any &Any::operator=(const Any &other) { if (m_ptr == other.m_ptr || &other == this) { return *this; } m_ptr = other.clone(); m_tpIndex = other.m_tpIndex; return *this; } bool Any::operator<(const Any &other) const { return this < &other; } Any &Any::operator=(Any &&other) { if (this != &other) { if (m_ptr == other.m_ptr || &other == this) { return *this; } m_ptr = std::move(other.m_ptr); m_tpIndex = std::move(other.m_tpIndex); other.m_ptr = nullptr; } return *this; } } // namespace mindspore
586
355
<gh_stars>100-1000 package net.tomp2p; import net.tomp2p.connection.ChannelCreator; import net.tomp2p.message.Buffer; import net.tomp2p.p2p.Peer; import net.tomp2p.p2p.builder.SendDirectBuilder; import net.tomp2p.peers.PeerAddress; import net.tomp2p.rpc.RawDataReply; import net.tomp2p.storage.AlternativeCompositeByteBuf; public abstract class SendDirectProfiler extends Profiler { private static final int BUFFER_SIZE_BYTES = 1000; protected final boolean isForceUdp; protected Peer sender; protected Peer receiver; protected ChannelCreator cc; protected SendDirectBuilder sendDirectBuilder; public SendDirectProfiler(boolean isForceUdp) { this.isForceUdp = isForceUdp; } @Override protected void shutdown() throws Exception { if (sender != null) { sender.shutdown().awaitUninterruptibly(); } if (receiver != null) { receiver.shutdown().awaitUninterruptibly(); } if (cc != null) { cc.shutdown().awaitUninterruptibly(); } } protected static Buffer createSampleBuffer() { AlternativeCompositeByteBuf acbb = AlternativeCompositeByteBuf.compBuffer(); for (int i = 0; i < BUFFER_SIZE_BYTES; i++) { acbb.writeByte(i % 256); } return new Buffer(acbb); } protected class SampleRawDataReply implements RawDataReply { @Override public Buffer reply(PeerAddress sender, Buffer requestBuffer, boolean complete) throws Exception { // server returns just OK if same buffer is returned return requestBuffer; } } }
515
411
<filename>library/src/main/java/me/zhanghai/android/customtabshelper/CustomTabsHelperFragment.java<gh_stars>100-1000 /* * Copyright (c) 2015 <NAME> <<EMAIL>> * All Rights Reserved. */ package me.zhanghai.android.customtabshelper; import android.app.Activity; import android.content.ActivityNotFoundException; import android.net.Uri; import android.os.Bundle; import android.support.customtabs.CustomTabsIntent; import android.support.customtabs.CustomTabsSession; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentActivity; import android.support.v4.app.FragmentManager; import org.chromium.customtabsclient.CustomTabsActivityHelper; import java.util.List; /** * A Fragment that manages a {@link CustomTabsActivityHelper}. */ public class CustomTabsHelperFragment extends Fragment { private static final String FRAGMENT_TAG = CustomTabsHelperFragment.class.getName(); private CustomTabsActivityHelper mCustomTabsActivityHelper = new CustomTabsActivityHelper(); /** * Ensure that an instance of this fragment is attached to an activity. * * @param activity The target activity. * @return An instance of this fragment. */ public static CustomTabsHelperFragment attachTo(FragmentActivity activity) { FragmentManager fragmentManager = activity.getSupportFragmentManager(); CustomTabsHelperFragment fragment = (CustomTabsHelperFragment) fragmentManager .findFragmentByTag(FRAGMENT_TAG); if (fragment == null) { fragment = new CustomTabsHelperFragment(); fragmentManager.beginTransaction() .add(fragment, FRAGMENT_TAG) .commit(); } return fragment; } /** * Ensure that an instance of this fragment is attached to the host activity of a fragment. * * @param fragment The target fragment, which will be used to retrieve the host activity. * @return An instance of this fragment. */ public static CustomTabsHelperFragment attachTo(Fragment fragment) { return attachTo(fragment.getActivity()); } // Cannot get javadoc to compile, saying "reference not found". /* * @see CustomTabsActivityHelper#openCustomTab(Activity, CustomTabsIntent, Uri, CustomTabsActivityHelper.CustomTabsFallback) */ public static void open(Activity activity, CustomTabsIntent intent, Uri uri, CustomTabsActivityHelper.CustomTabsFallback fallback) { try { CustomTabsActivityHelper.openCustomTab(activity, intent, uri, fallback); } catch (ActivityNotFoundException e) { fallback.openUri(activity, uri); } } /** * Get the {@link CustomTabsActivityHelper} this fragment manages. * * @return The {@link CustomTabsActivityHelper}. */ public CustomTabsActivityHelper getHelper() { return mCustomTabsActivityHelper; } /** * @see CustomTabsActivityHelper#getSession() */ public CustomTabsSession getSession() { return mCustomTabsActivityHelper.getSession(); } /** * @see CustomTabsActivityHelper#setConnectionCallback(CustomTabsActivityHelper.ConnectionCallback) */ public void setConnectionCallback( CustomTabsActivityHelper.ConnectionCallback connectionCallback) { mCustomTabsActivityHelper.setConnectionCallback(connectionCallback); } /** * @see CustomTabsActivityHelper#mayLaunchUrl(Uri, Bundle, List) */ public boolean mayLaunchUrl(Uri uri, Bundle extras, List<Bundle> otherLikelyBundles) { return mCustomTabsActivityHelper.mayLaunchUrl(uri, extras, otherLikelyBundles); } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setRetainInstance(true); setUserVisibleHint(false); } @Override public void onStart() { super.onStart(); mCustomTabsActivityHelper.bindCustomTabsService(getActivity()); } @Override public void onStop() { super.onStop(); mCustomTabsActivityHelper.unbindCustomTabsService(getActivity()); } }
1,518
327
/* * Copyright 2014 CITIUS <http://citius.usc.es>, University of Santiago de Compostela. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package es.usc.citius.hipster.algorithm; import es.usc.citius.hipster.model.HeuristicNode; import es.usc.citius.hipster.model.function.NodeExpander; /** * <p> * Implementation of the IDA* algorithm. Similar to Iterative DFS but using heuristics to limit * the space search and keeping a very low memory usage. * </p> * * <a href="http://www.sciencedirect.com/science/article/pii/0004370285900840">Original paper</a>: * <NAME> <i><b>"Depth-first Iterative-Deepening: An Optimal Admissible Tree Search."</b></i>, * Artificial Intelligence, vol. 27, pp. 97-109, 1985. * * @param <A> action type. * @param <S> state type. * @param <C> comparable cost used to compare states. * @param <N> type of the heuristic search node. * * @author <NAME> <<a href="mailto:<EMAIL>"><EMAIL></a>> * @author Jennnnyz * */ public class IDAStar<A,S,C extends Comparable<C>,N extends HeuristicNode<A,S,C,N>> extends DepthFirstSearch<A,S,N> { /** * * @param initialNode * @param expander */ public IDAStar(N initialNode, NodeExpander<A,S,N> expander) { super(initialNode, expander); } /** * IDA iterator. It expands the next state to be explored. Backtracking * is automatically performed so if the state reaches a dead-end the next * call to {@code iterator.next()} returns the next state after performing * backtracking. */ public class Iterator extends DepthFirstSearch.Iterator { protected C fLimit; protected C minfLimit; protected int reinitialization = 0; protected Iterator(){ // Set initial bound super(); fLimit = initialNode.getEstimation(); minfLimit = null; } protected void updateMinFLimit(C currentFLimit){ if (minfLimit == null){ minfLimit = currentFLimit; } else { if (minfLimit.compareTo(currentFLimit)>0){ minfLimit = currentFLimit; } } } @Override protected StackFrameNode nextUnvisited(){ StackFrameNode nextNode; do { nextNode = processNextNode(); // No more neighbors to visit with the current fLimit. Update the new fLimit if (nextNode == null){ // Reinitialize if (minfLimit != null && minfLimit.compareTo(fLimit)>0){ fLimit = minfLimit; reinitialization++; minfLimit = null; super.getStack().addLast(new StackFrameNode(initialNode)); nextNode = processNextNode(); } } } while(nextNode != null && (nextNode.processed || nextNode.visited)); if (nextNode != null){ nextNode.visited = true; } return nextNode; } @Override protected StackFrameNode processNextNode(){ // Get and process the current node. Cases: // 1 - empty stack, return null // 2 - node exceeds the bound: update minfLimit, pop and skip. // 3 - node has neighbors: expand and return current. // 4 - node has no neighbors: // 4.1 - Node visited before: processed node, pop and skip to the next node. // 4.2 - Not visited: we've reached a leaf node. // mark as visited, pop and return. // 1- If the stack is empty, change fLimit and reinitialize the search if (super.getStack().isEmpty()) return null; // Take current node in the stack but do not remove StackFrameNode current = (StackFrameNode) super.stack.peekLast(); // 2 - Check if the current node exceeds the limit bound C fCurrent = current.getNode().getScore(); if (fCurrent.compareTo(fLimit)>0){ // Current node exceeds the limit bound, update minfLimit, pop and skip. updateMinFLimit(fCurrent); // Remove from stack current.processed = true; return (StackFrameNode) super.getStack().removeLast(); } // Find a successor if (current.getSuccessors().hasNext()){ // 3 - Node has at least one neighbor N successor = current.getSuccessors().next(); // push the node super.getStack().addLast(new StackFrameNode(successor)); return current; } else { // 4 - Visited? if (current.visited){ current.processed = true; } return (StackFrameNode) super.getStack().removeLast(); } } public C getfLimit() { return fLimit; } public void setfLimit(C fLimit) { this.fLimit = fLimit; } public C getMinfLimit() { return minfLimit; } public void setMinfLimit(C minfLimit) { this.minfLimit = minfLimit; } public int getReinitialization() { return reinitialization; } public void setReinitialization(int reinitialization) { this.reinitialization = reinitialization; } } @Override public Iterator iterator() { return new Iterator(); } }
2,760
302
<reponame>Atul9/tectonic /*------------------------------------------------------------------------ Copyright (C) 2002-2014 SIL International. All rights reserved. Distributable under the terms of either the Common Public License or the GNU Lesser General Public License, as specified in the LICENSING.txt file. File: TECkit_Engine.h Responsibility: <NAME> Last reviewed: Not yet. Description: Public API to the TECkit conversion engine. -------------------------------------------------------------------------*/ /* TECkit_Engine.h Public API to the TECkit encoding conversion library. 18-Jan-2008 jk added EXPORTED to declarations, for mingw32 cross-build 18-Mar-2005 jk moved version number to TECkit_Common.h as it is shared with the compiler 19-Mar-2004 jk updated minor version for 2.2 engine (improved matching functionality) 23-Sep-2003 jk updated for version 2.1 - new "...Opt" APIs 5-Jul-2002 jk corrected placement of WINAPI to keep MS compiler happy 14-May-2002 jk added WINAPI to function declarations 22-Dec-2001 jk initial version */ #ifndef __TECkit_Engine_H__ #define __TECkit_Engine_H__ #include "teckit-Common.h" /* formFlags bits for normalization; if none are set, then this side of the mapping is normalization-form-agnostic on input, and may generate an unspecified mixture */ #define kFlags_ExpectsNFC 0x00000001 /* expects fully composed text (NC) */ #define kFlags_ExpectsNFD 0x00000002 /* expects fully decomposed text (NCD) */ #define kFlags_GeneratesNFC 0x00000004 /* generates fully composed text (NC) */ #define kFlags_GeneratesNFD 0x00000008 /* generates fully decomposed text (NCD) */ /* if VisualOrder is set, this side of the mapping deals with visual-order rather than logical-order text (only relevant for bidi scripts) */ #define kFlags_VisualOrder 0x00008000 /* visual rather than logical order */ /* if Unicode is set, the encoding is Unicode on this side of the mapping */ #define kFlags_Unicode 0x00010000 /* this is Unicode rather than a byte encoding */ /* required names */ #define kNameID_LHS_Name 0 /* "source" or LHS encoding name, e.g. "SIL-EEG_URDU-2001" */ #define kNameID_RHS_Name 1 /* "destination" or RHS encoding name, e.g. "UNICODE-3-1" */ #define kNameID_LHS_Description 2 /* source encoding description, e.g. "SIL East Eurasia Group Extended Urdu (Mac OS)" */ #define kNameID_RHS_Description 3 /* destination description, e.g. "Unicode 3.1" */ /* additional recommended names (parallel to UTR-22) */ #define kNameID_Version 4 /* "1.0b1" */ #define kNameID_Contact 5 /* "mailto:<EMAIL>" */ #define kNameID_RegAuthority 6 /* "SIL International" */ #define kNameID_RegName 7 /* "Greek (Galatia)" */ #define kNameID_Copyright 8 /* "(c)2002 SIL International" */ /* additional name IDs may be defined in the future */ /* encoding form options for TECkit_CreateConverter */ #define kForm_NormalizationMask 0x0F00 #define kForm_NFC 0x0100 #define kForm_NFD 0x0200 /* end of text value for TECkit_DataSource functions to return */ #define kEndOfText 0xffffffffUL /* A converter object is an opaque pointer */ typedef struct Opaque_TECkit_Converter* TECkit_Converter; #if defined(__cplusplus) extern "C" { #endif #ifdef _WIN32 /* MS compiler has predefined _WIN32, so assume Windows target */ #ifdef INPUT #undef INPUT #endif #include <windows.h> #undef WINAPI #define WINAPI #define EXPORTED #else /* not the MS compiler, so try Metrowerks' platform macros */ #ifndef __APPLE__ #if defined __dest_os && (__dest_os == __win32_os) #include <windef.h> #endif #endif #endif #ifndef WINAPI #define WINAPI #define CALLBACK #endif /* this seems to be needed for a gcc-mingw32 build to work... */ #ifndef EXPORTED #ifdef _WIN32 #define EXPORTED __declspec(dllexport) #else #define EXPORTED #endif #endif /* Create a converter object from a compiled mapping */ TECkit_Status WINAPI EXPORTED TECkit_CreateConverter( Byte* mapping, UInt32 mappingSize, Byte mapForward, UInt16 sourceForm, UInt16 targetForm, TECkit_Converter* converter); /* Dispose of a converter object */ TECkit_Status WINAPI EXPORTED TECkit_DisposeConverter( TECkit_Converter converter); /* Read a name record or the flags from a converter object */ TECkit_Status WINAPI EXPORTED TECkit_GetConverterName( TECkit_Converter converter, UInt16 nameID, Byte* nameBuffer, UInt32 bufferSize, UInt32* nameLength); TECkit_Status WINAPI EXPORTED TECkit_GetConverterFlags( TECkit_Converter converter, UInt32* sourceFlags, UInt32* targetFlags); /* Reset a converter object, forgetting any buffered context/state */ TECkit_Status WINAPI EXPORTED TECkit_ResetConverter( TECkit_Converter converter); /* Convert text from a buffer in memory */ TECkit_Status WINAPI EXPORTED TECkit_ConvertBuffer( TECkit_Converter converter, const Byte* inBuffer, UInt32 inLength, UInt32* inUsed, Byte* outBuffer, UInt32 outLength, UInt32* outUsed, Byte inputIsComplete); /* Flush any buffered text from a converter object (at end of input, if inputIsComplete flag not set for ConvertBuffer) */ TECkit_Status WINAPI EXPORTED TECkit_Flush( TECkit_Converter converter, Byte* outBuffer, UInt32 outLength, UInt32* outUsed); /* Read name and flags directly from a compiled mapping, before making a converter object */ TECkit_Status WINAPI EXPORTED TECkit_GetMappingName( Byte* mapping, UInt32 mappingSize, UInt16 nameID, Byte* nameBuffer, UInt32 bufferSize, UInt32* nameLength); TECkit_Status WINAPI EXPORTED TECkit_GetMappingFlags( Byte* mapping, UInt32 mappingSize, UInt32* lhsFlags, UInt32* rhsFlags); /* Return the version number of the TECkit library */ UInt32 WINAPI EXPORTED TECkit_GetVersion(void); /* ***** New APIs for version 2.1 of the engine ***** A converter object now has options to control behavior when "unmappable" characters occur in the input text. Choices are: UseReplacementCharSilently - original behavior, just uses "replacement character" in the mapping UseReplacementCharWithWarning - do the same mapping, but return a warning in the status value DontUseReplacementChar - stop conversion, returning immediately on encountering an unmapped character */ #define kOptionsMask_UnmappedBehavior 0x000F #define kOptionsUnmapped_UseReplacementCharSilently 0x00 #define kOptionsUnmapped_UseReplacementCharWithWarning 0x01 #define kOptionsUnmapped_DontUseReplacementChar 0x02 #define kOptionsMask_InputComplete 0x0100 #define kOptionsComplete_InputIsComplete 0x0100 /* Convert text from a buffer in memory, with options (note that former inputIsComplete flag is now a bit in the options parameter) */ TECkit_Status WINAPI EXPORTED TECkit_ConvertBufferOpt( TECkit_Converter converter, const Byte* inBuffer, UInt32 inLength, UInt32* inUsed, Byte* outBuffer, UInt32 outLength, UInt32* outUsed, UInt32 inOptions, UInt32* lookaheadCount); /* Flush any buffered text from a converter object, with options (at end of input, if inputIsComplete flag not set for ConvertBuffer) */ TECkit_Status WINAPI EXPORTED TECkit_FlushOpt( TECkit_Converter converter, Byte* outBuffer, UInt32 outLength, UInt32* outUsed, UInt32 inOptions, UInt32* lookaheadCount); #if defined(__cplusplus) } /* extern "C" */ #endif #endif /* __TECkit_Engine_H__ */
2,737
496
<gh_stars>100-1000 #ifndef SIMIT_FIR_REWRITER_H #define SIMIT_FIR_REWRITER_H #include <memory> #include "fir.h" #include "fir_visitor.h" namespace simit { namespace fir { struct FIRRewriter : public FIRVisitor { virtual void visit(Program::Ptr); virtual void visit(StmtBlock::Ptr); virtual void visit(RangeIndexSet::Ptr op) { node = op; } virtual void visit(SetIndexSet::Ptr op) { node = op; } virtual void visit(DynamicIndexSet::Ptr op) { node = op; } virtual void visit(ElementType::Ptr op) { node = op; } virtual void visit(Endpoint::Ptr); virtual void visit(HomogeneousEdgeSetType::Ptr); virtual void visit(HeterogeneousEdgeSetType::Ptr); virtual void visit(GridSetType::Ptr); virtual void visit(TupleElement::Ptr); virtual void visit(NamedTupleType::Ptr); virtual void visit(TupleLength::Ptr op) { node = op; } virtual void visit(UnnamedTupleType::Ptr); virtual void visit(ScalarType::Ptr op) { node = op; } virtual void visit(NDTensorType::Ptr); virtual void visit(OpaqueType::Ptr op) { node = op; } virtual void visit(IVarType::Ptr op) { node = op; } virtual void visit(Identifier::Ptr op) { node = op; } virtual void visit(IdentDecl::Ptr); virtual void visit(ElementTypeDecl::Ptr); virtual void visit(GenericParam::Ptr op) { node = op; } virtual void visit(FuncDecl::Ptr); virtual void visit(VarDecl::Ptr); virtual void visit(IVarDecl::Ptr); virtual void visit(WhileStmt::Ptr); virtual void visit(IfStmt::Ptr); virtual void visit(IndexSetDomain::Ptr); virtual void visit(RangeDomain::Ptr); virtual void visit(ForStmt::Ptr); virtual void visit(PrintStmt::Ptr); virtual void visit(ExprStmt::Ptr); virtual void visit(AssignStmt::Ptr); virtual void visit(Slice::Ptr op) { node = op; } virtual void visit(ExprParam::Ptr); virtual void visit(MapExpr::Ptr); virtual void visit(OrExpr::Ptr); virtual void visit(AndExpr::Ptr); virtual void visit(XorExpr::Ptr); virtual void visit(EqExpr::Ptr); virtual void visit(NotExpr::Ptr); virtual void visit(AddExpr::Ptr); virtual void visit(SubExpr::Ptr); virtual void visit(MulExpr::Ptr); virtual void visit(DivExpr::Ptr); virtual void visit(LeftDivExpr::Ptr); virtual void visit(ElwiseMulExpr::Ptr); virtual void visit(ElwiseDivExpr::Ptr); virtual void visit(NegExpr::Ptr); virtual void visit(ExpExpr::Ptr); virtual void visit(TransposeExpr::Ptr); virtual void visit(CallExpr::Ptr); virtual void visit(TensorReadExpr::Ptr); virtual void visit(SetReadExpr::Ptr); virtual void visit(NamedTupleReadExpr::Ptr); virtual void visit(UnnamedTupleReadExpr::Ptr); virtual void visit(FieldReadExpr::Ptr); virtual void visit(ParenExpr::Ptr); virtual void visit(VarExpr::Ptr op) { node = op; } virtual void visit(IntLiteral::Ptr op) { node = op; } virtual void visit(FloatLiteral::Ptr op) { node = op; } virtual void visit(BoolLiteral::Ptr op) { node = op; } virtual void visit(ComplexLiteral::Ptr op) { node = op; } virtual void visit(StringLiteral::Ptr op) { node = op; } virtual void visit(IntVectorLiteral::Ptr op) { node = op; } virtual void visit(FloatVectorLiteral::Ptr op) { node = op; } virtual void visit(ComplexVectorLiteral::Ptr op) { node = op; } virtual void visit(NDTensorLiteral::Ptr); virtual void visit(ApplyStmt::Ptr); virtual void visit(Test::Ptr); template <typename T = Program> std::shared_ptr<T> rewrite(std::shared_ptr<T> ptr) { node.reset(); ptr->accept(this); auto ret = std::static_pointer_cast<T>(node); node.reset(); return ret; } private: virtual void visitUnaryExpr(UnaryExpr::Ptr); virtual void visitBinaryExpr(BinaryExpr::Ptr); virtual void visitNaryExpr(NaryExpr::Ptr); protected: FIRNode::Ptr node; }; } } #endif
1,283
12,718
<reponame>aruniiird/zig<filename>lib/libc/musl/src/thread/pthread_setname_np.c<gh_stars>1000+ #define _GNU_SOURCE #include <fcntl.h> #include <string.h> #include <unistd.h> #include <sys/prctl.h> #include "pthread_impl.h" int pthread_setname_np(pthread_t thread, const char *name) { int fd, cs, status = 0; char f[sizeof "/proc/self/task//comm" + 3*sizeof(int)]; size_t len; if ((len = strnlen(name, 16)) > 15) return ERANGE; if (thread == pthread_self()) return prctl(PR_SET_NAME, (unsigned long)name, 0UL, 0UL, 0UL) ? errno : 0; snprintf(f, sizeof f, "/proc/self/task/%d/comm", thread->tid); pthread_setcancelstate(PTHREAD_CANCEL_DISABLE, &cs); if ((fd = open(f, O_WRONLY)) < 0 || write(fd, name, len) < 0) status = errno; if (fd >= 0) close(fd); pthread_setcancelstate(cs, 0); return status; }
355
12,718
//===----------------------------------------------------------------------===// // // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. // See https://llvm.org/LICENSE.txt for license information. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception // //===----------------------------------------------------------------------===// #ifndef _LIBCPP___ALGORITHM_EQUAL_RANGE_H #define _LIBCPP___ALGORITHM_EQUAL_RANGE_H #include <__config> #include <__algorithm/comp.h> #include <__algorithm/comp_ref_type.h> #include <__algorithm/half_positive.h> #include <__algorithm/lower_bound.h> #include <__algorithm/upper_bound.h> #include <iterator> #if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER) #pragma GCC system_header #endif _LIBCPP_PUSH_MACROS #include <__undef_macros> _LIBCPP_BEGIN_NAMESPACE_STD template <class _Compare, class _ForwardIterator, class _Tp> _LIBCPP_CONSTEXPR_AFTER_CXX17 pair<_ForwardIterator, _ForwardIterator> __equal_range(_ForwardIterator __first, _ForwardIterator __last, const _Tp& __value_, _Compare __comp) { typedef typename iterator_traits<_ForwardIterator>::difference_type difference_type; difference_type __len = _VSTD::distance(__first, __last); while (__len != 0) { difference_type __l2 = _VSTD::__half_positive(__len); _ForwardIterator __m = __first; _VSTD::advance(__m, __l2); if (__comp(*__m, __value_)) { __first = ++__m; __len -= __l2 + 1; } else if (__comp(__value_, *__m)) { __last = __m; __len = __l2; } else { _ForwardIterator __mp1 = __m; return pair<_ForwardIterator, _ForwardIterator> ( _VSTD::__lower_bound<_Compare>(__first, __m, __value_, __comp), _VSTD::__upper_bound<_Compare>(++__mp1, __last, __value_, __comp) ); } } return pair<_ForwardIterator, _ForwardIterator>(__first, __first); } template <class _ForwardIterator, class _Tp, class _Compare> _LIBCPP_NODISCARD_EXT inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR_AFTER_CXX17 pair<_ForwardIterator, _ForwardIterator> equal_range(_ForwardIterator __first, _ForwardIterator __last, const _Tp& __value_, _Compare __comp) { typedef typename __comp_ref_type<_Compare>::type _Comp_ref; return _VSTD::__equal_range<_Comp_ref>(__first, __last, __value_, __comp); } template <class _ForwardIterator, class _Tp> _LIBCPP_NODISCARD_EXT inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR_AFTER_CXX17 pair<_ForwardIterator, _ForwardIterator> equal_range(_ForwardIterator __first, _ForwardIterator __last, const _Tp& __value_) { return _VSTD::equal_range(__first, __last, __value_, __less<typename iterator_traits<_ForwardIterator>::value_type, _Tp>()); } _LIBCPP_END_NAMESPACE_STD _LIBCPP_POP_MACROS #endif // _LIBCPP___ALGORITHM_EQUAL_RANGE_H
1,274
1,056
<reponame>Antholoj/netbeans /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.netbeans.modules.glassfish.common; import java.awt.event.ActionEvent; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; import java.util.concurrent.atomic.AtomicReference; import java.util.logging.Level; import java.util.logging.Logger; import javax.swing.AbstractAction; import javax.swing.Action; import org.openide.DialogDisplayer; import org.openide.NotifyDescriptor; import org.openide.util.ImageUtilities; import org.openide.util.Mutex; import org.openide.util.NbBundle; import org.openide.util.RequestProcessor; import org.openide.windows.IOProvider; import org.openide.windows.InputOutput; import org.openide.windows.OutputWriter; /** * @author <NAME> */ public class SimpleIO { /** * Time in milliseconds to wait between checks of the input stream. */ private static final int DELAY = 1000; private final String name; private final InputOutput io; private final CancelAction cancelAction; private final AtomicReference<Process> process; public SimpleIO(String displayName, Process task) { name = displayName; process = new AtomicReference<Process>(task); cancelAction = new CancelAction(); io = IOProvider.getDefault().getIO(displayName, new Action [] { cancelAction }); io.select(); } /** * Reads a newly included InputSreams * * @param inputStreams InputStreams to read */ public void readInputStreams(InputStream... inputStreams) { RequestProcessor rp = RequestProcessor.getDefault(); for(InputStream inputStream : inputStreams){ rp.post(new IOReader(inputStream)); } } /** * Writes a string to the output window * * @param s string to be written */ public synchronized void write(String s) { OutputWriter writer = io.getOut(); writer.print(s); writer.flush(); } /** * Selects output panel */ public synchronized void selectIO() { io.select(); } /** * Closes the output panel */ public synchronized void closeIO() { // Don't close the window when finished -- in case of install or launching // failures, it makes problems easiesr for the user to diagnose. process.set(null); cancelAction.updateEnabled(); } /** * Thread to read an I/O stream and write it to the output window managed */ private class IOReader implements Runnable { private InputStream inputStream; public IOReader(InputStream inputStream) { this.inputStream = inputStream; } /** * Implementation of the Runnable interface. Here all tailing is * performed */ public void run() { final String originalName = Thread.currentThread().getName(); try { Thread.currentThread().setName(this.getClass().getName() + " - " + inputStream); // NOI18N // create a reader from the input stream Reader reader = new BufferedReader(new InputStreamReader(inputStream)); // read from the input stream and put all the changes to the I/O window char [] chars = new char[1024]; int len = 0; while(len != -1) { while((len = reader.read(chars)) != -1) { write(new String(chars, 0, len)); selectIO(); if(!reader.ready()) { break; } } // sleep for a while when the stream is empty try { Thread.sleep(DELAY); } catch (InterruptedException e) { // ignore } } } catch (IOException ex) { Logger.getLogger("glassfish").log(Level.INFO, ex.getLocalizedMessage(), ex); // NOI18N } finally { try { inputStream.close(); } catch (IOException ex) { Logger.getLogger("glassfish").log(Level.INFO, ex.getLocalizedMessage(), ex); // NOI18N } Thread.currentThread().setName(originalName); } } } /** This action will be displayed in the server output window */ public class CancelAction extends AbstractAction { private static final String PROP_ENABLED = "enabled"; // NOI18N private static final String ICON = "org/netbeans/modules/glassfish/common/resources/stop.png"; // NOI18N public CancelAction() { super(NbBundle.getMessage(SimpleIO.class, "CTL_Cancel"),ImageUtilities.loadImageIcon(ICON, false)); // NOI18N putValue(SHORT_DESCRIPTION, NbBundle.getMessage(SimpleIO.class, "LBL_CancelDesc")); // NOI18N } public void actionPerformed(ActionEvent e) { if(process.get() != null) { String message = NbBundle.getMessage(SimpleIO.class, "MSG_QueryCancel", name); // NOI18N NotifyDescriptor nd = new NotifyDescriptor.Confirmation(message, NotifyDescriptor.YES_NO_OPTION, NotifyDescriptor.QUESTION_MESSAGE); if(DialogDisplayer.getDefault().notify(nd) == NotifyDescriptor.YES_OPTION) { Process p = process.getAndSet(null); if(p != null) { p.destroy(); } else { Logger.getLogger("glassfish").log(Level.FINEST, "Process handle unexpectedly null, cancel aborted."); // NOI18N } } } } @Override public boolean isEnabled() { return process.get() != null; } public void updateEnabled() { Mutex.EVENT.readAccess(new Runnable() { public void run() { firePropertyChange(PROP_ENABLED, null, isEnabled() ? Boolean.TRUE : Boolean.FALSE); } }); } } }
3,246
365
import pytest import torch from summarize_from_feedback import tasks from summarize_from_feedback.query_response_model import PADDING_TOKEN from summarize_from_feedback.utils.assertions import assert_eq class TestEncoder: def encode(self, text): return [ord(x) for x in text] def decode(self, tokens): return "".join([chr(x) for x in tokens]) def test_process_query(): h = tasks.TaskQueryHParams(length=10, format_str="<{query}>") encoder = TestEncoder() query = "123456789abc" assert_eq( tasks.process_query(dict(query=query), encoder=encoder, hparams=h, pad_sequence=[0, 0, 0]), dict(tokens=encoder.encode("<" + query[:8] + ">")), ) with pytest.raises(KeyError): tasks.process_query(dict(x=query), encoder=encoder, hparams=h, pad_sequence=[0, 0, 0]) query = "12345a7" with pytest.raises(AssertionError): tasks.process_query(query, encoder=encoder, hparams=h, pad_sequence=[0, 0, 0]) h.pad_side = "left" assert_eq( tasks.process_query(query, encoder=encoder, hparams=h, pad_sequence=[0, 0, 0]), dict(tokens=[0] + encoder.encode("<" + query + ">")), ) def test_process_response(): encoder = TestEncoder() def test_response(h, response, expected_processed, expected_decoded): response_encoder = tasks.ResponseEncoder(h, encoder) processed_tensor = response_encoder.process_responses(torch.LongTensor([response])) processed = processed_tensor.numpy()[0] assert_eq(processed, expected_processed) decoded = response_encoder.decode_response(processed) assert_eq(decoded, expected_decoded) assert_eq(processed, response_encoder.encode_response(expected_decoded)) decoded = response_encoder.decode_responses(processed_tensor) assert_eq(decoded, [expected_decoded]) test_response( tasks.TaskResponseHParams(length=10, truncate_token=None), encoder.encode("123456789a"), encoder.encode("123456789a"), "123456789a", ) test_response( tasks.TaskResponseHParams(length=10, truncate_token=ord("5")), encoder.encode("123456789a"), encoder.encode("12345") + [PADDING_TOKEN] * 5, "1234", ) def test_encode_response(): encoder = TestEncoder() def test_encode_response(h, response, expected_processed=None, allow_truncate=False): response_encoder = tasks.ResponseEncoder(h, encoder) processed = response_encoder.encode_response(response, allow_truncate=allow_truncate) if expected_processed is not None: assert_eq(expected_processed, processed) test_encode_response( tasks.TaskResponseHParams(length=10, truncate_token=None), "123456789a", encoder.encode("123456789a"), ) test_encode_response( tasks.TaskResponseHParams(length=10, truncate_token=None), "123456789", encoder.encode("123456789") + [PADDING_TOKEN], ) with pytest.raises(AssertionError): test_encode_response( tasks.TaskResponseHParams(length=10, truncate_token=None), "123456789ab", encoder.encode("123456789a"), ) test_encode_response( tasks.TaskResponseHParams(length=10, truncate_token=None), "123456789ab", encoder.encode("123456789a"), allow_truncate=True, ) test_encode_response( tasks.TaskResponseHParams(length=10, truncate_token=ord("5")), "1234", encoder.encode("12345") + [PADDING_TOKEN] * 5, ) with pytest.raises(AssertionError): test_encode_response( tasks.TaskResponseHParams(length=10, truncate_token=ord("b")), "123456789a", encoder.encode("12345") + [PADDING_TOKEN] * 5, ) test_encode_response( tasks.TaskResponseHParams(length=10, truncate_token=ord("b")), "123456789a", encoder.encode("123456789b"), allow_truncate=True, ) test_encode_response( tasks.TaskResponseHParams(length=10, truncate_token=ord("b")), "123456789aaaab", encoder.encode("123456789b"), allow_truncate=True, )
1,882
1,150
/** * Copyright 2018 Confluent Inc. * * Licensed under the GNU AFFERO GENERAL PUBLIC LICENSE, Version 3.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://opensource.org/licenses/AGPL-3.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. **/ package io.confluent.kpay.utils; import io.confluent.common.utils.TestUtils; import io.confluent.kpay.util.KafkaTopicClient; import io.confluent.kpay.util.KafkaTopicClientImpl; import org.apache.kafka.clients.admin.AdminClient; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.KafkaConsumer; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.clients.producer.RecordMetadata; import org.apache.kafka.common.serialization.Deserializer; import org.apache.kafka.common.serialization.Serializer; import org.apache.kafka.common.serialization.StringSerializer; import org.apache.kafka.streams.StreamsConfig; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Properties; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; public class IntegrationTestHarness { private static final long TEST_RECORD_FUTURE_TIMEOUT_MS = 5000; public EmbeddedSingleNodeKafkaCluster embeddedKafkaCluster; private AdminClient adminClient; private KafkaTopicClient topicClient; Map<String, Object> configMap = new HashMap<>(); String CONSUMER_GROUP_ID_PREFIX = "KWQ-test"; public void start() throws Exception { embeddedKafkaCluster = new EmbeddedSingleNodeKafkaCluster(); embeddedKafkaCluster.start(); configMap.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, embeddedKafkaCluster.bootstrapServers()); configMap.put("application.id", "KWQ"); configMap.put("commit.interval.ms", 0); configMap.put("cache.max.bytes.buffering", 0); configMap.put("auto.offset.reset", "earliest"); configMap.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory().getPath()); this.adminClient = AdminClient.create(configMap); this.topicClient = new KafkaTopicClientImpl(adminClient); } public void stop() { this.topicClient.close(); this.adminClient.close(); this.embeddedKafkaCluster.stop(); } public <V> void produceData(String topicName, Map<String, V> recordsToPublish, Serializer<V> valueSerializer, Long timestamp) throws InterruptedException, TimeoutException, ExecutionException { createTopic(topicName, 1, 1); Properties producerConfig = properties(); KafkaProducer<String, V> producer = new KafkaProducer<>(producerConfig, new StringSerializer(), valueSerializer); Map<String, RecordMetadata> result = new HashMap<>(); for (Map.Entry<String, V> recordEntry : recordsToPublish.entrySet()) { Future<RecordMetadata> recordMetadataFuture = producer.send(buildRecord(topicName, timestamp, recordEntry)); result.put(recordEntry.getKey(), recordMetadataFuture.get(TEST_RECORD_FUTURE_TIMEOUT_MS, TimeUnit.MILLISECONDS)); } producer.close(); } public void createTopic(String topicName, int numPartitions, int replicationFactor) { topicClient.createTopic(topicName, numPartitions, (short) replicationFactor); } private <V> ProducerRecord<String, V> buildRecord(String topicName, Long timestamp, Map.Entry<String, V> recordEntry) { return new ProducerRecord<>(topicName, null, timestamp, recordEntry.getKey(), recordEntry.getValue()); } private Properties properties() { Properties producerConfig = new Properties(); producerConfig.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, configMap.get(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG)); producerConfig.put(ProducerConfig.ACKS_CONFIG, "all"); producerConfig.put(ProducerConfig.RETRIES_CONFIG, 0); return producerConfig; } private long groupId = System.currentTimeMillis(); public <K, V> Map<K, V> consumeData(String topic, int expectedNumMessages, Deserializer<K> keyDeserializer, Deserializer<V> valueDeserializer, long resultsPollMaxTimeMs) { Properties consumerConfig = new Properties(); consumerConfig.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, configMap.get(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG)); consumerConfig.put(ConsumerConfig.GROUP_ID_CONFIG, CONSUMER_GROUP_ID_PREFIX + groupId++); consumerConfig.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); System.out.println("IntTestHarness - ConsumerConfig: Topic:" + topic + " properties:" + consumerConfig); try (KafkaConsumer<K, V> consumer = new KafkaConsumer(consumerConfig, keyDeserializer, valueDeserializer)) { consumer.subscribe(Collections.singleton(topic)); Map<K, V> result = new HashMap<>(); int waitCount = 0; while (result.size() < expectedNumMessages && waitCount++ < 5) { for (ConsumerRecord<K, V> record : consumer.poll(resultsPollMaxTimeMs)) { if (record.value() != null) { result.put(record.key(), record.value()); } } } return result; } } public KafkaTopicClient getTopicClient() { return topicClient; } }
2,299
784
<filename>corev_apu/tb/riscv-isa-sim/riscv/insns/fcvt_q_wu.h require_extension('Q'); require_fp; softfloat_roundingMode = RM; WRITE_FRD(ui32_to_f128((uint32_t)RS1)); set_fp_exceptions;
90
348
{"nom":"Morsains","circ":"3ème circonscription","dpt":"Marne","inscrits":127,"abs":56,"votants":71,"blancs":11,"nuls":0,"exp":60,"res":[{"nuance":"REM","nom":"<NAME>","voix":42},{"nuance":"FN","nom":"<NAME>","voix":18}]}
88
4,901
/* * Copyright (C) 2016 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package libcore.java.time.zone; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import android.icu.util.BasicTimeZone; import android.icu.util.TimeZone; import android.icu.util.TimeZoneRule; import android.icu.util.TimeZoneTransition; import java.time.Duration; import java.time.Instant; import java.time.LocalDateTime; import java.time.Month; import java.time.ZoneOffset; import java.time.zone.ZoneOffsetTransition; import java.time.zone.ZoneRules; import java.time.zone.ZoneRulesProvider; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Set; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; /** * Test the {@link java.time.zone.IcuZoneRulesProvider}. * * It is indirectly tested via static methods in {@link ZoneRulesProvider} as all the relevant * methods are protected. This test verifies that the rules returned by that provider behave * equivalently to the ICU rules from which they are created. */ @RunWith(Parameterized.class) public class IcuZoneRulesProviderTest { @Parameterized.Parameters(name = "{0}") public static Collection getZoneIds() { Set<String> availableZoneIds = ZoneRulesProvider.getAvailableZoneIds(); assertFalse("no zones returned", availableZoneIds.isEmpty()); List<Object[]> availableZoneIdsAsArrays = new ArrayList<>(); for (String zoneId : availableZoneIds){ availableZoneIdsAsArrays.add(new Object[] {zoneId}); } return availableZoneIdsAsArrays; } private final String zoneId; public IcuZoneRulesProviderTest(final String zoneId) { this.zoneId = zoneId; } /** * Verifies that ICU and java.time return the same transitions before and after a pre-selected * set of instants in time. */ @Test public void testTransitionsNearInstants() { // An arbitrary set of instants at which to test the offsets in both implementations. Instant[] instants = new Instant[] { LocalDateTime.of(1900, Month.DECEMBER, 24, 12, 0).toInstant(ZoneOffset.UTC), LocalDateTime.of(1970, Month.JANUARY, 1, 2, 3).toInstant(ZoneOffset.UTC), LocalDateTime.of(1980, Month.FEBRUARY, 4, 5, 6).toInstant(ZoneOffset.UTC), LocalDateTime.of(1990, Month.MARCH, 7, 8, 9).toInstant(ZoneOffset.UTC), LocalDateTime.of(2000, Month.APRIL, 10, 11, 12).toInstant(ZoneOffset.UTC), LocalDateTime.of(2016, Month.MAY, 13, 14, 15).toInstant(ZoneOffset.UTC), LocalDateTime.of(2020, Month.JUNE, 16, 17, 18).toInstant(ZoneOffset.UTC), LocalDateTime.of(2100, Month.JULY, 19, 20, 21).toInstant(ZoneOffset.UTC), // yes, adding "now" makes the test time-dependent, but it also ensures that future // updates don't break on the then-current date. Instant.now() }; // Coincidentally this test verifies that all zones can be converted to ZoneRules and // don't violate any of the assumptions of IcuZoneRulesProvider. ZoneRules rules = ZoneRulesProvider.getRules(zoneId, false); BasicTimeZone timeZone = (BasicTimeZone) TimeZone.getTimeZone(zoneId); int[] icuOffsets = new int[2]; for (Instant instant : instants) { ZoneOffset offset = rules.getOffset(instant); Duration daylightSavings = rules.getDaylightSavings(instant); timeZone.getOffset(instant.toEpochMilli(), false, icuOffsets); assertEquals("total offset for " + zoneId + " at " + instant, icuOffsets[1] + icuOffsets[0], offset.getTotalSeconds() * 1000); assertEquals("dst offset for " + zoneId + " at " + instant, icuOffsets[1], daylightSavings.toMillis()); ZoneOffsetTransition jtTrans; TimeZoneTransition icuTrans; jtTrans = rules.nextTransition(instant); icuTrans = timeZone.getNextTransition(instant.toEpochMilli(), false); while (isIcuOnlyTransition(icuTrans)) { icuTrans = timeZone.getNextTransition(icuTrans.getTime(), false); } assertEquivalent(icuTrans, jtTrans); jtTrans = rules.previousTransition(instant); icuTrans = timeZone.getPreviousTransition(instant.toEpochMilli(), false); // Find previous "real" transition. while (isIcuOnlyTransition(icuTrans)) { icuTrans = timeZone.getPreviousTransition(icuTrans.getTime(), false); } assertEquivalent(icuTrans, jtTrans); } } /** * Verifies that ICU and java.time rules return the same transitions between 1900 and 2100. */ @Test public void testAllTransitions() { final Instant start = LocalDateTime.of(1900, Month.JANUARY, 1, 12, 0) .toInstant(ZoneOffset.UTC); // Many timezones have ongoing DST changes, so they would generate transitions endlessly. // Pick a far-future end date to stop comparing in that case. final Instant end = LocalDateTime.of(2100, Month.DECEMBER, 31, 12, 0) .toInstant(ZoneOffset.UTC); ZoneRules rules = ZoneRulesProvider.getRules(zoneId, false); BasicTimeZone timeZone = (BasicTimeZone) TimeZone.getTimeZone(zoneId); Instant instant = start; while (instant.isBefore(end)) { ZoneOffsetTransition jtTrans; TimeZoneTransition icuTrans; jtTrans = rules.nextTransition(instant); icuTrans = timeZone.getNextTransition(instant.toEpochMilli(), false); while (isIcuOnlyTransition(icuTrans)) { icuTrans = timeZone.getNextTransition(icuTrans.getTime(), false); } assertEquivalent(icuTrans, jtTrans); if (jtTrans == null) { break; } instant = jtTrans.getInstant(); } } /** * Returns {@code true} iff this transition will only be returned by ICU code. * ICU reports "no-op" transitions where the raw offset and the dst savings * change by the same absolute value in opposite directions, java.time doesn't * return them, so find the next "real" transition. */ private static boolean isIcuOnlyTransition(TimeZoneTransition transition) { if (transition == null) { return false; } return transition.getFrom().getRawOffset() + transition.getFrom().getDSTSavings() == transition.getTo().getRawOffset() + transition.getTo().getDSTSavings(); } /** * Asserts that the ICU {@link TimeZoneTransition} is equivalent to the java.time {@link * ZoneOffsetTransition}. */ private static void assertEquivalent( TimeZoneTransition icuTransition, ZoneOffsetTransition jtTransition) { if (icuTransition == null) { assertNull(jtTransition); return; } assertEquals("time of transition", Instant.ofEpochMilli(icuTransition.getTime()), jtTransition.getInstant()); TimeZoneRule from = icuTransition.getFrom(); TimeZoneRule to = icuTransition.getTo(); assertEquals("offset before", (from.getDSTSavings() + from.getRawOffset()) / 1000, jtTransition.getOffsetBefore().getTotalSeconds()); assertEquals("offset after", (to.getDSTSavings() + to.getRawOffset()) / 1000, jtTransition.getOffsetAfter().getTotalSeconds()); } }
3,307
1,144
// Copyright 2013 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #ifndef NINJA_LINE_PRINTER_H_ #define NINJA_LINE_PRINTER_H_ #include <stddef.h> #include <string> /// Prints lines of text, possibly overprinting previously printed lines /// if the terminal supports it. struct LinePrinter { LinePrinter(); bool is_smart_terminal() const { return smart_terminal_; } void set_smart_terminal(bool smart) { smart_terminal_ = smart; } enum LineType { INFO, WARNING, ERROR }; /// Outputs the given line. INFO output will be overwritten. /// WARNING and ERROR appear on a line to themselves. void Print(std::string to_print, LineType type); /// If there's an INFO line, keep it. If not, do nothing. void KeepInfoLine(); private: /// Whether we can do fancy terminal control codes. bool smart_terminal_; /// Whether the caret is at the beginning of a blank line. bool have_blank_line_; #ifdef _WIN32 void* console_; #endif }; #endif // NINJA_LINE_PRINTER_H_
450
784
<filename>sklego/mixture/bayesian_gmm_classifier.py import numpy as np from sklearn.base import BaseEstimator, ClassifierMixin from sklearn.mixture import BayesianGaussianMixture from sklearn.utils import check_X_y from sklearn.utils.multiclass import unique_labels from sklearn.utils.validation import check_is_fitted, check_array, FLOAT_DTYPES class BayesianGMMClassifier(BaseEstimator, ClassifierMixin): def __init__( self, n_components=1, covariance_type="full", tol=0.001, reg_covar=1e-06, max_iter=100, n_init=1, init_params="kmeans", weight_concentration_prior_type="dirichlet_process", weight_concentration_prior=None, mean_precision_prior=None, mean_prior=None, degrees_of_freedom_prior=None, covariance_prior=None, random_state=None, warm_start=False, verbose=0, verbose_interval=10, ): """ The BayesianGMMClassifier trains a Gaussian Mixture Model for each class in y on a dataset X. Once a density is trained for each class we can evaluate the likelihood scores to see which class is more likely. All parameters of the model are an exact copy of the parameters in scikit-learn. """ self.n_components = n_components self.covariance_type = covariance_type self.tol = tol self.reg_covar = reg_covar self.max_iter = max_iter self.n_init = n_init self.init_params = init_params self.weight_concentration_prior_type = weight_concentration_prior_type self.weight_concentration_prior = weight_concentration_prior self.mean_precision_prior = mean_precision_prior self.mean_prior = mean_prior self.degrees_of_freedom_prior = degrees_of_freedom_prior self.covariance_prior = covariance_prior self.random_state = random_state self.warm_start = warm_start self.verbose = verbose self.verbose_interval = verbose_interval def fit(self, X: np.array, y: np.array) -> "BayesianGMMClassifier": """ Fit the model using X, y as training data. :param X: array-like, shape=(n_columns, n_samples, ) training data. :param y: array-like, shape=(n_samples, ) training data. :return: Returns an instance of self. """ X, y = check_X_y(X, y, estimator=self, dtype=FLOAT_DTYPES) if X.ndim == 1: X = np.expand_dims(X, 1) self.gmms_ = {} self.classes_ = unique_labels(y) for c in self.classes_: subset_x, subset_y = X[y == c], y[y == c] mixture = BayesianGaussianMixture( n_components=self.n_components, covariance_type=self.covariance_type, tol=self.tol, reg_covar=self.reg_covar, max_iter=self.max_iter, n_init=self.n_init, init_params=self.init_params, weight_concentration_prior_type=self.weight_concentration_prior_type, weight_concentration_prior=self.weight_concentration_prior, mean_precision_prior=self.mean_precision_prior, mean_prior=self.mean_prior, degrees_of_freedom_prior=self.degrees_of_freedom_prior, covariance_prior=self.covariance_prior, random_state=self.random_state, warm_start=self.warm_start, verbose=self.verbose, verbose_interval=self.verbose_interval, ) self.gmms_[c] = mixture.fit(subset_x, subset_y) return self def predict(self, X): check_is_fitted(self, ["gmms_", "classes_"]) X = check_array(X, estimator=self, dtype=FLOAT_DTYPES) return self.classes_[self.predict_proba(X).argmax(axis=1)] def predict_proba(self, X): X = check_array(X, estimator=self, dtype=FLOAT_DTYPES) check_is_fitted(self, ["gmms_", "classes_"]) res = np.zeros((X.shape[0], self.classes_.shape[0])) for idx, c in enumerate(self.classes_): res[:, idx] = self.gmms_[c].score_samples(X) return np.exp(res) / np.exp(res).sum(axis=1)[:, np.newaxis]
2,072
10,002
<reponame>fangshun2004/calculator // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. #pragma once namespace Numbers { namespace DesignData { #ifdef _DEBUG // These class are to be consumed exclusively by Blend and the VS designer // with these classes Blend will be able to populate the controls // with the hardcoded strings so whoever is working on the UI can actually see how the app would look like // with semi-realistic data. // This data is to only be compiled in the debug build and it will not affect app functionality at all // so it does not need to be tested. It will have to be kept in sync with UnitConverterViewModel though // to ensure that the design experience is correct. // This class's code is run in the designer process so the less code it has the better. public ref class CategoryViewModel sealed : public Windows::UI::Xaml::Data::INotifyPropertyChanged { public: CategoryViewModel(Platform::String ^ name) : m_Name(name) , m_NegateVisibility(Windows::UI::Xaml::Visibility::Collapsed) { } CategoryViewModel(Platform::String ^ name, Windows::UI::Xaml::Visibility negateVisibility) : m_Name(name) , m_NegateVisibility(negateVisibility) { } OBSERVABLE_OBJECT(); OBSERVABLE_PROPERTY_RW(Platform::String ^, Name); OBSERVABLE_PROPERTY_RW(Windows::UI::Xaml::Visibility, NegateVisibility); }; public ref class UnitViewModel sealed : public Windows::UI::Xaml::Data::INotifyPropertyChanged { public: UnitViewModel(Platform::String ^ unit, Platform::String ^ abbr) : m_Name(unit) , m_Abbreviation(abbr) { } OBSERVABLE_OBJECT(); OBSERVABLE_PROPERTY_RW(Platform::String ^, Name); OBSERVABLE_PROPERTY_RW(Platform::String ^, Abbreviation); }; public ref class UnitConverterSupplementaryResultViewModel sealed : public Windows::UI::Xaml::Data::INotifyPropertyChanged { public: UnitConverterSupplementaryResultViewModel(Platform::String ^ value, Platform::String ^ unit, Platform::String ^ abbr) : m_Value(value) { m_Unit = ref new UnitViewModel(unit, abbr); } OBSERVABLE_OBJECT(); OBSERVABLE_PROPERTY_RW(Platform::String ^, Value); OBSERVABLE_PROPERTY_RW(UnitViewModel ^, Unit); }; public ref class UnitConverterViewModel sealed : public Windows::UI::Xaml::Data::INotifyPropertyChanged { public: UnitConverterViewModel() : m_Value1("Åy24") , m_Value2("Åy183") , m_Value1Active(true) , m_Value2Active(false) { m_SupplementaryResults = ref new Platform::Collections::Vector<UnitConverterSupplementaryResultViewModel ^>(); m_SupplementaryResults->Append(ref new UnitConverterSupplementaryResultViewModel("128", "Kilograms", "Kgs")); m_SupplementaryResults->Append(ref new UnitConverterSupplementaryResultViewModel("42.55", "Liters", "ÅyL")); m_SupplementaryResults->Append(ref new UnitConverterSupplementaryResultViewModel("1.5e3", "Gallons", "G")); m_SupplementaryResults->Append(ref new UnitConverterSupplementaryResultViewModel("1929", "Gigabyte", "GB")); m_Categories = ref new Platform::Collections::Vector<CategoryViewModel ^>(); m_Categories->Append(ref new CategoryViewModel("Volume")); m_Categories->Append(ref new CategoryViewModel("Temperature", Windows::UI::Xaml::Visibility::Visible)); m_CurrentCategory = ref new CategoryViewModel("ÅyTime"); m_Categories->Append(m_CurrentCategory); m_Categories->Append(ref new CategoryViewModel("Speed")); m_Units = ref new Platform::Collections::Vector<UnitViewModel ^>(); m_Unit1 = ref new UnitViewModel("ÅySeconds", "S"); m_Unit2 = ref new UnitViewModel("ÅyMinutes", "M"); m_Units->Append(ref new UnitViewModel("Miliseconds", "MS")); m_Units->Append(m_Unit1); m_Units->Append(m_Unit2); m_Units->Append(ref new UnitViewModel("Hours", "HRs")); } OBSERVABLE_OBJECT(); OBSERVABLE_PROPERTY_RW(Platform::String ^, Value1); OBSERVABLE_PROPERTY_RW(Platform::String ^, Value2); OBSERVABLE_PROPERTY_R(Windows::UI::Xaml::Interop::IBindableObservableVector ^, Categories); OBSERVABLE_PROPERTY_RW(CategoryViewModel ^, CurrentCategory); OBSERVABLE_PROPERTY_R(Windows::UI::Xaml::Interop::IBindableObservableVector ^, Units); OBSERVABLE_PROPERTY_RW(UnitViewModel ^, Unit1); OBSERVABLE_PROPERTY_RW(UnitViewModel ^, Unit2); OBSERVABLE_PROPERTY_RW(bool, Value1Active); OBSERVABLE_PROPERTY_RW(bool, Value2Active); OBSERVABLE_PROPERTY_R(Windows::UI::Xaml::Interop::IBindableObservableVector ^, SupplementaryResults); }; #endif } }
2,465
462
## # Copyright (c) 2013-2017 Apple Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ## from twistedcaldav.ical import Component, diff_iCalStrs, normalize_iCalStr from twext.enterprise.jobs.jobitem import JobItem from twext.enterprise.jobs.workitem import WorkItem from txdav.common.datastore.sql_tables import scheduleActionFromSQL from twisted.internet import reactor """ Tests for txdav.caldav.datastore.utils """ from twisted.internet.defer import inlineCallbacks, returnValue from twisted.trial import unittest from txdav.caldav.datastore.scheduling.work import ScheduleOrganizerWork, \ ScheduleWorkMixin, ScheduleWork, ScheduleOrganizerSendWork from txdav.common.datastore.test.util import populateCalendarsFrom, CommonCommonTests class BaseWorkTests(CommonCommonTests, unittest.TestCase): """ Tests for scheduling work. """ @inlineCallbacks def setUp(self): yield super(BaseWorkTests, self).setUp() yield self.buildStoreAndDirectory() yield self.populate() @inlineCallbacks def populate(self): yield populateCalendarsFrom(self.requirements, self.storeUnderTest()) self.notifierFactory.reset() requirements = { "user01": { "calendar": { }, "inbox": { }, }, "user02": { "calendar": { }, "inbox": { }, }, "user03": { "calendar": { }, "inbox": { }, }, } def storeUnderTest(self): """ Create and return a L{CalendarStore} for testing. """ return self._sqlCalendarStore @inlineCallbacks def _runAllJobs(self): """ Run all outstanding jobs. """ # Run jobs jobs = yield JobItem.all(self.transactionUnderTest()) while jobs: yield jobs[0].run() yield self.commit() jobs = yield JobItem.all(self.transactionUnderTest()) yield self.commit() @inlineCallbacks def _runOneJob(self, work_type=None): """ Run the first outstanding jobs. """ # Run jobs jobs = yield JobItem.all(self.transactionUnderTest()) for job in jobs: if work_type is None or job.workType == work_type: yield job.run() break yield self.commit() @inlineCallbacks def createOrganizerEvent(self, organizer, ical, run_jobs=True): """ Create an organizer event and wait for the jobs to complete. """ cal = yield self.calendarUnderTest(name="calendar", home=organizer) yield cal.createCalendarObjectWithName("invite.ics", ical) yield self.commit() if run_jobs: yield self._runAllJobs() @inlineCallbacks def getOrganizerResource(self, organizer): """ Get the attendee's event. """ calobj = yield self.calendarObjectUnderTest(name="invite.ics", calendar_name="calendar", home=organizer) returnValue(calobj) @inlineCallbacks def setOrganizerEvent(self, organizer, ical, run_jobs=True): """ Set the organizer's event. """ calobj = yield self.getOrganizerResource(organizer) yield calobj.setComponent(ical) yield self.commit() if run_jobs: yield self._runAllJobs() @inlineCallbacks def getOrganizerEvent(self, organizer): """ Get the organizer's event. """ calobj = yield self.getOrganizerResource(organizer) comp = yield calobj.componentForUser() yield self.commit() returnValue(comp) @inlineCallbacks def getAttendeeResource(self, attendee): """ Get the attendee's event. """ cal = yield self.calendarUnderTest(name="calendar", home=attendee) calobjs = yield cal.calendarObjects() self.assertEqual(len(calobjs), 1) returnValue(calobjs[0]) @inlineCallbacks def setAttendeeEvent(self, attendee, ical, run_jobs=True): """ Set the attendee's event. """ calobj = yield self.getAttendeeResource(attendee) yield calobj.setComponent(ical) yield self.commit() if run_jobs: yield self._runAllJobs() @inlineCallbacks def getAttendeeEvent(self, attendee): """ Get the attendee's event. """ calobj = yield self.getAttendeeResource(attendee) comp = yield calobj.componentForUser() yield self.commit() returnValue(comp) class TestScheduleOrganizerWork(BaseWorkTests): """ Test creation of L{ScheduleOrganizerWork} items. """ calendar_old = Component.fromString("""BEGIN:VCALENDAR VERSION:2.0 PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN BEGIN:VEVENT UID:12345-67890 DTSTART:20080601T120000Z DURATION:PT1H ORGANIZER:urn:uuid:user01 ATTENDEE:urn:uuid:user01 ATTENDEE:urn:uuid:user02 END:VEVENT END:VCALENDAR """) calendar_new = Component.fromString("""BEGIN:VCALENDAR VERSION:2.0 PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN BEGIN:VEVENT UID:12345-67890 DTSTART:20080601T130000Z DURATION:PT1H ORGANIZER:urn:uuid:user01 ATTENDEE:urn:uuid:user01 ATTENDEE:urn:uuid:user02 END:VEVENT END:VCALENDAR """) @inlineCallbacks def test_create(self): """ Test that jobs associated with L{txdav.caldav.datastore.scheduling.work.ScheduleOrganizerWork} can be created and correctly removed. """ ScheduleWorkMixin._queued = 0 txn = self.transactionUnderTest() home = yield self.homeUnderTest(name="user01") yield ScheduleOrganizerWork.schedule( txn, "12345-67890", "create", home, None, None, self.calendar_new, "urn:uuid:user01", 2, True, ) yield self.commit() self.assertEqual(ScheduleWorkMixin._queued, 1) jobs = yield JobItem.all(self.transactionUnderTest()) self.assertEqual(len(jobs), 1) work = yield jobs[0].workItem() self.assertTrue(isinstance(work, ScheduleOrganizerWork)) self.assertEqual(work.icalendarUID, "12345-67890") self.assertEqual(scheduleActionFromSQL[work.scheduleAction], "create") yield work.delete() yield jobs[0].delete() yield self.commit() jobs = yield JobItem.all(self.transactionUnderTest()) self.assertEqual(len(jobs), 0) work = yield ScheduleOrganizerWork.all(self.transactionUnderTest()) self.assertEqual(len(work), 0) baseWork = yield ScheduleWork.all(self.transactionUnderTest()) self.assertEqual(len(baseWork), 0) @inlineCallbacks def test_cascade_delete_cleanup(self): """ Test that when work associated with L{txdav.caldav.datastore.scheduling.work.ScheduleWork} is removed with the L{ScheduleWork} item being removed, the associated L{JobItem} runs and removes itself and the L{ScheduleWork}. """ ScheduleWorkMixin._queued = 0 txn = self.transactionUnderTest() home = yield self.homeUnderTest(name="user01") yield ScheduleOrganizerWork.schedule( txn, "12345-67890", "create", home, None, None, self.calendar_new, "urn:uuid:user01", 2, True, ) yield self.commit() self.assertEqual(ScheduleWorkMixin._queued, 1) jobs = yield JobItem.all(self.transactionUnderTest()) work = yield jobs[0].workItem() yield WorkItem.delete(work) yield self.commit() jobs = yield JobItem.all(self.transactionUnderTest()) self.assertEqual(len(jobs), 1) baseWork = yield ScheduleWork.all(self.transactionUnderTest()) self.assertEqual(len(baseWork), 1) self.assertEqual(baseWork[0].jobID, jobs[0].jobID) work = yield jobs[0].workItem() self.assertTrue(work is None) yield self.commit() yield JobItem.waitEmpty(self.storeUnderTest().newTransaction, reactor, 60) jobs = yield JobItem.all(self.transactionUnderTest()) self.assertEqual(len(jobs), 0) work = yield ScheduleOrganizerWork.all(self.transactionUnderTest()) self.assertEqual(len(work), 0) baseWork = yield ScheduleWork.all(self.transactionUnderTest()) self.assertEqual(len(baseWork), 0) class TestScheduleOrganizerSendWork(BaseWorkTests): """ Test creation of L{ScheduleOrganizerSendWork} items. """ itip_new = Component.fromString("""BEGIN:VCALENDAR VERSION:2.0 METHOD:REQUEST PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN BEGIN:VEVENT UID:12345-67890 DTSTART:20080601T130000Z DURATION:PT1H ORGANIZER:urn:x-uid:user01 {attendees} END:VEVENT END:VCALENDAR """.format(attendees="\n".join(["ATTENDEE:urn:x-uid:user%02d" % i for i in range(1, 100)]))) @inlineCallbacks def test_create(self): """ Test that jobs associated with L{txdav.caldav.datastore.scheduling.work.ScheduleOrganizerSendWork} can be created and correctly removed. """ txn = self.transactionUnderTest() home = yield self.homeUnderTest(name="user01") yield ScheduleOrganizerSendWork.schedule( txn, "create", home, None, "urn:x-uid:user01", "urn:x-uid:user02", self.itip_new, True, 1000, ) jobs = yield JobItem.all(self.transactionUnderTest()) self.assertEqual(len(jobs), 1) work = yield jobs[0].workItem() yield work.doWork() home2 = yield self.calendarUnderTest(home="user02", name="calendar") cobjs = yield home2.calendarObjects() self.assertEqual(len(cobjs), 1) # cal2 = yield cobjs[0].component() yield work.delete() yield jobs[0].delete() yield self.commit() class TestScheduleWork(BaseWorkTests): """ Test various scheduling work scenarios that are potential race conditions and could give rise to partstat mismatches between organizer and attendee, or cause work items to fail. """ def configure(self): super(TestScheduleWork, self).configure() # Enable the queue and make it slow self.patch(self.config.Scheduling.Options.WorkQueues, "Enabled", True) self.patch(self.config.Scheduling.Options.WorkQueues, "RequestDelaySeconds", 1000) self.patch(self.config.Scheduling.Options.WorkQueues, "ReplyDelaySeconds", 1000) self.patch(self.config.Scheduling.Options.WorkQueues, "AutoReplyDelaySeconds", 1000) self.patch(self.config.Scheduling.Options.WorkQueues, "AttendeeRefreshBatchDelaySeconds", 1000) self.patch(self.config.Scheduling.Options.WorkQueues, "AttendeeRefreshBatchIntervalSeconds", 1000) self.patch(JobItem, "failureRescheduleInterval", 1000) self.patch(JobItem, "lockRescheduleInterval", 1000) @inlineCallbacks def test_replyBeforeResourceDelete(self): """ Test that a reply is sent if an attendee changes an event, then immediately deletes it. """ organizer1 = Component.fromString("""BEGIN:VCALENDAR VERSION:2.0 PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN BEGIN:VEVENT UID:12345-67890 DTSTAMP:20080601T130000Z DTSTART:20080601T130000Z DURATION:PT1H ORGANIZER;CN=User 01;EMAIL=<EMAIL>:urn:x-uid:user01 ATTENDEE;CN=User 01;EMAIL=<EMAIL>;PARTSTAT=ACCEPTED:urn:x-uid:user01 ATTENDEE;CN=User 02;EMAIL=<EMAIL>;PARTSTAT=NEEDS-ACTION:urn:x-uid:user02 END:VEVENT END:VCALENDAR """) attendee1 = Component.fromString("""BEGIN:VCALENDAR VERSION:2.0 PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN BEGIN:VEVENT UID:12345-67890 DTSTAMP:20080601T130000Z DTSTART:20080601T130000Z DURATION:PT1H ORGANIZER;CN=User 01;EMAIL=<EMAIL>:urn:x-uid:user01 ATTENDEE;CN=User 01;EMAIL=<EMAIL>;PARTSTAT=ACCEPTED:urn:x-uid:user01 ATTENDEE;CN=User 02;EMAIL=<EMAIL>;PARTSTAT=NEEDS-ACTION;RSVP=TRUE:urn:x-uid:user02 TRANSP:TRANSPARENT END:VEVENT END:VCALENDAR """) organizer2 = Component.fromString("""BEGIN:VCALENDAR VERSION:2.0 PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN BEGIN:VEVENT UID:12345-67890 DTSTAMP:20080601T130000Z DTSTART:20080601T130000Z DURATION:PT1H ORGANIZER;CN=User 01;EMAIL=<EMAIL>:urn:x-uid:user01 ATTENDEE;CN=User 01;EMAIL=<EMAIL>;PARTSTAT=ACCEPTED:urn:x-uid:user01 ATTENDEE;CN=User 02;EMAIL=<EMAIL>;PARTSTAT=DECLINED;SCHEDULE-STATUS=2.0:urn:x-uid:user02 END:VEVENT END:VCALENDAR """) attendee2 = Component.fromString("""BEGIN:VCALENDAR VERSION:2.0 PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN BEGIN:VEVENT UID:12345-67890 DTSTAMP:20080601T130000Z DTSTART:20080601T130000Z DURATION:PT1H ORGANIZER;CN=User 01;EMAIL=<EMAIL>:urn:x-uid:user01 ATTENDEE;CN=User 01;EMAIL=<EMAIL>;PARTSTAT=ACCEPTED:urn:x-uid:user01 ATTENDEE;CN=User 02;EMAIL=<EMAIL>;PARTSTAT=DECLINED:urn:x-uid:user02 TRANSP:TRANSPARENT END:VEVENT END:VCALENDAR """) yield self.createOrganizerEvent("user01", organizer1) attendee = yield self.getAttendeeEvent("user02") self.assertEqual(attendee, attendee1, msg=diff_iCalStrs(attendee, attendee1)) yield self.setAttendeeEvent("user02", attendee2, run_jobs=False) calobj = yield self.getAttendeeResource("user02") yield calobj.remove() yield self.commit() yield self._runAllJobs() jobs = yield JobItem.all(self.transactionUnderTest()) self.assertEqual(len(jobs), 0) yield self.commit() organizer = yield self.getOrganizerEvent("user01") self.assertEqual(organizer, organizer2, msg=diff_iCalStrs(organizer, organizer2)) @inlineCallbacks def test_replyBeforeOrganizerEXDATE(self): """ Test that a reply is sent if an attendee changes an event, but the organizer exdate's the instance before the reply work is processed. """ organizer1 = Component.fromString("""BEGIN:VCALENDAR VERSION:2.0 PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN BEGIN:VEVENT UID:12345-67890 DTSTAMP:20080601T130000Z DTSTART:20080601T130000Z DURATION:PT1H ORGANIZER;CN=User 01;EMAIL=<EMAIL>:urn:x-uid:user01 ATTENDEE;CN=User 01;EMAIL=<EMAIL>;PARTSTAT=ACCEPTED:urn:x-uid:user01 ATTENDEE;CN=User 02;EMAIL=<EMAIL>;PARTSTAT=NEEDS-ACTION:urn:x-uid:user02 RRULE:FREQ=DAILY END:VEVENT END:VCALENDAR """) attendee1 = Component.fromString("""BEGIN:VCALENDAR VERSION:2.0 PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN BEGIN:VEVENT UID:12345-67890 DTSTAMP:20080601T130000Z DTSTART:20080601T130000Z DURATION:PT1H ORGANIZER;CN=User 01;EMAIL=<EMAIL>:urn:x-uid:user01 ATTENDEE;CN=User 01;EMAIL=<EMAIL>;PARTSTAT=ACCEPTED:urn:x-uid:user01 ATTENDEE;CN=User 02;EMAIL=<EMAIL>;PARTSTAT=NEEDS-ACTION;RSVP=TRUE:urn:x-uid:user02 RRULE:FREQ=DAILY TRANSP:TRANSPARENT END:VEVENT END:VCALENDAR """) organizer2 = Component.fromString("""BEGIN:VCALENDAR VERSION:2.0 PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN BEGIN:VEVENT UID:12345-67890 DTSTAMP:20080601T130000Z DTSTART:20080601T130000Z DURATION:PT1H ORGANIZER;CN=User 01;EMAIL=<EMAIL>:urn:x-uid:user01 ATTENDEE;CN=User 01;EMAIL=<EMAIL>;PARTSTAT=ACCEPTED:urn:x-uid:user01 ATTENDEE;CN=User 02;EMAIL=<EMAIL>;PARTSTAT=NEEDS-ACTION:urn:x-uid:user02 EXDATE:20080602T130000Z RRULE:FREQ=DAILY SUMMARY:Test END:VEVENT END:VCALENDAR """) attendee2 = Component.fromString("""BEGIN:VCALENDAR VERSION:2.0 PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN BEGIN:VEVENT UID:12345-67890 DTSTAMP:20080601T130000Z DTSTART:20080601T130000Z DURATION:PT1H ORGANIZER;CN=User 01;EMAIL=<EMAIL>:urn:x-uid:user01 ATTENDEE;CN=User 01;EMAIL=<EMAIL>;PARTSTAT=ACCEPTED:urn:x-uid:user01 ATTENDEE;CN=User 02;EMAIL=<EMAIL>;PARTSTAT=NEEDS-ACTION;RSVP=TRUE:urn:x-uid:user02 RRULE:FREQ=DAILY TRANSP:TRANSPARENT END:VEVENT BEGIN:VEVENT UID:12345-67890 RECURRENCE-ID:20080602T130000Z DTSTAMP:20080601T130000Z DTSTART:20080602T130000Z DURATION:PT1H ORGANIZER;CN=User 01;EMAIL=<EMAIL>:urn:x-uid:user01 ATTENDEE;CN=User 01;EMAIL=<EMAIL>;PARTSTAT=ACCEPTED:urn:x-uid:user01 ATTENDEE;CN=User 02;EMAIL=<EMAIL>;PARTSTAT=DECLINED:urn:x-uid:user02 TRANSP:TRANSPARENT END:VEVENT END:VCALENDAR """) attendee3 = Component.fromString("""BEGIN:VCALENDAR VERSION:2.0 PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN BEGIN:VEVENT UID:12345-67890 DTSTAMP:20080601T130000Z DTSTART:20080601T130000Z DURATION:PT1H ORGANIZER;CN=User 01;EMAIL=<EMAIL>:urn:x-uid:user01 ATTENDEE;CN=User 01;EMAIL=<EMAIL>;PARTSTAT=ACCEPTED:urn:x-uid:user01 ATTENDEE;CN=User 02;EMAIL=<EMAIL>;PARTSTAT=NEEDS-ACTION;RSVP=TRUE:urn:x-uid:user02 EXDATE:20080602T130000Z RRULE:FREQ=DAILY TRANSP:TRANSPARENT END:VEVENT END:VCALENDAR """) yield self.createOrganizerEvent("user01", organizer1) attendee = yield self.getAttendeeEvent("user02") self.assertEqual(attendee, attendee1, msg=diff_iCalStrs(attendee, attendee1)) yield self.setOrganizerEvent("user01", organizer2, run_jobs=False) yield self._runOneJob() yield self.setAttendeeEvent("user02", attendee2, run_jobs=False) yield self.setAttendeeEvent("user02", attendee3, run_jobs=False) yield self._runAllJobs() jobs = yield JobItem.all(self.transactionUnderTest()) self.assertEqual(len(jobs), 0) yield self.commit() @inlineCallbacks def test_replyBeforeOrganizerInconsequentialChange(self): """ Test that the organizer and attendee see the attendee's partstat change when the organizer makes an inconsequential change whilst the attendee reply is in progress. """ organizer1 = Component.fromString("""BEGIN:VCALENDAR VERSION:2.0 PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN BEGIN:VEVENT UID:12345-67890 DTSTAMP:20080601T130000Z DTSTART:20080601T130000Z DURATION:PT1H ORGANIZER;CN=User 01;EMAIL=<EMAIL>:urn:x-uid:user01 ATTENDEE;CN=User 01;EMAIL=<EMAIL>;PARTSTAT=ACCEPTED:urn:x-uid:user01 ATTENDEE;CN=User 02;EMAIL=<EMAIL>;PARTSTAT=NEEDS-ACTION:urn:x-uid:user02 END:VEVENT END:VCALENDAR """) organizer2 = Component.fromString("""BEGIN:VCALENDAR VERSION:2.0 PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN BEGIN:VEVENT UID:12345-67890 DTSTAMP:20080601T130000Z DTSTART:20080601T130000Z DURATION:PT1H ORGANIZER;CN=User 01;EMAIL=<EMAIL>:urn:x-uid:user01 ATTENDEE;CN=User 01;EMAIL=<EMAIL>;PARTSTAT=ACCEPTED:urn:x-uid:user01 ATTENDEE;CN=User 02;EMAIL=<EMAIL>;PARTSTAT=NEEDS-ACTION:urn:x-uid:user02 SUMMARY:Test END:VEVENT END:VCALENDAR """) organizer3 = Component.fromString("""BEGIN:VCALENDAR VERSION:2.0 PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN BEGIN:VEVENT UID:12345-67890 DTSTAMP:20080601T130000Z DTSTART:20080601T130000Z DURATION:PT1H ORGANIZER;CN=User 01;EMAIL=<EMAIL>:urn:x-uid:user01 ATTENDEE;CN=User 01;EMAIL=<EMAIL>;PARTSTAT=ACCEPTED:urn:x-uid:user01 ATTENDEE;CN=User 02;EMAIL=<EMAIL>;PARTSTAT=ACCEPTED;SCHEDULE-STATUS=2.0:urn:x-uid:user02 SEQUENCE:1 SUMMARY:Test END:VEVENT END:VCALENDAR """) attendee1 = Component.fromString("""BEGIN:VCALENDAR VERSION:2.0 PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN BEGIN:VEVENT UID:12345-67890 DTSTAMP:20080601T130000Z DTSTART:20080601T130000Z DURATION:PT1H ORGANIZER;CN=User 01;EMAIL=<EMAIL>:urn:x-uid:user01 ATTENDEE;CN=User 01;EMAIL=<EMAIL>;PARTSTAT=ACCEPTED:urn:x-uid:user01 ATTENDEE;CN=User 02;EMAIL=<EMAIL>;PARTSTAT=NEEDS-ACTION;RSVP=TRUE:urn:x-uid:user02 TRANSP:TRANSPARENT END:VEVENT END:VCALENDAR """) attendee2 = Component.fromString("""BEGIN:VCALENDAR VERSION:2.0 PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN BEGIN:VEVENT UID:12345-67890 DTSTAMP:20080601T130000Z DTSTART:20080601T130000Z DURATION:PT1H ORGANIZER;CN=User 01;EMAIL=<EMAIL>:urn:x-uid:user01 ATTENDEE;CN=User 01;EMAIL=<EMAIL>;PARTSTAT=ACCEPTED:urn:x-uid:user01 ATTENDEE;CN=User 02;EMAIL=<EMAIL>;PARTSTAT=ACCEPTED:urn:x-uid:user02 END:VEVENT END:VCALENDAR """) attendee3 = Component.fromString("""BEGIN:VCALENDAR VERSION:2.0 PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN BEGIN:VEVENT UID:12345-67890 DTSTAMP:20080601T130000Z DTSTART:20080601T130000Z DURATION:PT1H ORGANIZER;CN=User 01;EMAIL=<EMAIL>;SCHEDULE-STATUS=1.2:urn:x-uid:user01 ATTENDEE;CN=User 01;EMAIL=<EMAIL>;PARTSTAT=ACCEPTED:urn:x-uid:user01 ATTENDEE;CN=User 02;EMAIL=<EMAIL>;PARTSTAT=ACCEPTED:urn:x-uid:user02 SEQUENCE:1 SUMMARY:Test END:VEVENT END:VCALENDAR """) yield self.createOrganizerEvent("user01", organizer1) attendee = yield self.getAttendeeEvent("user02") self.assertEqual(attendee, attendee1, msg=diff_iCalStrs(attendee, attendee1)) yield self.setOrganizerEvent("user01", organizer2, run_jobs=False) yield self._runOneJob() yield self.setAttendeeEvent("user02", attendee2, run_jobs=False) yield self._runAllJobs() jobs = yield JobItem.all(self.transactionUnderTest()) self.assertEqual(len(jobs), 0) yield self.commit() organizer = yield self.getOrganizerEvent("user01") self.assertEqual(normalize_iCalStr(organizer), normalize_iCalStr(organizer3), msg=diff_iCalStrs(organizer3, organizer)) attendee = yield self.getAttendeeEvent("user02") self.assertEqual(normalize_iCalStr(attendee), normalize_iCalStr(attendee3), msg=diff_iCalStrs(attendee3, attendee)) @inlineCallbacks def test_replyBeforeOrganizerConsequentialChange(self): """ Test that the organizer and attendee see the attendee's partstat change when the organizer makes a consequential change whilst the attendee reply is in progress. """ organizer1 = Component.fromString("""BEGIN:VCALENDAR VERSION:2.0 PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN BEGIN:VEVENT UID:12345-67890 DTSTAMP:20080601T130000Z DTSTART:20080601T130000Z DURATION:PT1H ORGANIZER;CN=User 01;EMAIL=<EMAIL>:urn:x-uid:user01 ATTENDEE;CN=User 01;EMAIL=<EMAIL>;PARTSTAT=ACCEPTED:urn:x-uid:user01 ATTENDEE;CN=User 02;EMAIL=<EMAIL>;PARTSTAT=NEEDS-ACTION:urn:x-uid:user02 END:VEVENT END:VCALENDAR """) organizer2 = Component.fromString("""BEGIN:VCALENDAR VERSION:2.0 PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN BEGIN:VEVENT UID:12345-67890 DTSTAMP:20080601T130000Z DTSTART:20080602T130000Z DURATION:PT1H ORGANIZER;CN=User 01;EMAIL=<EMAIL>:urn:x-uid:user01 ATTENDEE;CN=User 01;EMAIL=<EMAIL>;PARTSTAT=ACCEPTED:urn:x-uid:user01 ATTENDEE;CN=User 02;EMAIL=<EMAIL>;PARTSTAT=NEEDS-ACTION:urn:x-uid:user02 SUMMARY:Test END:VEVENT END:VCALENDAR """) organizer3 = Component.fromString("""BEGIN:VCALENDAR VERSION:2.0 PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN BEGIN:VEVENT UID:12345-67890 DTSTAMP:20080601T130000Z DTSTART:20080602T130000Z DURATION:PT1H ORGANIZER;CN=User 01;EMAIL=<EMAIL>:urn:x-uid:user01 ATTENDEE;CN=User 01;EMAIL=<EMAIL>;PARTSTAT=ACCEPTED:urn:x-uid:user01 ATTENDEE;CN=User 02;EMAIL=<EMAIL>;PARTSTAT=NEEDS-ACTION;RSVP=TRUE;SCHEDULE-STATUS=1.2;X-CALENDARSERVER-RESET-PARTSTAT=1:urn:x-uid:user02 SEQUENCE:1 SUMMARY:Test END:VEVENT END:VCALENDAR """) attendee1 = Component.fromString("""BEGIN:VCALENDAR VERSION:2.0 PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN BEGIN:VEVENT UID:12345-67890 DTSTAMP:20080601T130000Z DTSTART:20080601T130000Z DURATION:PT1H ORGANIZER;CN=User 01;EMAIL=<EMAIL>:urn:x-uid:user01 ATTENDEE;CN=User 01;EMAIL=<EMAIL>;PARTSTAT=ACCEPTED:urn:x-uid:user01 ATTENDEE;CN=User 02;EMAIL=<EMAIL>;PARTSTAT=NEEDS-ACTION;RSVP=TRUE:urn:x-uid:user02 TRANSP:TRANSPARENT END:VEVENT END:VCALENDAR """) attendee2 = Component.fromString("""BEGIN:VCALENDAR VERSION:2.0 PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN BEGIN:VEVENT UID:12345-67890 DTSTAMP:20080601T130000Z DTSTART:20080601T130000Z DURATION:PT1H ORGANIZER;CN=User 01;EMAIL=<EMAIL>:urn:x-uid:user01 ATTENDEE;CN=User 01;EMAIL=<EMAIL>;PARTSTAT=ACCEPTED:urn:x-uid:user01 ATTENDEE;CN=User 02;EMAIL=<EMAIL>;PARTSTAT=ACCEPTED:urn:x-uid:user02 END:VEVENT END:VCALENDAR """) attendee3 = Component.fromString("""BEGIN:VCALENDAR VERSION:2.0 PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN BEGIN:VEVENT UID:12345-67890 DTSTAMP:20080601T130000Z DTSTART:20080602T130000Z DURATION:PT1H ORGANIZER;CN=User 01;EMAIL=<EMAIL>;SCHEDULE-STATUS=1.2:urn:x-uid:user01 ATTENDEE;CN=User 01;EMAIL=<EMAIL>;PARTSTAT=ACCEPTED:urn:x-uid:user01 ATTENDEE;CN=User 02;EMAIL=<EMAIL>;PARTSTAT=NEEDS-ACTION;RSVP=TRUE:urn:x-uid:user02 SEQUENCE:1 SUMMARY:Test TRANSP:TRANSPARENT END:VEVENT END:VCALENDAR """) yield self.createOrganizerEvent("user01", organizer1) attendee = yield self.getAttendeeEvent("user02") self.assertEqual(attendee, attendee1, msg=diff_iCalStrs(attendee, attendee1)) yield self.setOrganizerEvent("user01", organizer2, run_jobs=False) yield self._runOneJob() yield self.setAttendeeEvent("user02", attendee2, run_jobs=False) yield self._runAllJobs() jobs = yield JobItem.all(self.transactionUnderTest()) self.assertEqual(len(jobs), 0) yield self.commit() organizer = yield self.getOrganizerEvent("user01") self.assertEqual(normalize_iCalStr(organizer), normalize_iCalStr(organizer3), msg=diff_iCalStrs(organizer3, organizer)) attendee = yield self.getAttendeeEvent("user02") self.assertEqual(normalize_iCalStr(attendee), normalize_iCalStr(attendee3), msg=diff_iCalStrs(attendee3, attendee)) @inlineCallbacks def test_needsActionOrganizerChange(self): """ Test that if the organizer makes an inconsequential change and also changes the attendee partstat, then the new partstat is sent to the attendee. """ organizer1 = Component.fromString("""BEGIN:VCALENDAR VERSION:2.0 PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN BEGIN:VEVENT UID:12345-67890 DTSTAMP:20080601T130000Z DTSTART:20080601T130000Z DURATION:PT1H ORGANIZER;CN=User 01;EMAIL=<EMAIL>:urn:x-uid:user01 ATTENDEE;CN=User 01;EMAIL=<EMAIL>;PARTSTAT=ACCEPTED:urn:x-uid:user01 ATTENDEE;CN=User 02;EMAIL=<EMAIL>;PARTSTAT=NEEDS-ACTION:urn:x-uid:user02 END:VEVENT END:VCALENDAR """) organizer2 = Component.fromString("""BEGIN:VCALENDAR VERSION:2.0 PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN BEGIN:VEVENT UID:12345-67890 DTSTAMP:20080601T130000Z DTSTART:20080601T130000Z DURATION:PT1H ORGANIZER;CN=User 01;EMAIL=<EMAIL>:urn:x-uid:user01 ATTENDEE;CN=User 01;EMAIL=<EMAIL>;PARTSTAT=ACCEPTED:urn:x-uid:user01 ATTENDEE;CN=User 02;EMAIL=<EMAIL>;PARTSTAT=ACCEPTED;SCHEDULE-STATUS=2.0:urn:x-uid:user02 END:VEVENT END:VCALENDAR """) organizer3 = Component.fromString("""BEGIN:VCALENDAR VERSION:2.0 PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN BEGIN:VEVENT UID:12345-67890 DTSTAMP:20080601T130000Z DTSTART:20080601T130000Z DURATION:PT1H ORGANIZER;CN=User 01;EMAIL=<EMAIL>:urn:x-uid:user01 ATTENDEE;CN=User 01;EMAIL=<EMAIL>;PARTSTAT=ACCEPTED:urn:x-uid:user01 ATTENDEE;CN=User 02;EMAIL=<EMAIL>;PARTSTAT=NEEDS-ACTION:urn:x-uid:user02 SUMMARY:Test END:VEVENT END:VCALENDAR """) attendee1 = Component.fromString("""BEGIN:VCALENDAR VERSION:2.0 PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN BEGIN:VEVENT UID:12345-67890 DTSTAMP:20080601T130000Z DTSTART:20080601T130000Z DURATION:PT1H ORGANIZER;CN=User 01;EMAIL=<EMAIL>:urn:x-uid:user01 ATTENDEE;CN=User 01;EMAIL=<EMAIL>;PARTSTAT=ACCEPTED:urn:x-uid:user01 ATTENDEE;CN=User 02;EMAIL=<EMAIL>;PARTSTAT=NEEDS-ACTION;RSVP=TRUE:urn:x-uid:user02 TRANSP:TRANSPARENT END:VEVENT END:VCALENDAR """) attendee2 = Component.fromString("""BEGIN:VCALENDAR VERSION:2.0 PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN BEGIN:VEVENT UID:12345-67890 DTSTAMP:20080601T130000Z DTSTART:20080601T130000Z DURATION:PT1H ORGANIZER;CN=User 01;EMAIL=<EMAIL>:urn:x-uid:user01 ATTENDEE;CN=User 01;EMAIL=<EMAIL>;PARTSTAT=ACCEPTED:urn:x-uid:user01 ATTENDEE;CN=User 02;EMAIL=<EMAIL>;PARTSTAT=ACCEPTED:urn:x-uid:user02 END:VEVENT END:VCALENDAR """) attendee3 = Component.fromString("""BEGIN:VCALENDAR VERSION:2.0 PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN BEGIN:VEVENT UID:12345-67890 DTSTAMP:20080601T130000Z DTSTART:20080601T130000Z DURATION:PT1H ORGANIZER;CN=User 01;EMAIL=<EMAIL>;SCHEDULE-STATUS=1.2:urn:x-uid:user01 ATTENDEE;CN=User 01;EMAIL=<EMAIL>;PARTSTAT=ACCEPTED:urn:x-uid:user01 ATTENDEE;CN=User 02;EMAIL=<EMAIL>;PARTSTAT=NEEDS-ACTION;RSVP=TRUE:urn:x-uid:user02 SEQUENCE:1 SUMMARY:Test TRANSP:TRANSPARENT END:VEVENT END:VCALENDAR """) yield self.createOrganizerEvent("user01", organizer1) attendee = yield self.getAttendeeEvent("user02") self.assertEqual(attendee, attendee1, msg=diff_iCalStrs(attendee, attendee1)) yield self.setAttendeeEvent("user02", attendee2) organizer = yield self.getOrganizerEvent("user01") self.assertEqual(normalize_iCalStr(organizer), normalize_iCalStr(organizer2), msg=diff_iCalStrs(organizer2, organizer)) yield self.setOrganizerEvent("user01", organizer3) attendee = yield self.getAttendeeEvent("user02") self.assertEqual(normalize_iCalStr(attendee), normalize_iCalStr(attendee3), msg=diff_iCalStrs(attendee3, attendee)) @inlineCallbacks def test_refreshBeforeOrganizerRemoved(self): """ Test that a refresh work runs even though the original event has had the Organizer removed. """ organizer1 = Component.fromString("""BEGIN:VCALENDAR VERSION:2.0 PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN BEGIN:VEVENT UID:12345-67890 DTSTAMP:20080601T130000Z DTSTART:20080601T130000Z DURATION:PT1H ORGANIZER;CN=User 01;EMAIL=<EMAIL>:urn:x-uid:user01 ATTENDEE;CN=User 01;EMAIL=<EMAIL>;PARTSTAT=ACCEPTED:urn:x-uid:user01 ATTENDEE;CN=User 02;EMAIL=<EMAIL>;PARTSTAT=NEEDS-ACTION:urn:x-uid:user02 ATTENDEE;CN=User 03;EMAIL=<EMAIL>;PARTSTAT=NEEDS-ACTION:urn:x-uid:user03 END:VEVENT END:VCALENDAR """) attendee1 = Component.fromString("""BEGIN:VCALENDAR VERSION:2.0 PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN BEGIN:VEVENT UID:12345-67890 DTSTAMP:20080601T130000Z DTSTART:20080601T130000Z DURATION:PT1H ORGANIZER;CN=User 01;EMAIL=<EMAIL>:urn:x-uid:user01 ATTENDEE;CN=User 01;EMAIL=<EMAIL>;PARTSTAT=ACCEPTED:urn:x-uid:user01 ATTENDEE;CN=User 02;EMAIL=<EMAIL>;PARTSTAT=ACCEPTED;RSVP=TRUE:urn:x-uid:user02 ATTENDEE;CN=User 03;EMAIL=<EMAIL>;PARTSTAT=NEEDS-ACTION;RSVP=TRUE:urn:x-uid:user03 TRANSP:TRANSPARENT END:VEVENT END:VCALENDAR """) organizer2 = Component.fromString("""BEGIN:VCALENDAR VERSION:2.0 PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN BEGIN:VEVENT UID:12345-67890 DTSTAMP:20080601T130000Z DTSTART:20080601T130000Z DURATION:PT1H END:VEVENT END:VCALENDAR """) yield self.createOrganizerEvent("user01", organizer1) yield self.setAttendeeEvent("user02", attendee1, run_jobs=False) yield self._runOneJob() yield self.setOrganizerEvent("user01", organizer2, run_jobs=False) yield self._runOneJob(work_type="SCHEDULE_ORGANIZER_WORK") yield self._runAllJobs() jobs = yield JobItem.all(self.transactionUnderTest()) self.assertEqual(len(jobs), 0) yield self.commit()
14,289
3,269
# Time: O(1) # Space: O(1) class Solution(object): def numberOfDays(self, Y, M): """ :type Y: int :type M: int :rtype: int """ leap = 1 if ((Y % 4 == 0) and (Y % 100 != 0)) or (Y % 400 == 0) else 0 return (28+leap if (M == 2) else 31-(M-1)%7%2)
174
1,615
// // MLNUIMyImageHandler.h // LuaNative // // Created by <NAME> on 2020/5/27. // Copyright © 2020 MoMo. All rights reserved. // #import <Foundation/Foundation.h> #import "MLNUIKit.h" NS_ASSUME_NONNULL_BEGIN @interface MLNUIMyImageHandler : NSObject <MLNUIImageLoaderProtocol> @end NS_ASSUME_NONNULL_END
125
509
#pragma once #include "GraphicsDevice/ITexture2D.h" #include "WebRTCMacros.h" namespace unity { namespace webrtc { class MTLTexture; struct MetalTexture2D : ITexture2D { public: id<MTLTexture> m_texture; MetalTexture2D(uint32_t w, uint32_t h, id<MTLTexture> tex); virtual ~MetalTexture2D(); inline virtual void* GetNativeTexturePtrV(); inline virtual const void* GetNativeTexturePtrV() const; inline virtual void* GetEncodeTexturePtrV(); inline virtual const void* GetEncodeTexturePtrV() const; }; //--------------------------------------------------------------------------------------------------------------------- void* MetalTexture2D::GetNativeTexturePtrV() { return m_texture; } const void* MetalTexture2D::GetNativeTexturePtrV() const { return m_texture; }; void* MetalTexture2D::GetEncodeTexturePtrV() { return m_texture; } const void* MetalTexture2D::GetEncodeTexturePtrV() const { return m_texture; } } // end namespace webrtc } // end namespace unity
355
2,963
// // Created by <NAME> on 17.03.18. // #include "../runtime/Runtime.h" #include <random> namespace s { class PRNG : public runtime::Object<PRNG> { public: std::mt19937_64 prng = std::mt19937_64(std::random_device()()); }; extern "C" PRNG* sPrngNew() { return PRNG::init(); } extern "C" runtime::Integer sPrngGetInteger(PRNG *prng, runtime::Integer from, runtime::Integer to) { return std::uniform_int_distribution<runtime::Integer>(from, to)(prng->prng); } extern "C" runtime::Real sPrngGetReal(PRNG *prng) { return std::uniform_real_distribution<runtime::Real>()(prng->prng); } extern "C" void sPrngDestruct(PRNG *prng) { prng->~PRNG(); } } // namespace s SET_INFO_FOR(s::PRNG, s, 1f3b0)
297
480
/* * Copyright [2013-2021], Alibaba Group Holding Limited * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.alibaba.polardbx.optimizer.partition.pruning; import org.apache.calcite.rex.RexNode; import java.util.concurrent.atomic.AtomicInteger; /** * @author chenghui.lch */ public class PredConstExprReferenceInfo { protected Integer constExprId; protected String constExprDigest; protected RexNode constExpr; protected AtomicInteger referencedCount; protected PredConstExprReferenceInfo(Integer constExprId, RexNode constExpr) { this.constExprId = constExprId; this.constExpr = constExpr; this.constExprDigest = constExpr.toString(); this.referencedCount = new AtomicInteger(1); } public RexNode getConstExpr() { return constExpr; } public AtomicInteger getReferencedCount() { return referencedCount; } public Integer getConstExprId() { return constExprId; } public String getConstExprDigest() { return constExprDigest; } }
524
30,023
"""The IntelliFire integration.""" from __future__ import annotations from datetime import timedelta from aiohttp import ClientConnectionError from async_timeout import timeout from intellifire4py import ( IntellifireAsync, IntellifireControlAsync, IntellifirePollData, ) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity import DeviceInfo from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import DOMAIN, LOGGER class IntellifireDataUpdateCoordinator(DataUpdateCoordinator[IntellifirePollData]): """Class to manage the polling of the fireplace API.""" def __init__( self, hass: HomeAssistant, read_api: IntellifireAsync, control_api: IntellifireControlAsync, ) -> None: """Initialize the Coordinator.""" super().__init__( hass, LOGGER, name=DOMAIN, update_interval=timedelta(seconds=15), ) self._read_api = read_api self._control_api = control_api async def _async_update_data(self) -> IntellifirePollData: LOGGER.debug("Calling update loop on IntelliFire") async with timeout(100): try: await self._read_api.poll() except (ConnectionError, ClientConnectionError) as exception: raise UpdateFailed from exception return self._read_api.data @property def read_api(self) -> IntellifireAsync: """Return the Status API pointer.""" return self._read_api @property def control_api(self) -> IntellifireControlAsync: """Return the control API.""" return self._control_api @property def device_info(self) -> DeviceInfo: """Return the device info.""" return DeviceInfo( manufacturer="Hearth and Home", model="IFT-WFM", name="IntelliFire Fireplace", identifiers={("IntelliFire", f"{self.read_api.data.serial}]")}, sw_version=self.read_api.data.fw_ver_str, configuration_url=f"http://{self.read_api.ip}/poll", )
888
627
<reponame>closer2/chrome-ec /* Copyright 2018 The Chromium OS Authors. All rights reserved. * Use of this source code is governed by a BSD-style license that can be * found in the LICENSE file. */ #ifndef __HID_DEVICE_H #define __HID_DEVICE_H #include <stdint.h> #include <stddef.h> #include "hooks.h" #define HID_SUBSYS_MAX_PAYLOAD_SIZE 4954 enum HID_SUBSYS_ERR { HID_SUBSYS_ERR_NOT_READY = EC_ERROR_INTERNAL_FIRST + 0, HID_SUBSYS_ERR_TOO_MANY_HID_DEVICES = EC_ERROR_INTERNAL_FIRST + 1, }; typedef void * hid_handle_t; #define HID_INVALID_HANDLE NULL struct hid_callbacks { /* * function called during registration. * if returns non-zero, the registration will fail. */ int (*initialize)(const hid_handle_t handle); /* return size of data copied to buf. if returns <= 0, error */ int (*get_hid_descriptor)(const hid_handle_t handle, uint8_t *buf, const size_t buf_size); /* return size of data copied to buf. if return <= 0, error */ int (*get_report_descriptor)(const hid_handle_t handle, uint8_t *buf, const size_t buf_size); /* return size of data copied to buf. if return <= 0, error */ int (*get_feature_report)(const hid_handle_t handle, const uint8_t report_id, uint8_t *buf, const size_t buf_size); /* return tranferred data size. if returns <= 0, error */ int (*set_feature_report)(const hid_handle_t handle, const uint8_t report_id, const uint8_t *data, const size_t data_size); /* return size of data copied to buf. if returns <= 0, error */ int (*get_input_report)(const hid_handle_t handle, const uint8_t report_id, uint8_t *buf, const size_t buf_size); /* suspend/resume, if returns non-zero, error */ int (*resume)(const hid_handle_t handle); int (*suspend)(const hid_handle_t handle); }; struct hid_device { uint8_t dev_class; uint16_t pid; uint16_t vid; const struct hid_callbacks *cbs; }; /* * Do not call this function directly. * The function should be called only by HID_DEVICE_ENTRY() */ hid_handle_t hid_subsys_register_device(const struct hid_device *dev_info); /* send HID input report */ int hid_subsys_send_input_report(const hid_handle_t handle, uint8_t *buf, const size_t buf_size); /* store HID device specific data */ int hid_subsys_set_device_data(const hid_handle_t handle, void *data); /* retrieve HID device specific data */ void *hid_subsys_get_device_data(const hid_handle_t handle); #define HID_DEVICE_ENTRY(hid_dev) \ void _hid_dev_entry_##hid_dev(void) \ { \ hid_subsys_register_device(&(hid_dev)); \ } \ DECLARE_HOOK(HOOK_INIT, _hid_dev_entry_##hid_dev, HOOK_PRIO_LAST - 2) #endif /* __HID_DEVICE_H */
1,023
435
<reponame>amaajemyfren/data { "description": "As a penniless academic I wanted to do \"big data\" for science. Open\nsource, Python, and simple patterns were the way forward. Staying on top\nof todays growing datasets is an arm race. Data analytics machinery\n\u2014clusters, NOSQL, visualization, Hadoop, machine learning, ...\u2014 can\nspread a team's resources thin. Focusing on simple patterns, lightweight\ntechnologies, and a good understanding of the applications gets us most\nof the way for a fraction of the cost. These patterns appear underline\nthe design of Mayavi, for interactive 3D visualization, scikit-learn, for\neasy machine learning, and joblib for out-of-core and parallel computing.\n\nI will present a personal perspective on ten years of scientific data\nprocessing with Python. What are the emerging patterns in data\nprocessing? How can modern data-mining ideas be used without a big\nengineering team? What constraints and design trade-offs govern software\nprojects like scikit-learn, Mayavi, or joblib? How can we make the most\nout of distributed hardware with simple framework-less code?\n", "duration": 770, "recorded": "2015-09-18", "speakers": [ "<NAME>" ], "thumbnail_url": "https://i.ytimg.com/vi/HjHuf2gRl-Y/hqdefault.jpg", "title": "Building a cutting-edge data processing environment on a budget", "videos": [ { "type": "youtube", "url": "https://www.youtube.com/watch?v=HjHuf2gRl-Y" } ] }
441
2,151
<reponame>zipated/src // Copyright 2017 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef CHROME_BROWSER_BROWSING_DATA_COUNTERS_SITE_DATA_COUNTING_HELPER_H_ #define CHROME_BROWSER_BROWSING_DATA_COUNTERS_SITE_DATA_COUNTING_HELPER_H_ #include <set> #include "components/content_settings/core/common/content_settings_types.h" #include "net/cookies/canonical_cookie.h" #include "net/ssl/channel_id_store.h" #include "third_party/blink/public/mojom/quota/quota_types.mojom.h" class Profile; class BrowsingDataFlashLSOHelper; class HostContentSettingsMap; namespace net { class URLRequestContextGetter; } namespace content { struct LocalStorageUsageInfo; struct SessionStorageUsageInfo; } namespace storage { class SpecialStoragePolicy; } // Helper class that counts the number of unique origins, that are affected by // deleting "cookies and site data" in the CBD dialog. class SiteDataCountingHelper { public: explicit SiteDataCountingHelper( Profile* profile, base::Time begin, base::Callback<void(int)> completion_callback); ~SiteDataCountingHelper(); void CountAndDestroySelfWhenFinished(); private: void GetOriginsFromHostContentSettignsMap(HostContentSettingsMap* hcsm, ContentSettingsType type); void GetCookiesCallback(const net::CookieList& cookies); void GetSessionStorageUsageInfoCallback( const scoped_refptr<storage::SpecialStoragePolicy>& special_storage_policy, const std::vector<content::SessionStorageUsageInfo>& infos); void GetLocalStorageUsageInfoCallback( const scoped_refptr<storage::SpecialStoragePolicy>& special_storage_policy, const std::vector<content::LocalStorageUsageInfo>& infos); void GetQuotaOriginsCallback(const std::set<GURL>& origin_set, blink::mojom::StorageType type); void SitesWithFlashDataCallback(const std::vector<std::string>& sites); void GetChannelIDsOnIOThread( const scoped_refptr<net::URLRequestContextGetter>& rq_context); void GetChannelIDsCallback( const net::ChannelIDStore::ChannelIDList& channel_ids); void Done(const std::vector<GURL>& origins); Profile* profile_; base::Time begin_; base::Callback<void(int)> completion_callback_; int tasks_; std::set<std::string> unique_hosts_; scoped_refptr<BrowsingDataFlashLSOHelper> flash_lso_helper_; }; #endif // CHROME_BROWSER_BROWSING_DATA_COUNTERS_SITE_DATA_COUNTING_HELPER_H_
922
20,995
// Copyright 2020 the V8 project authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef V8_TOOLS_V8WINDBG_TEST_DEBUG_CALLBACKS_H_ #define V8_TOOLS_V8WINDBG_TEST_DEBUG_CALLBACKS_H_ #if !defined(UNICODE) || !defined(_UNICODE) #error Unicode not defined #endif #include <DbgEng.h> #include <DbgModel.h> #include <Windows.h> #include <crtdbg.h> #include <pathcch.h> #include <wrl/client.h> #include <string> namespace WRL = Microsoft::WRL; namespace v8 { namespace internal { namespace v8windbg_test { class MyOutput : public IDebugOutputCallbacks { public: MyOutput(WRL::ComPtr<IDebugClient5> p_client); ~MyOutput(); MyOutput(const MyOutput&) = delete; MyOutput& operator=(const MyOutput&) = delete; // Inherited via IDebugOutputCallbacks HRESULT __stdcall QueryInterface(REFIID InterfaceId, PVOID* Interface) override; ULONG __stdcall AddRef(void) override; ULONG __stdcall Release(void) override; HRESULT __stdcall Output(ULONG Mask, PCSTR Text) override; const std::string& GetLog() const { return log_; } void ClearLog() { log_.clear(); } private: WRL::ComPtr<IDebugClient5> p_client_; std::string log_; }; // For return values, see: // https://docs.microsoft.com/en-us/windows-hardware/drivers/debugger/debug-status-xxx class MyCallback : public IDebugEventCallbacks { public: // Inherited via IDebugEventCallbacks HRESULT __stdcall QueryInterface(REFIID InterfaceId, PVOID* Interface) override; ULONG __stdcall AddRef(void) override; ULONG __stdcall Release(void) override; HRESULT __stdcall GetInterestMask(PULONG Mask) override; HRESULT __stdcall Breakpoint(PDEBUG_BREAKPOINT Bp) override; HRESULT __stdcall Exception(PEXCEPTION_RECORD64 Exception, ULONG FirstChance) override; HRESULT __stdcall CreateThread(ULONG64 Handle, ULONG64 DataOffset, ULONG64 StartOffset) override; HRESULT __stdcall ExitThread(ULONG ExitCode) override; HRESULT __stdcall ExitProcess(ULONG ExitCode) override; HRESULT __stdcall LoadModule(ULONG64 ImageFileHandle, ULONG64 BaseOffset, ULONG ModuleSize, PCSTR ModuleName, PCSTR ImageName, ULONG CheckSum, ULONG TimeDateStamp) override; HRESULT __stdcall UnloadModule(PCSTR ImageBaseName, ULONG64 BaseOffset) override; HRESULT __stdcall SystemError(ULONG Error, ULONG Level) override; HRESULT __stdcall SessionStatus(ULONG Status) override; HRESULT __stdcall ChangeDebuggeeState(ULONG Flags, ULONG64 Argument) override; HRESULT __stdcall ChangeEngineState(ULONG Flags, ULONG64 Argument) override; HRESULT __stdcall ChangeSymbolState(ULONG Flags, ULONG64 Argument) override; HRESULT __stdcall CreateProcessW(ULONG64 ImageFileHandle, ULONG64 Handle, ULONG64 BaseOffset, ULONG ModuleSize, PCSTR ModuleName, PCSTR ImageName, ULONG CheckSum, ULONG TimeDateStamp, ULONG64 InitialThreadHandle, ULONG64 ThreadDataOffset, ULONG64 StartOffset) override; }; } // namespace v8windbg_test } // namespace internal } // namespace v8 #endif // V8_TOOLS_V8WINDBG_TEST_DEBUG_CALLBACKS_H_
1,495
2,443
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.alipay.sofa.rpc.common.utils; import com.alipay.sofa.rpc.common.cache.ReflectCache; /** * <p>类型转换工具类</p> * <p>调用端时将类描述转换为字符串传输。服务端将字符串转换为具体的类</p> * <pre> * 保证传递的时候值为可阅读格式,而不是jvm格式([Lxxx;): * 普通:java.lang.String、java.lang.String[] * 基本类型:int、int[] * 内部类:com.alipay.example.Inner、com.alipay.example.Inner[] * 匿名类:com.alipay.example.Xxx$1、com.alipay.example.Xxx$1[] * 本地类:com.alipay.example.Xxx$1Local、com.alipay.example.Xxx$1Local[] * 成员类:com.alipay.example.Xxx$Member、com.alipay.example.Xxx$Member[] * 同时Class.forName的时候又会解析出Class。 * </pre> * <p> * * @author <a href=mailto:<EMAIL>>GengZhang</a> */ public class ClassTypeUtils { /** * Class[]转String[] * * @param typeStrs 对象描述[] * @return Class[] */ public static Class[] getClasses(String[] typeStrs) throws RuntimeException { if (CommonUtils.isEmpty(typeStrs)) { return new Class[0]; } else { Class[] classes = new Class[typeStrs.length]; for (int i = 0; i < typeStrs.length; i++) { classes[i] = getClass(typeStrs[i]); } return classes; } } /** * String转Class * * @param typeStr 对象描述 * @return Class[] */ public static Class getClass(String typeStr) { Class clazz = ReflectCache.getClassCache(typeStr); if (clazz == null) { if ("void".equals(typeStr)) { clazz = void.class; } else if ("boolean".equals(typeStr)) { clazz = boolean.class; } else if ("byte".equals(typeStr)) { clazz = byte.class; } else if ("char".equals(typeStr)) { clazz = char.class; } else if ("double".equals(typeStr)) { clazz = double.class; } else if ("float".equals(typeStr)) { clazz = float.class; } else if ("int".equals(typeStr)) { clazz = int.class; } else if ("long".equals(typeStr)) { clazz = long.class; } else if ("short".equals(typeStr)) { clazz = short.class; } else { String jvmName = canonicalNameToJvmName(typeStr); clazz = ClassUtils.forName(jvmName); } ReflectCache.putClassCache(typeStr, clazz); } return clazz; } /** * 通用描述转JVM描述 * * @param canonicalName 例如 int[] * @return JVM描述 例如 [I; */ public static String canonicalNameToJvmName(String canonicalName) { boolean isArray = canonicalName.endsWith("[]"); if (isArray) { String t = ""; // 计数,看上几维数组 while (isArray) { canonicalName = canonicalName.substring(0, canonicalName.length() - 2); t += "["; isArray = canonicalName.endsWith("[]"); } if ("boolean".equals(canonicalName)) { canonicalName = t + "Z"; } else if ("byte".equals(canonicalName)) { canonicalName = t + "B"; } else if ("char".equals(canonicalName)) { canonicalName = t + "C"; } else if ("double".equals(canonicalName)) { canonicalName = t + "D"; } else if ("float".equals(canonicalName)) { canonicalName = t + "F"; } else if ("int".equals(canonicalName)) { canonicalName = t + "I"; } else if ("long".equals(canonicalName)) { canonicalName = t + "J"; } else if ("short".equals(canonicalName)) { canonicalName = t + "S"; } else { canonicalName = t + "L" + canonicalName + ";"; } } return canonicalName; } /** * Class[]转String[] <br> * 注意,得到的String可能不能直接用于Class.forName,请使用getClasses(String[])反向获取 * * @param types Class[] * @return 对象描述 * @see #getClasses(String[]) */ public static String[] getTypeStrs(Class[] types) { return getTypeStrs(types, false); } /** * Class[]转String[] <br> * 注意,得到的String可能不能直接用于Class.forName,请使用getClasses(String[])反向获取 * * @param types Class[] * @param javaStyle JDK自带格式,例如 int[], true的话返回 [I; false的话返回int[] * @return 对象描述 * @see #getClasses(String[]) */ public static String[] getTypeStrs(Class[] types, boolean javaStyle) { if (CommonUtils.isEmpty(types)) { return StringUtils.EMPTY_STRING_ARRAY; } else { String[] strings = new String[types.length]; for (int i = 0; i < types.length; i++) { strings[i] = javaStyle ? types[i].getName() : getTypeStr(types[i]); } return strings; } } /** * Class转String<br> * 注意,得到的String可能不能直接用于Class.forName,请使用getClass(String)反向获取 * * @param clazz Class * @return 对象 * @see #getClass(String) */ public static String getTypeStr(Class clazz) { String typeStr = ReflectCache.getTypeStrCache(clazz); if (typeStr == null) { if (clazz.isArray()) { String name = clazz.getName(); // 原始名字:[Ljava.lang.String; typeStr = jvmNameToCanonicalName(name); // java.lang.String[] } else { typeStr = clazz.getName(); } ReflectCache.putTypeStrCache(clazz, typeStr); } return typeStr; } /** * JVM描述转通用描述 * * @param jvmName 例如 [I; * @return 通用描述 例如 int[] */ public static String jvmNameToCanonicalName(String jvmName) { boolean isArray = jvmName.charAt(0) == '['; if (isArray) { String cnName = StringUtils.EMPTY; // 计数,看上几维数组 int i = 0; for (; i < jvmName.length(); i++) { if (jvmName.charAt(i) != '[') { break; } cnName += "[]"; } String componentType = jvmName.substring(i, jvmName.length()); if ("Z".equals(componentType)) { cnName = "boolean" + cnName; } else if ("B".equals(componentType)) { cnName = "byte" + cnName; } else if ("C".equals(componentType)) { cnName = "char" + cnName; } else if ("D".equals(componentType)) { cnName = "double" + cnName; } else if ("F".equals(componentType)) { cnName = "float" + cnName; } else if ("I".equals(componentType)) { cnName = "int" + cnName; } else if ("J".equals(componentType)) { cnName = "long" + cnName; } else if ("S".equals(componentType)) { cnName = "short" + cnName; } else { cnName = componentType.substring(1, componentType.length() - 1) + cnName; // 对象的 去掉L } return cnName; } return jvmName; } }
4,349
1,232
/*- * << * DBus * == * Copyright (C) 2016 - 2019 Bridata * == * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * >> */ package com.creditease.dbus.stream.db2.appender.bolt.processor.wrapper; import com.creditease.dbus.commons.*; import com.creditease.dbus.msgencoder.*; import com.creditease.dbus.stream.common.Constants; import com.creditease.dbus.stream.common.appender.bean.DataTable; import com.creditease.dbus.stream.common.appender.bean.EmitData; import com.creditease.dbus.stream.common.appender.bean.MetaVersion; import com.creditease.dbus.stream.common.appender.bolt.processor.BoltCommandHandler; import com.creditease.dbus.stream.common.appender.bolt.processor.BoltCommandHandlerHelper; import com.creditease.dbus.stream.common.appender.bolt.processor.CachedEncodeColumnProvider; import com.creditease.dbus.stream.common.appender.bolt.processor.listener.CommandHandlerListener; import com.creditease.dbus.stream.common.appender.enums.Command; import com.creditease.dbus.stream.common.appender.utils.Utils; import com.creditease.dbus.stream.common.tools.DateUtil; import com.creditease.dbus.stream.db2.appender.bolt.processor.vo.AppenderDataResults; import com.creditease.dbus.stream.db2.appender.enums.DbusDb2DmlType; import org.apache.avro.Schema; import org.apache.avro.generic.GenericRecord; import org.apache.storm.tuple.Tuple; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.List; import java.util.Properties; import java.util.Set; public class Db2WrapperDefaultHandler implements BoltCommandHandler { private Logger logger = LoggerFactory.getLogger(getClass()); private Set<String> noorderKeys = Constants.MessageBodyKey.noorderKeys; private CommandHandlerListener listener; private Tuple tuple; private MetaVersion version; private long offset; private String groupId; private DataTable table; public Db2WrapperDefaultHandler(CommandHandlerListener listener) { this.listener = listener; } @Override public void handle(Tuple tuple) { EmitData emitData = (EmitData) tuple.getValueByField(Constants.EmitFields.DATA); this.tuple = tuple; this.offset = emitData.get(EmitData.OFFSET); this.version = emitData.get(EmitData.VERSION); this.groupId = groupField(version.getSchema(), version.getTable()); this.table = emitData.get(EmitData.DATA_TABLE); MetaWrapper meta = version.getMeta(); List<AppenderDataResults> dataList = emitData.get(EmitData.MESSAGE); logger.debug("[BEGIN] receive data,offset:{}, dataList.size: {}", offset, dataList.size()); //********************************************************************* if (logger.isDebugEnabled()) { StringBuilder buf = new StringBuilder(version.getTable()).append(":"); for (MetaWrapper.MetaCell cell : meta.getColumns()) { buf.append(cell.getColumnName()).append(","); } logger.debug("[fields-order] {} before build message: {}", offset, buf.toString()); String fieldsOrderFromAppender = emitData.get("fields-order"); if (fieldsOrderFromAppender != null && !fieldsOrderFromAppender.equals(buf.toString())) { logger.error("[fields-order] fields order not matched.\nappenderBolt:{}\nkafkaBolt:{}", fieldsOrderFromAppender, buf.toString()); } } //*********************************************************************** int payloadCount = 0; int payloadSize = 0; int payloadMaxCount = getMaxCount(); int payloadMaxSize = getMaxSize(); DbusMessageBuilder builder = createBuilderWithSchema(version, meta); int decode = PropertiesHolder.getIntegerValue(Constants.Properties.CONFIGURE, Constants.ConfigureKey.BASE64_DECODE); // 脱敏 EncodeColumnProvider provider = new CachedEncodeColumnProvider(version.getTableId()); List<EncodeColumn> encodeColumns = provider.getColumns(); final MetaVersion v = version; UmsEncoder encoder = new PluggableMessageEncoder(PluginManagerProvider.getManager(), (e, column, message) -> { BoltCommandHandlerHelper.onEncodeError(message, v, column, e); }); for (AppenderDataResults appenderDataResults : dataList) { GenericRecord data = appenderDataResults.getGenericRecord(); // PairWrapper<String, Object> wrapper = BoltCommandHandlerHelper.convertAvroRecord(data, noorderKeys); try { // 遇到update类型的消息,需要先处理b消息,之后再按原来的程序流程走 // if (wrapper.getProperties(Constants.MessageBodyKey.OP_TYPE).toString().toLowerCase().equals("u") && this.table.getOutputBeforeUpdateFlg() != 0) { // PairWrapper<String, Object> beforeWrapper = BoltCommandHandlerHelper.convertAvroRecordUseBeforeMap(data, noorderKeys); // // 生成meta信息 // List<Object> beforePayloads = new ArrayList<>(); // beforePayloads.add(beforeWrapper.getProperties(Constants.MessageBodyKey.POS)); // beforePayloads.add(beforeWrapper.getProperties(Constants.MessageBodyKey.OP_TS)); // beforePayloads.add(Constants.UmsMessage.BEFORE_UPDATE_OPERATION); // beforePayloads.add(generateUmsUid()); // payloadSize += addPayloadColumns(meta, beforeWrapper, decode, beforePayloads, data); // builder.appendPayload(beforePayloads.toArray()); // payloadCount++; // } // 生成meta信息 List<Object> payloads = new ArrayList<>(); String format = Constants.DB2MessageBodyKey.DB2_CURTMSTP_FORMAT; Long time = DateUtil.convertStrToLong4Date(data.get(Constants.DB2MessageBodyKey.DB2_CURTMSTP).toString(), format); String offset = appenderDataResults.getOffset(); // Long umsId = getOffsetNumCompensation() + time + Long.parseLong(offset); Long umsId = getOffsetNumCompensation() + Long.parseLong(offset); payloads.add(umsId); payloads.add(DateUtil.convertLongToStr4Date(time)); //db2中有几种DML类型: 1)PT:insert 2) DL:delete 3) UP:update String ddlType = DbusDb2DmlType.getDmlType(data.get(Constants.DB2MessageBodyKey.DB2_ENTTYP).toString()); if (ddlType.equals(null)) logger.error("DB2_ENTTYP is error: [" + data.get(Constants.DB2MessageBodyKey.DB2_ENTTYP).toString() + "] "); payloads.add(ddlType); payloads.add(generateUmsUid()); payloadSize += addPayloadColumns(meta, payloads, data); builder.appendPayload(payloads.toArray()); payloadCount++; // 判断消息payload数量和大小是否超出限制 if (payloadCount >= payloadMaxCount) { logger.debug("Payload count out of limitation[{}]!", payloadMaxCount); encoder.encode(builder.getMessage(), encodeColumns); emitMessage(builder.getMessage()); builder = createBuilderWithSchema(version, meta); payloadCount = 0; } else if (payloadSize >= payloadMaxSize) { logger.debug("Payload size out of limitation[{}]!", payloadMaxCount); encoder.encode(builder.getMessage(), encodeColumns); emitMessage(builder.getMessage()); builder = createBuilderWithSchema(version, meta); payloadSize = 0; } } catch (Exception e) { long errId = System.currentTimeMillis(); logger.error("[{}]Build dbus message of table[{}.{}] error, abort this message.\noriginal value:\n{}\nmessage: {}", errId, version.getSchema(), version.getTable(), data.toString(), e.getMessage(), e); BoltCommandHandlerHelper.onBuildMessageError(errId + "", version, e); } } // 判断message是否包含payload,如果payload列表为空则不写kafka DbusMessage message = builder.getMessage(); encoder.encode(message, encodeColumns); if (!message.getPayload().isEmpty()) { emitMessage(message); } } private String generateUmsUid() throws Exception { //生成ums_uid return String.valueOf(listener.getZkService().nextValue(Utils.join(".", Utils.getDatasource().getDsName(), Constants.UmsMessage.NAMESPACE_INDISTINCTIVE_SCHEMA, Constants.UmsMessage.NAMESPACE_INDISTINCTIVE_TABLE, Constants.UmsMessage.NAMESPACE_INDISTINCTIVE_VERSION))); } private long getOffsetNumCompensation() { try { Properties properties = PropertiesHolder.getProperties(Constants.Properties.CONFIGURE); return Long.parseLong(properties.getOrDefault(Constants.ConfigureKey.DB2_OFFSET_COMPENSATION, 0).toString()); } catch (Exception e) { throw new RuntimeException("获取配置文件失败", e); } } private int addPayloadColumns(MetaWrapper meta, List<Object> payloads, GenericRecord data) throws Exception { int payloadSize = 0; for (MetaWrapper.MetaCell cell : meta.getColumns()) { if (cell.isSupportedOnDb2()) { Schema schema = data.getSchema(); Schema.Field field = schema.getField(cell.getColumnName()); Object value = null; try { value = data.get(field.name()); } catch (Exception e) { logger.error("{}", e); logger.info("schema:{}", schema.toString()); logger.info("data: {}", data.toString()); } payloads.add(value); if (value != null) { payloadSize += value.toString().getBytes("utf-8").length; } } } return payloadSize; } private void emitMessage(DbusMessage message) { EmitData data = new EmitData(); data.add(EmitData.MESSAGE, message); data.add(EmitData.VERSION, version); data.add(EmitData.GROUP_KEY, groupId); data.add(EmitData.OFFSET, offset); this.emit(listener.getOutputCollector(), tuple, groupId, data, Command.UNKNOWN_CMD); } /** * 创建DBusMessageBuilder对象,同时生成ums schema */ private DbusMessageBuilder createBuilderWithSchema(MetaVersion version, MetaWrapper meta) { DbusMessageBuilder builder = new DbusMessageBuilder(); String namespace = builder.buildNameSpace(Utils.getDataSourceNamespace(), version.getSchema(), version.getTable(), version.getVersion()); builder.build(DbusMessage.ProtocolType.DATA_INCREMENT_DATA, namespace, table.getBatchId()); for (MetaWrapper.MetaCell cell : meta.getColumns()) { if (cell.isSupportedOnDb2()) { // 这里使用db2原始列名作为ums schema中的字段名 builder.appendSchema(cell.getOriginalColumnName(), DataType.convertDb2DataType(cell.getDataType()), cell.isNullable()); } } // 处理抓取的业务数据 return builder; } private int getMaxCount() { return PropertiesHolder.getIntegerValue(Constants.Properties.CONFIGURE, Constants.ConfigureKey.UMS_PAYLOAD_MAX_COUNT); } private int getMaxSize() { return PropertiesHolder.getIntegerValue(Constants.Properties.CONFIGURE, Constants.ConfigureKey.UMS_PAYLOAD_MAX_SIZE); } }
5,273
14,668
// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef CHROME_BROWSER_ANDROID_TAB_WEB_CONTENTS_DELEGATE_ANDROID_H_ #define CHROME_BROWSER_ANDROID_TAB_WEB_CONTENTS_DELEGATE_ANDROID_H_ #include <memory> #include "base/scoped_multi_source_observation.h" #include "components/embedder_support/android/delegate/web_contents_delegate_android.h" #include "components/find_in_page/find_result_observer.h" #include "components/find_in_page/find_tab_helper.h" #include "components/paint_preview/buildflags/buildflags.h" #include "printing/buildflags/buildflags.h" #include "third_party/blink/public/mojom/frame/blocked_navigation_types.mojom.h" namespace content { struct FileChooserParams; class WebContents; } namespace gfx { class Rect; class RectF; } namespace url { class Origin; } namespace android { // Chromium Android specific WebContentsDelegate. // Should contain any WebContentsDelegate implementations required by // the Chromium Android port but not to be shared with WebView. class TabWebContentsDelegateAndroid : public web_contents_delegate_android::WebContentsDelegateAndroid, public find_in_page::FindResultObserver { public: TabWebContentsDelegateAndroid(JNIEnv* env, jobject obj); TabWebContentsDelegateAndroid(const TabWebContentsDelegateAndroid&) = delete; TabWebContentsDelegateAndroid& operator=( const TabWebContentsDelegateAndroid&) = delete; ~TabWebContentsDelegateAndroid() override; void PortalWebContentsCreated(content::WebContents* portal_contents) override; void RunFileChooser(content::RenderFrameHost* render_frame_host, scoped_refptr<content::FileSelectListener> listener, const blink::mojom::FileChooserParams& params) override; void CreateSmsPrompt(content::RenderFrameHost*, const std::vector<url::Origin>&, const std::string& one_time_code, base::OnceClosure on_confirm, base::OnceClosure on_cancel) override; bool ShouldFocusLocationBarByDefault(content::WebContents* source) override; void FindReply(content::WebContents* web_contents, int request_id, int number_of_matches, const gfx::Rect& selection_rect, int active_match_ordinal, bool final_update) override; void FindMatchRectsReply(content::WebContents* web_contents, int version, const std::vector<gfx::RectF>& rects, const gfx::RectF& active_rect) override; content::JavaScriptDialogManager* GetJavaScriptDialogManager( content::WebContents* source) override; void AdjustPreviewsStateForNavigation( content::WebContents* web_contents, blink::PreviewsState* previews_state) override; void RequestMediaAccessPermission( content::WebContents* web_contents, const content::MediaStreamRequest& request, content::MediaResponseCallback callback) override; bool CheckMediaAccessPermission(content::RenderFrameHost* render_frame_host, const GURL& security_origin, blink::mojom::MediaStreamType type) override; void SetOverlayMode(bool use_overlay_mode) override; content::WebContents* OpenURLFromTab( content::WebContents* source, const content::OpenURLParams& params) override; bool ShouldResumeRequestsForCreatedWindow() override; void AddNewContents(content::WebContents* source, std::unique_ptr<content::WebContents> new_contents, const GURL& target_url, WindowOpenDisposition disposition, const gfx::Rect& initial_rect, bool user_gesture, bool* was_blocked) override; void OnDidBlockNavigation( content::WebContents* web_contents, const GURL& blocked_url, const GURL& initiator_url, blink::mojom::NavigationBlockedReason reason) override; void UpdateUserGestureCarryoverInfo( content::WebContents* web_contents) override; content::PictureInPictureResult EnterPictureInPicture( content::WebContents* web_contents, const viz::SurfaceId&, const gfx::Size&) override; void ExitPictureInPicture() override; bool IsBackForwardCacheSupported() override; bool IsPrerender2Supported() override; std::unique_ptr<content::WebContents> ActivatePortalWebContents( content::WebContents* predecessor_contents, std::unique_ptr<content::WebContents> portal_contents) override; device::mojom::GeolocationContext* GetInstalledWebappGeolocationContext() override; #if BUILDFLAG(ENABLE_PRINTING) void PrintCrossProcessSubframe( content::WebContents* web_contents, const gfx::Rect& rect, int document_cookie, content::RenderFrameHost* subframe_host) const override; #endif #if BUILDFLAG(ENABLE_PAINT_PREVIEW) void CapturePaintPreviewOfSubframe( content::WebContents* web_contents, const gfx::Rect& rect, const base::UnguessableToken& guid, content::RenderFrameHost* render_frame_host) override; #endif // find_in_page::FindResultObserver: void OnFindResultAvailable(content::WebContents* web_contents) override; void OnFindTabHelperDestroyed(find_in_page::FindTabHelper* helper) override; bool ShouldEnableEmbeddedMediaExperience() const; bool IsPictureInPictureEnabled() const; bool IsNightModeEnabled() const; bool IsForceDarkWebContentEnabled() const; bool CanShowAppBanners() const; bool IsTabLargeEnoughForDesktopSite() const; // Returns true if this tab is currently presented in the context of custom // tabs. Tabs can be moved between different activities so the returned value // might change over the lifetime of the tab. bool IsCustomTab() const; const GURL GetManifestScope() const; bool IsInstalledWebappDelegateGeolocation() const; private: std::unique_ptr<device::mojom::GeolocationContext> installed_webapp_geolocation_context_; base::ScopedMultiSourceObservation<find_in_page::FindTabHelper, find_in_page::FindResultObserver> find_result_observations_{this}; }; } // namespace android #endif // CHROME_BROWSER_ANDROID_TAB_WEB_CONTENTS_DELEGATE_ANDROID_H_
2,436
2,875
# -*- coding: utf-8 -*- # @Time : 2017/7/13 下午5:13 # @Author : play4fun # @File : findHomography.py # @Software: PyCharm """ findHomography.py:联合使用特征提取和 calib3d 模块中的 findHomography 在复杂图像中查找已知对象 """ import numpy as np import cv2 from matplotlib import pyplot as plt MIN_MATCH_COUNT = 10 img1 = cv2.imread('../data/box.png', 0) # queryImage img2 = cv2.imread('../data/box_in_scene.png', 0) # trainImage # Initiate SIFT detector sift = cv2.xfeatures2d.SIFT_create() # find the keypoints and descriptors with SIFT kp1, des1 = sift.detectAndCompute(img1, None) kp2, des2 = sift.detectAndCompute(img2, None) FLANN_INDEX_KDTREE = 0 index_params = dict(algorithm=FLANN_INDEX_KDTREE, trees=5) search_params = dict(checks=50) flann = cv2.FlannBasedMatcher(index_params, search_params) matches = flann.knnMatch(des1, des2, k=2) # store all the good matches as per Lowe's ratio test. good = [] for m, n in matches: if m.distance < 0.7 * n.distance: good.append(m) ''' 现在我们 置只有存在 10 个以上匹 时才去查找目标 MIN_MATCH_COUNT=10 否则显示 告消息 现在匹 不 如果找到了 够的匹 我们 提取两幅图像中匹 点的坐标。把它们传 入到函数中 算 变换。一旦我们找到 3x3 的变换矩 就可以使用它将查 图像的四个 点 四个 变换到目标图像中去了。然后再绘制出来。 ''' if len(good) > MIN_MATCH_COUNT: # 获取关 点的坐标 src_pts = np.float32([kp1[m.queryIdx].pt for m in good]).reshape(-1, 1, 2) dst_pts = np.float32([kp2[m.trainIdx].pt for m in good]).reshape(-1, 1, 2) # 第三个参数 Method used to computed a homography matrix. The following methods are possible: #0 - a regular method using all the points # CV_RANSAC - RANSAC-based robust method # CV_LMEDS - Least-Median robust method # 第四个参数取值范围在 1 到 10 绝一个点对的 值。原图像的点经 变换后点与目标图像上对应点的 差 # 差就 为是 outlier # 回值中 M 为变换矩 。 M, mask = cv2.findHomography(src_pts, dst_pts, cv2.RANSAC, 5.0) matchesMask = mask.ravel().tolist() # 获得原图像的高和宽 h, w = img1.shape # 使用得到的变换矩 对原图像的四个 变换 获得在目标图像上对应的坐标 pts = np.float32([[0, 0], [0, h - 1], [w - 1, h - 1], [w - 1, 0]]).reshape(-1, 1, 2) dst = cv2.perspectiveTransform(pts, M) # 原图像为灰度图 img2 = cv2.polylines(img2, [np.int32(dst)], True, 255, 3, cv2.LINE_AA) else: print("Not enough matches are found - %d/%d" % (len(good), MIN_MATCH_COUNT)) matchesMask = None # 最后我再绘制 inliers 如果能成功的找到目标图像的话 或者匹配的关 点 如果失败。 draw_params = dict(matchColor=(0, 255, 0), # draw matches in green color singlePointColor=None, matchesMask=matchesMask, # draw only inliers flags=2) img3 = cv2.drawMatches(img1, kp1, img2, kp2, good, None, **draw_params) plt.imshow(img3, 'gray'), plt.show() # 复杂图像中被找到的目标图像被标记成白色
1,681
852
#include "SiStripRawToDigiModule.h" #include "SiStripRawToDigiUnpacker.h" #include "CondFormats/SiStripObjects/interface/SiStripFedCabling.h" #include "DataFormats/Common/interface/DetSetVector.h" #include "DataFormats/Common/interface/Handle.h" #include "DataFormats/DetId/interface/DetIdCollection.h" #include "DataFormats/SiStripCommon/interface/SiStripConstants.h" #include "DataFormats/SiStripCommon/interface/SiStripEventSummary.h" #include "DataFormats/SiStripDigi/interface/SiStripDigi.h" #include "DataFormats/SiStripDigi/interface/SiStripRawDigi.h" #include "FWCore/MessageLogger/interface/MessageLogger.h" #include <cstdlib> namespace sistrip { RawToDigiModule::RawToDigiModule(const edm::ParameterSet& pset) : rawToDigi_(nullptr), cabling_(nullptr), extractCm_(false), doFullCorruptBufferChecks_(false), doAPVEmulatorCheck_(true), tTopoToken_(esConsumes()), fedCablingToken_(esConsumes()) { if (edm::isDebugEnabled()) { LogTrace("SiStripRawToDigi") << "[sistrip::RawToDigiModule::" << __func__ << "]" << " Constructing object..."; } token_ = consumes<FEDRawDataCollection>(pset.getParameter<edm::InputTag>("ProductLabel")); int16_t appended_bytes = pset.getParameter<int>("AppendedBytes"); int16_t trigger_fed_id = pset.getParameter<int>("TriggerFedId"); bool legacy_unpacker = pset.getParameter<bool>("LegacyUnpacker"); bool use_daq_register = pset.getParameter<bool>("UseDaqRegister"); bool using_fed_key = pset.getParameter<bool>("UseFedKey"); bool unpack_bad_channels = pset.getParameter<bool>("UnpackBadChannels"); bool mark_missing_feds = pset.getParameter<bool>("MarkModulesOnMissingFeds"); int16_t fed_buffer_dump_freq = pset.getUntrackedParameter<int>("FedBufferDumpFreq", 0); int16_t fed_event_dump_freq = pset.getUntrackedParameter<int>("FedEventDumpFreq", 0); bool quiet = pset.getUntrackedParameter<bool>("Quiet", true); extractCm_ = pset.getParameter<bool>("UnpackCommonModeValues"); doFullCorruptBufferChecks_ = pset.getParameter<bool>("DoAllCorruptBufferChecks"); doAPVEmulatorCheck_ = pset.getParameter<bool>("DoAPVEmulatorCheck"); uint32_t errorThreshold = pset.getParameter<unsigned int>("ErrorThreshold"); rawToDigi_ = new sistrip::RawToDigiUnpacker(appended_bytes, fed_buffer_dump_freq, fed_event_dump_freq, trigger_fed_id, using_fed_key, unpack_bad_channels, mark_missing_feds, errorThreshold); rawToDigi_->legacy(legacy_unpacker); rawToDigi_->quiet(quiet); rawToDigi_->useDaqRegister(use_daq_register); rawToDigi_->extractCm(extractCm_); rawToDigi_->doFullCorruptBufferChecks(doFullCorruptBufferChecks_); rawToDigi_->doAPVEmulatorCheck(doAPVEmulatorCheck_); produces<SiStripEventSummary>(); produces<edm::DetSetVector<SiStripRawDigi> >("ScopeMode"); produces<edm::DetSetVector<SiStripRawDigi> >("VirginRaw"); produces<edm::DetSetVector<SiStripRawDigi> >("ProcessedRaw"); produces<edm::DetSetVector<SiStripDigi> >("ZeroSuppressed"); produces<DetIdCollection>(); if (extractCm_) produces<edm::DetSetVector<SiStripRawDigi> >("CommonMode"); } RawToDigiModule::~RawToDigiModule() { if (rawToDigi_) { delete rawToDigi_; } if (cabling_) { cabling_ = nullptr; } if (edm::isDebugEnabled()) { LogTrace("SiStripRawToDigi") << "[sistrip::RawToDigiModule::" << __func__ << "]" << " Destructing object..."; } } /** Retrieves cabling map from EventSetup and FEDRawDataCollection from Event, creates a DetSetVector of SiStrip(Raw)Digis, uses the SiStripRawToDigiUnpacker class to fill the DetSetVector, and attaches the container to the Event. */ void RawToDigiModule::produce(edm::Event& event, const edm::EventSetup& setup) { updateCabling(setup); // Retrieve FED raw data (by label, which is "source" by default) edm::Handle<FEDRawDataCollection> buffers; event.getByToken(token_, buffers); // Populate SiStripEventSummary object with "trigger FED" info auto summary = std::make_unique<SiStripEventSummary>(); rawToDigi_->triggerFed(*buffers, *summary, event.id().event()); // Create containers for digis edm::DetSetVector<SiStripRawDigi>* sm = new edm::DetSetVector<SiStripRawDigi>(); edm::DetSetVector<SiStripRawDigi>* vr = new edm::DetSetVector<SiStripRawDigi>(); edm::DetSetVector<SiStripRawDigi>* pr = new edm::DetSetVector<SiStripRawDigi>(); edm::DetSetVector<SiStripDigi>* zs = new edm::DetSetVector<SiStripDigi>(); DetIdCollection* ids = new DetIdCollection(); edm::DetSetVector<SiStripRawDigi>* cm = new edm::DetSetVector<SiStripRawDigi>(); // Create digis if (rawToDigi_) { rawToDigi_->createDigis(*cabling_, *buffers, *summary, *sm, *vr, *pr, *zs, *ids, *cm); } // Create unique_ptr's of digi products std::unique_ptr<edm::DetSetVector<SiStripRawDigi> > sm_dsv(sm); std::unique_ptr<edm::DetSetVector<SiStripRawDigi> > vr_dsv(vr); std::unique_ptr<edm::DetSetVector<SiStripRawDigi> > pr_dsv(pr); std::unique_ptr<edm::DetSetVector<SiStripDigi> > zs_dsv(zs); std::unique_ptr<DetIdCollection> det_ids(ids); std::unique_ptr<edm::DetSetVector<SiStripRawDigi> > cm_dsv(cm); // Add to event event.put(std::move(summary)); event.put(std::move(sm_dsv), "ScopeMode"); event.put(std::move(vr_dsv), "VirginRaw"); event.put(std::move(pr_dsv), "ProcessedRaw"); event.put(std::move(zs_dsv), "ZeroSuppressed"); event.put(std::move(det_ids)); if (extractCm_) event.put(std::move(cm_dsv), "CommonMode"); } void RawToDigiModule::updateCabling(const edm::EventSetup& setup) { if (fedCablingWatcher_.check(setup)) { const bool isFirst = cabling_ != nullptr; cabling_ = &setup.getData(fedCablingToken_); if (edm::isDebugEnabled()) { if (isFirst) { std::stringstream ss; ss << "[sistrip::RawToDigiModule::" << __func__ << "]" << " Updating cabling for first time..." << std::endl << " Terse print out of FED cabling:" << std::endl; cabling_->terse(ss); LogTrace("SiStripRawToDigi") << ss.str(); } } if (edm::isDebugEnabled()) { std::stringstream sss; sss << "[sistrip::RawToDigiModule::" << __func__ << "]" << " Summary of FED cabling:" << std::endl; cabling_->summary(sss, &setup.getData(tTopoToken_)); LogTrace("SiStripRawToDigi") << sss.str(); } } } void RawToDigiModule::endStream() { rawToDigi_->printWarningSummary(); } } // namespace sistrip
3,152
17,703
// NOLINT(namespace-envoy) constexpr char TEST_SELFSIGNED_ECDSA_P384_CERT_256_HASH[] = "686326b5427f43d04394606c75223180779478004b2112820b5affb6d84763b9"; constexpr char TEST_SELFSIGNED_ECDSA_P384_CERT_1_HASH[] = "58b63003620b27457a8445520c73b3ee7530a6d4"; constexpr char TEST_SELFSIGNED_ECDSA_P384_CERT_SPKI[] = "sCNwIyDKE4zM8AEswNRUdSQ7xLCOINkhzAT/XiJ08Mk="; constexpr char TEST_SELFSIGNED_ECDSA_P384_CERT_SERIAL[] = "2b7bc53f979f6e3aec493fb41ceb846d2b66a6a7"; constexpr char TEST_SELFSIGNED_ECDSA_P384_CERT_NOT_BEFORE[] = "Aug 20 16:57:52 2020 GMT"; constexpr char TEST_SELFSIGNED_ECDSA_P384_CERT_NOT_AFTER[] = "Aug 20 16:57:52 2022 GMT";
338