max_stars_count
int64
301
224k
text
stringlengths
6
1.05M
token_count
int64
3
727k
312
<reponame>yanaspaula/rdf4j /******************************************************************************* * Copyright (c) 2019 Eclipse RDF4J contributors. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Distribution License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/org/documents/edl-v10.php. *******************************************************************************/ package org.eclipse.rdf4j.federated.util; import java.util.Set; import java.util.concurrent.atomic.AtomicLong; import org.eclipse.rdf4j.federated.FedXConfig; import org.eclipse.rdf4j.federated.FederationContext; import org.eclipse.rdf4j.federated.repository.FedXRepositoryConnection; import org.eclipse.rdf4j.model.IRI; import org.eclipse.rdf4j.model.Literal; import org.eclipse.rdf4j.model.Resource; import org.eclipse.rdf4j.model.Value; import org.eclipse.rdf4j.model.ValueFactory; import org.eclipse.rdf4j.model.impl.SimpleValueFactory; import org.eclipse.rdf4j.query.Dataset; import org.eclipse.rdf4j.query.Operation; import org.eclipse.rdf4j.query.algebra.StatementPattern; import org.eclipse.rdf4j.query.algebra.StatementPattern.Scope; import org.eclipse.rdf4j.query.impl.SimpleDataset; import org.eclipse.rdf4j.repository.sail.SailQuery; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.Sets; /** * General utility functions * * @author <NAME> * @since 5.0 */ public class FedXUtil { private static final Logger log = LoggerFactory.getLogger(FedXUtil.class); private static final AtomicLong count = new AtomicLong(0L); /** * @param iri * @return the IRI for the full URI string */ public static IRI iri(String iri) { return valueFactory().createIRI(iri); } /** * * @param literal * @return the string literal */ public static Literal literal(String literal) { return valueFactory().createLiteral(literal); } /** * * @return a {@link SimpleValueFactory} instance */ public static ValueFactory valueFactory() { return SimpleValueFactory.getInstance(); } /** * Apply query bindings to transfer information from the query into the evaluation routine, e.g. the query execution * time. * * @param query */ public static void applyQueryBindings(SailQuery query) { query.setBinding(FedXRepositoryConnection.BINDING_ORIGINAL_MAX_EXECUTION_TIME, FedXUtil.valueFactory().createLiteral(query.getMaxExecutionTime())); } /** * Hexadecimal representation of an incremental integer. * * @return an incremental hex UUID */ public static String getIncrementalUUID() { long id = count.incrementAndGet(); return Long.toHexString(id); } /** * Set a maximum execution time corresponding to {@link FedXConfig#getEnforceMaxQueryTime()} to this operation. * * Note that this is an upper bound only as FedX applies other means for evaluation the maximum query execution * time. * * @param operation the {@link Operation} * @param federationContext the {@link FederationContext} */ public static void applyMaxQueryExecutionTime(Operation operation, FederationContext federationContext) { int maxExecutionTime = federationContext.getConfig().getEnforceMaxQueryTime(); if (maxExecutionTime <= 0) { return; } operation.setMaxExecutionTime(maxExecutionTime); } /** * Convert the given contexts to a {@link Dataset} representation. * * @param contexts * @return */ public static Dataset toDataset(Resource[] contexts) { SimpleDataset dataset = new SimpleDataset(); for (Resource context : contexts) { if (!(context instanceof IRI)) { log.warn("FedX does not support to use non-IRIs as context identifier. Ignoring {}", context); continue; } dataset.addDefaultGraph((IRI) context); } return dataset; } /** * Convert the given {@link Dataset} to an array of contexts * * @param ds * @return */ public static Resource[] toContexts(Dataset ds) { if (ds == null) { return new Resource[0]; } return ds.getDefaultGraphs().toArray(new Resource[0]); } /** * Retrieve the contexts from the {@link StatementPattern} and {@link Dataset}. * * @param stmt * @param dataset * @return */ public static Resource[] toContexts(StatementPattern stmt, Dataset dataset) { if (dataset == null && (stmt.getContextVar() == null || !stmt.getContextVar().hasValue())) { return new Resource[0]; } Set<Resource> contexts = Sets.newHashSet(); if (dataset != null) { contexts.addAll(dataset.getDefaultGraphs()); } if (stmt.getScope().equals(Scope.NAMED_CONTEXTS)) { if (stmt.getContextVar().hasValue()) { contexts.add((Resource) stmt.getContextVar().getValue()); } } return contexts.toArray(new Resource[contexts.size()]); } /** * Returns a {@link Dataset} representation of the given {@link StatementPattern} and {@link Dataset}. * <p> * If the {@link StatementPattern} does not have a context value, the {@link Dataset} is returned as-is, which may * also be <code>null</code>. * </p> * * <p> * Otherwise the newly constructed {@link Dataset} contains all information from the original one plus the context * from the statement. * </p> * * @param stmt * @param dataset * @return */ public static Dataset toDataset(StatementPattern stmt, Dataset dataset) { if (stmt.getContextVar() == null || !stmt.getContextVar().hasValue()) { return dataset; } SimpleDataset res = new SimpleDataset(); if (dataset != null) { dataset.getDefaultGraphs().forEach(iri -> res.addDefaultGraph(iri)); dataset.getNamedGraphs().forEach(iri -> res.addNamedGraph(iri)); dataset.getDefaultRemoveGraphs().forEach(iri -> res.addDefaultRemoveGraph(iri)); res.setDefaultInsertGraph(dataset.getDefaultInsertGraph()); } Value stmtContext = stmt.getContextVar().getValue(); if (stmtContext instanceof IRI) { res.addDefaultGraph((IRI) stmtContext); } else { log.warn("FedX named graph handling does not support non-IRIs: " + stmtContext); } return res; } }
2,075
60,067
<filename>caffe2/operators/space_batch_op.cc #include "caffe2/operators/space_batch_op.h" namespace caffe2 { REGISTER_CPU_OPERATOR(SpaceToBatch, SpaceToBatchOp<CPUContext>); OPERATOR_SCHEMA(SpaceToBatch).NumInputs(1).NumOutputs(1).SetDoc(R"DOC( Zero-pads and then rearranges (permutes) blocks of spatial data into batch. More specifically, this op outputs a copy of the input tensor where values from the height and width dimensions are moved to the batch dimension. After the zero-padding is according to the `pad` argument, both height and width of the input must be divisible by the `block_size`. Only "NCHW" order is currently supported. Github Links: - https://github.com/pytorch/pytorch/blob/master/caffe2/operators/space_batch_op.cc <details> <summary> <b>Example</b> </summary> **Code** ``` workspace.ResetWorkspace() op = core.CreateOperator( "SpaceToBatch", ["X"], ["Y"], pad=2, block_size=3 ) workspace.FeedBlob("X", np.random.rand(1,3,5,5).astype(np.float32)) print("X.shape:", workspace.FetchBlob("X").shape) workspace.RunOperatorOnce(op) print("Y.shape:", workspace.FetchBlob("Y").shape) ``` **Result** ``` X.shape: (1, 3, 5, 5) Y.shape: (9, 3, 3, 3) ``` </details> )DOC") .Arg("pad","(*int*): exclusive axis that divides the first and second dimension of matrix `A` (default=0)") .Arg("block_size","(*int*): height/width of spatial blocks to be moved (default=2)") // NOLINTNEXTLINE(modernize-raw-string-literal) .Arg("order","(*string*): order of dimensions of input and output blobs; only \"NCHW\" order is currently supported (default=\"NCHW\")") .Input(0,"X","(*Tensor`<float>`*): input tensor (NCHW order)") .Output(0,"Y","(*Tensor`<float>`*): output tensor (NCHW order)"); REGISTER_CPU_OPERATOR(BatchToSpace, BatchToSpaceOp<CPUContext>); OPERATOR_SCHEMA(BatchToSpace).NumInputs(1).NumOutputs(1).SetDoc(R"DOC( Rearranges (permutes) data from batch into blocks of spatial data, followed by cropping. This is the reverse transformation of `SpaceToBatch`. More specifically, this op outputs a copy of the input tensor where values from the batch dimension are moved in spatial blocks to the height and width dimensions, followed by cropping along the height and width dimensions. Only "NCHW" order is currently supported. Github Links: - https://github.com/pytorch/pytorch/blob/master/caffe2/operators/space_batch_op.cc <details> <summary> <b>Example</b> </summary> **Code** ``` workspace.ResetWorkspace() op = core.CreateOperator( "BatchToSpace", ["X"], ["Y"], pad=3 ) workspace.FeedBlob("X", np.random.rand(10,3,32,32).astype(np.float32)) print("X.shape:", workspace.FetchBlob("X").shape) workspace.RunOperatorOnce(op) print("Y.shape:", workspace.FetchBlob("Y").shape) ``` **Result** ``` X.shape: (10, 3, 32, 32) Y.shape: (2, 3, 58, 58) ``` </details> )DOC") .Arg("pad","(*int*): exclusive axis that divides the first and second dimension of matrix `A` (default=0)") .Arg("block_size","(*int*): height/width of spatial blocks to be moved (default=2)") // NOLINTNEXTLINE(modernize-raw-string-literal) .Arg("order","(*string*): order of dimensions of input and output blobs; only \"NCHW\" order is currently supported (default=\"NCHW\")") .Input(0,"X","(*Tensor`<float>`*): input tensor (NCHW order)") .Output(0,"Y","(*Tensor`<float>`*): output tensor (NCHW order)"); class GetSpaceToBatchGradient : public GradientMakerBase { using GradientMakerBase::GradientMakerBase; vector<OperatorDef> GetGradientDefs() override { return SingleGradientDef( "BatchToSpace", "", vector<string>{GO(0)}, vector<string>{GI(0)}); } }; class GetBatchToSpaceGradient : public GradientMakerBase { using GradientMakerBase::GradientMakerBase; vector<OperatorDef> GetGradientDefs() override { return SingleGradientDef( "SpaceToBatch", "", vector<string>{GO(0)}, vector<string>{GI(0)}); } }; REGISTER_GRADIENT(SpaceToBatch, GetSpaceToBatchGradient); REGISTER_GRADIENT(BatchToSpace, GetBatchToSpaceGradient); }
1,485
1,830
/* * Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH under * one or more contributor license agreements. See the NOTICE file distributed * with this work for additional information regarding copyright ownership. * Licensed under the Zeebe Community License 1.1. You may not use this file * except in compliance with the Zeebe Community License 1.1. */ package io.camunda.zeebe.engine.processing.message; import io.camunda.zeebe.engine.processing.message.command.SubscriptionCommandSender; import io.camunda.zeebe.engine.processing.streamprocessor.TypedRecord; import io.camunda.zeebe.engine.processing.streamprocessor.TypedRecordProcessor; import io.camunda.zeebe.engine.processing.streamprocessor.sideeffect.SideEffectProducer; import io.camunda.zeebe.engine.processing.streamprocessor.writers.StateWriter; import io.camunda.zeebe.engine.processing.streamprocessor.writers.TypedRejectionWriter; import io.camunda.zeebe.engine.processing.streamprocessor.writers.TypedResponseWriter; import io.camunda.zeebe.engine.processing.streamprocessor.writers.TypedStreamWriter; import io.camunda.zeebe.engine.processing.streamprocessor.writers.Writers; import io.camunda.zeebe.engine.state.immutable.MessageSubscriptionState; import io.camunda.zeebe.protocol.impl.record.value.message.MessageSubscriptionRecord; import io.camunda.zeebe.protocol.record.RejectionType; import io.camunda.zeebe.protocol.record.intent.MessageSubscriptionIntent; import io.camunda.zeebe.util.buffer.BufferUtil; import java.util.function.Consumer; public final class MessageSubscriptionDeleteProcessor implements TypedRecordProcessor<MessageSubscriptionRecord> { private static final String NO_SUBSCRIPTION_FOUND_MESSAGE = "Expected to close message subscription for element with key '%d' and message name '%s', " + "but no such message subscription exists"; private final MessageSubscriptionState subscriptionState; private final SubscriptionCommandSender commandSender; private final StateWriter stateWriter; private final TypedRejectionWriter rejectionWriter; private MessageSubscriptionRecord subscriptionRecord; public MessageSubscriptionDeleteProcessor( final MessageSubscriptionState subscriptionState, final SubscriptionCommandSender commandSender, final Writers writers) { this.subscriptionState = subscriptionState; this.commandSender = commandSender; stateWriter = writers.state(); rejectionWriter = writers.rejection(); } @Override public void processRecord( final TypedRecord<MessageSubscriptionRecord> record, final TypedResponseWriter responseWriter, final TypedStreamWriter streamWriter, final Consumer<SideEffectProducer> sideEffect) { subscriptionRecord = record.getValue(); final var messageSubscription = subscriptionState.get( subscriptionRecord.getElementInstanceKey(), subscriptionRecord.getMessageNameBuffer()); if (messageSubscription != null) { stateWriter.appendFollowUpEvent( messageSubscription.getKey(), MessageSubscriptionIntent.DELETED, messageSubscription.getRecord()); } else { rejectCommand(record); } sideEffect.accept(this::sendAcknowledgeCommand); } private void rejectCommand(final TypedRecord<MessageSubscriptionRecord> record) { final var subscription = record.getValue(); final var reason = String.format( NO_SUBSCRIPTION_FOUND_MESSAGE, subscription.getElementInstanceKey(), BufferUtil.bufferAsString(subscription.getMessageNameBuffer())); rejectionWriter.appendRejection(record, RejectionType.NOT_FOUND, reason); } private boolean sendAcknowledgeCommand() { return commandSender.closeProcessMessageSubscription( subscriptionRecord.getProcessInstanceKey(), subscriptionRecord.getElementInstanceKey(), subscriptionRecord.getMessageNameBuffer()); } }
1,206
4,224
<reponame>lgarciaos/Firmware<gh_stars>1000+ #! /usr/bin/env python3 import serial, time import subprocess from subprocess import call, Popen from argparse import ArgumentParser import re import sys import datetime import serial.tools.list_ports as list_ports import tempfile COLOR_RED = "\x1b[31m" COLOR_GREEN = "\x1b[32m" COLOR_YELLOW = "\x1b[33m" COLOR_WHITE = "\x1b[37m" COLOR_RESET = "\x1b[0m" def print_line(line): if "WARNING" in line: line = line.replace("WARNING", f"{COLOR_YELLOW}WARNING{COLOR_RESET}", 1) elif "WARN" in line: line = line.replace("WARN", f"{COLOR_YELLOW}WARN{COLOR_RESET}", 1) elif "ERROR" in line: line = line.replace("ERROR", f"{COLOR_RED}ERROR{COLOR_RESET}", 1) elif "INFO" in line: line = line.replace("INFO", f"{COLOR_WHITE}INFO{COLOR_RESET}", 1) if "PASSED" in line: line = line.replace("PASSED", f"{COLOR_GREEN}PASSED{COLOR_RESET}", 1) if "FAILED" in line: line = line.replace("FAILED", f"{COLOR_RED}FAILED{COLOR_RESET}", 1) if "\n" in line: current_time = datetime.datetime.now() print('[{0}] {1}'.format(current_time.isoformat(timespec='milliseconds'), line), end='') else: print('{0}'.format(line), end='') def reboot(port_url, baudrate): ser = serial.serial_for_url(url=port_url, baudrate=baudrate, bytesize=serial.EIGHTBITS, parity=serial.PARITY_NONE, stopbits=serial.STOPBITS_ONE, timeout=3, xonxoff=False, rtscts=False, dsrdtr=False, inter_byte_timeout=1) time_start = time.monotonic() ser.write("\n\n\n".encode("ascii")) ser.write("reboot\n".encode("ascii")) time_reboot_cmd = time_start timeout_reboot_cmd = 90 timeout = 300 # 5 minutes return_code = 0 while True: serial_line = ser.readline().decode("ascii", errors='ignore') if len(serial_line) > 0: print_line(serial_line) if "ERROR" in serial_line: return_code = -1 if "NuttShell (NSH)" in serial_line: sys.exit(return_code) else: if time.monotonic() > time_start + timeout: print("Error, timeout") sys.exit(-1) if time.monotonic() > time_reboot_cmd + timeout_reboot_cmd: time_reboot_cmd = time.monotonic() print("sending reboot cmd again") ser.write("reboot\n".encode("ascii")) def main(): default_device = None device_required = True # select USB UART as default if there's only 1 ports = list(serial.tools.list_ports.grep('USB UART')) if (len(ports) == 1): default_device = ports[0].device device_required = False print("Default USB UART port: {0}".format(ports[0].name)) print(" device: {0}".format(ports[0].device)) print(" description: \"{0}\" ".format(ports[0].description)) print(" hwid: {0}".format(ports[0].hwid)) #print(" vid: {0}, pid: {1}".format(ports[0].vid, ports[0].pid)) #print(" serial_number: {0}".format(ports[0].serial_number)) #print(" location: {0}".format(ports[0].location)) print(" manufacturer: {0}".format(ports[0].manufacturer)) #print(" product: {0}".format(ports[0].product)) #print(" interface: {0}".format(ports[0].interface)) parser = ArgumentParser(description=__doc__) parser.add_argument('--device', "-d", nargs='?', default=default_device, help='', required=device_required) parser.add_argument("--baudrate", "-b", dest="baudrate", type=int, help="Mavlink port baud rate (default=57600)", default=57600) args = parser.parse_args() reboot(args.device, args.baudrate) if __name__ == "__main__": main()
1,644
1,144
package de.metas.order.payment_reservation; import de.metas.common.util.time.SystemTime; import org.adempiere.exceptions.AdempiereException; import org.adempiere.service.ClientId; import org.compiere.model.I_C_Order; import org.springframework.stereotype.Service; import de.metas.bpartner.BPartnerContactId; import de.metas.bpartner.service.IBPartnerDAO; import de.metas.money.CurrencyId; import de.metas.money.Money; import de.metas.order.IOrderBL; import de.metas.order.OrderId; import de.metas.organization.OrgId; import de.metas.payment.PaymentRule; import de.metas.payment.reservation.PaymentReservation; import de.metas.payment.reservation.PaymentReservationCreateRequest; import de.metas.payment.reservation.PaymentReservationService; import de.metas.payment.reservation.PaymentReservationStatus; import de.metas.util.Check; import de.metas.util.Services; import lombok.NonNull; /* * #%L * de.metas.business * %% * Copyright (C) 2019 metas GmbH * %% * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as * published by the Free Software Foundation, either version 2 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public * License along with this program. If not, see * <http://www.gnu.org/licenses/gpl-2.0.html>. * #L% */ @Service public class OrderPaymentReservationService { private final IOrderBL ordersService = Services.get(IOrderBL.class); private final IBPartnerDAO bpartnersRepo = Services.get(IBPartnerDAO.class); private final PaymentReservationService paymentReservationService; public OrderPaymentReservationService( @NonNull final PaymentReservationService paymentReservationService) { this.paymentReservationService = paymentReservationService; } public OrderPaymentReservationCreateResult createPaymentReservationIfNeeded(@NonNull final I_C_Order salesOrder) { Check.assume(salesOrder.isSOTrx(), "expected sales order but got purchase order: {}", salesOrder); final PaymentRule paymentRule = PaymentRule.ofCode(salesOrder.getPaymentRule()); if (!paymentReservationService.isPaymentReservationRequired(paymentRule)) { return OrderPaymentReservationCreateResult.NOT_NEEDED; } // // Get existing reservation / create a new one final OrderId salesOrderId = OrderId.ofRepoId(salesOrder.getC_Order_ID()); final PaymentReservation paymentReservation = paymentReservationService .getBySalesOrderIdNotVoided(salesOrderId) .orElseGet(() -> createPaymentReservation(salesOrder)); // // Result based on payment reservation's status final PaymentReservationStatus paymentReservationStatus = paymentReservation.getStatus(); if (PaymentReservationStatus.COMPLETED.equals(paymentReservationStatus)) { return OrderPaymentReservationCreateResult.ALREADY_COMPLETED; } else if (paymentReservationStatus.isWaitingToComplete()) { return OrderPaymentReservationCreateResult.WAITING_TO_COMPLETE; } else { throw new AdempiereException("Invalid payment reservation status: " + paymentReservationStatus) .setParameter("paymentReservation", paymentReservation) .setParameter("salesOrderId", salesOrderId); } } private PaymentReservation createPaymentReservation(final I_C_Order salesOrder) { final BPartnerContactId payerContactId = ordersService.getBillToContactId(salesOrder); return paymentReservationService.createReservation(PaymentReservationCreateRequest.builder() .clientId(ClientId.ofRepoId(salesOrder.getAD_Client_ID())) .orgId(OrgId.ofRepoId(salesOrder.getAD_Org_ID())) .amount(extractGrandTotal(salesOrder)) .payerContactId(payerContactId) .payerEmail(bpartnersRepo.getContactEMail(payerContactId)) .salesOrderId(OrderId.ofRepoId(salesOrder.getC_Order_ID())) .dateTrx(SystemTime.asLocalDate()) .paymentRule(PaymentRule.ofCode(salesOrder.getPaymentRule())) .build()); } private static Money extractGrandTotal(final I_C_Order salesOrder) { return Money.of(salesOrder.getGrandTotal(), CurrencyId.ofRepoId(salesOrder.getC_Currency_ID())); } }
1,418
849
package milkman.ui.plugin; import javafx.scene.control.Tab; import milkman.domain.RequestContainer; import milkman.domain.ResponseContainer; import java.util.Optional; /** * editor UI for a specific request aspect */ public interface RequestAspectEditor { /** * returns the tab with UI elements to edit the aspect data */ Tab getRoot(RequestContainer request); default Tab getRoot(RequestContainer request, Optional<ResponseContainer> existingResponse) { return getRoot(request); } /* * checks, if a requestContainer can be handled by this * aspect-editor, e.g. if a specific RequestAspect exists. */ boolean canHandleAspect(RequestContainer request); }
214
1,359
package com.kalessil.phpStorm.phpInspectionsEA.classes; import com.kalessil.phpStorm.phpInspectionsEA.PhpCodeInsightFixtureTestCase; import com.kalessil.phpStorm.phpInspectionsEA.inspectors.semanticalAnalysis.classes.CallableMethodValidityInspector; final public class CallableMethodValidityInspectorTest extends PhpCodeInsightFixtureTestCase { public void testIfFindsPatterns() { myFixture.enableInspections(new CallableMethodValidityInspector()); myFixture.configureByFile("testData/fixtures/classes/callable-methods-validity.php"); myFixture.testHighlighting(true, false, true); } }
218
678
<gh_stars>100-1000 /** * This header is generated by class-dump-z 0.2b. * * Source: /System/Library/PrivateFrameworks/TextInput.framework/TextInput */ #import <TextInput/TIWordSearchOperation.h> @interface TIWordSearchOperationCancelLastAcceptedCandidate : TIWordSearchOperation { } - (id)initWithTIWordSearchObj:(id)tiwordSearchObj; // 0x30f69 - (void)dealloc; // 0x30fbd - (void)cancel; // 0x30efd - (void)perform; // 0x30f95 @end
163
2,023
# HtmlMail python class # Compose HTML mails from URLs or local files with all images included # # Author: <NAME> <<EMAIL>> import sys, os, urllib2, urlparse from email.MIMEText import MIMEText from email.MIMEImage import MIMEImage from email.MIMEMultipart import MIMEMultipart import email, re class HtmlMail: def __init__(self, location, encoding="iso-8859-1"): self.location=location if location.find("http://")==0: self.is_http=True else: self.is_http=False self.encoding=encoding self.p1=re.compile("(<img.*?src=\")(.*?)(\".*?>)", re.IGNORECASE|re.DOTALL) self.p2=re.compile("(<.*?background=\")(.*?)(\".*?>)", re.IGNORECASE|re.DOTALL) self.p3=re.compile("(<input.*?src=\")(.*?)(\".*?>)", re.IGNORECASE|re.DOTALL) self.img_c=0 def set_log(self,log): self.log=log def _handle_image(self, matchobj): img=matchobj.group(2) if not self.images.has_key(img): self.img_c+=1 self.images[img]="dazoot-img%d" % self.img_c return "%scid:%s%s" % (matchobj.group(1), self.images[img], matchobj.group(3)) def _parse_images(self): self.images={} self.content=self.p1.sub(self._handle_image, self.content) self.content=self.p2.sub(self._handle_image, self.content) self.content=self.p3.sub(self._handle_image, self.content) return self.images def _read_image(self, imglocation): if self.is_http: img_url=urlparse.urljoin(self.location, imglocation) content=urllib2.urlopen(img_url).read() return content else: return file(imglocation, "rb").read() def get_msg(self): if self.is_http: content=urllib2.urlopen(self.location).read() else: content=file(self.location, "r").read() self.content=content msg=MIMEMultipart("related") images=self._parse_images() tmsg=MIMEText(self.content, "html", self.encoding) msg.attach(tmsg) for img in images.keys(): img_content=self._read_image(img) img_msg=MIMEImage(img_content) img_type, img_ext=img_msg["Content-Type"].split("/") del img_msg["MIME-Version"] del img_msg["Content-Type"] del img_msg["Content-Transfer-Encoding"] img_msg.add_header("Content-Type", "%s/%s; name=\"%s.%s\"" % (img_type, img_ext, images[img], img_ext)) img_msg.add_header("Content-Transfer-Encoding", "base64") img_msg.add_header("Content-ID", "<%s>" % images[img]) img_msg.add_header("Content-Disposition", "inline; filename=\"%s.%s\"" % (images[img], img_ext)) msg.attach(img_msg) return msg if __name__=="__main__": # test the class here import smtplib hm=HtmlMail("http://www.egirl.ro/newsletter/december2005_2/") msg=hm.get_msg() msg["Subject"]="Egirl Newsletter" msg["From"]="<NAME> <<EMAIL>>" msg["To"]="<EMAIL>" s=smtplib.SMTP("localhost") s.sendmail("<EMAIL>", msg["To"], msg.as_string()) s.quit()
1,200
8,092
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """ This is a basic example DAG for using `SalesforceToS3Operator` to retrieve Salesforce account data and upload it to an Amazon S3 bucket. """ from datetime import datetime from os import getenv from airflow import DAG from airflow.providers.amazon.aws.transfers.salesforce_to_s3 import SalesforceToS3Operator S3_BUCKET_NAME = getenv("S3_BUCKET_NAME", "s3_bucket_name") S3_KEY = getenv("S3_KEY", "s3_filename") with DAG( dag_id="example_salesforce_to_s3", schedule_interval=None, start_date=datetime(2021, 7, 8), catchup=False, tags=["example"], ) as dag: # [START howto_transfer_salesforce_to_s3] upload_salesforce_data_to_s3 = SalesforceToS3Operator( task_id="upload_salesforce_to_s3", salesforce_query="SELECT AccountNumber, Name FROM Account", s3_bucket_name=S3_BUCKET_NAME, s3_key=S3_KEY, salesforce_conn_id="salesforce", replace=True, ) # [END howto_transfer_salesforce_to_s3]
605
403
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sshd.common.keyprovider; import java.io.InputStream; import java.io.InputStreamReader; import java.security.KeyPair; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.NoSuchElementException; import org.apache.sshd.common.util.IoUtils; import org.apache.sshd.common.util.SecurityUtils; import org.bouncycastle.openssl.PEMDecryptorProvider; import org.bouncycastle.openssl.PEMEncryptedKeyPair; import org.bouncycastle.openssl.PEMKeyPair; import org.bouncycastle.openssl.PEMParser; import org.bouncycastle.openssl.PasswordFinder; import org.bouncycastle.openssl.jcajce.JcaPEMKeyConverter; import org.bouncycastle.openssl.jcajce.JcePEMDecryptorProviderBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * <p>This host key provider loads private keys from the specified resources.</p> * * <p>Note that this class has a direct dependency on BouncyCastle and won't work * unless it has been correctly registered as a security provider.</p> * * @author <a href="mailto:<EMAIL>">Apache MINA SSHD Project</a> */ public class ResourceKeyPairProvider extends AbstractKeyPairProvider { // --- Shared --- /** * Logger */ private final Logger log = LoggerFactory.getLogger(getClass()); // --- Properties --- /** * Class loader */ private final ClassLoader cloader; /** * Key resources */ private String[] resources; /** * Password finder */ private PasswordFinder passwordFinder; // --- /** * No-arg constructor. */ public ResourceKeyPairProvider() { this.cloader = this.getClass().getClassLoader(); } // end of <init> /** * Bulk constructor 1. */ public ResourceKeyPairProvider(String[] resources) { this.cloader = this.getClass().getClassLoader(); this.resources = resources; } // end of <init> /** * Bulk constructor 2. */ public ResourceKeyPairProvider(String[] resources, PasswordFinder passwordFinder) { this.cloader = this.getClass().getClassLoader(); this.resources = resources; this.passwordFinder = passwordFinder; } // end of <init> /** * Bulk constructor 3. */ public ResourceKeyPairProvider(String[] resources, PasswordFinder passwordFinder, ClassLoader cloader) { this.cloader = cloader; this.resources = resources; this.passwordFinder = passwordFinder; } // end of <init> // --- Properties accessors --- /** * {@inheritDoc} */ public String[] getResources() { return this.resources; } // end of getResources /** * {@inheritDoc} */ public void setResources(String[] resources) { this.resources = resources; } // end of setResources /** * {@inheritDoc} */ public PasswordFinder getPasswordFinder() { return this.passwordFinder; } // end of getPasswordFinder /** * {@inheritDoc} */ public void setPasswordFinder(PasswordFinder passwordFinder) { this.passwordFinder = passwordFinder; } // end of setPasswordFinder /** * {@inheritDoc} */ public Iterable<KeyPair> loadKeys() { if (!SecurityUtils.isBouncyCastleRegistered()) { throw new IllegalStateException("BouncyCastle must be registered as a JCE provider"); } // end of if return new Iterable<KeyPair>() { public Iterator<KeyPair> iterator() { return new Iterator<KeyPair>() { private final Iterator<String> iterator = Arrays.asList(resources).iterator(); private KeyPair nextKeyPair; private boolean nextKeyPairSet = false; public boolean hasNext() { return nextKeyPairSet || setNextObject(); } public KeyPair next() { if (!nextKeyPairSet) { if (!setNextObject()) { throw new NoSuchElementException(); } } nextKeyPairSet = false; return nextKeyPair; } public void remove() { throw new UnsupportedOperationException(); } private boolean setNextObject() { while (iterator.hasNext()) { String file = iterator.next(); nextKeyPair = doLoadKey(file); if (nextKeyPair != null) { nextKeyPairSet = true; return true; } } return false; } }; } }; } protected KeyPair doLoadKey(String resource) { PEMParser r = null; InputStreamReader isr = null; InputStream is = null; try { is = this.cloader.getResourceAsStream(resource); isr = new InputStreamReader(is); r = new PEMParser(isr); Object o = r.readObject(); JcaPEMKeyConverter pemConverter = new JcaPEMKeyConverter(); pemConverter.setProvider("BC"); if (passwordFinder != null && o instanceof PEMEncryptedKeyPair) { JcePEMDecryptorProviderBuilder decryptorBuilder = new JcePEMDecryptorProviderBuilder(); PEMDecryptorProvider pemDecryptor = decryptorBuilder.build(passwordFinder.getPassword()); o = pemConverter.getKeyPair(((PEMEncryptedKeyPair) o).decryptKeyPair(pemDecryptor)); } if (o instanceof PEMKeyPair) { o = pemConverter.getKeyPair((PEMKeyPair)o); return (KeyPair) o; } else if (o instanceof KeyPair) { return (KeyPair) o; } // end of if } catch (Exception e) { log.warn("Unable to read key " + resource, e); } finally { IoUtils.closeQuietly(r, is, isr); } // end of finally return null; } // end of doLoadKey } // end of class ResourceKeyPairProvider
3,268
359
{ "name": "docserver", "version": "1.0.0", "description": "socket server for realtime doc sharing system", "main": "app.js", "scripts": { "test": "echo \"Error: no test specified\" && exit 1", "start": "node src/app.js" }, "author": "<NAME>", "license": "ISC", "devDependencies": { "@types/socket.io": "^2.1.2", "express": "^4.16.4", "socket.io": "^2.2.0" } }
173
2,035
<reponame>irvingzhang0512/tinyflow // Copyright (c) 2016 by Contributors // implementation of common tensor operators #include <tinyflow/base.h> #include <dmlc/parameter.h> #include <nnvm/op_attr_types.h> #include <cmath> #include <utility> #include "./op_util.h" namespace tinyflow { // shape given the ZeroParam using namespace nnvm; // shape parameter for zeros, ones struct ZeroParam : public dmlc::Parameter<ZeroParam> { TShape shape; int dtype; DMLC_DECLARE_PARAMETER(ZeroParam) { DMLC_DECLARE_FIELD(shape).set_default(TShape()); DMLC_DECLARE_FIELD(dtype).set_default(kFloat32); } }; DMLC_REGISTER_PARAMETER(ZeroParam); inline bool ZeroShape(const NodeAttrs& attrs, std::vector<TShape> *ishape, std::vector<TShape> *oshape) { const TShape& ts = dmlc::get<ZeroParam>(attrs.parsed).shape; if (ts.ndim() != 0) { SHAPE_ASSIGN(oshape->at(0), ts); return true; } else { return false; } } inline bool ZeroType(const NodeAttrs& attrs, std::vector<int> *iattr, std::vector<int> *oattr) { int dtype = dmlc::get<ZeroParam>(attrs.parsed).dtype; DTYPE_ASSIGN(oattr->at(0), dtype); return true; } NNVM_REGISTER_OP_GROUP(ElementwiseOpAttr) .set_attr<bool>("IsElementWise", true) .set_attr<FInferShape>("FInferShape", SameShape); NNVM_REGISTER_OP(zeros) .describe("zeros") .set_num_inputs(0) .set_attr_parser(ParamParser<ZeroParam>) .set_attr<FInferShape>("FInferShape", ZeroShape) .set_attr<FInferType>("FInferType", ZeroType); NNVM_REGISTER_OP(zeros_like) .describe("zeros_like") .set_num_inputs(1) .set_attr<FInferShape>("FInferShape", SameShape); NNVM_REGISTER_OP(ones) .describe("ones") .set_num_inputs(0) .set_attr_parser(ParamParser<ZeroParam>) .set_attr<FInferShape>("FInferShape", ZeroShape) .set_attr<FInferType>("FInferType", ZeroType); NNVM_REGISTER_OP(ones_like) .describe("ones_like") .set_num_inputs(1) .set_attr<FInferShape>("FInferShape", SameShape); NNVM_REGISTER_OP(normal) .describe("normal distribution") .set_num_inputs(0) .set_attr_parser(ParamParser<ZeroParam>) .set_attr<FInferShape>("FInferShape", ZeroShape) .set_attr<FInferType>("FInferType", ZeroType); NNVM_REGISTER_OP(equal) .describe("Equal comparitor") .set_num_inputs(2) .set_attr<FInferShape>("FInferShape", SameShape); NNVM_REGISTER_OP(__ewise_sum__) .describe("ewise sum") .set_num_inputs(nnvm::kVarg) .set_attr<FInplaceOption>("FInplaceOption", InplaceIn0Out0) .set_attr<FInferShape>("FInferShape", SameShape) .set_attr<FGradient>( "FGradient", [](const NodePtr& n, const std::vector<NodeEntry>& ograds) { return std::vector<NodeEntry>(n->num_inputs(), ograds[0]); }); NNVM_REGISTER_OP(__add_symbol__) .describe("add two data together") .set_num_inputs(2) .include("ElementwiseOpAttr") .set_attr<FInplaceOption>("FInplaceOption", InplaceIn0Out0) .set_attr<FGradient>( "FGradient", [](const NodePtr& n, const std::vector<NodeEntry>& ograds){ return std::vector<NodeEntry>{ograds[0], ograds[0]}; }); NNVM_REGISTER_OP(__add_scalar__) .describe("add symbol with scalar") .set_num_inputs(1) .include("ElementwiseOpAttr") .set_attr<FInplaceOption>("FInplaceOption", InplaceIn0Out0) .set_attr<FGradient>( "FGradient", [](const NodePtr& n, const std::vector<NodeEntry>& ograds){ return std::vector<NodeEntry>{ograds[0]}; }); NNVM_REGISTER_OP(__sub_symbol__) .describe("do subtract") .set_num_inputs(2) .include("ElementwiseOpAttr") .set_attr<FInplaceOption>("FInplaceOption", InplaceIn0Out0) .set_attr<FGradient>( "FGradient", [](const NodePtr& n, const std::vector<NodeEntry>& ograds){ return std::vector<NodeEntry>{ MakeNode("__mul_scalar__", n->attrs.name + "_grad_0", {ograds[0]}, {{"scalar", "1"}}), MakeNode("__mul_scalar__", n->attrs.name + "_grad_1", {ograds[0]}, {{"scalar", "-1"}}), }; }); NNVM_REGISTER_OP(__sub_scalar__) .describe("subtract symbol with scalar") .set_num_inputs(1) .include("ElementwiseOpAttr") .set_attr<FInplaceOption>("FInplaceOption", InplaceIn0Out0) .set_attr<FGradient>( "FGradient", [](const NodePtr& n, const std::vector<NodeEntry>& ograds){ return std::vector<NodeEntry>{ograds[0]}; }); NNVM_REGISTER_OP(__rsub_scalar__) .describe("subtract scalar with symbol") .set_num_inputs(1) .include("ElementwiseOpAttr") .set_attr<FGradient>( "FGradient", [](const NodePtr& n, const std::vector<NodeEntry>& ograds){ return std::vector<NodeEntry>{ MakeNode("__mul_scalar__", n->attrs.name + "_grad_1", {ograds[0]}, {{"scalar", "-1"}}), }; }); NNVM_REGISTER_OP(mul) .add_alias("__mul_symbol__") .describe("add two data together") .set_num_inputs(2) .include("ElementwiseOpAttr") .set_attr<FInplaceOption>("FInplaceOption", InplaceIn0Out0) .set_attr<FGradient>( "FGradient", [](const NodePtr& n, const std::vector<NodeEntry>& ograds){ return std::vector<NodeEntry>{ MakeNode("mul", n->attrs.name + "_grad_0", {ograds[0], n->inputs[1]}), MakeNode("mul", n->attrs.name + "_grad_1", {ograds[0], n->inputs[0]}) }; }); NNVM_REGISTER_OP(__mul_scalar__) .describe("Multiply symbol with scalar") .set_num_inputs(1) .include("ElementwiseOpAttr") .set_attr<FInplaceOption>("FInplaceOption", InplaceIn0Out0) .set_attr<FGradient>( "FGradient", [](const NodePtr& n, const std::vector<NodeEntry>& ograds){ return std::vector<NodeEntry>{ MakeNode("__mul_scalar__", n->attrs.name + "_grad_0", {ograds[0]}, {{"scalar", n->attrs.dict["scalar"]}}), }; }); NNVM_REGISTER_OP(__div_symbol__) .add_alias("div") .describe("do division") .set_num_inputs(2) .include("ElementwiseOpAttr") .set_attr<FInplaceOption>("FInplaceOption", InplaceIn0Out0) .set_attr<FGradient>( "FGradient", [](const NodePtr& n, const std::vector<NodeEntry>& ograds){ NodeEntry n1 = MakeNode("mul", n->attrs.name + "_grad_sub_0", {ograds[0], n->inputs[0]}); NodeEntry n2 = MakeNode("__mul_scalar__", n->attrs.name + "_grad_sub_1", {n1}, {{"scalar", "-1"}}); NodeEntry n3 = MakeNode("mul", n->attrs.name + "_grad_sub_2", {n->inputs[1], n->inputs[1]}); return std::vector<NodeEntry>{ MakeNode("__div_symbol__", n->attrs.name + "_grad_0", {ograds[0], n->inputs[1]}), MakeNode("__div_symbol__", n->attrs.name + "_grad_1", {n2, n3}) }; }); NNVM_REGISTER_OP(__div_scalar__) .describe("division symbol with scalar") .set_num_inputs(1) .include("ElementwiseOpAttr") .set_attr<FInplaceOption>("FInplaceOption", InplaceIn0Out0) .set_attr<FGradient>( "FGradient", [](const NodePtr& n, const std::vector<NodeEntry>& ograds){ return std::vector<NodeEntry>{ MakeNode("__div_scalar__", n->attrs.name + "_grad_0", {ograds[0]}, {{"scalar", n->attrs.dict["scalar"]}}), }; }); NNVM_REGISTER_OP(exp) .describe("take elemtnwise exponation") .set_num_inputs(1) .include("ElementwiseOpAttr") .set_attr<FInplaceOption>("FInplaceOption", InplaceIn0Out0) .set_attr<FGradient>( "FGradient", [](const NodePtr& n, const std::vector<NodeEntry>& ograds) { return std::vector<NodeEntry>{ MakeNode("__mul_symbol__", n->attrs.name + "_grad_0", {ograds[0], NodeEntry{n, 0, 0}}) }; }); NNVM_REGISTER_OP(log) .describe("take elemtnwise logarithm") .set_num_inputs(1) .include("ElementwiseOpAttr") .set_attr<FInplaceOption>("FInplaceOption", InplaceIn0Out0) .set_attr<FGradient>( "FGradient", [](const NodePtr& n, const std::vector<NodeEntry>& ograds) { return std::vector<NodeEntry>{ MakeNode("__div_symbol__", n->attrs.name + "_grad_0", {ograds[0], n->inputs[0]}) }; }); NNVM_REGISTER_OP(sqrt) .describe("return square root of input") .set_num_inputs(1) .include("ElementwiseOpAttr") .set_attr<FInplaceOption>("FInplaceOption", InplaceIn0Out0) .set_attr<FGradient>( // 1 / (2 * sqrt(x)) == 1 / (2 * y) "FGradient", [](const NodePtr& n, const std::vector<NodeEntry>& ograds) { NodeEntry n1 = MakeNode("__mul_scalar__", n->attrs.name + "_grad_sub_1", {NodeEntry{n, 0, 0}}, {{"scalar", "2"}}); return std::vector<NodeEntry>{ MakeNode("__div_symbol__", n->attrs.name + "_grad_0", {ograds[0], n1}) }; }); NNVM_REGISTER_OP(__pow_symbol__) .add_alias("pow") .describe("take elmtnwise power between two tensor") .set_num_inputs(2) .include("ElementwiseOpAttr") .set_attr<FInplaceOption>("FInplaceOption", InplaceIn0Out0) .set_attr<FGradient>( "FGradient", [](const NodePtr& n, const std::vector<NodeEntry>& ograds) { // lhs: b*pow(a, b-1), rhs: pow(a, b)*ln(a) NodeEntry n0 = MakeNode("__add_scalar__", n->attrs.name + "_grad_sub_0", {n->inputs[1]}, {{"scalar", "-1"}}); NodeEntry n1 = MakeNode("pow", n->attrs.name + "_grad_sub_1", {n->inputs[0], n0}); NodeEntry d_lhs = MakeNode("mul", n->attrs.name + "_grad_sub_2", {n1, n->inputs[1]}); NodeEntry n2 = MakeNode("log", n->attrs.name + "_grad_sub_3", {n->inputs[0]}); NodeEntry d_rhs = MakeNode("mul", n->attrs.name + "_grad_sub_4", {NodeEntry{n, 0, 0}, n2}); return std::vector<NodeEntry>{ MakeNode("__mul_symbol__", n->attrs.name + "_grad_0", {ograds[0], d_lhs}), MakeNode("__mul_symbol__", n->attrs.name + "_grad_1", {ograds[0], d_rhs}) }; }); NNVM_REGISTER_OP(__rpow_scalar__) .describe("take elmtnwise power between a number and a tensor") .set_num_inputs(1) .include("ElementwiseOpAttr") .set_attr<FInplaceOption>("FInplaceOption", InplaceIn0Out0) .set_attr<FGradient>( "FGradient", [](const NodePtr& n, const std::vector<NodeEntry>& ograds) { // pow(m, x) * ln(m) double num = std::stod(n->attrs.dict["scalar"]); NodeEntry n0 = MakeNode("__mul_scalar__", n->attrs.name + "_grad_sub_4", {NodeEntry{n, 0, 0}}, {{"scalar", std::to_string(std::log(num))}}); return std::vector<NodeEntry>{ MakeNode("__mul_symbol__", n->attrs.name + "_grad_0", {ograds[0], n0}) }; }); NNVM_REGISTER_OP(matmul) .describe("Matrix multiplication") .set_num_inputs(2) .set_attr<FInferShape>( "FInferShape", [](const NodeAttrs& attrs, std::vector<TShape> *ishape, std::vector<TShape> *oshape) { if (ishape->at(0).ndim() == 0) return false; if (ishape->at(1).ndim() == 0) return false; CHECK_EQ(ishape->at(0).ndim(), 2); CHECK_EQ(ishape->at(1).ndim(), 2); CHECK_EQ(ishape->at(0)[1], ishape->at(1)[0]); TShape target{ishape->at(0)[0], ishape->at(1)[1]}; SHAPE_ASSIGN(oshape->at(0), target); return true; }) .set_attr<FGradient>( "FGradient", [](const NodePtr& n, const std::vector<NodeEntry>& ograds) { return MakeBackwardGrads("_matmul_backward", n, {ograds[0], n->inputs[0], n->inputs[1]}); }); // simply register a bulk op for backward NNVM_REGISTER_OP(_matmul_backward) .set_num_inputs(3) .set_num_outputs(2) .set_attr<nnvm::TIsBackward>("TIsBackward", true); struct ReduceParam : public dmlc::Parameter<ReduceParam> { Tuple<int> reduction_indices; DMLC_DECLARE_PARAMETER(ReduceParam) { DMLC_DECLARE_FIELD(reduction_indices).set_default(Tuple<int>()); } }; DMLC_REGISTER_PARAMETER(ReduceParam); inline bool ReduceShape(const NodeAttrs& attrs, std::vector<TShape> *ishape, std::vector<TShape> *oshape) { const auto& axis = dmlc::get<ReduceParam>(attrs.parsed).reduction_indices; if (ishape->at(0).ndim() == 0) return false; if (axis.ndim() == 0) { SHAPE_ASSIGN(oshape->at(0), TShape{1}); } else { TShape tmp = ishape->at(0); for (uint32_t idx : axis) { tmp[idx] = 0; } std::vector<uint32_t> ret; for (uint32_t x : tmp) { if (x != 0) ret.push_back(x); } if (ret.size() == 0) ret.push_back(1); SHAPE_ASSIGN(oshape->at(0), TShape(ret.begin(), ret.end())); } return true; } NNVM_REGISTER_OP(reduce_sum) .describe("reduce sum") .set_attr_parser(ParamParser<ReduceParam>) .set_num_inputs(1) .set_attr<FInferShape>("FInferShape", ReduceShape) .set_attr<FGradient>( "FGradient", [](const NodePtr& n, const std::vector<NodeEntry>& ograds) { return MakeBackwardGrads("_reduce_sum_backward", n, {ograds[0]}, n->attrs.dict); }); NNVM_REGISTER_OP(reduce_mean) .describe("reduce mean") .set_attr_parser(ParamParser<ReduceParam>) .set_num_inputs(1) .set_attr<FInferShape>("FInferShape", ReduceShape) .set_attr<FGradient>( "FGradient", [](const NodePtr& n, const std::vector<NodeEntry>& ograds) { return MakeBackwardGrads("_reduce_mean_backward", n, {ograds[0]}, n->attrs.dict); }); NNVM_REGISTER_OP_GROUP(ReduceBackwardIndeAttr) .set_attr<nnvm::TIsBackward>("TIsBackward", true); NNVM_REGISTER_OP(_reduce_sum_backward) .set_num_inputs(1) .set_num_outputs(1) .include("ReduceBackwardIndeAttr"); NNVM_REGISTER_OP(_reduce_mean_backward) .set_num_inputs(1) .set_num_outputs(1) .include("ReduceBackwardIndeAttr"); NNVM_REGISTER_OP(_argmax) .set_attr_parser(ParamParser<ReduceParam>) .set_num_inputs(1) .set_attr<FInferShape>("FInferShape", ReduceShape); } // namespace tinyflow
6,879
1,163
// Copyright 2021 The IREE Authors // // Licensed under the Apache License v2.0 with LLVM Exceptions. // See https://llvm.org/LICENSE.txt for license information. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception #ifndef IREE_RUNTIME_SESSION_H_ #define IREE_RUNTIME_SESSION_H_ #include <stdint.h> #include "iree/base/api.h" #include "iree/hal/api.h" #include "iree/vm/api.h" #ifdef __cplusplus extern "C" { #endif // __cplusplus typedef struct iree_runtime_instance_t iree_runtime_instance_t; // A session containing a set of loaded VM modules and their runtime state. // Each session has its own isolated module state and though multiple sessions // may share the same device they will all see their own individual timelines. // Think of a session like a process in an operating system: able to communicate // and share syscalls but with a strict separation. // // Only sessions that share an instance may directly share resources as // different instances may have different HAL devices and have incompatible // memory. Import and export APIs must be used to transfer the resources across // instances or incompatible devices within the same instance. // // As with all of iree/runtime/ this API is a higher-level wrapper for the // low-level IREE HAL and VM. Using this may pull in additional dependencies and // perform additional allocations compared to what you can get by directly going // to the lower levels. // // Thread-compatible; only a single thread may use the session at any time and // the caller must use external synchronization if they will be using it or any // resource derived from it concurrently. Any two sessions may be executed // concurrently without interference. typedef struct iree_runtime_session_t iree_runtime_session_t; //===----------------------------------------------------------------------===// // iree_runtime_session_options_t //===----------------------------------------------------------------------===// // Builtin modules that are provided by the runtime. enum iree_runtime_session_builtins_bits_t { // All built-in modules that are compiled into the runtime will be available. IREE_RUNTIME_SESSION_BUILTIN_ALL = UINT64_MAX, }; typedef uint64_t iree_runtime_session_builtins_t; // Options used to configure session creation. typedef struct iree_runtime_session_options_t { // Flags controlling the execution environment. iree_vm_context_flags_t context_flags; // A bitmask identifying which IREE builtin modules should be enabled. // Session creation will fail if a requested module is not built into the // runtime binary. iree_runtime_session_builtins_t builtin_modules; } iree_runtime_session_options_t; // Initializes |out_options| to its default values. IREE_API_EXPORT void iree_runtime_session_options_initialize( iree_runtime_session_options_t* out_options); //===----------------------------------------------------------------------===// // iree_runtime_session_t //===----------------------------------------------------------------------===// // Creates a new session forced to use the given |device|. // This bypasses any device enumeration performed by the loaded modules but // the loaded modules will still verify that the device matches their // requirements. // // A base set of modules may be added by the runtime during creation based on // |options| and users may load additional modules - such as the one containing // their user code - by using the iree_vm_context_t provided by // iree_runtime_session_context. // // |host_allocator| will be used to allocate the session and any associated // resources. |out_session| must be released by the caller. IREE_API_EXPORT iree_status_t iree_runtime_session_create_with_device( iree_runtime_instance_t* instance, const iree_runtime_session_options_t* options, iree_hal_device_t* device, iree_allocator_t host_allocator, iree_runtime_session_t** out_session); // Retains the given |session| for the caller. IREE_API_EXPORT void iree_runtime_session_retain( iree_runtime_session_t* session); // Releases the given |session| from the caller. IREE_API_EXPORT void iree_runtime_session_release( iree_runtime_session_t* session); // Returns the host allocator used to allocate the session and its resources. // Callers should use this to allocate resources so that any memory tracking // being performed correctly attributes the allocations to the session. IREE_API_EXPORT iree_allocator_t iree_runtime_session_host_allocator(const iree_runtime_session_t* session); // Returns the instance the session uses for shared resources. IREE_API_EXPORT iree_runtime_instance_t* iree_runtime_session_instance( const iree_runtime_session_t* session); // Returns the VM context used to load and link modules. // The context can be used to perform additional reflection over the loaded // modules or load additional modules (if supported). IREE_API_EXPORT iree_vm_context_t* iree_runtime_session_context( const iree_runtime_session_t* session); // Returns the HAL device being used for execution. // // NOTE: this device will not be available until initialized by a user module // and will return NULL if queried prior. IREE_API_EXPORT iree_hal_device_t* iree_runtime_session_device( const iree_runtime_session_t* session); // Returns the device allocator used to allocate compatible buffers. // Buffers from other allocators may not be compatible and require importing // prior to being usable by the session. // // NOTE: this device allocator will not be available until initialized by a // user module and will return NULL if queried prior. IREE_API_EXPORT iree_hal_allocator_t* iree_runtime_session_device_allocator( const iree_runtime_session_t* session); // Appends the given |module| to the context. // The module will be retained by the context. // // NOTE: only valid if the context is not yet frozen; see // iree_vm_context_freeze for more information. IREE_API_EXPORT iree_status_t iree_runtime_session_append_module( iree_runtime_session_t* session, iree_vm_module_t* module); // Appends a bytecode module to the context loaded from the given memory blob. // If a |flatbuffer_allocator| is provided then it will be used to free the // |flatbuffer_data| when the module is destroyed and otherwise the ownership of // the |flatbuffer_data| remains with the caller. The data must remain valid for // the lifetime of the session. // // If the module exists as a file prefer instead to use // iree_runtime_session_append_bytecode_module_from_file to use memory mapped // I/O and reduce total memory consumption. // // NOTE: only valid if the context is not yet frozen; see // iree_vm_context_freeze for more information. IREE_API_EXPORT iree_status_t iree_runtime_session_append_bytecode_module_from_memory( iree_runtime_session_t* session, iree_const_byte_span_t flatbuffer_data, iree_allocator_t flatbuffer_allocator); // Appends a bytecode module to the context loaded from the given |file_path|. // // NOTE: only valid if the context is not yet frozen; see // iree_vm_context_freeze for more information. IREE_API_EXPORT iree_status_t iree_runtime_session_append_bytecode_module_from_file( iree_runtime_session_t* session, const char* file_path); // Sets |out_function| to to an exported function with the fully-qualified name // of |full_name| or returns IREE_STATUS_NOT_FOUND. The function reference is // valid for the lifetime of |session|. // // The function name matches the original MLIR module and function symbols. // Example: // module @foo { // func @bar() // } // The full name of '@bar' is 'foo.bar'. // By default modules have the name 'module'. IREE_API_EXPORT iree_status_t iree_runtime_session_lookup_function( const iree_runtime_session_t* session, iree_string_view_t full_name, iree_vm_function_t* out_function); // Synchronously issues a generic function call. // // |input_list| is used to pass values and objects into the target function and // must match the signature defined by the compiled function. List ownership // remains with the caller. // // |output_list| is populated after the function completes execution with the // output values and objects of the function. List ownership remains with the // caller. // // Functions with either no inputs or outputs may provide NULL for the // respective list. IREE_API_EXPORT iree_status_t iree_runtime_session_call( iree_runtime_session_t* session, const iree_vm_function_t* function, iree_vm_list_t* input_list, iree_vm_list_t* output_list); // Synchronously issues a generic function call by fully-qualified name. // This is equivalent to performing a iree_runtime_session_lookup_function // followed by a iree_runtime_session_call. When calling the same function // repeatedly callers should perform the lookup and cache the resulting function // handle to avoid repeated lookups. IREE_API_EXPORT iree_status_t iree_runtime_session_call_by_name( iree_runtime_session_t* session, iree_string_view_t full_name, iree_vm_list_t* input_list, iree_vm_list_t* output_list); // Synchronously issues a direct function call. // This bypasses signature verification and directly calls through the VM ABI. // Though still safe(ish) the errors reported on a signature mismatch will be // much less useful than a call performed via the more generic methods. Treat // this as a low-level technique only to be used when the calling host code and // callee modules are known to be compatible. // // See iree_vm_function_call_t for more information. IREE_API_EXPORT iree_status_t iree_runtime_session_call_direct( iree_runtime_session_t* session, const iree_vm_function_call_t* call); #ifdef __cplusplus } // extern "C" #endif // __cplusplus #endif // IREE_RUNTIME_SESSION_H_
2,781
811
<reponame>rizalgowandy/gubernator<gh_stars>100-1000 # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! """Client and server classes corresponding to protobuf-defined services.""" import grpc import peers_pb2 as peers__pb2 class PeersV1Stub(object): """NOTE: For use by gubernator peers only """ def __init__(self, channel): """Constructor. Args: channel: A grpc.Channel. """ self.GetPeerRateLimits = channel.unary_unary( '/pb.gubernator.PeersV1/GetPeerRateLimits', request_serializer=peers__pb2.GetPeerRateLimitsReq.SerializeToString, response_deserializer=peers__pb2.GetPeerRateLimitsResp.FromString, ) self.UpdatePeerGlobals = channel.unary_unary( '/pb.gubernator.PeersV1/UpdatePeerGlobals', request_serializer=peers__pb2.UpdatePeerGlobalsReq.SerializeToString, response_deserializer=peers__pb2.UpdatePeerGlobalsResp.FromString, ) class PeersV1Servicer(object): """NOTE: For use by gubernator peers only """ def GetPeerRateLimits(self, request, context): """Used by peers to relay batches of requests to an authoritative peer """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def UpdatePeerGlobals(self, request, context): """Used by peers send global rate limit updates to other peers """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def add_PeersV1Servicer_to_server(servicer, server): rpc_method_handlers = { 'GetPeerRateLimits': grpc.unary_unary_rpc_method_handler( servicer.GetPeerRateLimits, request_deserializer=peers__pb2.GetPeerRateLimitsReq.FromString, response_serializer=peers__pb2.GetPeerRateLimitsResp.SerializeToString, ), 'UpdatePeerGlobals': grpc.unary_unary_rpc_method_handler( servicer.UpdatePeerGlobals, request_deserializer=peers__pb2.UpdatePeerGlobalsReq.FromString, response_serializer=peers__pb2.UpdatePeerGlobalsResp.SerializeToString, ), } generic_handler = grpc.method_handlers_generic_handler( 'pb.gubernator.PeersV1', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,)) # This class is part of an EXPERIMENTAL API. class PeersV1(object): """NOTE: For use by gubernator peers only """ @staticmethod def GetPeerRateLimits(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.unary_unary(request, target, '/pb.gubernator.PeersV1/GetPeerRateLimits', peers__pb2.GetPeerRateLimitsReq.SerializeToString, peers__pb2.GetPeerRateLimitsResp.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def UpdatePeerGlobals(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.unary_unary(request, target, '/pb.gubernator.PeersV1/UpdatePeerGlobals', peers__pb2.UpdatePeerGlobalsReq.SerializeToString, peers__pb2.UpdatePeerGlobalsResp.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
1,889
1,350
<gh_stars>1000+ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.authorization.implementation; import com.azure.core.annotation.BodyParam; import com.azure.core.annotation.Delete; import com.azure.core.annotation.ExpectedResponses; import com.azure.core.annotation.Get; import com.azure.core.annotation.HeaderParam; import com.azure.core.annotation.Headers; import com.azure.core.annotation.Host; import com.azure.core.annotation.HostParam; import com.azure.core.annotation.Patch; import com.azure.core.annotation.PathParam; import com.azure.core.annotation.Post; import com.azure.core.annotation.QueryParam; import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceInterface; import com.azure.core.annotation.ServiceMethod; import com.azure.core.annotation.UnexpectedResponseExceptionType; import com.azure.core.http.rest.PagedFlux; import com.azure.core.http.rest.PagedIterable; import com.azure.core.http.rest.PagedResponse; import com.azure.core.http.rest.PagedResponseBase; import com.azure.core.http.rest.Response; import com.azure.core.http.rest.RestProxy; import com.azure.core.util.Context; import com.azure.core.util.FluxUtil; import com.azure.core.util.logging.ClientLogger; import com.azure.core.util.serializer.CollectionFormat; import com.azure.core.util.serializer.JacksonAdapter; import com.azure.resourcemanager.authorization.fluent.UsersTodoListsTasksClient; import com.azure.resourcemanager.authorization.fluent.models.CollectionOfExtension1; import com.azure.resourcemanager.authorization.fluent.models.CollectionOfLinkedResource; import com.azure.resourcemanager.authorization.fluent.models.Get5ItemsItem; import com.azure.resourcemanager.authorization.fluent.models.MicrosoftGraphExtensionInner; import com.azure.resourcemanager.authorization.fluent.models.MicrosoftGraphLinkedResourceInner; import com.azure.resourcemanager.authorization.fluent.models.OdataErrorMainException; import com.azure.resourcemanager.authorization.fluent.models.UsersTodoListsTasksOrderby; import com.azure.resourcemanager.authorization.fluent.models.UsersTodoListsTasksSelect; import java.util.List; import reactor.core.publisher.Mono; /** An instance of this class provides access to all the operations defined in UsersTodoListsTasksClient. */ public final class UsersTodoListsTasksClientImpl implements UsersTodoListsTasksClient { private final ClientLogger logger = new ClientLogger(UsersTodoListsTasksClientImpl.class); /** The proxy service used to perform REST calls. */ private final UsersTodoListsTasksService service; /** The service client containing this operation class. */ private final MicrosoftGraphClientImpl client; /** * Initializes an instance of UsersTodoListsTasksClientImpl. * * @param client the instance of the service client containing this operation class. */ UsersTodoListsTasksClientImpl(MicrosoftGraphClientImpl client) { this.service = RestProxy.create(UsersTodoListsTasksService.class, client.getHttpPipeline(), client.getSerializerAdapter()); this.client = client; } /** * The interface defining all the services for MicrosoftGraphClientUsersTodoListsTasks to be used by the proxy * service to perform REST calls. */ @Host("{$host}") @ServiceInterface(name = "MicrosoftGraphClient") private interface UsersTodoListsTasksService { @Headers({"Content-Type: application/json"}) @Get("/users/{user-id}/todo/lists/{todoTaskList-id}/tasks/{todoTask-id}/extensions") @ExpectedResponses({200}) @UnexpectedResponseExceptionType(OdataErrorMainException.class) Mono<Response<CollectionOfExtension1>> listExtensions( @HostParam("$host") String endpoint, @PathParam("user-id") String userId, @PathParam("todoTaskList-id") String todoTaskListId, @PathParam("todoTask-id") String todoTaskId, @QueryParam("$top") Integer top, @QueryParam("$skip") Integer skip, @QueryParam("$search") String search, @QueryParam("$filter") String filter, @QueryParam("$count") Boolean count, @QueryParam("$orderby") String orderby, @QueryParam("$select") String select, @QueryParam("$expand") String expand, @HeaderParam("Accept") String accept, Context context); @Headers({"Content-Type: application/json"}) @Post("/users/{user-id}/todo/lists/{todoTaskList-id}/tasks/{todoTask-id}/extensions") @ExpectedResponses({201}) @UnexpectedResponseExceptionType(OdataErrorMainException.class) Mono<Response<MicrosoftGraphExtensionInner>> createExtensions( @HostParam("$host") String endpoint, @PathParam("user-id") String userId, @PathParam("todoTaskList-id") String todoTaskListId, @PathParam("todoTask-id") String todoTaskId, @BodyParam("application/json") MicrosoftGraphExtensionInner body, @HeaderParam("Accept") String accept, Context context); @Headers({"Content-Type: application/json"}) @Get("/users/{user-id}/todo/lists/{todoTaskList-id}/tasks/{todoTask-id}/extensions/{extension-id}") @ExpectedResponses({200}) @UnexpectedResponseExceptionType(OdataErrorMainException.class) Mono<Response<MicrosoftGraphExtensionInner>> getExtensions( @HostParam("$host") String endpoint, @PathParam("user-id") String userId, @PathParam("todoTaskList-id") String todoTaskListId, @PathParam("todoTask-id") String todoTaskId, @PathParam("extension-id") String extensionId, @QueryParam("$select") String select, @QueryParam("$expand") String expand, @HeaderParam("Accept") String accept, Context context); @Headers({"Content-Type: application/json"}) @Patch("/users/{user-id}/todo/lists/{todoTaskList-id}/tasks/{todoTask-id}/extensions/{extension-id}") @ExpectedResponses({204}) @UnexpectedResponseExceptionType(OdataErrorMainException.class) Mono<Response<Void>> updateExtensions( @HostParam("$host") String endpoint, @PathParam("user-id") String userId, @PathParam("todoTaskList-id") String todoTaskListId, @PathParam("todoTask-id") String todoTaskId, @PathParam("extension-id") String extensionId, @BodyParam("application/json") MicrosoftGraphExtensionInner body, @HeaderParam("Accept") String accept, Context context); @Headers({"Content-Type: application/json"}) @Delete("/users/{user-id}/todo/lists/{todoTaskList-id}/tasks/{todoTask-id}/extensions/{extension-id}") @ExpectedResponses({204}) @UnexpectedResponseExceptionType(OdataErrorMainException.class) Mono<Response<Void>> deleteExtensions( @HostParam("$host") String endpoint, @PathParam("user-id") String userId, @PathParam("todoTaskList-id") String todoTaskListId, @PathParam("todoTask-id") String todoTaskId, @PathParam("extension-id") String extensionId, @HeaderParam("If-Match") String ifMatch, @HeaderParam("Accept") String accept, Context context); @Headers({"Content-Type: application/json"}) @Get("/users/{user-id}/todo/lists/{todoTaskList-id}/tasks/{todoTask-id}/linkedResources") @ExpectedResponses({200}) @UnexpectedResponseExceptionType(OdataErrorMainException.class) Mono<Response<CollectionOfLinkedResource>> listLinkedResources( @HostParam("$host") String endpoint, @PathParam("user-id") String userId, @PathParam("todoTaskList-id") String todoTaskListId, @PathParam("todoTask-id") String todoTaskId, @QueryParam("$top") Integer top, @QueryParam("$skip") Integer skip, @QueryParam("$search") String search, @QueryParam("$filter") String filter, @QueryParam("$count") Boolean count, @QueryParam("$orderby") String orderby, @QueryParam("$select") String select, @QueryParam("$expand") String expand, @HeaderParam("Accept") String accept, Context context); @Headers({"Content-Type: application/json"}) @Post("/users/{user-id}/todo/lists/{todoTaskList-id}/tasks/{todoTask-id}/linkedResources") @ExpectedResponses({201}) @UnexpectedResponseExceptionType(OdataErrorMainException.class) Mono<Response<MicrosoftGraphLinkedResourceInner>> createLinkedResources( @HostParam("$host") String endpoint, @PathParam("user-id") String userId, @PathParam("todoTaskList-id") String todoTaskListId, @PathParam("todoTask-id") String todoTaskId, @BodyParam("application/json") MicrosoftGraphLinkedResourceInner body, @HeaderParam("Accept") String accept, Context context); @Headers({"Content-Type: application/json"}) @Get("/users/{user-id}/todo/lists/{todoTaskList-id}/tasks/{todoTask-id}/linkedResources/{linkedResource-id}") @ExpectedResponses({200}) @UnexpectedResponseExceptionType(OdataErrorMainException.class) Mono<Response<MicrosoftGraphLinkedResourceInner>> getLinkedResources( @HostParam("$host") String endpoint, @PathParam("user-id") String userId, @PathParam("todoTaskList-id") String todoTaskListId, @PathParam("todoTask-id") String todoTaskId, @PathParam("linkedResource-id") String linkedResourceId, @QueryParam("$select") String select, @QueryParam("$expand") String expand, @HeaderParam("Accept") String accept, Context context); @Headers({"Content-Type: application/json"}) @Patch("/users/{user-id}/todo/lists/{todoTaskList-id}/tasks/{todoTask-id}/linkedResources/{linkedResource-id}") @ExpectedResponses({204}) @UnexpectedResponseExceptionType(OdataErrorMainException.class) Mono<Response<Void>> updateLinkedResources( @HostParam("$host") String endpoint, @PathParam("user-id") String userId, @PathParam("todoTaskList-id") String todoTaskListId, @PathParam("todoTask-id") String todoTaskId, @PathParam("linkedResource-id") String linkedResourceId, @BodyParam("application/json") MicrosoftGraphLinkedResourceInner body, @HeaderParam("Accept") String accept, Context context); @Headers({"Content-Type: application/json"}) @Delete("/users/{user-id}/todo/lists/{todoTaskList-id}/tasks/{todoTask-id}/linkedResources/{linkedResource-id}") @ExpectedResponses({204}) @UnexpectedResponseExceptionType(OdataErrorMainException.class) Mono<Response<Void>> deleteLinkedResources( @HostParam("$host") String endpoint, @PathParam("user-id") String userId, @PathParam("todoTaskList-id") String todoTaskListId, @PathParam("todoTask-id") String todoTaskId, @PathParam("linkedResource-id") String linkedResourceId, @HeaderParam("If-Match") String ifMatch, @HeaderParam("Accept") String accept, Context context); @Headers({"Accept: application/json", "Content-Type: application/json"}) @Get("{nextLink}") @ExpectedResponses({200}) @UnexpectedResponseExceptionType(OdataErrorMainException.class) Mono<Response<CollectionOfExtension1>> listMore( @PathParam(value = "nextLink", encoded = true) String nextLink, Context context); @Headers({"Accept: application/json", "Content-Type: application/json"}) @Get("{nextLink}") @ExpectedResponses({200}) @UnexpectedResponseExceptionType(OdataErrorMainException.class) Mono<Response<CollectionOfLinkedResource>> listLinkedResourcesNext( @PathParam(value = "nextLink", encoded = true) String nextLink, Context context); } /** * Get extensions from users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param top Show only the first n items. * @param skip Skip the first n items. * @param search Search items by search phrases. * @param filter Filter items by property values. * @param count Include count of items. * @param orderby Order items by property values. * @param select Select properties to be returned. * @param expand Expand related entities. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return extensions from users. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<MicrosoftGraphExtensionInner>> listExtensionsSinglePageAsync( String userId, String todoTaskListId, String todoTaskId, Integer top, Integer skip, String search, String filter, Boolean count, List<UsersTodoListsTasksOrderby> orderby, List<String> select, List<String> expand) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (userId == null) { return Mono.error(new IllegalArgumentException("Parameter userId is required and cannot be null.")); } if (todoTaskListId == null) { return Mono.error(new IllegalArgumentException("Parameter todoTaskListId is required and cannot be null.")); } if (todoTaskId == null) { return Mono.error(new IllegalArgumentException("Parameter todoTaskId is required and cannot be null.")); } final String accept = "application/json"; String orderbyConverted = JacksonAdapter.createDefaultSerializerAdapter().serializeList(orderby, CollectionFormat.CSV); String selectConverted = JacksonAdapter.createDefaultSerializerAdapter().serializeList(select, CollectionFormat.CSV); String expandConverted = JacksonAdapter.createDefaultSerializerAdapter().serializeList(expand, CollectionFormat.CSV); return FluxUtil .withContext( context -> service .listExtensions( this.client.getEndpoint(), userId, todoTaskListId, todoTaskId, top, skip, search, filter, count, orderbyConverted, selectConverted, expandConverted, accept, context)) .<PagedResponse<MicrosoftGraphExtensionInner>>map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().odataNextLink(), null)) .subscriberContext(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()))); } /** * Get extensions from users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param top Show only the first n items. * @param skip Skip the first n items. * @param search Search items by search phrases. * @param filter Filter items by property values. * @param count Include count of items. * @param orderby Order items by property values. * @param select Select properties to be returned. * @param expand Expand related entities. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return extensions from users. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<MicrosoftGraphExtensionInner>> listExtensionsSinglePageAsync( String userId, String todoTaskListId, String todoTaskId, Integer top, Integer skip, String search, String filter, Boolean count, List<UsersTodoListsTasksOrderby> orderby, List<String> select, List<String> expand, Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (userId == null) { return Mono.error(new IllegalArgumentException("Parameter userId is required and cannot be null.")); } if (todoTaskListId == null) { return Mono.error(new IllegalArgumentException("Parameter todoTaskListId is required and cannot be null.")); } if (todoTaskId == null) { return Mono.error(new IllegalArgumentException("Parameter todoTaskId is required and cannot be null.")); } final String accept = "application/json"; String orderbyConverted = JacksonAdapter.createDefaultSerializerAdapter().serializeList(orderby, CollectionFormat.CSV); String selectConverted = JacksonAdapter.createDefaultSerializerAdapter().serializeList(select, CollectionFormat.CSV); String expandConverted = JacksonAdapter.createDefaultSerializerAdapter().serializeList(expand, CollectionFormat.CSV); context = this.client.mergeContext(context); return service .listExtensions( this.client.getEndpoint(), userId, todoTaskListId, todoTaskId, top, skip, search, filter, count, orderbyConverted, selectConverted, expandConverted, accept, context) .map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().odataNextLink(), null)); } /** * Get extensions from users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param top Show only the first n items. * @param skip Skip the first n items. * @param search Search items by search phrases. * @param filter Filter items by property values. * @param count Include count of items. * @param orderby Order items by property values. * @param select Select properties to be returned. * @param expand Expand related entities. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return extensions from users. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<MicrosoftGraphExtensionInner> listExtensionsAsync( String userId, String todoTaskListId, String todoTaskId, Integer top, Integer skip, String search, String filter, Boolean count, List<UsersTodoListsTasksOrderby> orderby, List<String> select, List<String> expand) { return new PagedFlux<>( () -> listExtensionsSinglePageAsync( userId, todoTaskListId, todoTaskId, top, skip, search, filter, count, orderby, select, expand), nextLink -> listMoreSinglePageAsync(nextLink)); } /** * Get extensions from users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return extensions from users. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<MicrosoftGraphExtensionInner> listExtensionsAsync( String userId, String todoTaskListId, String todoTaskId) { final Integer top = null; final Integer skip = null; final String search = null; final String filter = null; final Boolean count = null; final List<UsersTodoListsTasksOrderby> orderby = null; final List<String> select = null; final List<String> expand = null; return new PagedFlux<>( () -> listExtensionsSinglePageAsync( userId, todoTaskListId, todoTaskId, top, skip, search, filter, count, orderby, select, expand), nextLink -> listMoreSinglePageAsync(nextLink)); } /** * Get extensions from users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param top Show only the first n items. * @param skip Skip the first n items. * @param search Search items by search phrases. * @param filter Filter items by property values. * @param count Include count of items. * @param orderby Order items by property values. * @param select Select properties to be returned. * @param expand Expand related entities. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return extensions from users. */ @ServiceMethod(returns = ReturnType.COLLECTION) private PagedFlux<MicrosoftGraphExtensionInner> listExtensionsAsync( String userId, String todoTaskListId, String todoTaskId, Integer top, Integer skip, String search, String filter, Boolean count, List<UsersTodoListsTasksOrderby> orderby, List<String> select, List<String> expand, Context context) { return new PagedFlux<>( () -> listExtensionsSinglePageAsync( userId, todoTaskListId, todoTaskId, top, skip, search, filter, count, orderby, select, expand, context), nextLink -> listMoreSinglePageAsync(nextLink, context)); } /** * Get extensions from users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return extensions from users. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<MicrosoftGraphExtensionInner> listExtensions( String userId, String todoTaskListId, String todoTaskId) { final Integer top = null; final Integer skip = null; final String search = null; final String filter = null; final Boolean count = null; final List<UsersTodoListsTasksOrderby> orderby = null; final List<String> select = null; final List<String> expand = null; return new PagedIterable<>( listExtensionsAsync( userId, todoTaskListId, todoTaskId, top, skip, search, filter, count, orderby, select, expand)); } /** * Get extensions from users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param top Show only the first n items. * @param skip Skip the first n items. * @param search Search items by search phrases. * @param filter Filter items by property values. * @param count Include count of items. * @param orderby Order items by property values. * @param select Select properties to be returned. * @param expand Expand related entities. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return extensions from users. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<MicrosoftGraphExtensionInner> listExtensions( String userId, String todoTaskListId, String todoTaskId, Integer top, Integer skip, String search, String filter, Boolean count, List<UsersTodoListsTasksOrderby> orderby, List<String> select, List<String> expand, Context context) { return new PagedIterable<>( listExtensionsAsync( userId, todoTaskListId, todoTaskId, top, skip, search, filter, count, orderby, select, expand, context)); } /** * Create new navigation property to extensions for users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param body New navigation property. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return extension. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<MicrosoftGraphExtensionInner>> createExtensionsWithResponseAsync( String userId, String todoTaskListId, String todoTaskId, MicrosoftGraphExtensionInner body) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (userId == null) { return Mono.error(new IllegalArgumentException("Parameter userId is required and cannot be null.")); } if (todoTaskListId == null) { return Mono.error(new IllegalArgumentException("Parameter todoTaskListId is required and cannot be null.")); } if (todoTaskId == null) { return Mono.error(new IllegalArgumentException("Parameter todoTaskId is required and cannot be null.")); } if (body == null) { return Mono.error(new IllegalArgumentException("Parameter body is required and cannot be null.")); } else { body.validate(); } final String accept = "application/json"; return FluxUtil .withContext( context -> service .createExtensions( this.client.getEndpoint(), userId, todoTaskListId, todoTaskId, body, accept, context)) .subscriberContext(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()))); } /** * Create new navigation property to extensions for users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param body New navigation property. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return extension. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<MicrosoftGraphExtensionInner>> createExtensionsWithResponseAsync( String userId, String todoTaskListId, String todoTaskId, MicrosoftGraphExtensionInner body, Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (userId == null) { return Mono.error(new IllegalArgumentException("Parameter userId is required and cannot be null.")); } if (todoTaskListId == null) { return Mono.error(new IllegalArgumentException("Parameter todoTaskListId is required and cannot be null.")); } if (todoTaskId == null) { return Mono.error(new IllegalArgumentException("Parameter todoTaskId is required and cannot be null.")); } if (body == null) { return Mono.error(new IllegalArgumentException("Parameter body is required and cannot be null.")); } else { body.validate(); } final String accept = "application/json"; context = this.client.mergeContext(context); return service .createExtensions(this.client.getEndpoint(), userId, todoTaskListId, todoTaskId, body, accept, context); } /** * Create new navigation property to extensions for users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param body New navigation property. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return extension. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<MicrosoftGraphExtensionInner> createExtensionsAsync( String userId, String todoTaskListId, String todoTaskId, MicrosoftGraphExtensionInner body) { return createExtensionsWithResponseAsync(userId, todoTaskListId, todoTaskId, body) .flatMap( (Response<MicrosoftGraphExtensionInner> res) -> { if (res.getValue() != null) { return Mono.just(res.getValue()); } else { return Mono.empty(); } }); } /** * Create new navigation property to extensions for users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param body New navigation property. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return extension. */ @ServiceMethod(returns = ReturnType.SINGLE) public MicrosoftGraphExtensionInner createExtensions( String userId, String todoTaskListId, String todoTaskId, MicrosoftGraphExtensionInner body) { return createExtensionsAsync(userId, todoTaskListId, todoTaskId, body).block(); } /** * Create new navigation property to extensions for users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param body New navigation property. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return extension. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<MicrosoftGraphExtensionInner> createExtensionsWithResponse( String userId, String todoTaskListId, String todoTaskId, MicrosoftGraphExtensionInner body, Context context) { return createExtensionsWithResponseAsync(userId, todoTaskListId, todoTaskId, body, context).block(); } /** * Get extensions from users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param extensionId key: id of extension. * @param select Select properties to be returned. * @param expand Expand related entities. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return extensions from users. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<MicrosoftGraphExtensionInner>> getExtensionsWithResponseAsync( String userId, String todoTaskListId, String todoTaskId, String extensionId, List<String> select, List<Get5ItemsItem> expand) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (userId == null) { return Mono.error(new IllegalArgumentException("Parameter userId is required and cannot be null.")); } if (todoTaskListId == null) { return Mono.error(new IllegalArgumentException("Parameter todoTaskListId is required and cannot be null.")); } if (todoTaskId == null) { return Mono.error(new IllegalArgumentException("Parameter todoTaskId is required and cannot be null.")); } if (extensionId == null) { return Mono.error(new IllegalArgumentException("Parameter extensionId is required and cannot be null.")); } final String accept = "application/json"; String selectConverted = JacksonAdapter.createDefaultSerializerAdapter().serializeList(select, CollectionFormat.CSV); String expandConverted = JacksonAdapter.createDefaultSerializerAdapter().serializeList(expand, CollectionFormat.CSV); return FluxUtil .withContext( context -> service .getExtensions( this.client.getEndpoint(), userId, todoTaskListId, todoTaskId, extensionId, selectConverted, expandConverted, accept, context)) .subscriberContext(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()))); } /** * Get extensions from users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param extensionId key: id of extension. * @param select Select properties to be returned. * @param expand Expand related entities. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return extensions from users. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<MicrosoftGraphExtensionInner>> getExtensionsWithResponseAsync( String userId, String todoTaskListId, String todoTaskId, String extensionId, List<String> select, List<Get5ItemsItem> expand, Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (userId == null) { return Mono.error(new IllegalArgumentException("Parameter userId is required and cannot be null.")); } if (todoTaskListId == null) { return Mono.error(new IllegalArgumentException("Parameter todoTaskListId is required and cannot be null.")); } if (todoTaskId == null) { return Mono.error(new IllegalArgumentException("Parameter todoTaskId is required and cannot be null.")); } if (extensionId == null) { return Mono.error(new IllegalArgumentException("Parameter extensionId is required and cannot be null.")); } final String accept = "application/json"; String selectConverted = JacksonAdapter.createDefaultSerializerAdapter().serializeList(select, CollectionFormat.CSV); String expandConverted = JacksonAdapter.createDefaultSerializerAdapter().serializeList(expand, CollectionFormat.CSV); context = this.client.mergeContext(context); return service .getExtensions( this.client.getEndpoint(), userId, todoTaskListId, todoTaskId, extensionId, selectConverted, expandConverted, accept, context); } /** * Get extensions from users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param extensionId key: id of extension. * @param select Select properties to be returned. * @param expand Expand related entities. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return extensions from users. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<MicrosoftGraphExtensionInner> getExtensionsAsync( String userId, String todoTaskListId, String todoTaskId, String extensionId, List<String> select, List<Get5ItemsItem> expand) { return getExtensionsWithResponseAsync(userId, todoTaskListId, todoTaskId, extensionId, select, expand) .flatMap( (Response<MicrosoftGraphExtensionInner> res) -> { if (res.getValue() != null) { return Mono.just(res.getValue()); } else { return Mono.empty(); } }); } /** * Get extensions from users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param extensionId key: id of extension. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return extensions from users. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<MicrosoftGraphExtensionInner> getExtensionsAsync( String userId, String todoTaskListId, String todoTaskId, String extensionId) { final List<String> select = null; final List<Get5ItemsItem> expand = null; return getExtensionsWithResponseAsync(userId, todoTaskListId, todoTaskId, extensionId, select, expand) .flatMap( (Response<MicrosoftGraphExtensionInner> res) -> { if (res.getValue() != null) { return Mono.just(res.getValue()); } else { return Mono.empty(); } }); } /** * Get extensions from users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param extensionId key: id of extension. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return extensions from users. */ @ServiceMethod(returns = ReturnType.SINGLE) public MicrosoftGraphExtensionInner getExtensions( String userId, String todoTaskListId, String todoTaskId, String extensionId) { final List<String> select = null; final List<Get5ItemsItem> expand = null; return getExtensionsAsync(userId, todoTaskListId, todoTaskId, extensionId, select, expand).block(); } /** * Get extensions from users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param extensionId key: id of extension. * @param select Select properties to be returned. * @param expand Expand related entities. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return extensions from users. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<MicrosoftGraphExtensionInner> getExtensionsWithResponse( String userId, String todoTaskListId, String todoTaskId, String extensionId, List<String> select, List<Get5ItemsItem> expand, Context context) { return getExtensionsWithResponseAsync(userId, todoTaskListId, todoTaskId, extensionId, select, expand, context) .block(); } /** * Update the navigation property extensions in users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param extensionId key: id of extension. * @param body New navigation property values. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the completion. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<Void>> updateExtensionsWithResponseAsync( String userId, String todoTaskListId, String todoTaskId, String extensionId, MicrosoftGraphExtensionInner body) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (userId == null) { return Mono.error(new IllegalArgumentException("Parameter userId is required and cannot be null.")); } if (todoTaskListId == null) { return Mono.error(new IllegalArgumentException("Parameter todoTaskListId is required and cannot be null.")); } if (todoTaskId == null) { return Mono.error(new IllegalArgumentException("Parameter todoTaskId is required and cannot be null.")); } if (extensionId == null) { return Mono.error(new IllegalArgumentException("Parameter extensionId is required and cannot be null.")); } if (body == null) { return Mono.error(new IllegalArgumentException("Parameter body is required and cannot be null.")); } else { body.validate(); } final String accept = "application/json"; return FluxUtil .withContext( context -> service .updateExtensions( this.client.getEndpoint(), userId, todoTaskListId, todoTaskId, extensionId, body, accept, context)) .subscriberContext(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()))); } /** * Update the navigation property extensions in users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param extensionId key: id of extension. * @param body New navigation property values. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the completion. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<Void>> updateExtensionsWithResponseAsync( String userId, String todoTaskListId, String todoTaskId, String extensionId, MicrosoftGraphExtensionInner body, Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (userId == null) { return Mono.error(new IllegalArgumentException("Parameter userId is required and cannot be null.")); } if (todoTaskListId == null) { return Mono.error(new IllegalArgumentException("Parameter todoTaskListId is required and cannot be null.")); } if (todoTaskId == null) { return Mono.error(new IllegalArgumentException("Parameter todoTaskId is required and cannot be null.")); } if (extensionId == null) { return Mono.error(new IllegalArgumentException("Parameter extensionId is required and cannot be null.")); } if (body == null) { return Mono.error(new IllegalArgumentException("Parameter body is required and cannot be null.")); } else { body.validate(); } final String accept = "application/json"; context = this.client.mergeContext(context); return service .updateExtensions( this.client.getEndpoint(), userId, todoTaskListId, todoTaskId, extensionId, body, accept, context); } /** * Update the navigation property extensions in users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param extensionId key: id of extension. * @param body New navigation property values. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the completion. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Void> updateExtensionsAsync( String userId, String todoTaskListId, String todoTaskId, String extensionId, MicrosoftGraphExtensionInner body) { return updateExtensionsWithResponseAsync(userId, todoTaskListId, todoTaskId, extensionId, body) .flatMap((Response<Void> res) -> Mono.empty()); } /** * Update the navigation property extensions in users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param extensionId key: id of extension. * @param body New navigation property values. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. */ @ServiceMethod(returns = ReturnType.SINGLE) public void updateExtensions( String userId, String todoTaskListId, String todoTaskId, String extensionId, MicrosoftGraphExtensionInner body) { updateExtensionsAsync(userId, todoTaskListId, todoTaskId, extensionId, body).block(); } /** * Update the navigation property extensions in users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param extensionId key: id of extension. * @param body New navigation property values. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the response. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<Void> updateExtensionsWithResponse( String userId, String todoTaskListId, String todoTaskId, String extensionId, MicrosoftGraphExtensionInner body, Context context) { return updateExtensionsWithResponseAsync(userId, todoTaskListId, todoTaskId, extensionId, body, context) .block(); } /** * Delete navigation property extensions for users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param extensionId key: id of extension. * @param ifMatch ETag. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the completion. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<Void>> deleteExtensionsWithResponseAsync( String userId, String todoTaskListId, String todoTaskId, String extensionId, String ifMatch) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (userId == null) { return Mono.error(new IllegalArgumentException("Parameter userId is required and cannot be null.")); } if (todoTaskListId == null) { return Mono.error(new IllegalArgumentException("Parameter todoTaskListId is required and cannot be null.")); } if (todoTaskId == null) { return Mono.error(new IllegalArgumentException("Parameter todoTaskId is required and cannot be null.")); } if (extensionId == null) { return Mono.error(new IllegalArgumentException("Parameter extensionId is required and cannot be null.")); } final String accept = "application/json"; return FluxUtil .withContext( context -> service .deleteExtensions( this.client.getEndpoint(), userId, todoTaskListId, todoTaskId, extensionId, ifMatch, accept, context)) .subscriberContext(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()))); } /** * Delete navigation property extensions for users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param extensionId key: id of extension. * @param ifMatch ETag. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the completion. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<Void>> deleteExtensionsWithResponseAsync( String userId, String todoTaskListId, String todoTaskId, String extensionId, String ifMatch, Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (userId == null) { return Mono.error(new IllegalArgumentException("Parameter userId is required and cannot be null.")); } if (todoTaskListId == null) { return Mono.error(new IllegalArgumentException("Parameter todoTaskListId is required and cannot be null.")); } if (todoTaskId == null) { return Mono.error(new IllegalArgumentException("Parameter todoTaskId is required and cannot be null.")); } if (extensionId == null) { return Mono.error(new IllegalArgumentException("Parameter extensionId is required and cannot be null.")); } final String accept = "application/json"; context = this.client.mergeContext(context); return service .deleteExtensions( this.client.getEndpoint(), userId, todoTaskListId, todoTaskId, extensionId, ifMatch, accept, context); } /** * Delete navigation property extensions for users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param extensionId key: id of extension. * @param ifMatch ETag. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the completion. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Void> deleteExtensionsAsync( String userId, String todoTaskListId, String todoTaskId, String extensionId, String ifMatch) { return deleteExtensionsWithResponseAsync(userId, todoTaskListId, todoTaskId, extensionId, ifMatch) .flatMap((Response<Void> res) -> Mono.empty()); } /** * Delete navigation property extensions for users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param extensionId key: id of extension. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the completion. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Void> deleteExtensionsAsync( String userId, String todoTaskListId, String todoTaskId, String extensionId) { final String ifMatch = null; return deleteExtensionsWithResponseAsync(userId, todoTaskListId, todoTaskId, extensionId, ifMatch) .flatMap((Response<Void> res) -> Mono.empty()); } /** * Delete navigation property extensions for users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param extensionId key: id of extension. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. */ @ServiceMethod(returns = ReturnType.SINGLE) public void deleteExtensions(String userId, String todoTaskListId, String todoTaskId, String extensionId) { final String ifMatch = null; deleteExtensionsAsync(userId, todoTaskListId, todoTaskId, extensionId, ifMatch).block(); } /** * Delete navigation property extensions for users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param extensionId key: id of extension. * @param ifMatch ETag. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the response. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<Void> deleteExtensionsWithResponse( String userId, String todoTaskListId, String todoTaskId, String extensionId, String ifMatch, Context context) { return deleteExtensionsWithResponseAsync(userId, todoTaskListId, todoTaskId, extensionId, ifMatch, context) .block(); } /** * Get linkedResources from users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param top Show only the first n items. * @param skip Skip the first n items. * @param search Search items by search phrases. * @param filter Filter items by property values. * @param count Include count of items. * @param orderby Order items by property values. * @param select Select properties to be returned. * @param expand Expand related entities. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return linkedResources from users. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<MicrosoftGraphLinkedResourceInner>> listLinkedResourcesSinglePageAsync( String userId, String todoTaskListId, String todoTaskId, Integer top, Integer skip, String search, String filter, Boolean count, List<UsersTodoListsTasksOrderby> orderby, List<UsersTodoListsTasksSelect> select, List<String> expand) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (userId == null) { return Mono.error(new IllegalArgumentException("Parameter userId is required and cannot be null.")); } if (todoTaskListId == null) { return Mono.error(new IllegalArgumentException("Parameter todoTaskListId is required and cannot be null.")); } if (todoTaskId == null) { return Mono.error(new IllegalArgumentException("Parameter todoTaskId is required and cannot be null.")); } final String accept = "application/json"; String orderbyConverted = JacksonAdapter.createDefaultSerializerAdapter().serializeList(orderby, CollectionFormat.CSV); String selectConverted = JacksonAdapter.createDefaultSerializerAdapter().serializeList(select, CollectionFormat.CSV); String expandConverted = JacksonAdapter.createDefaultSerializerAdapter().serializeList(expand, CollectionFormat.CSV); return FluxUtil .withContext( context -> service .listLinkedResources( this.client.getEndpoint(), userId, todoTaskListId, todoTaskId, top, skip, search, filter, count, orderbyConverted, selectConverted, expandConverted, accept, context)) .<PagedResponse<MicrosoftGraphLinkedResourceInner>>map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().odataNextLink(), null)) .subscriberContext(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()))); } /** * Get linkedResources from users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param top Show only the first n items. * @param skip Skip the first n items. * @param search Search items by search phrases. * @param filter Filter items by property values. * @param count Include count of items. * @param orderby Order items by property values. * @param select Select properties to be returned. * @param expand Expand related entities. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return linkedResources from users. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<MicrosoftGraphLinkedResourceInner>> listLinkedResourcesSinglePageAsync( String userId, String todoTaskListId, String todoTaskId, Integer top, Integer skip, String search, String filter, Boolean count, List<UsersTodoListsTasksOrderby> orderby, List<UsersTodoListsTasksSelect> select, List<String> expand, Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (userId == null) { return Mono.error(new IllegalArgumentException("Parameter userId is required and cannot be null.")); } if (todoTaskListId == null) { return Mono.error(new IllegalArgumentException("Parameter todoTaskListId is required and cannot be null.")); } if (todoTaskId == null) { return Mono.error(new IllegalArgumentException("Parameter todoTaskId is required and cannot be null.")); } final String accept = "application/json"; String orderbyConverted = JacksonAdapter.createDefaultSerializerAdapter().serializeList(orderby, CollectionFormat.CSV); String selectConverted = JacksonAdapter.createDefaultSerializerAdapter().serializeList(select, CollectionFormat.CSV); String expandConverted = JacksonAdapter.createDefaultSerializerAdapter().serializeList(expand, CollectionFormat.CSV); context = this.client.mergeContext(context); return service .listLinkedResources( this.client.getEndpoint(), userId, todoTaskListId, todoTaskId, top, skip, search, filter, count, orderbyConverted, selectConverted, expandConverted, accept, context) .map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().odataNextLink(), null)); } /** * Get linkedResources from users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param top Show only the first n items. * @param skip Skip the first n items. * @param search Search items by search phrases. * @param filter Filter items by property values. * @param count Include count of items. * @param orderby Order items by property values. * @param select Select properties to be returned. * @param expand Expand related entities. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return linkedResources from users. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<MicrosoftGraphLinkedResourceInner> listLinkedResourcesAsync( String userId, String todoTaskListId, String todoTaskId, Integer top, Integer skip, String search, String filter, Boolean count, List<UsersTodoListsTasksOrderby> orderby, List<UsersTodoListsTasksSelect> select, List<String> expand) { return new PagedFlux<>( () -> listLinkedResourcesSinglePageAsync( userId, todoTaskListId, todoTaskId, top, skip, search, filter, count, orderby, select, expand), nextLink -> listLinkedResourcesNextSinglePageAsync(nextLink)); } /** * Get linkedResources from users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return linkedResources from users. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<MicrosoftGraphLinkedResourceInner> listLinkedResourcesAsync( String userId, String todoTaskListId, String todoTaskId) { final Integer top = null; final Integer skip = null; final String search = null; final String filter = null; final Boolean count = null; final List<UsersTodoListsTasksOrderby> orderby = null; final List<UsersTodoListsTasksSelect> select = null; final List<String> expand = null; return new PagedFlux<>( () -> listLinkedResourcesSinglePageAsync( userId, todoTaskListId, todoTaskId, top, skip, search, filter, count, orderby, select, expand), nextLink -> listLinkedResourcesNextSinglePageAsync(nextLink)); } /** * Get linkedResources from users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param top Show only the first n items. * @param skip Skip the first n items. * @param search Search items by search phrases. * @param filter Filter items by property values. * @param count Include count of items. * @param orderby Order items by property values. * @param select Select properties to be returned. * @param expand Expand related entities. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return linkedResources from users. */ @ServiceMethod(returns = ReturnType.COLLECTION) private PagedFlux<MicrosoftGraphLinkedResourceInner> listLinkedResourcesAsync( String userId, String todoTaskListId, String todoTaskId, Integer top, Integer skip, String search, String filter, Boolean count, List<UsersTodoListsTasksOrderby> orderby, List<UsersTodoListsTasksSelect> select, List<String> expand, Context context) { return new PagedFlux<>( () -> listLinkedResourcesSinglePageAsync( userId, todoTaskListId, todoTaskId, top, skip, search, filter, count, orderby, select, expand, context), nextLink -> listLinkedResourcesNextSinglePageAsync(nextLink, context)); } /** * Get linkedResources from users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return linkedResources from users. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<MicrosoftGraphLinkedResourceInner> listLinkedResources( String userId, String todoTaskListId, String todoTaskId) { final Integer top = null; final Integer skip = null; final String search = null; final String filter = null; final Boolean count = null; final List<UsersTodoListsTasksOrderby> orderby = null; final List<UsersTodoListsTasksSelect> select = null; final List<String> expand = null; return new PagedIterable<>( listLinkedResourcesAsync( userId, todoTaskListId, todoTaskId, top, skip, search, filter, count, orderby, select, expand)); } /** * Get linkedResources from users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param top Show only the first n items. * @param skip Skip the first n items. * @param search Search items by search phrases. * @param filter Filter items by property values. * @param count Include count of items. * @param orderby Order items by property values. * @param select Select properties to be returned. * @param expand Expand related entities. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return linkedResources from users. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<MicrosoftGraphLinkedResourceInner> listLinkedResources( String userId, String todoTaskListId, String todoTaskId, Integer top, Integer skip, String search, String filter, Boolean count, List<UsersTodoListsTasksOrderby> orderby, List<UsersTodoListsTasksSelect> select, List<String> expand, Context context) { return new PagedIterable<>( listLinkedResourcesAsync( userId, todoTaskListId, todoTaskId, top, skip, search, filter, count, orderby, select, expand, context)); } /** * Create new navigation property to linkedResources for users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param body New navigation property. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return linkedResource. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<MicrosoftGraphLinkedResourceInner>> createLinkedResourcesWithResponseAsync( String userId, String todoTaskListId, String todoTaskId, MicrosoftGraphLinkedResourceInner body) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (userId == null) { return Mono.error(new IllegalArgumentException("Parameter userId is required and cannot be null.")); } if (todoTaskListId == null) { return Mono.error(new IllegalArgumentException("Parameter todoTaskListId is required and cannot be null.")); } if (todoTaskId == null) { return Mono.error(new IllegalArgumentException("Parameter todoTaskId is required and cannot be null.")); } if (body == null) { return Mono.error(new IllegalArgumentException("Parameter body is required and cannot be null.")); } else { body.validate(); } final String accept = "application/json"; return FluxUtil .withContext( context -> service .createLinkedResources( this.client.getEndpoint(), userId, todoTaskListId, todoTaskId, body, accept, context)) .subscriberContext(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()))); } /** * Create new navigation property to linkedResources for users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param body New navigation property. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return linkedResource. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<MicrosoftGraphLinkedResourceInner>> createLinkedResourcesWithResponseAsync( String userId, String todoTaskListId, String todoTaskId, MicrosoftGraphLinkedResourceInner body, Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (userId == null) { return Mono.error(new IllegalArgumentException("Parameter userId is required and cannot be null.")); } if (todoTaskListId == null) { return Mono.error(new IllegalArgumentException("Parameter todoTaskListId is required and cannot be null.")); } if (todoTaskId == null) { return Mono.error(new IllegalArgumentException("Parameter todoTaskId is required and cannot be null.")); } if (body == null) { return Mono.error(new IllegalArgumentException("Parameter body is required and cannot be null.")); } else { body.validate(); } final String accept = "application/json"; context = this.client.mergeContext(context); return service .createLinkedResources( this.client.getEndpoint(), userId, todoTaskListId, todoTaskId, body, accept, context); } /** * Create new navigation property to linkedResources for users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param body New navigation property. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return linkedResource. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<MicrosoftGraphLinkedResourceInner> createLinkedResourcesAsync( String userId, String todoTaskListId, String todoTaskId, MicrosoftGraphLinkedResourceInner body) { return createLinkedResourcesWithResponseAsync(userId, todoTaskListId, todoTaskId, body) .flatMap( (Response<MicrosoftGraphLinkedResourceInner> res) -> { if (res.getValue() != null) { return Mono.just(res.getValue()); } else { return Mono.empty(); } }); } /** * Create new navigation property to linkedResources for users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param body New navigation property. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return linkedResource. */ @ServiceMethod(returns = ReturnType.SINGLE) public MicrosoftGraphLinkedResourceInner createLinkedResources( String userId, String todoTaskListId, String todoTaskId, MicrosoftGraphLinkedResourceInner body) { return createLinkedResourcesAsync(userId, todoTaskListId, todoTaskId, body).block(); } /** * Create new navigation property to linkedResources for users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param body New navigation property. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return linkedResource. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<MicrosoftGraphLinkedResourceInner> createLinkedResourcesWithResponse( String userId, String todoTaskListId, String todoTaskId, MicrosoftGraphLinkedResourceInner body, Context context) { return createLinkedResourcesWithResponseAsync(userId, todoTaskListId, todoTaskId, body, context).block(); } /** * Get linkedResources from users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param linkedResourceId key: id of linkedResource. * @param select Select properties to be returned. * @param expand Expand related entities. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return linkedResources from users. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<MicrosoftGraphLinkedResourceInner>> getLinkedResourcesWithResponseAsync( String userId, String todoTaskListId, String todoTaskId, String linkedResourceId, List<UsersTodoListsTasksSelect> select, List<String> expand) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (userId == null) { return Mono.error(new IllegalArgumentException("Parameter userId is required and cannot be null.")); } if (todoTaskListId == null) { return Mono.error(new IllegalArgumentException("Parameter todoTaskListId is required and cannot be null.")); } if (todoTaskId == null) { return Mono.error(new IllegalArgumentException("Parameter todoTaskId is required and cannot be null.")); } if (linkedResourceId == null) { return Mono .error(new IllegalArgumentException("Parameter linkedResourceId is required and cannot be null.")); } final String accept = "application/json"; String selectConverted = JacksonAdapter.createDefaultSerializerAdapter().serializeList(select, CollectionFormat.CSV); String expandConverted = JacksonAdapter.createDefaultSerializerAdapter().serializeList(expand, CollectionFormat.CSV); return FluxUtil .withContext( context -> service .getLinkedResources( this.client.getEndpoint(), userId, todoTaskListId, todoTaskId, linkedResourceId, selectConverted, expandConverted, accept, context)) .subscriberContext(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()))); } /** * Get linkedResources from users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param linkedResourceId key: id of linkedResource. * @param select Select properties to be returned. * @param expand Expand related entities. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return linkedResources from users. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<MicrosoftGraphLinkedResourceInner>> getLinkedResourcesWithResponseAsync( String userId, String todoTaskListId, String todoTaskId, String linkedResourceId, List<UsersTodoListsTasksSelect> select, List<String> expand, Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (userId == null) { return Mono.error(new IllegalArgumentException("Parameter userId is required and cannot be null.")); } if (todoTaskListId == null) { return Mono.error(new IllegalArgumentException("Parameter todoTaskListId is required and cannot be null.")); } if (todoTaskId == null) { return Mono.error(new IllegalArgumentException("Parameter todoTaskId is required and cannot be null.")); } if (linkedResourceId == null) { return Mono .error(new IllegalArgumentException("Parameter linkedResourceId is required and cannot be null.")); } final String accept = "application/json"; String selectConverted = JacksonAdapter.createDefaultSerializerAdapter().serializeList(select, CollectionFormat.CSV); String expandConverted = JacksonAdapter.createDefaultSerializerAdapter().serializeList(expand, CollectionFormat.CSV); context = this.client.mergeContext(context); return service .getLinkedResources( this.client.getEndpoint(), userId, todoTaskListId, todoTaskId, linkedResourceId, selectConverted, expandConverted, accept, context); } /** * Get linkedResources from users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param linkedResourceId key: id of linkedResource. * @param select Select properties to be returned. * @param expand Expand related entities. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return linkedResources from users. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<MicrosoftGraphLinkedResourceInner> getLinkedResourcesAsync( String userId, String todoTaskListId, String todoTaskId, String linkedResourceId, List<UsersTodoListsTasksSelect> select, List<String> expand) { return getLinkedResourcesWithResponseAsync(userId, todoTaskListId, todoTaskId, linkedResourceId, select, expand) .flatMap( (Response<MicrosoftGraphLinkedResourceInner> res) -> { if (res.getValue() != null) { return Mono.just(res.getValue()); } else { return Mono.empty(); } }); } /** * Get linkedResources from users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param linkedResourceId key: id of linkedResource. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return linkedResources from users. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<MicrosoftGraphLinkedResourceInner> getLinkedResourcesAsync( String userId, String todoTaskListId, String todoTaskId, String linkedResourceId) { final List<UsersTodoListsTasksSelect> select = null; final List<String> expand = null; return getLinkedResourcesWithResponseAsync(userId, todoTaskListId, todoTaskId, linkedResourceId, select, expand) .flatMap( (Response<MicrosoftGraphLinkedResourceInner> res) -> { if (res.getValue() != null) { return Mono.just(res.getValue()); } else { return Mono.empty(); } }); } /** * Get linkedResources from users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param linkedResourceId key: id of linkedResource. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return linkedResources from users. */ @ServiceMethod(returns = ReturnType.SINGLE) public MicrosoftGraphLinkedResourceInner getLinkedResources( String userId, String todoTaskListId, String todoTaskId, String linkedResourceId) { final List<UsersTodoListsTasksSelect> select = null; final List<String> expand = null; return getLinkedResourcesAsync(userId, todoTaskListId, todoTaskId, linkedResourceId, select, expand).block(); } /** * Get linkedResources from users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param linkedResourceId key: id of linkedResource. * @param select Select properties to be returned. * @param expand Expand related entities. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return linkedResources from users. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<MicrosoftGraphLinkedResourceInner> getLinkedResourcesWithResponse( String userId, String todoTaskListId, String todoTaskId, String linkedResourceId, List<UsersTodoListsTasksSelect> select, List<String> expand, Context context) { return getLinkedResourcesWithResponseAsync( userId, todoTaskListId, todoTaskId, linkedResourceId, select, expand, context) .block(); } /** * Update the navigation property linkedResources in users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param linkedResourceId key: id of linkedResource. * @param body New navigation property values. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the completion. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<Void>> updateLinkedResourcesWithResponseAsync( String userId, String todoTaskListId, String todoTaskId, String linkedResourceId, MicrosoftGraphLinkedResourceInner body) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (userId == null) { return Mono.error(new IllegalArgumentException("Parameter userId is required and cannot be null.")); } if (todoTaskListId == null) { return Mono.error(new IllegalArgumentException("Parameter todoTaskListId is required and cannot be null.")); } if (todoTaskId == null) { return Mono.error(new IllegalArgumentException("Parameter todoTaskId is required and cannot be null.")); } if (linkedResourceId == null) { return Mono .error(new IllegalArgumentException("Parameter linkedResourceId is required and cannot be null.")); } if (body == null) { return Mono.error(new IllegalArgumentException("Parameter body is required and cannot be null.")); } else { body.validate(); } final String accept = "application/json"; return FluxUtil .withContext( context -> service .updateLinkedResources( this.client.getEndpoint(), userId, todoTaskListId, todoTaskId, linkedResourceId, body, accept, context)) .subscriberContext(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()))); } /** * Update the navigation property linkedResources in users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param linkedResourceId key: id of linkedResource. * @param body New navigation property values. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the completion. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<Void>> updateLinkedResourcesWithResponseAsync( String userId, String todoTaskListId, String todoTaskId, String linkedResourceId, MicrosoftGraphLinkedResourceInner body, Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (userId == null) { return Mono.error(new IllegalArgumentException("Parameter userId is required and cannot be null.")); } if (todoTaskListId == null) { return Mono.error(new IllegalArgumentException("Parameter todoTaskListId is required and cannot be null.")); } if (todoTaskId == null) { return Mono.error(new IllegalArgumentException("Parameter todoTaskId is required and cannot be null.")); } if (linkedResourceId == null) { return Mono .error(new IllegalArgumentException("Parameter linkedResourceId is required and cannot be null.")); } if (body == null) { return Mono.error(new IllegalArgumentException("Parameter body is required and cannot be null.")); } else { body.validate(); } final String accept = "application/json"; context = this.client.mergeContext(context); return service .updateLinkedResources( this.client.getEndpoint(), userId, todoTaskListId, todoTaskId, linkedResourceId, body, accept, context); } /** * Update the navigation property linkedResources in users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param linkedResourceId key: id of linkedResource. * @param body New navigation property values. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the completion. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Void> updateLinkedResourcesAsync( String userId, String todoTaskListId, String todoTaskId, String linkedResourceId, MicrosoftGraphLinkedResourceInner body) { return updateLinkedResourcesWithResponseAsync(userId, todoTaskListId, todoTaskId, linkedResourceId, body) .flatMap((Response<Void> res) -> Mono.empty()); } /** * Update the navigation property linkedResources in users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param linkedResourceId key: id of linkedResource. * @param body New navigation property values. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. */ @ServiceMethod(returns = ReturnType.SINGLE) public void updateLinkedResources( String userId, String todoTaskListId, String todoTaskId, String linkedResourceId, MicrosoftGraphLinkedResourceInner body) { updateLinkedResourcesAsync(userId, todoTaskListId, todoTaskId, linkedResourceId, body).block(); } /** * Update the navigation property linkedResources in users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param linkedResourceId key: id of linkedResource. * @param body New navigation property values. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the response. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<Void> updateLinkedResourcesWithResponse( String userId, String todoTaskListId, String todoTaskId, String linkedResourceId, MicrosoftGraphLinkedResourceInner body, Context context) { return updateLinkedResourcesWithResponseAsync( userId, todoTaskListId, todoTaskId, linkedResourceId, body, context) .block(); } /** * Delete navigation property linkedResources for users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param linkedResourceId key: id of linkedResource. * @param ifMatch ETag. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the completion. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<Void>> deleteLinkedResourcesWithResponseAsync( String userId, String todoTaskListId, String todoTaskId, String linkedResourceId, String ifMatch) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (userId == null) { return Mono.error(new IllegalArgumentException("Parameter userId is required and cannot be null.")); } if (todoTaskListId == null) { return Mono.error(new IllegalArgumentException("Parameter todoTaskListId is required and cannot be null.")); } if (todoTaskId == null) { return Mono.error(new IllegalArgumentException("Parameter todoTaskId is required and cannot be null.")); } if (linkedResourceId == null) { return Mono .error(new IllegalArgumentException("Parameter linkedResourceId is required and cannot be null.")); } final String accept = "application/json"; return FluxUtil .withContext( context -> service .deleteLinkedResources( this.client.getEndpoint(), userId, todoTaskListId, todoTaskId, linkedResourceId, ifMatch, accept, context)) .subscriberContext(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()))); } /** * Delete navigation property linkedResources for users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param linkedResourceId key: id of linkedResource. * @param ifMatch ETag. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the completion. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<Response<Void>> deleteLinkedResourcesWithResponseAsync( String userId, String todoTaskListId, String todoTaskId, String linkedResourceId, String ifMatch, Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (userId == null) { return Mono.error(new IllegalArgumentException("Parameter userId is required and cannot be null.")); } if (todoTaskListId == null) { return Mono.error(new IllegalArgumentException("Parameter todoTaskListId is required and cannot be null.")); } if (todoTaskId == null) { return Mono.error(new IllegalArgumentException("Parameter todoTaskId is required and cannot be null.")); } if (linkedResourceId == null) { return Mono .error(new IllegalArgumentException("Parameter linkedResourceId is required and cannot be null.")); } final String accept = "application/json"; context = this.client.mergeContext(context); return service .deleteLinkedResources( this.client.getEndpoint(), userId, todoTaskListId, todoTaskId, linkedResourceId, ifMatch, accept, context); } /** * Delete navigation property linkedResources for users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param linkedResourceId key: id of linkedResource. * @param ifMatch ETag. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the completion. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Void> deleteLinkedResourcesAsync( String userId, String todoTaskListId, String todoTaskId, String linkedResourceId, String ifMatch) { return deleteLinkedResourcesWithResponseAsync(userId, todoTaskListId, todoTaskId, linkedResourceId, ifMatch) .flatMap((Response<Void> res) -> Mono.empty()); } /** * Delete navigation property linkedResources for users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param linkedResourceId key: id of linkedResource. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the completion. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Void> deleteLinkedResourcesAsync( String userId, String todoTaskListId, String todoTaskId, String linkedResourceId) { final String ifMatch = null; return deleteLinkedResourcesWithResponseAsync(userId, todoTaskListId, todoTaskId, linkedResourceId, ifMatch) .flatMap((Response<Void> res) -> Mono.empty()); } /** * Delete navigation property linkedResources for users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param linkedResourceId key: id of linkedResource. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. */ @ServiceMethod(returns = ReturnType.SINGLE) public void deleteLinkedResources( String userId, String todoTaskListId, String todoTaskId, String linkedResourceId) { final String ifMatch = null; deleteLinkedResourcesAsync(userId, todoTaskListId, todoTaskId, linkedResourceId, ifMatch).block(); } /** * Delete navigation property linkedResources for users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param linkedResourceId key: id of linkedResource. * @param ifMatch ETag. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the response. */ @ServiceMethod(returns = ReturnType.SINGLE) public Response<Void> deleteLinkedResourcesWithResponse( String userId, String todoTaskListId, String todoTaskId, String linkedResourceId, String ifMatch, Context context) { return deleteLinkedResourcesWithResponseAsync( userId, todoTaskListId, todoTaskId, linkedResourceId, ifMatch, context) .block(); } /** * Get the next page of items. * * @param nextLink The nextLink parameter. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return collection of extension. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<MicrosoftGraphExtensionInner>> listMoreSinglePageAsync(String nextLink) { if (nextLink == null) { return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null.")); } return FluxUtil .withContext(context -> service.listMore(nextLink, context)) .<PagedResponse<MicrosoftGraphExtensionInner>>map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().odataNextLink(), null)) .subscriberContext(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()))); } /** * Get the next page of items. * * @param nextLink The nextLink parameter. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return collection of extension. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<MicrosoftGraphExtensionInner>> listMoreSinglePageAsync( String nextLink, Context context) { if (nextLink == null) { return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null.")); } context = this.client.mergeContext(context); return service .listMore(nextLink, context) .map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().odataNextLink(), null)); } /** * Get the next page of items. * * @param nextLink The nextLink parameter. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return collection of linkedResource. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<MicrosoftGraphLinkedResourceInner>> listLinkedResourcesNextSinglePageAsync( String nextLink) { if (nextLink == null) { return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null.")); } return FluxUtil .withContext(context -> service.listLinkedResourcesNext(nextLink, context)) .<PagedResponse<MicrosoftGraphLinkedResourceInner>>map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().odataNextLink(), null)) .subscriberContext(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()))); } /** * Get the next page of items. * * @param nextLink The nextLink parameter. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws OdataErrorMainException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return collection of linkedResource. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<MicrosoftGraphLinkedResourceInner>> listLinkedResourcesNextSinglePageAsync( String nextLink, Context context) { if (nextLink == null) { return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null.")); } context = this.client.mergeContext(context); return service .listLinkedResourcesNext(nextLink, context) .map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().odataNextLink(), null)); } }
47,368
1,778
/* * Copyright (c) 2014-2018 Cesanta Software Limited * All rights reserved * * Licensed under the Apache License, Version 2.0 (the ""License""); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an ""AS IS"" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #if 0 //CS_PLATFORM == CS_P_CC3200 #include "common/mg_mem.h" #include <stdio.h> #include <stdlib.h> #include <string.h> #ifndef __TI_COMPILER_VERSION__ #include <reent.h> #include <sys/stat.h> #include <sys/time.h> #include <unistd.h> #endif #include <inc/hw_types.h> #include <inc/hw_memmap.h> #include <driverlib/prcm.h> #include <driverlib/rom.h> #include <driverlib/rom_map.h> #include <driverlib/uart.h> #include <driverlib/utils.h> #define CONSOLE_UART UARTA0_BASE #ifdef __TI_COMPILER_VERSION__ int asprintf(char **strp, const char *fmt, ...) { va_list ap; int len; *strp = MG_MALLOC(BUFSIZ); if (*strp == NULL) return -1; va_start(ap, fmt); len = vsnprintf(*strp, BUFSIZ, fmt, ap); va_end(ap); if (len > 0) { *strp = MG_REALLOC(*strp, len + 1); if (*strp == NULL) return -1; } if (len >= BUFSIZ) { va_start(ap, fmt); len = vsnprintf(*strp, len + 1, fmt, ap); va_end(ap); } return len; } #if MG_TI_NO_HOST_INTERFACE time_t HOSTtime() { struct timeval tp; gettimeofday(&tp, NULL); return tp.tv_sec; } #endif #endif /* __TI_COMPILER_VERSION__ */ void fprint_str(FILE *fp, const char *str) { while (*str != '\0') { if (*str == '\n') MAP_UARTCharPut(CONSOLE_UART, '\r'); MAP_UARTCharPut(CONSOLE_UART, *str++); } } void _exit(int status) { fprint_str(stderr, "_exit\n"); /* cause an unaligned access exception, that will drop you into gdb */ *(int *) 1 = status; while (1) ; /* avoid gcc warning because stdlib abort() has noreturn attribute */ } void _not_implemented(const char *what) { fprint_str(stderr, what); fprint_str(stderr, " is not implemented\n"); _exit(42); } int _kill(int pid, int sig) { (void) pid; (void) sig; _not_implemented("_kill"); return -1; } int _getpid() { fprint_str(stderr, "_getpid is not implemented\n"); return 42; } int _isatty(int fd) { /* 0, 1 and 2 are TTYs. */ return fd < 2; } #endif /* CS_PLATFORM == CS_P_CC3200 */
1,042
690
<gh_stars>100-1000 package demo; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.cloud.stream.annotation.EnableBinding; import org.springframework.cloud.stream.annotation.Output; import org.springframework.messaging.Message; import org.springframework.messaging.MessageChannel; import org.springframework.messaging.support.MessageBuilder; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RestController; @EnableBinding(MessageChannels.class) @RestController @SpringBootApplication public class ProducerApplication { private final MessageChannels channels; @Autowired public ProducerApplication(MessageChannels channel) { this.channels = channel; } @RequestMapping(method = RequestMethod.GET, value = "/greet/{name}") void greet(@PathVariable String name) { Message<String> msg = MessageBuilder.withPayload(name).build(); this.channels.output().send(msg); } public static void main(String[] args) { SpringApplication.run(ProducerApplication.class, args); } } interface MessageChannels { @Output MessageChannel output(); }
440
778
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ /** * This file is derived from BookKeeperClusterTestCase from Apache BookKeeper * http://bookkeeper.apache.org */ package org.apache.bookkeeper.test; import static org.apache.bookkeeper.bookie.BookKeeperServerStats.BOOKIE_SCOPE; import static org.apache.bookkeeper.bookie.BookKeeperServerStats.LD_INDEX_SCOPE; import static org.apache.bookkeeper.bookie.BookKeeperServerStats.LD_LEDGER_SCOPE; import org.apache.bookkeeper.bookie.Bookie; import org.apache.bookkeeper.bookie.BookieImpl; import org.apache.bookkeeper.bookie.BookieResources; import org.apache.bookkeeper.bookie.LedgerDirsManager; import org.apache.bookkeeper.bookie.LedgerStorage; import org.apache.bookkeeper.bookie.LegacyCookieValidation; import org.apache.bookkeeper.bookie.ReadOnlyBookie; import org.apache.bookkeeper.bookie.UncleanShutdownDetection; import org.apache.bookkeeper.bookie.UncleanShutdownDetectionImpl; import org.apache.bookkeeper.client.TestStatsProvider; import org.apache.bookkeeper.common.allocator.ByteBufAllocatorWithOomHandler; import org.apache.bookkeeper.common.allocator.PoolingPolicy; import org.apache.bookkeeper.conf.ServerConfiguration; import org.apache.bookkeeper.discover.BookieServiceInfo; import org.apache.bookkeeper.discover.RegistrationManager; import org.apache.bookkeeper.meta.LedgerManager; import org.apache.bookkeeper.meta.LedgerManagerFactory; import org.apache.bookkeeper.meta.MetadataBookieDriver; import org.apache.bookkeeper.net.BookieSocketAddress; import org.apache.bookkeeper.proto.BookieServer; import org.apache.bookkeeper.replication.Auditor; import org.apache.bookkeeper.replication.AutoRecoveryMain; import org.apache.bookkeeper.replication.ReplicationWorker; import org.apache.bookkeeper.server.Main; import org.apache.bookkeeper.stats.StatsLogger; import org.apache.bookkeeper.util.DiskChecker; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Class to encapsulate all the test objects. */ public class ServerTester { static final Logger LOG = LoggerFactory.getLogger(ServerTester.class); /** * Mock implementation of UncleanShutdownDetection. */ public static class MockUncleanShutdownDetection implements UncleanShutdownDetection { private boolean startRegistered; private boolean shutdownRegistered; @Override public void registerStartUp() { startRegistered = true; } @Override public void registerCleanShutdown() { shutdownRegistered = true; } @Override public boolean lastShutdownWasUnclean() { return startRegistered && !shutdownRegistered; } public boolean getStartRegistered() { return startRegistered; } public boolean getShutdownRegistered() { return shutdownRegistered; } } private final ServerConfiguration conf; private final TestStatsProvider provider; private final Bookie bookie; private final BookieServer server; private final BookieSocketAddress address; private final MetadataBookieDriver metadataDriver; private final RegistrationManager registrationManager; private final LedgerManagerFactory lmFactory; private final LedgerManager ledgerManager; private final LedgerStorage storage; public AutoRecoveryMain autoRecovery; private final ByteBufAllocatorWithOomHandler allocator = BookieResources .createAllocator((new ServerConfiguration()).setAllocatorPoolingPolicy(PoolingPolicy.UnpooledHeap)); public ServerTester(ServerConfiguration conf) throws Exception { this.conf = conf; provider = new TestStatsProvider(); StatsLogger rootStatsLogger = provider.getStatsLogger(""); StatsLogger bookieStats = rootStatsLogger.scope(BOOKIE_SCOPE); metadataDriver = BookieResources.createMetadataDriver(conf, bookieStats); registrationManager = metadataDriver.createRegistrationManager(); lmFactory = metadataDriver.getLedgerManagerFactory(); ledgerManager = lmFactory.newLedgerManager(); LegacyCookieValidation cookieValidation = new LegacyCookieValidation( conf, registrationManager); cookieValidation.checkCookies(Main.storageDirectoriesFromConf(conf)); DiskChecker diskChecker = BookieResources.createDiskChecker(conf); LedgerDirsManager ledgerDirsManager = BookieResources.createLedgerDirsManager( conf, diskChecker, bookieStats.scope(LD_LEDGER_SCOPE)); LedgerDirsManager indexDirsManager = BookieResources.createIndexDirsManager( conf, diskChecker, bookieStats.scope(LD_INDEX_SCOPE), ledgerDirsManager); UncleanShutdownDetection uncleanShutdownDetection = new UncleanShutdownDetectionImpl(ledgerDirsManager); storage = BookieResources.createLedgerStorage( conf, ledgerManager, ledgerDirsManager, indexDirsManager, bookieStats, allocator); if (conf.isForceReadOnlyBookie()) { bookie = new ReadOnlyBookie(conf, registrationManager, storage, diskChecker, ledgerDirsManager, indexDirsManager, bookieStats, allocator, BookieServiceInfo.NO_INFO); } else { bookie = new BookieImpl(conf, registrationManager, storage, diskChecker, ledgerDirsManager, indexDirsManager, bookieStats, allocator, BookieServiceInfo.NO_INFO); } server = new BookieServer(conf, bookie, rootStatsLogger, allocator, uncleanShutdownDetection); address = BookieImpl.getBookieAddress(conf); autoRecovery = null; } public ServerTester(ServerConfiguration conf, Bookie b) throws Exception { this.conf = conf; provider = new TestStatsProvider(); metadataDriver = null; registrationManager = null; ledgerManager = null; lmFactory = null; storage = null; bookie = b; server = new BookieServer(conf, b, provider.getStatsLogger(""), allocator, new MockUncleanShutdownDetection()); address = BookieImpl.getBookieAddress(conf); autoRecovery = null; } public void startAutoRecovery() throws Exception { LOG.debug("Starting Auditor Recovery for the bookie: {}", address); autoRecovery = new AutoRecoveryMain(conf); autoRecovery.start(); } public void stopAutoRecovery() { if (autoRecovery != null) { LOG.debug("Shutdown Auditor Recovery for the bookie: {}", address); autoRecovery.shutdown(); } } public Auditor getAuditor() { if (autoRecovery != null) { return autoRecovery.getAuditor(); } else { return null; } } public ReplicationWorker getReplicationWorker() { if (autoRecovery != null) { return autoRecovery.getReplicationWorker(); } else { return null; } } public ServerConfiguration getConfiguration() { return conf; } public BookieServer getServer() { return server; } public TestStatsProvider getStatsProvider() { return provider; } public BookieSocketAddress getAddress() { return address; } public void shutdown() throws Exception { server.shutdown(); if (ledgerManager != null) { ledgerManager.close(); } if (lmFactory != null) { lmFactory.close(); } if (registrationManager != null) { registrationManager.close(); } if (metadataDriver != null) { metadataDriver.close(); } if (autoRecovery != null) { LOG.debug("Shutdown auto recovery for bookieserver: {}", address); autoRecovery.shutdown(); } } }
3,152
14,668
<gh_stars>1000+ // Copyright 2014 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "extensions/browser/computed_hashes.h" #include "base/base64.h" #include "base/files/file_path.h" #include "base/files/scoped_temp_dir.h" #include "base/strings/stringprintf.h" #include "build/build_config.h" #include "crypto/sha2.h" #include "extensions/browser/content_verifier/content_verifier_utils.h" #include "extensions/common/constants.h" #include "testing/gtest/include/gtest/gtest.h" namespace { constexpr bool kIsDotSpaceSuffixIgnored = extensions::content_verifier_utils::IsDotSpaceFilenameSuffixIgnored(); constexpr bool kIsFileAccessCaseInsensitive = !extensions::content_verifier_utils::IsFileAccessCaseSensitive(); // Helper to return base64 encode result by value. std::string Base64Encode(const std::string& data) { std::string result; base::Base64Encode(data, &result); return result; } struct HashInfo { base::FilePath path; int block_size; std::vector<std::string> hashes; }; testing::AssertionResult WriteThenReadComputedHashes( const std::vector<HashInfo>& hash_infos, extensions::ComputedHashes* result) { base::ScopedTempDir scoped_dir; if (!scoped_dir.CreateUniqueTempDir()) return testing::AssertionFailure() << "Failed to create temp dir."; base::FilePath computed_hashes_path = scoped_dir.GetPath().AppendASCII("computed_hashes.json"); extensions::ComputedHashes::Data computed_hashes_data; for (const auto& info : hash_infos) computed_hashes_data.Add(info.path, info.block_size, info.hashes); if (!extensions::ComputedHashes(std::move(computed_hashes_data)) .WriteToFile(computed_hashes_path)) { return testing::AssertionFailure() << "Failed to write computed_hashes.json"; } extensions::ComputedHashes::Status computed_hashes_status; absl::optional<extensions::ComputedHashes> computed_hashes = extensions::ComputedHashes::CreateFromFile(computed_hashes_path, &computed_hashes_status); if (!computed_hashes) return testing::AssertionFailure() << "Failed to read computed_hashes.json (status: " << static_cast<int>(computed_hashes_status) << ")"; *result = std::move(computed_hashes.value()); return testing::AssertionSuccess(); } } // namespace namespace extensions { TEST(ComputedHashesTest, ComputedHashes) { // We'll add hashes for 2 files, one of which uses a subdirectory // path. The first file will have a list of 1 block hash, and the // second file will have 2 block hashes. base::FilePath path1(FILE_PATH_LITERAL("foo.txt")); base::FilePath path2 = base::FilePath(FILE_PATH_LITERAL("foo")).AppendASCII("bar.txt"); std::vector<std::string> hashes1 = {crypto::SHA256HashString("first")}; std::vector<std::string> hashes2 = {crypto::SHA256HashString("second"), crypto::SHA256HashString("third")}; const int kBlockSize1 = 4096; const int kBlockSize2 = 2048; ComputedHashes computed_hashes{ComputedHashes::Data()}; ASSERT_TRUE(WriteThenReadComputedHashes( {{path1, kBlockSize1, hashes1}, {path2, kBlockSize2, hashes2}}, &computed_hashes)); // After reading hashes back assert that we got what we wrote. std::vector<std::string> read_hashes1; std::vector<std::string> read_hashes2; int block_size = 0; EXPECT_TRUE(computed_hashes.GetHashes(path1, &block_size, &read_hashes1)); EXPECT_EQ(block_size, 4096); block_size = 0; EXPECT_TRUE(computed_hashes.GetHashes(path2, &block_size, &read_hashes2)); EXPECT_EQ(block_size, 2048); EXPECT_EQ(hashes1, read_hashes1); EXPECT_EQ(hashes2, read_hashes2); // Make sure we can lookup hashes for a file using incorrect case base::FilePath path1_badcase(FILE_PATH_LITERAL("FoO.txt")); std::vector<std::string> read_hashes1_badcase; EXPECT_EQ(kIsFileAccessCaseInsensitive, computed_hashes.GetHashes(path1_badcase, &block_size, &read_hashes1_badcase)); if (kIsFileAccessCaseInsensitive) { EXPECT_EQ(4096, block_size); EXPECT_EQ(hashes1, read_hashes1_badcase); } // Finally make sure that we can retrieve the hashes for the subdir // path even when that path contains forward slashes (on windows). base::FilePath path2_fwd_slashes = base::FilePath::FromUTF8Unsafe("foo/bar.txt"); block_size = 0; EXPECT_TRUE( computed_hashes.GetHashes(path2_fwd_slashes, &block_size, &read_hashes2)); EXPECT_EQ(hashes2, read_hashes2); } // Note: the expected hashes used in this test were generated using linux // command line tools. E.g., from a bash prompt: // $ printf "hello world" | openssl dgst -sha256 -binary | base64 // // The file with multiple-blocks expectations were generated by doing: // $ for i in `seq 500 ; do printf "hello world" ; done > hello.txt // $ dd if=hello.txt bs=4096 count=1 | openssl dgst -sha256 -binary | base64 // $ dd if=hello.txt skip=1 bs=4096 count=1 | // openssl dgst -sha256 -binary | base64 TEST(ComputedHashesTest, GetHashesForContent) { const int block_size = 4096; // Simple short input. std::string content1 = "hello world"; std::string content1_expected_hash = "uU0nuZNNPgilLlLX2n2r+sSE7+N6U4DukIj3rOLvzek="; std::vector<std::string> hashes1 = ComputedHashes::GetHashesForContent(content1, block_size); ASSERT_EQ(1u, hashes1.size()); EXPECT_EQ(content1_expected_hash, Base64Encode(hashes1[0])); // Multiple blocks input. std::string content2; for (int i = 0; i < 500; i++) content2 += "hello world"; const char* content2_expected_hashes[] = { "bvtt5hXo8xvHrlzGAhhoqPL/r+4zJXHx+6wAvkv15V8=", "lTD45F7P6I/HOdi8u7FLRA4qzAYL+7xSNVeusG6MJI0="}; std::vector<std::string> hashes2 = ComputedHashes::GetHashesForContent(content2, block_size); ASSERT_EQ(2u, hashes2.size()); EXPECT_EQ(content2_expected_hashes[0], Base64Encode(hashes2[0])); EXPECT_EQ(content2_expected_hashes[1], Base64Encode(hashes2[1])); // Now an empty input. std::string content3; std::vector<std::string> hashes3 = ComputedHashes::GetHashesForContent(content3, block_size); ASSERT_EQ(1u, hashes3.size()); ASSERT_EQ(std::string("47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU="), Base64Encode(hashes3[0])); } // Tests that dot/space path suffixes are treated correctly in // ComputedHashes::InitFromFile. // // Regression test for https://crbug.com/696208. TEST(ComputedHashesTest, DotSpaceSuffix) { const std::string hash_value = crypto::SHA256HashString("test"); ComputedHashes computed_hashes{ComputedHashes::Data()}; // Add hashes for "foo.html" to computed_hashes.json. ASSERT_TRUE(WriteThenReadComputedHashes( { {base::FilePath(FILE_PATH_LITERAL("foo.html")), extension_misc::kContentVerificationDefaultBlockSize, {hash_value}}, }, &computed_hashes)); struct TestCase { const char* path; bool expect_hash; std::string ToString() const { return base::StringPrintf("path = %s, expect_hash = %d", path, expect_hash); } } test_cases[] = { // Sanity check: existing file. {"foo.html", true}, // Sanity check: non existent file. {"notfound.html", false}, // Path with "." suffix, along with incorrect case for the same. {"foo.html.", kIsDotSpaceSuffixIgnored}, {"fOo.html.", kIsDotSpaceSuffixIgnored}, // Path with " " suffix, along with incorrect case for the same. {"foo.html ", kIsDotSpaceSuffixIgnored}, {"fOo.html ", kIsDotSpaceSuffixIgnored}, // Path with ". " suffix, along with incorrect case for the same. {"foo.html. ", kIsDotSpaceSuffixIgnored}, {"fOo.html. ", kIsDotSpaceSuffixIgnored}, // Path with " ." suffix, along with incorrect case for the same. {"foo.html .", kIsDotSpaceSuffixIgnored}, {"fOo.html .", kIsDotSpaceSuffixIgnored}, }; for (const auto& test_case : test_cases) { SCOPED_TRACE(test_case.ToString()); int block_size = 0; std::vector<std::string> read_hashes; EXPECT_EQ( test_case.expect_hash, computed_hashes.GetHashes(base::FilePath().AppendASCII(test_case.path), &block_size, &read_hashes)); if (test_case.expect_hash) { EXPECT_EQ(block_size, extension_misc::kContentVerificationDefaultBlockSize); ASSERT_EQ(1u, read_hashes.size()); EXPECT_EQ(hash_value, read_hashes[0]); } } } } // namespace extensions
3,476
519
package com.yz.common.web.springmvc.interceptor; import com.alibaba.fastjson.JSON; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.web.servlet.HandlerInterceptor; import org.springframework.web.servlet.ModelAndView; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.io.PrintWriter; import java.util.Enumeration; import java.util.HashMap; import java.util.Map; /** * @author yangzhao * @Description * @Date create by 17:24 18/2/4 */ public class BaseInterceptor implements HandlerInterceptor { public final Logger logger = LoggerFactory.getLogger(BaseInterceptor.class); @Override public boolean preHandle(HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse, Object o) throws Exception { logger.info(httpServletRequest.getRequestURL().toString()); //设置header头数据返回类型 httpServletResponse.setContentType("content-type: application/json; charset=utf-8;"); logger.info("请求参数:"+ JSON.toJSONString(httpServletRequest.getParameterMap())); return true; } @Override public void postHandle(HttpServletRequest request, HttpServletResponse response, Object handler, ModelAndView modelAndView) throws Exception { } @Override public void afterCompletion(HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse, Object o, Exception e) throws Exception { } }
516
346
#ifndef SOUND_CONTROL_H #define SOUND_CONTROL_H #include "JA2Types.h" #include "Random.h" #include <string_theory/string> #define FARLEFT 0 #define LEFTSIDE 48 #define MIDDLE 64 #define MIDDLEPAN 64 #define RIGHTSIDE 80 #define FARRIGHT 127 #define LOWVOLUME 25 #define BTNVOLUME 40 #define MIDVOLUME 65 #define HIGHVOLUME 127 #define LOOPING 0 // SOUNDS ENUMERATION enum SoundID { NO_SOUND = -1, MISS_1 = 0, // 0 MISS_2, // 1 MISS_3, // 2 MISS_4, // 3 MISS_5, // 4 MISS_6, // 5 MISS_7, // 6 MISS_8, // 7 MISS_G1, // 8 MISS_G2, // 9 MISS_KNIFE, // 10 FALL_1, // 11 FALL_2, // 12 FALL_TO_GROUND_1, // 13 FALL_TO_GROUND_2, // 14 FALL_TO_GROUND_3, // 15 HEAVY_FALL_1, // 16 BODY_SPLAT_1, // 17 GLASS_SHATTER1, // 18 GLASS_SHATTER2, // 19 DROPEN_1, // 20 DROPEN_2, // 21 DROPEN_3, // 22 DRCLOSE_1, // 23 DRCLOSE_2, // 24 UNLOCK_DOOR_1, // 25 KICKIN_DOOR, // 26 BREAK_LOCK, // 27 PICKING_LOCK, // 28 GARAGE_DOOR_OPEN, // 29 GARAGE_DOOR_CLOSE, // 30 S_UNUSED_31, // 31 S_UNUSED_32, // 32 S_UNUSED_33, // 33 S_UNUSED_34, // 34 CURTAINS_OPEN, // 35 CURTAINS_CLOSE, // 36 METAL_DOOR_OPEN, // 37 METAL_DOOR_CLOSE, // 38 WALK_LEFT_OUT, // 39 WALK_RIGHT_OUT, // 40 WALK_LEFT_OUT2, // 41 WALK_RIGHT_OUT2, // 42 WALK_LEFT_IN, // 43 WALK_RIGHT_IN, // 44 WALK_LEFT_IN2, // 45 WALK_RIGHT_IN2, // 46 WALK_LEFT_ROAD, // 47 WALK_RIGHT_ROAD, // 48 WALK_LEFT_ROAD2, // 49 WALK_RIGHT_ROAD2, // 50 CRAWL_1, // 51 CRAWL_2, // 52 CRAWL_3, // 53 CRAWL_4, // 54 TARG_REFINE_BEEP, // 55 ENDTURN_1, // 56 HEADCR_1, // 57 DOORCR_1, // 58 HEADSPLAT_1, // 59 BODY_EXPLODE_1, // 60 EXPLOSION_1, // 61 CROW_EXPLODE_1, // 62 SMALL_EXPLODE_1, // 63 HELI_1, // 64 BULLET_IMPACT_1, // 65 BULLET_IMPACT_2, // 66 BULLET_IMPACT_3, // 67 CREATURE_BATTLECRY_1, // 68 ENTER_WATER_1, // 69 ENTER_DEEP_WATER_1, // 70 COW_HIT_SND, // 71 COW_DIE_SND, // 72 RG_ID_IMPRINTED, // 73 RG_ID_INVALID, // 74 RG_TARGET_SELECTED, // 75 CAVE_COLLAPSE, // 76 S_RAID_WHISTLE, // 77 S_RAID_AMBIENT, // 78 S_RAID_DIVE, // 79 S_RAID_TB_DIVE, // 80 S_RAID_TB_BOMB, // 81 S_VECH1_MOVE, // 82 S_VECH1_ON, // 83 S_VECH1_OFF, // 84 S_VECH1_INTO, // 85 S_DRYFIRE1, // 86 S_WOOD_IMPACT1, // 87 S_WOOD_IMPACT2, // 88 S_WOOD_IMPACT3, // 89 S_PORCELAIN_IMPACT1, // 90 S_RUBBER_IMPACT1, // 91 S_STONE_IMPACT1, // 92 S_WATER_IMPACT1, // 93 S_VEG_IMPACT1, // 94 S_METAL_IMPACT1, // 95 S_METAL_IMPACT2, // 96 S_METAL_IMPACT3, // 97 S_SLAP_IMPACT, // 98 S_RELOAD_REVOLVER, // 99 S_RELOAD_PISTOL, // 100 S_RELOAD_SMG, // 101 S_RELOAD_RIFLE, // 102 S_RELOAD_SHOTGUN, // 103 S_RELOAD_LMG, // 104 S_LNL_REVOLVER, // 105 S_LNL_PISTOL, // 106 S_LNL_SMG, // 107 S_LNL_RIFLE, // 108 S_LNL_SHOTGUN, // 109 S_LNL_LMG, // 110 S_UNUSED_111, // 111 S_UNUSED_112, // 112 S_UNUSED_113, // 113 S_UNUSED_114, // 114 S_UNUSED_115, // 115 S_UNUSED_116, // 116 S_UNUSED_117, // 117 S_UNUSED_118, // 118 S_UNUSED_119, // 119 S_UNUSED_120, // 120 S_UNUSED_121, // 121 S_UNUSED_122, // 122 S_UNUSED_123, // 123 S_UNUSED_124, // 124 S_UNUSED_125, // 125 S_UNUSED_126, // 126 S_UNUSED_127, // 127 S_UNUSED_128, // 128 S_UNUSED_129, // 129 S_UNUSED_130, // 130 S_UNUSED_131, // 131 S_UNUSED_132, // 132 S_UNUSED_133, // 133 S_UNUSED_134, // 134 S_UNUSED_135, // 135 S_UNUSED_136, // 136 S_UNUSED_137, // 137 S_UNUSED_138, // 138 S_UNUSED_139, // 139 S_UNUSED_140, // 140 S_UNUSED_141, // 141 S_UNUSED_142, // 142 S_UNUSED_143, // 143 S_UNUSED_144, // 144 S_UNUSED_145, // 145 S_UNUSED_146, // 146 S_UNUSED_147, // 147 S_UNUSED_148, // 148 S_UNUSED_149, // 149 S_UNUSED_150, // 150 S_UNUSED_151, // 151 S_UNUSED_152, // 152 S_UNUSED_153, // 153 S_UNUSED_154, // 154 S_UNUSED_155, // 155 S_SILENCER_1, // 156 S_SILENCER_2, // 157 SWOOSH_1, // 158 SWOOSH_2, // 159 SWOOSH_3, // 160 SWOOSH_4, // 161 SWOOSH_5, // 162 SWOOSH_6, // 163 ACR_FALL_1, // 164 ACR_STEP_1, // 165 ACR_STEP_2, // 166 ACR_SWIPE, // 167 ACR_EATFLESH, // 168 ACR_CRIPPLED, // 169 ACR_DIE_PART1, // 170 ACR_DIE_PART2, // 171 ACR_LUNGE, // 172 ACR_SMELL_THREAT, // 173 ACR_SMELL_PREY, // 174 ACR_SPIT, // 175 BCR_DYING, // 176 BCR_DRAGGING, // 177 BCR_SHRIEK, // 178 S_UNUSED_179, // 179 LCR_MOVEMENT, // 180 LCR_RUPTURE, // 181 LQ_SHRIEK, // 182 LQ_DYING, // 183 LQ_ENRAGED_ATTACK, // 184 LQ_RUPTURING, // 185 LQ_CRIPPLED, // 186 LQ_SMELLS_THREAT, // 187 LQ_WHIP_ATTACK, // 188 THROW_IMPACT_1, // 189 THROW_IMPACT_2, // 190 IDLE_SCRATCH, // 191 IDLE_ARMPIT, // 192 IDLE_BACKCRACK, // 193 AUTORESOLVE_FINISHFX, // 194 EMAIL_ALERT, // 195 ENTERING_TEXT, // 196 REMOVING_TEXT, // 197 COMPUTER_BEEP2_IN, // 198 COMPUTER_BEEP2_OUT, // 199 COMPUTER_SWITCH1_IN, // 200 COMPUTER_SWITCH1_OUT, // 201 VSM_SWITCH1_IN, // 202 VSM_SWITCH1_OUT, // 203 VSM_SWITCH2_IN, // 204 VSM_SWITCH2_OUT, // 205 SM_SWITCH1_IN, // 206 SM_SWITCH1_OUT, // 207 SM_SWITCH2_IN, // 208 SM_SWITCH2_OUT, // 209 SM_SWITCH3_IN, // 210 SM_SWITCH3_OUT, // 211 BIG_SWITCH3_IN, // 212 BIG_SWITCH3_OUT, // 213 KLAXON_ALARM, // 214 BOXING_BELL, // 215 S_UNUSED_216, // 216 ATTACH_TO_GUN, // 217 ATTACH_CERAMIC_PLATES, // 218 ATTACH_DETONATOR, // 219 GRAB_ROOF, // 220 LAND_ON_ROOF, // 221 S_UNUSED_222, // 222 S_UNUSED_223, // 223 S_UNUSED_224, // 224 OPEN_DEFAULT_OPENABLE, // 225 CLOSE_DEFAULT_OPENABLE, // 226 FIRE_ON_MERC, // 227 GLASS_CRACK, // 228 SPIT_RICOCHET, // 229 BLOODCAT_HIT_1, // 230 BLOODCAT_DIE_1, // 231 SLAP_1, // 232 ROBOT_BEEP, // 233 DOOR_ELECTRICITY, // 234 SWIM_1, // 235 SWIM_2, // 236 KEY_FAILURE, // 237 TARGET_OUT_OF_RANGE, // 238 OPEN_STATUE, // 239 USE_STATUE_REMOTE, // 240 USE_WIRE_CUTTERS, // 241 DRINK_CANTEEN_FEMALE, // 242 BLOODCAT_ATTACK, // 243 BLOODCAT_ROAR, // 244 ROBOT_GREETING, // 245 ROBOT_DEATH, // 246 GAS_EXPLODE_1, // 247 AIR_ESCAPING_1, // 248 OPEN_DRAWER, // 249 CLOSE_DRAWER, // 250 OPEN_LOCKER, // 251 CLOSE_LOCKER, // 252 OPEN_WOODEN_BOX, // 253 CLOSE_WOODEN_BOX, // 254 ROBOT_STOP, // 255 WATER_WALK1_IN, // 256 WATER_WALK1_OUT, // 257 WATER_WALK2_IN, // 258 WATER_WALK2_OUT, // 259 PRONE_UP_SOUND, // 260 PRONE_DOWN_SOUND, // 261 KNEEL_UP_SOUND, // 262 KNEEL_DOWN_SOUND, // 263 PICKING_SOMETHING_UP, // 264 COW_FALL, // 265 BLOODCAT_GROWL_1, // 266 BLOODCAT_GROWL_2, // 267 BLOODCAT_GROWL_3, // 268 BLOODCAT_GROWL_4, // 269 CREATURE_GAS_NOISE, // 270 CREATURE_FALL_PART_2, // 271 CREATURE_DISSOLVE_1, // 272 QUEEN_AMBIENT_NOISE, // 273 CREATURE_FALL, // 274 CROW_PECKING_AT_FLESH, // 275 CROW_FLYING_AWAY, // 276 SLAP_2, // 277 MORTAR_START, // 278 MORTAR_WHISTLE, // 279 MORTAR_LOAD, // 280 TURRET_MOVE, // 281 TURRET_STOP, // 282 COW_FALL_2, // 283 KNIFE_IMPACT, // 284 EXPLOSION_ALT_BLAST_1, // 285 EXPLOSION_BLAST_2, // 286 DRINK_CANTEEN_MALE, // 287 USE_X_RAY_MACHINE, // 288 CATCH_OBJECT, // 289 FENCE_OPEN, // 290 NUM_SAMPLES }; const char * getSoundSample(SoundID soundId); template<SoundID, SoundID, bool> struct SoundRangeHelper; template<SoundID first, SoundID last> struct SoundRangeHelper<first, last, true> { operator SoundID() { return static_cast<SoundID>(first + Random(last - first + 1)); } }; template<SoundID first, SoundID last> static inline SoundID SoundRange() { return SoundRangeHelper<first, last, first < last>(); } enum AmbientSoundID { LIGHTNING_1 = 0, LIGHTNING_2, RAIN_1, BIRD_1, BIRD_2, CRICKETS_1, CRICKETS_2, CRICKET_1, CRICKET_2, OWL_1, OWL_2, OWL_3, NIGHT_BIRD_1, NIGHT_BIRD_2, NUM_AMBIENTS }; typedef void (*SOUND_STOP_CALLBACK)( void *pData ); void ShutdownJA2Sound(void); UINT32 PlayJA2Sample(const char *sample, UINT32 const ubVolume, UINT32 const ubLoops, UINT32 const uiPan); UINT32 PlayJA2Sample(SoundID, UINT32 ubVolume, UINT32 ubLoops, UINT32 uiPan); UINT32 PlayJA2StreamingSample(SoundID, UINT32 ubVolume, UINT32 ubLoops, UINT32 uiPan); UINT32 PlayJA2SampleFromFile(const char* szFileName, UINT32 ubVolume, UINT32 ubLoops, UINT32 uiPan); UINT32 PlayJA2StreamingSampleFromFile(const char* szFileName, UINT32 ubVolume, UINT32 ubLoops, UINT32 uiPan, SOUND_STOP_CALLBACK EndsCallback); UINT32 PlayJA2Ambient(AmbientSoundID, UINT32 ubVolume, UINT32 ubLoops); UINT32 PlayLocationJA2SampleFromFile(UINT16 grid_no, const char* filename, UINT32 base_vol, UINT32 loops); UINT32 PlayLocationJA2Sample(UINT16 grid_no, SoundID, UINT32 base_vol, UINT32 loops); UINT32 PlayLocationJA2Sample(UINT16 grid_no, const ST::string &sample, UINT32 base_vol, UINT32 loops); UINT32 PlayLocationJA2StreamingSample(UINT16 grid_no, SoundID, UINT32 base_vol, UINT32 loops); UINT32 PlaySoldierJA2Sample(SOLDIERTYPE const* s, SoundID, UINT32 base_vol, UINT32 ubLoops, BOOLEAN fCheck); UINT32 GetSoundEffectsVolume(void); void SetSoundEffectsVolume( UINT32 uiNewVolume ); UINT32 GetSpeechVolume(void); void SetSpeechVolume( UINT32 uiNewVolume ); //Calculates a volume based on the current Speech Volume level UINT32 CalculateSpeechVolume( UINT32 uiVolume ); //Calculates a volume based on the current Sound Effects Volume level UINT32 CalculateSoundEffectsVolume( UINT32 uiVolume ); INT8 SoundDir( INT16 sGridNo ); INT8 SoundVolume( INT8 bInitialVolume, INT16 sGridNo ); INT32 NewPositionSnd(INT16 sGridNo, SOLDIERTYPE const* SoundSource, SoundID); void DeletePositionSnd( INT32 iPositionSndIndex ); void SetPositionSndsActive(void); void SetPositionSndsInActive(void); void SetPositionSndsVolumeAndPanning(void); void SetPositionSndGridNo( INT32 iPositionSndIndex, INT16 sGridNo ); #endif
9,121
1,603
{ "error": { "code": 404, "message": "Discovery document not found for API service: container.googleapis.com format: rest version: v1alpha1", "status": "NOT_FOUND" } }
69
1,738
/* * All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or * its licensors. * * For complete copyright and license terms please see the LICENSE at the root of this * distribution (the "License"). All use of this software is governed by the License, * or, if provided, by the license below or the license accompanying this file. Do not * remove or modify any license notices. This file is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * */ // Original file Copyright Crytek GMBH or its affiliates, used under license. // Description : Splitting out some of the particle specific containers to here. // Will be moved to a proper home eventually. #ifndef CRYINCLUDE_CRY3DENGINE_PARTICLEUTILS_H #define CRYINCLUDE_CRY3DENGINE_PARTICLEUTILS_H #pragma once ////////////////////////////////////////////////////////////////////////// // Ref target that doesn't auto-delete. Handled manually template <typename Counter> class _plain_reference_target { public: _plain_reference_target() : m_nRefCounter (0) {} ~_plain_reference_target() { assert(m_nRefCounter == 0); #ifdef _DEBUG m_nRefCounter = -1; #endif } void AddRef() { assert(m_nRefCounter >= 0); ++m_nRefCounter; } void Release() { --m_nRefCounter; assert(m_nRefCounter >= 0); } Counter NumRefs() const { assert(m_nRefCounter >= 0); return m_nRefCounter; } protected: Counter m_nRefCounter; }; extern ITimer* g_pParticleTimer; ILINE ITimer* GetParticleTimer() { return g_pParticleTimer; } ////////////////////////////////////////////////////////////////////////// // 3D helper functions inline void RotateToUpVector(Quat& qRot, Vec3 const& vNorm) { qRot = Quat::CreateRotationV0V1(qRot.GetColumn2(), vNorm) * qRot; } inline void RotateToForwardVector(Quat& qRot, Vec3 const& vNorm) { qRot = Quat::CreateRotationV0V1(qRot.GetColumn1(), vNorm) * qRot; } inline bool CheckNormalize(Vec3& vDest, const Vec3& vSource) { float fLenSq = vSource.GetLengthSquared(); if (fLenSq > FLT_MIN) { vDest = vSource * isqrt_tpl(fLenSq); return true; } return false; } inline bool CheckNormalize(Vec3& v) { return CheckNormalize(v, v); } #endif // CRYINCLUDE_CRY3DENGINE_PARTICLEUTILS_H
901
820
<reponame>datumbox/datumbox-framework<gh_stars>100-1000 /** * Copyright (C) 2013-2020 <NAME> <<EMAIL>> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.datumbox.framework.core.statistics.distributions; import com.datumbox.framework.tests.Constants; import com.datumbox.framework.tests.abstracts.AbstractTest; import org.junit.Test; import static org.junit.Assert.assertEquals; /** * Test cases for DiscreteDistributions. * * @author <NAME> <<EMAIL>> */ public class DiscreteDistributionsTest extends AbstractTest { /** * Test of bernoulli method, of class DiscreteDistributions. */ @Test public void testBernoulli() { logger.info("Bernoulli"); boolean k = true; double p = 0.5; double expResult = 0.5; double result = DiscreteDistributions.bernoulli(k, p); assertEquals(expResult, result, Constants.DOUBLE_ACCURACY_HIGH); } /** * Test of bernoulliCdf method, of class DiscreteDistributions. */ @Test public void testBernoulliCdf() { logger.info("BernoulliCdf"); int k = 1; double p = 0.5; double expResult = 1.0; double result = DiscreteDistributions.bernoulliCdf(k, p); assertEquals(expResult, result, Constants.DOUBLE_ACCURACY_HIGH); } /** * Test of binomial method, of class DiscreteDistributions. */ @Test public void testBinomial() { logger.info("Binomial"); int k = 3; double p = 0.5; int n = 10; double expResult = 0.11718750001462; double result = DiscreteDistributions.binomial(k, p, n); assertEquals(expResult, result, Constants.DOUBLE_ACCURACY_HIGH); } /** * Test of binomialCdf method, of class DiscreteDistributions. */ @Test public void testBinomialCdf() { logger.info("BinomialCdf"); int k = 3; double p = 0.5; int n = 10; double expResult = 0.17187500002003; double result = DiscreteDistributions.binomialCdf(k, p, n); assertEquals(expResult, result, Constants.DOUBLE_ACCURACY_HIGH); } /** * Test of geometric method, of class DiscreteDistributions. */ @Test public void testGeometric() { logger.info("Geometric"); int k = 3; double p = 0.5; double expResult = 0.125; double result = DiscreteDistributions.geometric(k, p); assertEquals(expResult, result, Constants.DOUBLE_ACCURACY_HIGH); } /** * Test of geometricCdf method, of class DiscreteDistributions. */ @Test public void testGeometricCdf() { logger.info("GeometricCdf"); int k = 3; double p = 0.5; double expResult = 0.875; double result = DiscreteDistributions.geometricCdf(k, p); assertEquals(expResult, result, Constants.DOUBLE_ACCURACY_HIGH); } /** * Test of negativeBinomial method, of class DiscreteDistributions. */ @Test public void testNegativeBinomial() { logger.info("NegativeBinomial"); int n = 10; int r = 4; double p = 0.5; double expResult = 0.08203125; double result = DiscreteDistributions.negativeBinomial(n, r, p); assertEquals(expResult, result, Constants.DOUBLE_ACCURACY_HIGH); } /** * Test of negativeBinomialCdf method, of class DiscreteDistributions. */ @Test public void testNegativeBinomialCdf() { logger.info("NegativeBinomialCdf"); int n = 10; int r = 4; double p = 0.5; double expResult = 0.1279296875; double result = DiscreteDistributions.negativeBinomialCdf(n, r, p); assertEquals(expResult, result, Constants.DOUBLE_ACCURACY_HIGH); } /** * Test of uniform method, of class DiscreteDistributions. */ @Test public void testUniform() { logger.info("Uniform"); int n = 10; double expResult = 0.1; double result = DiscreteDistributions.uniform(n); assertEquals(expResult, result, Constants.DOUBLE_ACCURACY_HIGH); } /** * Test of uniformCdf method, of class DiscreteDistributions. */ @Test public void testUniformCdf() { logger.info("UniformCdf"); int k = 3; int n = 10; double expResult = 0.3; double result = DiscreteDistributions.uniformCdf(k, n); assertEquals(expResult, result, Constants.DOUBLE_ACCURACY_HIGH); } /** * Test of hypergeometric method, of class DiscreteDistributions. */ @Test public void testHypergeometric() { logger.info("Hypergeometric"); int k = 3; int n = 10; int Kp = 30; int Np = 100; double expResult = 0.28116339430254; double result = DiscreteDistributions.hypergeometric(k, n, Kp, Np); assertEquals(expResult, result, Constants.DOUBLE_ACCURACY_HIGH); } /** * Test of hypergeometricCdf method, of class DiscreteDistributions. */ @Test public void testHypergeometricCdf() { logger.info("HypergeometricCdf"); int k = 3; int n = 10; int Kp = 30; int Np = 100; double expResult = 0.65401998866081; double result = DiscreteDistributions.hypergeometricCdf(k, n, Kp, Np); assertEquals(expResult, result, Constants.DOUBLE_ACCURACY_HIGH); } /** * Test of poisson method, of class DiscreteDistributions. */ @Test public void testPoisson() { logger.info("Poisson"); int k = 3; double lamda = 5.0; double expResult = 0.14037389583692; double result = DiscreteDistributions.poisson(k, lamda); assertEquals(expResult, result, Constants.DOUBLE_ACCURACY_HIGH); } /** * Test of poissonCdf method, of class DiscreteDistributions. */ @Test public void testPoissonCdf() { logger.info("PoissonCdf"); int k = 3; double lamda = 5.0; double expResult = 0.26502591533403; double result = DiscreteDistributions.poissonCdf(k, lamda); assertEquals(expResult, result, Constants.DOUBLE_ACCURACY_HIGH); } }
2,893
310
{ "name": "Barograph (iOS)", "description": "A barometer and altimeter app.", "url": "https://itunes.apple.com/us/app/barograph-barometer-altimeter/id926055907" }
62
348
{"nom":"Cevins","circ":"2ème circonscription","dpt":"Savoie","inscrits":574,"abs":394,"votants":180,"blancs":19,"nuls":4,"exp":157,"res":[{"nuance":"LR","nom":"<NAME>","voix":87},{"nuance":"DIV","nom":"<NAME>","voix":70}]}
89
1,531
<gh_stars>1000+ /** * Copyright 2012-2015 <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.googlecode.cqengine.attribute; import com.googlecode.cqengine.query.option.QueryOptions; /** * @author <NAME> */ public interface Attribute<O, A> { /** * Returns the type of the object which contains the attribute. * @return the type of the object which contains the attribute */ Class<O> getObjectType(); /** * Returns the type of the attribute. * @return the type of the attribute */ Class<A> getAttributeType(); /** * Returns the name of the attribute, as supplied to the constructor. * <p/> * @return the name of the attribute, as supplied to the constructor */ String getAttributeName(); /** * Returns the values belonging to the attribute in the given object. * <p/> * If the attribute is a {@link SimpleAttribute}, the list returned will contain a single value for the attribute. * If the attribute is a {@link MultiValueAttribute}, the list returned will contain any number of values for the * attribute. * <p/> * @param object The object from which the values of the attribute are required * @param queryOptions Optional parameters supplied by the application along with the operation which is causing * this attribute to be invoked (either a query, or an update to the collection) * @return The values for the attribute in the given object */ Iterable<A> getValues(O object, QueryOptions queryOptions); }
607
421
<reponame>majdal/tasking-manager """empty message Revision ID: 84c793a951b2 Revises: 7<PASSWORD> Create Date: 2019-11-12 20:04:46.065237 """ from alembic import op import sqlalchemy as sa from datetime import datetime # revision identifiers, used by Alembic. revision = "84c793a951b2" down_revision = "29097876c7e6" branch_labels = None depends_on = None def upgrade(): conn = op.get_bind() op.create_table( "notifications", sa.Column("id", sa.Integer(), nullable=False), sa.Column("user_id", sa.BigInteger(), nullable=False), sa.ForeignKeyConstraint(["user_id"], ["users.id"]), sa.Column("unread_count", sa.Integer(), nullable=False), sa.Column("date", sa.DateTime(), nullable=False), sa.PrimaryKeyConstraint("id", name="notifications_pkey"), ) op.create_index( "idx_notifications_user_id", "notifications", ["user_id"], unique=False ) fetch_all_users = "select id from users;" all_users = conn.execute(fetch_all_users) for user in all_users: user_id = user[0] insert_user_info = ( "insert into notifications (user_id,unread_count,date) values (" + str(user_id) + "," + str(0) + ",'" + str(datetime.now()) + "');" ) op.execute(insert_user_info) fetch_all_unread_counts = "select to_user_id, count(*) from messages where read = false group by to_user_id;" unread_counts = conn.execute(fetch_all_unread_counts) for unread_count in unread_counts: user_id = unread_count[0] user_unread_count = unread_count[1] update_notification_info = ( "update notifications set user_id =" + str(user_id) + ",unread_count = " + str(user_unread_count) + ",date = '" + str(datetime.now()) + "' where user_id = " + str(user_id) + ";" ) op.execute(update_notification_info) def downgrade(): op.drop_index("idx_notifications_user_id", table_name="notifications") op.drop_table("notifications")
1,001
1,408
<gh_stars>1000+ def test_readme(): # Tests that the readme code snippet doesn't fail from presidio_anonymizer import AnonymizerEngine from presidio_anonymizer.entities import RecognizerResult, OperatorConfig # Initialize the engine with logger. engine = AnonymizerEngine() # Invoke the anonymize function with the text, # analyzer results (potentially coming from presidio-analyzer) and # Operators to get the anonymization output: result = engine.anonymize( text="My name is Bond, <NAME>", analyzer_results=[ RecognizerResult(entity_type="PERSON", start=11, end=15, score=0.8), RecognizerResult(entity_type="PERSON", start=17, end=27, score=0.8), ], operators={"PERSON": OperatorConfig("replace", {"new_value": "BIP"})}, ) print(result) def test_readme_decrypt(): from presidio_anonymizer import DeanonymizeEngine from presidio_anonymizer.entities import OperatorResult, OperatorConfig # Initialize the engine with logger. engine = DeanonymizeEngine() # Invoke the deanonymize function with the text, anonymizer results and # Operators to define the deanonymization type. result = engine.deanonymize( text="My name is S184CMt9Drj7QaKQ21JTrpYzghnboTF9pn/neN8JME0=", entities=[ OperatorResult(start=11, end=55, entity_type="PERSON"), ], operators={"DEFAULT": OperatorConfig("decrypt", {"key": "<KEY>"})}, ) print(result)
568
2,143
/* * Copyright 2014-2022 TNG Technology Consulting GmbH * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.tngtech.archunit.lang; import com.tngtech.archunit.PublicAPI; import com.tngtech.archunit.base.DescribedIterable; import com.tngtech.archunit.base.DescribedPredicate; import com.tngtech.archunit.base.HasDescription; import com.tngtech.archunit.core.domain.JavaClasses; import static com.tngtech.archunit.PublicAPI.Usage.INHERITANCE; @PublicAPI(usage = INHERITANCE) public interface ClassesTransformer<T> extends HasDescription { /** * Defines how to transform imported {@link JavaClasses} to the respective objects to test. * * @param collection Imported {@link JavaClasses} * @return A {@link DescribedIterable} holding the transformed objects * @see com.tngtech.archunit.library.dependencies.Slices.Transformer */ DescribedIterable<T> transform(JavaClasses collection); /** * Can be used to further filter the transformation result. * * @param predicate Predicate to filter the collection of transformed objects * @return A transformer that additionally filters the transformed result */ ClassesTransformer<T> that(DescribedPredicate<? super T> predicate); /** * @param description A new description for this transformer * @return A transformer for the same transformation with an adjusted description */ ClassesTransformer<T> as(String description); }
586
1,561
<filename>tests/soter/nist-sts/src/cephes.c #include <stdio.h> #include <math.h> #include "../include/cephes.h" static const double rel_error = 1E-12; double MACHEP = 1.11022302462515654042E-16; // 2**-53 double MAXLOG = 7.09782712893383996732224E2; // log(MAXNUM) double MAXNUM = 1.7976931348623158E308; // 2**1024*(1-MACHEP) double PI = 3.14159265358979323846; // pi, duh! static double big = 4.503599627370496e15; static double biginv = 2.22044604925031308085e-16; int sgngam = 0; double cephes_igamc(double a, double x) { double ans, ax, c, yc, r, t, y, z; double pk, pkm1, pkm2, qk, qkm1, qkm2; if ( (x <= 0) || ( a <= 0) ) return( 1.0 ); if ( (x < 1.0) || (x < a) ) return( 1.e0 - cephes_igam(a,x) ); ax = a * log(x) - x - cephes_lgam(a); if ( ax < -MAXLOG ) { printf("igamc: UNDERFLOW\n"); return 0.0; } ax = exp(ax); /* continued fraction */ y = 1.0 - a; z = x + y + 1.0; c = 0.0; pkm2 = 1.0; qkm2 = x; pkm1 = x + 1.0; qkm1 = z * x; ans = pkm1/qkm1; do { c += 1.0; y += 1.0; z += 2.0; yc = y * c; pk = pkm1 * z - pkm2 * yc; qk = qkm1 * z - qkm2 * yc; if ( qk != 0 ) { r = pk/qk; t = fabs( (ans - r)/r ); ans = r; } else t = 1.0; pkm2 = pkm1; pkm1 = pk; qkm2 = qkm1; qkm1 = qk; if ( fabs(pk) > big ) { pkm2 *= biginv; pkm1 *= biginv; qkm2 *= biginv; qkm1 *= biginv; } } while ( t > MACHEP ); return ans*ax; } double cephes_igam(double a, double x) { double ans, ax, c, r; if ( (x <= 0) || ( a <= 0) ) return 0.0; if ( (x > 1.0) && (x > a ) ) return 1.e0 - cephes_igamc(a,x); /* Compute x**a * exp(-x) / gamma(a) */ ax = a * log(x) - x - cephes_lgam(a); if ( ax < -MAXLOG ) { printf("igam: UNDERFLOW\n"); return 0.0; } ax = exp(ax); /* power series */ r = a; c = 1.0; ans = 1.0; do { r += 1.0; c *= x/r; ans += c; } while ( c/ans > MACHEP ); return ans * ax/a; } /* A[]: Stirling's formula expansion of log gamma * B[], C[]: log gamma function between 2 and 3 */ static unsigned short A[] = { 0x6661,0x2733,0x9850,0x3f4a, 0xe943,0xb580,0x7fbd,0xbf43, 0x5ebb,0x20dc,0x019f,0x3f4a, 0xa5a1,0x16b0,0xc16c,0xbf66, 0x554b,0x5555,0x5555,0x3fb5 }; static unsigned short B[] = { 0x6761,0x8ff3,0x8901,0xc095, 0xb93e,0x355b,0xf234,0xc0e2, 0x89e5,0xf890,0x3d73,0xc114, 0xdb51,0xf994,0xbc82,0xc131, 0xf20b,0x0219,0x4589,0xc13a, 0x055e,0x5418,0x0c67,0xc12a }; static unsigned short C[] = { /*0x0000,0x0000,0x0000,0x3ff0,*/ 0x12b2,0x1cf3,0xfd0d,0xc075, 0xd757,0x7b89,0xaa0d,0xc0d0, 0x4c9b,0xb974,0xeb84,0xc10a, 0x0043,0x7195,0x6286,0xc131, 0xf34c,0x892f,0x5255,0xc143, 0xe14a,0x6a11,0xce4b,0xc13e }; #define MAXLGM 2.556348e305 /* Logarithm of gamma function */ double cephes_lgam(double x) { double p, q, u, w, z; int i; sgngam = 1; if ( x < -34.0 ) { q = -x; w = cephes_lgam(q); /* note this modifies sgngam! */ p = floor(q); if ( p == q ) { lgsing: goto loverf; } i = (int)p; if ( (i & 1) == 0 ) sgngam = -1; else sgngam = 1; z = q - p; if ( z > 0.5 ) { p += 1.0; z = p - q; } z = q * sin( PI * z ); if ( z == 0.0 ) goto lgsing; /* z = log(PI) - log( z ) - w;*/ z = log(PI) - log( z ) - w; return z; } if ( x < 13.0 ) { z = 1.0; p = 0.0; u = x; while ( u >= 3.0 ) { p -= 1.0; u = x + p; z *= u; } while ( u < 2.0 ) { if ( u == 0.0 ) goto lgsing; z /= u; p += 1.0; u = x + p; } if ( z < 0.0 ) { sgngam = -1; z = -z; } else sgngam = 1; if ( u == 2.0 ) return( log(z) ); p -= 2.0; x = x + p; p = x * cephes_polevl( x, (double *)B, 5 ) / cephes_p1evl( x, (double *)C, 6); return log(z) + p; } if ( x > MAXLGM ) { loverf: printf("lgam: OVERFLOW\n"); return sgngam * MAXNUM; } q = ( x - 0.5 ) * log(x) - x + log( sqrt( 2*PI ) ); if ( x > 1.0e8 ) return q; p = 1.0/(x*x); if ( x >= 1000.0 ) q += (( 7.9365079365079365079365e-4 * p - 2.7777777777777777777778e-3) *p + 0.0833333333333333333333) / x; else q += cephes_polevl( p, (double *)A, 4 ) / x; return q; } double cephes_polevl(double x, double *coef, int N) { double ans; int i; double *p; p = coef; ans = *p++; i = N; do ans = ans * x + *p++; while ( --i ); return ans; } double cephes_p1evl(double x, double *coef, int N) { double ans; double *p; int i; p = coef; ans = x + *p++; i = N-1; do ans = ans * x + *p++; while ( --i ); return ans; } double cephes_erf(double x) { static const double two_sqrtpi = 1.128379167095512574; double sum = x, term = x, xsqr = x * x; int j = 1; if ( fabs(x) > 2.2 ) return 1.0 - cephes_erfc(x); do { term *= xsqr/j; sum -= term/(2*j+1); j++; term *= xsqr/j; sum += term/(2*j+1); j++; } while ( fabs(term)/sum > rel_error ); return two_sqrtpi*sum; } double cephes_erfc(double x) { static const double one_sqrtpi = 0.564189583547756287; double a = 1, b = x, c = x, d = x*x + 0.5; double q1, q2 = b/d, n = 1.0, t; if ( fabs(x) < 2.2 ) return 1.0 - cephes_erf(x); if ( x < 0 ) return 2.0 - cephes_erfc(-x); do { t = a*n + b*x; a = b; b = t; t = c*n + d*x; c = d; d = t; n += 0.5; q1 = q2; q2 = b/d; } while ( fabs(q1-q2)/q2 > rel_error ); return one_sqrtpi*exp(-x*x)*q2; } double cephes_normal(double x) { double arg, result, sqrt2=1.414213562373095048801688724209698078569672; if (x > 0) { arg = x/sqrt2; result = 0.5 * ( 1 + erf(arg) ); } else { arg = -x/sqrt2; result = 0.5 * ( 1 - erf(arg) ); } return( result); }
3,525
607
/* * This file is a part of the open source stm32plus library. * Copyright (c) 2011,2012,2013 <NAME> <www.andybrown.me.uk> * Please see website for licensing terms. */ #pragma once // ensure the MCU series is correct #ifndef STM32PLUS_F0 #error This class can only be used with the STM32F0 series #endif namespace stm32plus { /** * Support for the Vrefint ADC channel */ template<uint8_t TSampleCycles> struct AdcInternalReferenceVoltageFeature : AdcFeatureBase { /** * Constructor, initialise upwards then enable the vrefint feature */ AdcInternalReferenceVoltageFeature(Adc& adc) : AdcFeatureBase(adc) { } /** * Post ADC_Init() initialisation */ void initialise() { ADC_ChannelConfig(_adc,ADC_Channel_17,TSampleCycles); ADC_VrefintCmd(ENABLE); } }; /* * Typedefs for the difference cycles on ADC1 */ typedef AdcInternalReferenceVoltageFeature<ADC_SampleTime_1_5Cycles> Adc1Cycle1InternalReferenceVoltageFeature; typedef AdcInternalReferenceVoltageFeature<ADC_SampleTime_7_5Cycles> Adc1Cycle7InternalReferenceVoltageFeature; typedef AdcInternalReferenceVoltageFeature<ADC_SampleTime_13_5Cycles> Adc1Cycle13InternalReferenceVoltageFeature; typedef AdcInternalReferenceVoltageFeature<ADC_SampleTime_28_5Cycles> Adc1Cycle28InternalReferenceVoltageFeature; typedef AdcInternalReferenceVoltageFeature<ADC_SampleTime_41_5Cycles> Adc1Cycle41InternalReferenceVoltageFeature; typedef AdcInternalReferenceVoltageFeature<ADC_SampleTime_55_5Cycles> Adc1Cycle55InternalReferenceVoltageFeature; typedef AdcInternalReferenceVoltageFeature<ADC_SampleTime_71_5Cycles> Adc1Cycle71InternalReferenceVoltageFeature; typedef AdcInternalReferenceVoltageFeature<ADC_SampleTime_239_5Cycles> Adc1Cycle239InternalReferenceVoltageFeature; }
712
1,414
#!/usr/bin/env python from .elmo import Embedder import logging logger = logging.getLogger('elmoformanylangs') # if the client application hasn't set the log level, we set it # ourselves to INFO if logger.level == 0: logger.setLevel(logging.INFO) log_handler = logging.StreamHandler() log_formatter = logging.Formatter(fmt="%(asctime)-15s %(levelname)s: %(message)s") log_handler.setFormatter(log_formatter) # also, if the client hasn't added any handlers for this logger # (or a default handler), we add a handler of our own # # client can later do # logger.removeHandler(stanza.log_handler) if not logger.hasHandlers(): logger.addHandler(log_handler)
220
7,272
package com.kunal.backtracking; import java.util.Arrays; public class AllPaths { public static void main(String[] args) { boolean[][] board = { {true, true, true}, {true, true, true}, {true, true, true} }; int[][] path = new int[board.length][board[0].length]; allPathPrint("", board, 0, 0, path, 1); } static void allPath(String p, boolean[][] maze, int r, int c) { if (r == maze.length - 1 && c == maze[0].length - 1) { System.out.println(p); return; } if (!maze[r][c]) { return; } // i am considering this block in my path maze[r][c] = false; if (r < maze.length - 1) { allPath(p + 'D', maze, r+1, c); } if (c < maze[0].length - 1) { allPath(p + 'R', maze, r, c+1); } if (r > 0) { allPath(p + 'U', maze, r-1, c); } if (c > 0) { allPath(p + 'L', maze, r, c-1); } // this line is where the function will be over // so before the function gets removed, also remove the changes that were made by that function maze[r][c] = true; } static void allPathPrint(String p, boolean[][] maze, int r, int c, int[][] path, int step) { if (r == maze.length - 1 && c == maze[0].length - 1) { path[r][c] = step; for(int[] arr : path) { System.out.println(Arrays.toString(arr)); } System.out.println(p); System.out.println(); return; } if (!maze[r][c]) { return; } // i am considering this block in my path maze[r][c] = false; path[r][c] = step; if (r < maze.length - 1) { allPathPrint(p + 'D', maze, r+1, c, path, step+1); } if (c < maze[0].length - 1) { allPathPrint(p + 'R', maze, r, c+1, path, step+1); } if (r > 0) { allPathPrint(p + 'U', maze, r-1, c, path, step+1); } if (c > 0) { allPathPrint(p + 'L', maze, r, c-1, path, step+1); } // this line is where the function will be over // so before the function gets removed, also remove the changes that were made by that function maze[r][c] = true; path[r][c] = 0; } }
1,280
2,206
/** * Comparators provided by the Runtime Field module. */ package com.speedment.runtime.field.comparator;
30
3,274
package com.ql.util.express.match; /** * 匹配类型 * @author xuannan * */ public interface INodeType { public String getName(); public INodeTypeManager getManager(); public QLPatternNode getPatternNode(); }
79
343
from benchmark import Benchmark modules = ['numpy','Numeric','numarray'] b = Benchmark(modules,runs=3,reps=100) N = 10000 b.title = 'Sorting %d elements' % N b['numarray'] = ('a=np.array(None,shape=%d,typecode="i");a.sort()'%N,'') b['numpy'] = ('a=np.empty(shape=%d, dtype="i");a.sort()'%N,'') b['Numeric'] = ('a=np.empty(shape=%d, typecode="i");np.sort(a)'%N,'') b.run() N1,N2 = 100,100 b.title = 'Sorting (%d,%d) elements, last axis' % (N1,N2) b['numarray'] = ('a=np.array(None,shape=(%d,%d),typecode="i");a.sort()'%(N1,N2),'') b['numpy'] = ('a=np.empty(shape=(%d,%d), dtype="i");a.sort()'%(N1,N2),'') b['Numeric'] = ('a=np.empty(shape=(%d,%d),typecode="i");np.sort(a)'%(N1,N2),'') b.run() N1,N2 = 100,100 b.title = 'Sorting (%d,%d) elements, first axis' % (N1,N2) b['numarray'] = ('a=np.array(None,shape=(%d,%d), typecode="i");a.sort(0)'%(N1,N2),'') b['numpy'] = ('a=np.empty(shape=(%d,%d),dtype="i");np.sort(a,0)'%(N1,N2),'') b['Numeric'] = ('a=np.empty(shape=(%d,%d),typecode="i");np.sort(a,0)'%(N1,N2),'') b.run()
513
3,494
<reponame>rsh/Cinder-Emscripten<gh_stars>1000+ /* * This audio sample firstly makes use of the audio::DelayNode, but also demonstrates some more complex methods of control, * like feedback and controlling an audio::Param with other audio::Node's. * * author: <NAME> (2014) */ #include "cinder/app/App.h" #include "cinder/app/RendererGl.h" #include "cinder/Rand.h" #include "cinder/Perlin.h" #include "cinder/gl/GlslProg.h" #include "cinder/Timeline.h" #include "cinder/Log.h" #include "cinder/TriMesh.h" #include "cinder/gl/gl.h" #include "cinder/audio/Context.h" #include "cinder/audio/GenNode.h" #include "cinder/audio/NodeEffects.h" #include "cinder/audio/Utilities.h" #include "Resources.h" const float MAX_VOLUME = 0.6f; const size_t MAX_SPLASHES = 200; const float MAX_RADIUS = 300; const float MAX_PITCH_MIDI = 80; const float MIN_PITCH_MIDI = 40; using namespace ci; using namespace ci::app; using namespace std; struct Splash { vec2 mCenter; vec3 mColorHsv; Anim<float> mRadius, mAlpha; }; class DelayFeedback : public App { public: void setup() override; void mouseDrag( MouseEvent event ) override; void mouseUp( MouseEvent event ) override; void keyDown( KeyEvent event ) override; void update() override; void draw() override; void setVariableDelayMod(); void addSplash( const vec2 &pos ); float quantizePitch( const vec2 &pos ); void loadBatch(); audio::GenOscNodeRef mOsc; audio::DelayNodeRef mDelay; audio::GainNodeRef mGain; std::list<Splash> mSplashes; Perlin mPerlin; gl::BatchRef mBatch; }; void DelayFeedback::setup() { loadBatch(); gl::enableAlphaBlending(); // The basic audio::Node's used here are an oscillator with a triangle waveform, a gain, and a delay. // The complexity in the sound comes from how they are connected and controlled. auto ctx = audio::master(); mOsc = ctx->makeNode( new audio::GenOscNode ); mGain = ctx->makeNode( new audio::GainNode( 0 ) ); mDelay = ctx->makeNode( new audio::DelayNode ); mOsc->setWaveform( audio::WaveformType::TRIANGLE ); // The Delay's length Param is itself controlled with Node's, which is configured next. setVariableDelayMod(); // Now we connect up the Node's so that the signal immediately reaches the Context's OutputNode, but it also // feedback in a cycle to create an echo. To control the level of feedback and prevent ringing, a one-off GainNode // is used with a value of 0.5, which gives a fairly natural sounding decay. auto feedbackGain = audio::master()->makeNode( new audio::GainNode( 0.5f ) ); feedbackGain->setName( "FeedbackGain" ); mOsc >> mGain >> ctx->getOutput(); mGain >> mDelay >> feedbackGain >> mDelay >> ctx->getOutput(); mOsc->enable(); ctx->enable(); console() << "--------- context audio graph: --------------------" << endl; console() << ctx->printGraphToString(); console() << "---------------------------------------------------" << endl; } // This method adds a low-frequency oscillator to the delay length, which makes a 'flanging' effect. void DelayFeedback::setVariableDelayMod() { mDelay->setMaxDelaySeconds( 2 ); auto ctx = audio::master(); auto osc = ctx->makeNode( new audio::GenSineNode( 0.00113f, audio::Node::Format().autoEnable() ) ); auto mul = ctx->makeNode( new audio::GainNode( 0.3f ) ); auto add = ctx->makeNode( new audio::AddNode( 0.343f ) ); osc >> mul >> add; mDelay->getParamDelaySeconds()->setProcessor( add ); } void DelayFeedback::addSplash( const vec2 &pos ) { mSplashes.push_back( Splash() ); auto &splash = mSplashes.back(); splash.mCenter = pos; splash.mAlpha = 1; float radiusMin = ( 1 - (float)pos.y / (float)getWindowHeight() ) * MAX_RADIUS / 2; splash.mRadius = randFloat( radiusMin, 30 ); float endRadius = randFloat( MAX_RADIUS * 0.9f, MAX_RADIUS ); timeline().apply( &splash.mRadius, endRadius, 7, EaseOutExpo() ); timeline().apply( &splash.mAlpha, 0.0f, 7 ); float h = math<float>::min( 1, mPerlin.fBm( normalize( pos ) ) * 7 ); splash.mColorHsv = vec3( fabsf( h ), 1, 1 ); } // returns a quantized pitch (in hertz) within the lydian dominant scale float DelayFeedback::quantizePitch( const vec2 &pos ) { const size_t scaleLength = 7; float scale[scaleLength] = { 0, 2, 4, 6, 7, 9, 10 }; int pitchMidi = lroundf( lmap( pos.x, 0.0f, (float)getWindowWidth(), MIN_PITCH_MIDI, MAX_PITCH_MIDI ) ); bool quantized = false; while( ! quantized ) { int note = pitchMidi % 12; for( size_t i = 0; i < scaleLength; i++ ) { if( note == scale[i] ) { quantized = true; break; } } if( ! quantized ) pitchMidi--; } return audio::midiToFreq( pitchMidi ); } void DelayFeedback::mouseDrag( MouseEvent event ) { float freq = quantizePitch( event.getPos() ); float gain = 1.0f - (float)event.getPos().y / (float)getWindowHeight(); gain *= MAX_VOLUME; mOsc->getParamFreq()->applyRamp( freq, 0.04f ); mGain->getParam()->applyRamp( gain, 0.1f ); addSplash( event.getPos() ); } void DelayFeedback::mouseUp( MouseEvent event ) { mGain->getParam()->applyRamp( 0, 1.5, audio::Param::Options().rampFn( &audio::rampOutQuad ) ); } void DelayFeedback::keyDown( KeyEvent event ) { if( event.getChar() == 'f' ) setFullScreen( ! isFullScreen() ); } void DelayFeedback::update() { // trim splashes if( mSplashes.size() > MAX_SPLASHES ) { size_t trimCount = mSplashes.size() - MAX_SPLASHES; for( size_t i = 0; i < trimCount; i++ ) mSplashes.pop_front(); } } void DelayFeedback::draw() { gl::clear(); if( ! mBatch ) return; gl::ScopedGlslProg glslScope( mBatch->getGlslProg() ); for( const auto &splash : mSplashes ) { float radiusNormalized = splash.mRadius / MAX_RADIUS; mBatch->getGlslProg()->uniform( "uRadius", radiusNormalized ); gl::ScopedModelMatrix matrixScope; gl::translate( splash.mCenter ); Color splashColor( CM_HSV, splash.mColorHsv ); gl::color( splashColor.r, splashColor.g, splashColor.b, splash.mAlpha() ); mBatch->draw(); } } void DelayFeedback::loadBatch() { gl::GlslProgRef glsl; try { glsl = gl::GlslProg::create( loadResource( SMOOTH_CIRCLE_GLSL_VERT ), loadResource( SMOOTH_CIRCLE_GLSL_FRAG ) ); } catch( ci::Exception &exc ) { CI_LOG_E( "failed to load shader, what: " << exc.what() ); return; } Rectf boundingBox( - MAX_RADIUS, - MAX_RADIUS, MAX_RADIUS, MAX_RADIUS ); TriMesh mesh( TriMesh::Format().positions( 2 ).texCoords( 2 ) ); mesh.appendPosition( boundingBox.getUpperLeft() ); mesh.appendTexCoord( vec2( -1, -1 ) ); mesh.appendPosition( boundingBox.getLowerLeft() ); mesh.appendTexCoord( vec2( -1, 1 ) ); mesh.appendPosition( boundingBox.getUpperRight() ); mesh.appendTexCoord( vec2( 1, -1 ) ); mesh.appendPosition( boundingBox.getLowerRight() ); mesh.appendTexCoord( vec2( 1, 1 ) ); mesh.appendTriangle( 0, 1, 2 ); mesh.appendTriangle( 2, 1, 3 ); mBatch = gl::Batch::create( mesh, glsl ); } CINDER_APP( DelayFeedback, RendererGl, []( App::Settings *settings ) { settings->setWindowPos( 200, 200 ); settings->setWindowSize( 1000, 800 ); } )
2,668
1,273
<reponame>sunboy0523/gatk package org.broadinstitute.hellbender.utils; import org.broadinstitute.hellbender.GATKBaseTest; import org.testng.Assert; import org.testng.annotations.Test; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.PrintStream; public class IGVUtilsUnitTest extends GATKBaseTest { @Test public void testPrintIGVFormatHeader() throws IOException { try ( final ByteArrayOutputStream baos = new ByteArrayOutputStream(); final PrintStream out = new PrintStream(baos) ) { IGVUtils.printIGVFormatHeader(out, "line", "Track1", "Track2"); Assert.assertEquals(baos.toString(), "#track graphType=line\nChromosome\tStart\tEnd\tFeature\tTrack1\tTrack2\n"); } } @Test public void testPrintIGVFormatRow() throws IOException { try ( final ByteArrayOutputStream baos = new ByteArrayOutputStream(); final PrintStream out = new PrintStream(baos) ) { IGVUtils.printIGVFormatRow(out, new SimpleInterval("1", 5, 10), "myFeature", 4.0, 7.0); // Our interval should get converted from 1-based closed form to 0-based closed-open form, // and our values should get formatted to 5 digits after the decimal point. Assert.assertEquals(baos.toString(), "1\t4\t10\tmyFeature\t4.00000\t7.00000\n"); } } }
544
8,523
<filename>tests/models/test_queryparams.py import pytest import httpx @pytest.mark.parametrize( "source", [ "a=123&a=456&b=789", {"a": ["123", "456"], "b": 789}, {"a": ("123", "456"), "b": 789}, [("a", "123"), ("a", "456"), ("b", "789")], (("a", "123"), ("a", "456"), ("b", "789")), ], ) def test_queryparams(source): q = httpx.QueryParams(source) assert "a" in q assert "A" not in q assert "c" not in q assert q["a"] == "123" assert q.get("a") == "123" assert q.get("nope", default=None) is None assert q.get_list("a") == ["123", "456"] assert list(q.keys()) == ["a", "b"] assert list(q.values()) == ["123", "789"] assert list(q.items()) == [("a", "123"), ("b", "789")] assert len(q) == 2 assert list(q) == ["a", "b"] assert dict(q) == {"a": "123", "b": "789"} assert str(q) == "a=123&a=456&b=789" assert repr(q) == "QueryParams('a=123&a=456&b=789')" assert httpx.QueryParams({"a": "123", "b": "456"}) == httpx.QueryParams( [("a", "123"), ("b", "456")] ) assert httpx.QueryParams({"a": "123", "b": "456"}) == httpx.QueryParams( "a=123&b=456" ) assert httpx.QueryParams({"a": "123", "b": "456"}) == httpx.QueryParams( {"b": "456", "a": "123"} ) assert httpx.QueryParams() == httpx.QueryParams({}) assert httpx.QueryParams([("a", "123"), ("a", "456")]) == httpx.QueryParams( "a=123&a=456" ) assert httpx.QueryParams({"a": "123", "b": "456"}) != "invalid" q = httpx.QueryParams([("a", "123"), ("a", "456")]) assert httpx.QueryParams(q) == q def test_queryparam_types(): q = httpx.QueryParams(None) assert str(q) == "" q = httpx.QueryParams({"a": True}) assert str(q) == "a=true" q = httpx.QueryParams({"a": False}) assert str(q) == "a=false" q = httpx.QueryParams({"a": ""}) assert str(q) == "a=" q = httpx.QueryParams({"a": None}) assert str(q) == "a=" q = httpx.QueryParams({"a": 1.23}) assert str(q) == "a=1.23" q = httpx.QueryParams({"a": 123}) assert str(q) == "a=123" q = httpx.QueryParams({"a": [1, 2]}) assert str(q) == "a=1&a=2" def test_queryparam_update_is_hard_deprecated(): q = httpx.QueryParams("a=123") with pytest.raises(RuntimeError): q.update({"a": "456"}) def test_queryparam_setter_is_hard_deprecated(): q = httpx.QueryParams("a=123") with pytest.raises(RuntimeError): q["a"] = "456" def test_queryparam_set(): q = httpx.QueryParams("a=123") q = q.set("a", "456") assert q == httpx.QueryParams("a=456") def test_queryparam_add(): q = httpx.QueryParams("a=123") q = q.add("a", "456") assert q == httpx.QueryParams("a=123&a=456") def test_queryparam_remove(): q = httpx.QueryParams("a=123") q = q.remove("a") assert q == httpx.QueryParams("") def test_queryparam_merge(): q = httpx.QueryParams("a=123") q = q.merge({"b": "456"}) assert q == httpx.QueryParams("a=123&b=456") q = q.merge({"a": "000", "c": "789"}) assert q == httpx.QueryParams("a=000&b=456&c=789") def test_queryparams_are_hashable(): params = ( httpx.QueryParams("a=123"), httpx.QueryParams({"a": 123}), httpx.QueryParams("b=456"), httpx.QueryParams({"b": 456}), ) assert len(set(params)) == 2
1,605
841
/* * Copyright 2019 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jbpm.test.persistence.scripts; import org.hibernate.dialect.Dialect; import org.hibernate.engine.jdbc.dialect.internal.StandardDialectResolver; import org.hibernate.engine.jdbc.dialect.spi.DialectResolutionInfo; import org.hibernate.engine.jdbc.dialect.spi.DialectResolver; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class JbpmDialectResolver implements DialectResolver { private static final long serialVersionUID = 1L; private static final Logger logger = LoggerFactory.getLogger(JbpmDialectResolver.class); @Override public Dialect resolveDialect(DialectResolutionInfo info){ Dialect d; if ("Adaptive Server Enterprise".equals(info.getDatabaseName())) { return new SybaseJbpmDialect(); } else { d = StandardDialectResolver.INSTANCE.resolveDialect(info); } logger.info("resolveDialect: {}", d); return d; } }
532
348
{"nom":"Cambron","circ":"1ère circonscription","dpt":"Somme","inscrits":612,"abs":253,"votants":359,"blancs":19,"nuls":2,"exp":338,"res":[{"nuance":"REM","nom":"<NAME>","voix":102},{"nuance":"LR","nom":"M. <NAME>","voix":77},{"nuance":"FN","nom":"<NAME>","voix":64},{"nuance":"FI","nom":"M. <NAME>","voix":52},{"nuance":"SOC","nom":"Mme <NAME>","voix":21},{"nuance":"DLF","nom":"<NAME>","voix":9},{"nuance":"EXD","nom":"<NAME>","voix":7},{"nuance":"DIV","nom":"Mme <NAME>","voix":2},{"nuance":"EXG","nom":"Mme <NAME>","voix":2},{"nuance":"EXG","nom":"Mme <NAME>","voix":2},{"nuance":"DIV","nom":"M. <NAME>","voix":0}]}
251
2,542
// ------------------------------------------------------------ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License (MIT). See License.txt in the repo root for license information. // ------------------------------------------------------------ #include "stdafx.h" #include <stdio.h> using namespace Common; using namespace std; using namespace Naming; using namespace ServiceModel; using namespace Transport; using namespace Management::ImageModel; using namespace Management::ClusterManager; StringLiteral const DockerComposeIBComponent("DockerComposeImageBuilderProxy"); TestDockerComposeImageBuilderProxy::TestDockerComposeImageBuilderProxy( wstring const & imageBuilderExeDirectory, wstring const & workingDirectory, wstring const & nodeName, Federation::NodeInstance const & nodeInstance) : ImageBuilderProxy( imageBuilderExeDirectory, workingDirectory, nodeName, nodeInstance) { } shared_ptr<TestDockerComposeImageBuilderProxy> TestDockerComposeImageBuilderProxy::Create( wstring const & imageBuilderExeDirectory, wstring const & workingDirectory, wstring const & nodeName, Federation::NodeInstance const & nodeInstance) { auto proxy = shared_ptr<TestDockerComposeImageBuilderProxy>(new TestDockerComposeImageBuilderProxy( imageBuilderExeDirectory, workingDirectory, nodeName, nodeInstance)); proxy->Initialize(); return proxy; } ErrorCode TestDockerComposeImageBuilderProxy::ValidateComposeFile( ByteBuffer const &composeFile, NamingUri const &appName, ServiceModelTypeName const &applicationTypeName, ServiceModelVersion const &applicationTypeVersion, TimeSpan const &timeout) { UNREFERENCED_PARAMETER(composeFile); UNREFERENCED_PARAMETER(appName); UNREFERENCED_PARAMETER(applicationTypeName); UNREFERENCED_PARAMETER(applicationTypeVersion); UNREFERENCED_PARAMETER(timeout); return ErrorCodeValue::Success; } AsyncOperationSPtr TestDockerComposeImageBuilderProxy::BeginBuildComposeDeploymentType( ActivityId const & activityId, Common::ByteBufferSPtr const &composeFile, Common::ByteBufferSPtr const &overridesFile, std::wstring const &registryUserName, std::wstring const &registryPassword, bool isPasswordEncrypted, NamingUri const &appName, ServiceModelTypeName const &typeName, ServiceModelVersion const &typeVersion, TimeSpan const timeout, AsyncCallback const & callback, AsyncOperationSPtr const & parent) { UNREFERENCED_PARAMETER(composeFile); UNREFERENCED_PARAMETER(overridesFile); UNREFERENCED_PARAMETER(registryUserName); UNREFERENCED_PARAMETER(registryPassword); UNREFERENCED_PARAMETER(isPasswordEncrypted); UNREFERENCED_PARAMETER(appName); auto buildPathIter = mockAppBuildPaths_.find(appName.ToString()); ASSERT_IF(buildPathIter == mockAppBuildPaths_.end(), "Mock AppBuildPaths does not contain entry {0}", appName); // we can add behaviors here to return various error codes wstring mockAppBuildPath = buildPathIter->second; return this->BeginBuildApplicationType( activityId, mockAppBuildPath, wstring(), typeName, typeVersion, [this](wstring const & details) { UNREFERENCED_PARAMETER(details); }, timeout, callback, parent); } ErrorCode TestDockerComposeImageBuilderProxy::EndBuildComposeDeploymentType( AsyncOperationSPtr const & operation, __out vector<ServiceModelServiceManifestDescription> & serviceManifests, __out wstring &, __out wstring & applicationManifestContent, __out ServiceModel::ApplicationHealthPolicy & healthPolicy, __out map<wstring, wstring> & defaultParamList, __out wstring &mergedComposeFile) { mergedComposeFile = L"dummy file"; wstring dummyApplicationManifestId; return this->EndBuildApplicationType(operation, serviceManifests, dummyApplicationManifestId, applicationManifestContent, healthPolicy, defaultParamList); } AsyncOperationSPtr TestDockerComposeImageBuilderProxy::BeginBuildComposeApplicationTypeForUpgrade( ActivityId const & activityId, ByteBufferSPtr const & composeFile, ByteBufferSPtr const & overridesFile, wstring const & registryUserName, wstring const & registryPassword, bool isPasswordEncrypted, Common::NamingUri const & appName, ServiceModelTypeName const & typeName, ServiceModelVersion const & currentTypeVersion, ServiceModelVersion const & targetTypeVersion, Common::TimeSpan const timeout, Common::AsyncCallback const & callback, Common::AsyncOperationSPtr const & parent) { UNREFERENCED_PARAMETER(currentTypeVersion); return this->BeginBuildComposeDeploymentType( activityId, composeFile, overridesFile, registryUserName, registryPassword, isPasswordEncrypted, appName, typeName, targetTypeVersion, timeout, callback, parent); } ErrorCode TestDockerComposeImageBuilderProxy::EndBuildComposeApplicationTypeForUpgrade( AsyncOperationSPtr const & operation, __out vector<ServiceModelServiceManifestDescription> & serviceManifests, __out wstring & applicationManifestId, __out wstring & applicationManifestContent, __out ApplicationHealthPolicy & healthPolicy, __out map<wstring, wstring> & defaultParamList, __out wstring & mergedComposeFile) { return this->EndBuildComposeDeploymentType( operation, serviceManifests, applicationManifestId, applicationManifestContent, healthPolicy, defaultParamList, mergedComposeFile); }
1,938
324
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jclouds.azurecompute.arm.domain.vpn; import java.util.Map; import org.jclouds.azurecompute.arm.util.GetEnumValue; import org.jclouds.javax.annotation.Nullable; import org.jclouds.json.SerializedNames; import com.google.auto.value.AutoValue; import com.google.common.collect.ImmutableMap; @AutoValue public abstract class VirtualNetworkGatewayConnection { public static enum Status { Connected, Connecting, NotConnected, Unknown; public static Status fromValue(final String text) { return (Status) GetEnumValue.fromValueOrDefault(text, Status.Unknown); } } public static enum Type { ExpressRoute, IPsec, VPNClient, Vnet2Vnet, Unrecognized; public static Type fromValue(final String text) { return (Type) GetEnumValue.fromValueOrDefault(text, Type.Unrecognized); } } @Nullable public abstract String id(); public abstract String name(); public abstract String location(); @Nullable public abstract String type(); @Nullable public abstract Map<String, String> tags(); @Nullable public abstract String etag(); public abstract VirtualNetworkGatewayConnectionProperties properties(); VirtualNetworkGatewayConnection() { } @SerializedNames({ "id", "name", "location", "type", "tags", "etag", "properties" }) public static VirtualNetworkGatewayConnection create(String id, String name, String location, String type, Map<String, String> tags, String etag, VirtualNetworkGatewayConnectionProperties properties) { return builder(name, location, properties).id(id).type(type).tags(tags).etag(etag).build(); } public abstract Builder toBuilder(); public static Builder builder(String name, String location, VirtualNetworkGatewayConnectionProperties properties) { return new AutoValue_VirtualNetworkGatewayConnection.Builder().name(name).location(location) .properties(properties); } @AutoValue.Builder public abstract static class Builder { public abstract Builder id(String id); public abstract Builder name(String name); public abstract Builder location(String location); public abstract Builder type(String type); public abstract Builder tags(Map<String, String> tags); public abstract Builder etag(String etag); public abstract Builder properties(VirtualNetworkGatewayConnectionProperties properties); abstract Map<String, String> tags(); abstract VirtualNetworkGatewayConnection autoBuild(); public VirtualNetworkGatewayConnection build() { tags(tags() != null ? ImmutableMap.copyOf(tags()) : null); return autoBuild(); } } }
1,025
4,071
<reponame>Ru-Xiang/x-deeplearning /* Copyright (C) 2016-2018 Alibaba Group Holding Limited Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ #include "gtest/gtest.h" #include <string> #include <thread> #include <unordered_map> #include "test/data_io/op/feature_op/test_data.h" #include "xdl/data_io/op/feature_op/feature_op.h" #include "xdl/data_io/op/feature_op/single_feature_op/single_feature_func/log_feature.h" using xdl::io::FeatureNameVec; using xdl::io::FeatureOP; using xdl::io::SampleGroup; using xdl::io::FeatureTable; using xdl::io::FeatureLine; using xdl::io::LogFeature; TEST(FeatureOpTest, Default) { bool is_sort = false; const int num = 3; std::string name_arr[] = {"ad_cate_id", "fake_id", "nick_cate_pv_14"}; int size_arr[] = {3, 5, 4}; std::vector<int64_t> key_arr[num] = { std::vector<int64_t>({2, 0, 5}), std::vector<int64_t>({3, 1, 4, 5, 2}), std::vector<int64_t>({0, 1, 2, 3}) }; std::vector<float> value_arr[num] = { std::vector<float>({0.1, 0.2, 0.3}), std::vector<float>({0.4, 0.1, 0.2, 0.5, 0.3}), std::vector<float>({0.1, 0.2, 0.3, 0.4}) }; std::vector<const Feature *> features; SampleGroup sample_group; FeatureTable *feature_table = sample_group.add_feature_tables(); FeatureLine *feature_line = feature_table->add_feature_lines(); FeatureLineTestData test_data; test_data.Generate(*feature_line, features, name_arr, size_arr, key_arr, value_arr, num, is_sort, false); std::vector<FeatureNameVec> feature_name_vecs(1); feature_name_vecs[0].push_back("ad_cate_id"); feature_name_vecs[0].push_back("nick_cate_pv_14"); std::vector<std::string> dsl_arr = { "Name=ad_cate_pv_14_match_sum; Expr=min(max(sum(match(ad_cate_id,log(nick_cate_pv_14))))); Type=Numeric;" }; // Expr = sum(match(0:0.2, 2:0.1 ~ 0:log(0.1), 2:log(0.3))) = 0.2*log(0.1) + 0.1*log(0.3) FeatureOP feature_op; feature_op.Init(dsl_arr, feature_name_vecs); feature_op.Run(&sample_group); const FeatureLine &result_feature_line = sample_group.feature_tables(0).feature_lines(0); EXPECT_EQ(result_feature_line.features_size(), 4); // check origin test data for (int i = 0; i < num; ++i) { const Feature &feature = result_feature_line.features(i); EXPECT_EQ(feature.name(), name_arr[i]); EXPECT_EQ(feature.values_size(), size_arr[i]); for (int j = 0; j < feature.values_size(); ++j) { test_data.ExpectKeyEq(feature.values(j).key(), key_arr[i][j], j); test_data.ExpectValueNear(feature.values(j).value(), value_arr[i][j], j); } } // check result const Feature &feature = result_feature_line.features(3); EXPECT_EQ(feature.name(), "ad_cate_pv_14_match_sum"); EXPECT_EQ(feature.values_size(), 1); EXPECT_FALSE(feature.values(0).has_key()); float expected_result = 0.2 * LogFeature::TransformValue(0.1) + 0.1 * LogFeature::TransformValue(0.3); test_data.ExpectValueNear(feature.values(0).value(), expected_result, 0); } class RunThread { public: void Init(int rank, FeatureOP *feature_op, std::string name_arr[], int size_arr[], bool is_sort) { rank_ = rank; feature_op_ = feature_op; name_arr_ = name_arr; size_arr_ = size_arr; for (int i = 0; i < num_; ++i) { for (size_t j = 0; j < value_arr_[i].size(); ++j) { value_arr_[i][j] = ChangeValue(value_arr_[i][j]); } } std::vector<const Feature *> features; FeatureTable *feature_table = sample_group_.add_feature_tables(); FeatureLine *feature_line = feature_table->add_feature_lines(); test_data_.Generate(*feature_line, features, name_arr, size_arr, key_arr_, value_arr_, num_, is_sort, false); } void Run() { feature_op_->Run(&sample_group_); } void Check() { const FeatureLine &result_feature_line = sample_group_.feature_tables(0).feature_lines(0); EXPECT_EQ(result_feature_line.features_size(), 4); // check origin test data for (int i = 0; i < num_; ++i) { const Feature &feature = result_feature_line.features(i); EXPECT_EQ(feature.name(), name_arr_[i]); EXPECT_EQ(feature.values_size(), size_arr_[i]); for (int j = 0; j < feature.values_size(); ++j) { test_data_.ExpectKeyEq(feature.values(j).key(), key_arr_[i][j], j); test_data_.ExpectValueNear(feature.values(j).value(), value_arr_[i][j], j); } } // check result const Feature &feature = result_feature_line.features(3); EXPECT_EQ(feature.name(), "ad_cate_pv_14_match_sum"); EXPECT_EQ(feature.values_size(), 1); EXPECT_FALSE(feature.values(0).has_key()); float expected_result = ChangeValue(0.2) * LogFeature::TransformValue(ChangeValue(0.1)) + ChangeValue(0.1) * LogFeature::TransformValue(ChangeValue(0.3)); test_data_.ExpectValueNear(feature.values(0).value(), expected_result, 0); } inline float ChangeValue(float value) const { return value + 0.1 * rank_; } private: int rank_; FeatureOP *feature_op_; SampleGroup sample_group_; FeatureLineTestData test_data_; const std::string *name_arr_; const int *size_arr_; static const int num_ = 3; std::vector<int64_t> key_arr_[num_] = { std::vector<int64_t>({2, 0, 5}), std::vector<int64_t>({3, 1, 4, 5, 2}), std::vector<int64_t>({0, 1, 2, 3}) }; std::vector<float> value_arr_[num_] = { std::vector<float>({0.1, 0.2, 0.3}), std::vector<float>({0.4, 0.1, 0.2, 0.5, 0.3}), std::vector<float>({0.1, 0.2, 0.3, 0.4}) }; }; TEST(FeatureOpTest, MultiThread) { bool is_sort = false; std::string name_arr[] = {"ad_cate_id", "fake_id", "nick_cate_pv_14"}; int size_arr[] = {3, 5, 4}; std::vector<FeatureNameVec> feature_name_vecs(1); feature_name_vecs[0].push_back("ad_cate_id"); feature_name_vecs[0].push_back("nick_cate_pv_14"); std::vector<std::string> dsl_arr = { "Name=ad_cate_pv_14_match_sum; Expr=sum(match(ad_cate_id,log(nick_cate_pv_14))); Type=Numeric;" }; // Expr = sum(match(0:0.2, 2:0.1 ~ 0:log(0.1), 2:log(0.3))) = 0.2*log(0.1) + 0.1*log(0.3) FeatureOP feature_op; feature_op.Init(dsl_arr, feature_name_vecs); const int thread_num = 30; RunThread r[thread_num]; std::thread *run_threads[thread_num]; for (int t = 0; t < thread_num; ++t) { r[t].Init(t, &feature_op, name_arr, size_arr, is_sort); run_threads[t] = new std::thread(&RunThread::Run, &r[t]); } for (int t = 0; t < thread_num; ++t) { run_threads[t]->join(); r[t].Check(); delete run_threads[t]; } }
3,168
1,056
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.netbeans.beaninfo.editors; import java.awt.Component; import java.awt.Font; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.Insets; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.beans.IntrospectionException; import java.beans.Introspector; import java.beans.PropertyEditorSupport; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Collection; import java.util.Date; import java.util.Iterator; import javax.swing.ButtonGroup; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.JRadioButton; import org.netbeans.core.UIExceptions; import org.openide.explorer.propertysheet.ExPropertyEditor; import org.openide.explorer.propertysheet.PropertyEnv; import org.openide.util.NbBundle; import org.openide.util.Lookup; import org.openide.util.Utilities; /** * Defines editor for choosing of any object using lookup. * * @author <NAME> */ public final class ObjectEditor extends PropertyEditorSupport implements ExPropertyEditor { /** Name of the custom property that can be passed in PropertyEnv. * Should contain superclass that is allowed to be */ private static final String PROP_SUPERCLASS = "superClass"; // NOI18N /** Name of the custom property that can be passed in PropertyEnv. * Either Boolean.TRUE or a String, in such case the string represents * human readable name of the value. */ /*package*/ static final String PROP_NULL = "nullValue"; // NOI18N /** Name of the custom property that can be passed in PropertyEnv. * A lookup to use to query for results. */ private static final String PROP_LOOKUP = "lookup"; // NOI18N /** custom editor */ private ObjectPanel customEditor; /** super class to search for */ private Lookup.Template<Object> template; /** null or name to use for null value */ private String nullValue; /** a special lookup to use or null */ private Lookup lookup; /** Creates new ObjectEditor */ public ObjectEditor() { } /** * This method is called by the IDE to pass * the environment to the property editor. */ public synchronized void attachEnv(PropertyEnv env) { Object obj = env.getFeatureDescriptor ().getValue (PROP_SUPERCLASS); if (obj instanceof Class) { @SuppressWarnings("unchecked") Class<Object> clz = (Class<Object>)obj; template = new Lookup.Template<Object> (clz); } else { template = null; } obj = env.getFeatureDescriptor ().getValue (PROP_NULL); if (Boolean.TRUE.equals (obj)) { nullValue = NbBundle.getMessage (ObjectEditor.class, "CTL_NullValue"); } else { if (obj instanceof String) { nullValue = (String)obj; } else { nullValue = null; } } obj = env.getFeatureDescriptor ().getValue (PROP_LOOKUP); lookup = obj instanceof Lookup ? (Lookup)obj : null; //Don't allow editing in the case only one item and tags are null if (getTags()==null || getTags().length <= 1) { env.getFeatureDescriptor().setValue("canEditAsText",Boolean.FALSE); //NOI18N } } /** A lookup to work on. * @return a lookup. */ protected Lookup lookup () { Lookup l = lookup; return l == null ? Lookup.getDefault () : l; } /** A template to use. */ protected Lookup.Template<Object> template () { if (template == null) { template = new Lookup.Template<Object> (Object.class); } return template; } @Override public String getAsText() { Object value = getValue (); if (value == null) { return nullValue == null ? NbBundle.getMessage (ObjectEditor.class, "CTL_NullValue") : nullValue; } Lookup.Template<Object> t = new Lookup.Template<Object> ( template ().getType (), template ().getId (), value // instance to search for ); Lookup.Item item = lookup ().lookupItem (t); if (item == null) { return NbBundle.getMessage (ObjectEditor.class, "CTL_NullItem"); } return item.getDisplayName(); } /** Searches between items whether there is one with the same display name. * @param str item name */ @Override public void setAsText(java.lang.String str) throws java.lang.IllegalArgumentException { if (nullValue != null && nullValue.equals (str)) { setValue (null); return; } Collection allItems = lookup ().lookup (template ()).allItems (); Iterator it = allItems.iterator (); while (it.hasNext ()) { Lookup.Item item = (Lookup.Item)it.next (); if (item.getDisplayName().equals (str)) { setValue (item.getInstance ()); firePropertyChange(); return; } } IllegalArgumentException iae = new IllegalArgumentException (str); String msg = MessageFormat.format( NbBundle.getMessage( ObjectEditor.class, "FMT_EXC_GENERIC_BAD_VALUE"), //NOI18N new Object[] {str}); UIExceptions.annotateUser(iae, str, msg, null, new Date()); throw iae; } /** List of all display names for items. * @return array of strings */ @Override public java.lang.String[] getTags() { Collection<? extends Lookup.Item<Object>> allItems = lookup ().lookup (template ()).allItems (); if (allItems.size() <= 1) { return null; } ArrayList<String> list = new ArrayList<String> (allItems.size () + 1); if (nullValue != null) { list.add (nullValue); } for (Lookup.Item<Object> item: allItems) { list.add (item.getDisplayName()); } String[] retValue = new String[list.size()]; list.toArray(retValue); return retValue; } /** Yes we have custom editor. */ @Override public boolean supportsCustomEditor() { //Don't allow custom editor if there will be nothing to show return getTags()!= null && getTags().length > 1; } @Override public synchronized Component getCustomEditor () { if (!supportsCustomEditor()) { return null; } if (customEditor != null) { return customEditor; } Lookup.Result<Object> contents = lookup().lookup(template()); ObjectPanel panel = new ObjectPanel(contents); return customEditor = panel; } private class ObjectPanel extends JPanel implements ActionListener { static final long serialVersionUID = 1L; public ObjectPanel(Lookup.Result<Object> res) { getAccessibleContext().setAccessibleName( NbBundle.getMessage(ObjectEditor.class, "ACSN_ObjectTree")); //NOI18N getAccessibleContext().setAccessibleDescription( NbBundle.getMessage(ObjectEditor.class, "ACSD_ObjectTree")); //NOI18N setLayout (new GridBagLayout()); GridBagConstraints gbc = new GridBagConstraints(); int row = 0; ButtonGroup bg = new ButtonGroup(); Font bold; Font plain; if (Utilities.isMac()) { // don't use deriveFont() - see #49973 for details bold = new Font(getFont().getName(), Font.BOLD, getFont().getSize()); //For default metal L&F where labels are by default bold // don't use deriveFont() - see #49973 for details plain = new Font(getFont().getName(), Font.PLAIN, getFont().getSize()); } else { bold = getFont().deriveFont(Font.BOLD); plain = getFont().deriveFont(Font.PLAIN); } Collection<? extends Lookup.Item<Object>> c = res.allItems(); Lookup.Item[] items = new Lookup.Item[c.size()]; items = c.toArray(items); int BASE_LEFT_INSET=7; for (int i=0; i < items.length; i++) { JRadioButton rb = new ItemRadioButton(items[i], bold); Object inst = items[i].getInstance(); if (inst != null && inst.equals(getValue())) { rb.setSelected(true); } rb.addActionListener(this); bg.add(rb); String description = getDescription(items[i]); gbc.gridx=0; gbc.gridy=row; gbc.insets = new Insets(i==0 ? 7 : 0, BASE_LEFT_INSET, description != null ? 1 : i==items.length-1 ? 7: 4, BASE_LEFT_INSET); gbc.fill=GridBagConstraints.HORIZONTAL; add(rb, gbc); row++; if (description != null) { JLabel lbl = new JLabel(description); lbl.setLabelFor(rb); lbl.setFont(plain); int left = rb.getIcon() != null ? rb.getIcon().getIconWidth() : 20; gbc.insets = new Insets(0, BASE_LEFT_INSET + left, 4, BASE_LEFT_INSET + left); gbc.gridx=0; gbc.gridy=row; add(lbl, gbc); row++; } } } private String getDescription (Lookup.Item item) { String id = item.getId (); String result = null; try { result = Introspector.getBeanInfo(item.getInstance().getClass()).getBeanDescriptor().getShortDescription(); } catch (IntrospectionException ie) { //do nothing } String toCheck = item.getInstance().getClass().getName(); toCheck = toCheck.lastIndexOf('.')!=-1 ? toCheck.substring(toCheck.lastIndexOf('.')+1) : toCheck; //NOI18N if (toCheck.equals(result)) { result = null; } return result; } public void actionPerformed(ActionEvent ae) { Lookup.Item item = ((ItemRadioButton) ae.getSource()).item; Object o = item.getInstance(); setValue (item.getInstance()); ObjectEditor.this.firePropertyChange(); } } private static class ItemRadioButton extends JRadioButton { static final long serialVersionUID = 3L; Lookup.Item item; public ItemRadioButton(Lookup.Item item, Font font) { this.item = item; setName(item.getId()); setText(item.getDisplayName()); setFont(font); getAccessibleContext().setAccessibleName(getName()); getAccessibleContext().setAccessibleDescription( getText()); } } }
5,446
507
<reponame>mjuenema/python-terrascript # terrascript/data/invidian/libvirt.py # Automatically generated by tools/makecode.py (24-Sep-2021 15:21:04 UTC) import terrascript class libvirt_network_dns_host_template(terrascript.Data): pass class libvirt_network_dns_srv_template(terrascript.Data): pass class libvirt_network_dnsmasq_options_template(terrascript.Data): pass __all__ = [ "libvirt_network_dns_host_template", "libvirt_network_dns_srv_template", "libvirt_network_dnsmasq_options_template", ]
204
14,668
// Copyright 2021 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "media/gpu/vaapi/test/vp8_decoder.h" #include <va/va.h> #include <memory> #include "media/filters/ivf_parser.h" #include "media/gpu/vaapi/test/macros.h" #include "media/parsers/vp8_parser.h" namespace media { namespace vaapi_test { namespace { template <typename To, typename From> void CheckedMemcpy(To& to, From& from) { static_assert(std::is_array<To>::value, "First parameter must be an array"); static_assert(std::is_array<From>::value, "Second parameter must be an array"); static_assert(sizeof(to) == sizeof(from), "arrays must be of same size"); memcpy(&to, &from, sizeof(to)); } } // namespace Vp8Decoder::Vp8Decoder(std::unique_ptr<IvfParser> ivf_parser, const VaapiDevice& va_device, SharedVASurface::FetchPolicy fetch_policy) : VideoDecoder(std::move(ivf_parser), va_device, fetch_policy), va_config_( std::make_unique<ScopedVAConfig>(va_device_, VAProfile::VAProfileVP8Version0_3, VA_RT_FORMAT_YUV420)), vp8_parser_(std::make_unique<Vp8Parser>()), ref_frames_(kNumVp8ReferenceBuffers) { std::fill(ref_frames_.begin(), ref_frames_.end(), nullptr); } Vp8Decoder::~Vp8Decoder() { // We destroy the VA handles explicitly to ensure the correct order. // The configuration must be destroyed after the context so that the // configuration reference remains valid in the context, and surfaces can only // be destroyed after the context as per // https://github.com/intel/libva/blob/8c6126e67c446f4c7808cb51b609077e4b9bd8fe/va/va.h#L1549 va_context_.reset(); va_config_.reset(); last_decoded_surface_.reset(); ref_frames_.clear(); } Vp8Decoder::ParseResult Vp8Decoder::ReadNextFrame( Vp8FrameHeader& vp8_frame_header) { IvfFrameHeader ivf_frame_header{}; const uint8_t* ivf_frame_data; if (!ivf_parser_->ParseNextFrame(&ivf_frame_header, &ivf_frame_data)) return kEOStream; const bool result = vp8_parser_->ParseFrame( ivf_frame_data, ivf_frame_header.frame_size, &vp8_frame_header); return result ? kOk : kError; } // The implementation of method is mostly lifted from vaapi_utils.h // FillVP8DataStructures // (https://source.chromium.org/chromium/chromium/src/+/main:media/gpu/vaapi/vaapi_utils.cc;l=195;drc=9d70e034c6a4c2b1ed56c94aace3f3c8d2b1f771). void Vp8Decoder::FillVp8DataStructures(const Vp8FrameHeader& frame_hdr, VAIQMatrixBufferVP8& iq_matrix_buf, VAProbabilityDataBufferVP8& prob_buf, VAPictureParameterBufferVP8& pic_param, VASliceParameterBufferVP8& slice_param) { const Vp8SegmentationHeader& sgmnt_hdr = frame_hdr.segmentation_hdr; const Vp8QuantizationHeader& quant_hdr = frame_hdr.quantization_hdr; static_assert(base::size(decltype(iq_matrix_buf.quantization_index){}) == kMaxMBSegments, "incorrect quantization matrix segment size"); static_assert( base::size(decltype(iq_matrix_buf.quantization_index){}[0]) == 6, "incorrect quantization matrix Q index size"); for (size_t i = 0; i < kMaxMBSegments; ++i) { int q = quant_hdr.y_ac_qi; if (sgmnt_hdr.segmentation_enabled) { if (sgmnt_hdr.segment_feature_mode == Vp8SegmentationHeader::FEATURE_MODE_ABSOLUTE) { q = sgmnt_hdr.quantizer_update_value[i]; } else { q += sgmnt_hdr.quantizer_update_value[i]; } } #define CLAMP_Q(q) base::clamp(q, 0, 127) iq_matrix_buf.quantization_index[i][0] = CLAMP_Q(q); iq_matrix_buf.quantization_index[i][1] = CLAMP_Q(q + quant_hdr.y_dc_delta); iq_matrix_buf.quantization_index[i][2] = CLAMP_Q(q + quant_hdr.y2_dc_delta); iq_matrix_buf.quantization_index[i][3] = CLAMP_Q(q + quant_hdr.y2_ac_delta); iq_matrix_buf.quantization_index[i][4] = CLAMP_Q(q + quant_hdr.uv_dc_delta); iq_matrix_buf.quantization_index[i][5] = CLAMP_Q(q + quant_hdr.uv_ac_delta); #undef CLAMP_Q } const Vp8EntropyHeader& entr_hdr = frame_hdr.entropy_hdr; CheckedMemcpy(prob_buf.dct_coeff_probs, entr_hdr.coeff_probs); pic_param.frame_width = frame_hdr.width; pic_param.frame_height = frame_hdr.height; pic_param.last_ref_frame = ref_frames_[VP8_FRAME_LAST] ? ref_frames_[VP8_FRAME_LAST]->id() : VA_INVALID_SURFACE; pic_param.golden_ref_frame = ref_frames_[VP8_FRAME_GOLDEN] ? ref_frames_[VP8_FRAME_GOLDEN]->id() : VA_INVALID_SURFACE; pic_param.alt_ref_frame = ref_frames_[VP8_FRAME_ALTREF] ? ref_frames_[VP8_FRAME_ALTREF]->id() : VA_INVALID_SURFACE; const Vp8LoopFilterHeader& lf_hdr = frame_hdr.loopfilter_hdr; #define FHDR_TO_PP_PF(a, b) pic_param.pic_fields.bits.a = (b) FHDR_TO_PP_PF(key_frame, frame_hdr.IsKeyframe() ? 0 : 1); FHDR_TO_PP_PF(version, frame_hdr.version); FHDR_TO_PP_PF(segmentation_enabled, sgmnt_hdr.segmentation_enabled); FHDR_TO_PP_PF(update_mb_segmentation_map, sgmnt_hdr.update_mb_segmentation_map); FHDR_TO_PP_PF(update_segment_feature_data, sgmnt_hdr.update_segment_feature_data); FHDR_TO_PP_PF(filter_type, lf_hdr.type); FHDR_TO_PP_PF(sharpness_level, lf_hdr.sharpness_level); FHDR_TO_PP_PF(loop_filter_adj_enable, lf_hdr.loop_filter_adj_enable); FHDR_TO_PP_PF(mode_ref_lf_delta_update, lf_hdr.mode_ref_lf_delta_update); FHDR_TO_PP_PF(sign_bias_golden, frame_hdr.sign_bias_golden); FHDR_TO_PP_PF(sign_bias_alternate, frame_hdr.sign_bias_alternate); FHDR_TO_PP_PF(mb_no_coeff_skip, frame_hdr.mb_no_skip_coeff); FHDR_TO_PP_PF(loop_filter_disable, lf_hdr.level == 0); #undef FHDR_TO_PP_PF CheckedMemcpy(pic_param.mb_segment_tree_probs, sgmnt_hdr.segment_prob); static_assert(std::extent<decltype(sgmnt_hdr.lf_update_value)>() == std::extent<decltype(pic_param.loop_filter_level)>(), "loop filter level arrays mismatch"); for (size_t i = 0; i < base::size(sgmnt_hdr.lf_update_value); ++i) { int lf_level = lf_hdr.level; if (sgmnt_hdr.segmentation_enabled) { if (sgmnt_hdr.segment_feature_mode == Vp8SegmentationHeader::FEATURE_MODE_ABSOLUTE) { lf_level = sgmnt_hdr.lf_update_value[i]; } else { lf_level += sgmnt_hdr.lf_update_value[i]; } } pic_param.loop_filter_level[i] = base::clamp(lf_level, 0, 63); } static_assert( std::extent<decltype(lf_hdr.ref_frame_delta)>() == std::extent<decltype(pic_param.loop_filter_deltas_ref_frame)>(), "loop filter deltas arrays size mismatch"); static_assert(std::extent<decltype(lf_hdr.mb_mode_delta)>() == std::extent<decltype(pic_param.loop_filter_deltas_mode)>(), "loop filter deltas arrays size mismatch"); static_assert(std::extent<decltype(lf_hdr.ref_frame_delta)>() == std::extent<decltype(lf_hdr.mb_mode_delta)>(), "loop filter deltas arrays size mismatch"); for (size_t i = 0; i < base::size(lf_hdr.ref_frame_delta); ++i) { pic_param.loop_filter_deltas_ref_frame[i] = lf_hdr.ref_frame_delta[i]; pic_param.loop_filter_deltas_mode[i] = lf_hdr.mb_mode_delta[i]; } #define FHDR_TO_PP(a) pic_param.a = frame_hdr.a FHDR_TO_PP(prob_skip_false); FHDR_TO_PP(prob_intra); FHDR_TO_PP(prob_last); FHDR_TO_PP(prob_gf); #undef FHDR_TO_PP CheckedMemcpy(pic_param.y_mode_probs, entr_hdr.y_mode_probs); CheckedMemcpy(pic_param.uv_mode_probs, entr_hdr.uv_mode_probs); CheckedMemcpy(pic_param.mv_probs, entr_hdr.mv_probs); pic_param.bool_coder_ctx.range = frame_hdr.bool_dec_range; pic_param.bool_coder_ctx.value = frame_hdr.bool_dec_value; pic_param.bool_coder_ctx.count = frame_hdr.bool_dec_count; slice_param.slice_data_size = frame_hdr.frame_size; slice_param.slice_data_offset = frame_hdr.first_part_offset; slice_param.slice_data_flag = VA_SLICE_DATA_FLAG_ALL; slice_param.macroblock_offset = frame_hdr.macroblock_bit_offset; // Number of DCT partitions plus control partition. slice_param.num_of_partitions = frame_hdr.num_of_dct_partitions + 1; // Per VAAPI, this size only includes the size of the macroblock data in // the first partition (in bytes), so we have to subtract the header size. slice_param.partition_size[0] = frame_hdr.first_part_size - ((frame_hdr.macroblock_bit_offset + 7) / 8); for (size_t i = 0; i < frame_hdr.num_of_dct_partitions; ++i) slice_param.partition_size[i + 1] = frame_hdr.dct_partition_sizes[i]; } // Based on update_reference_frames() in libvpx: vp8/encoder/onyx_if.c void Vp8Decoder::RefreshReferenceSlots(Vp8FrameHeader& frame_hdr, scoped_refptr<SharedVASurface> surface) { if (frame_hdr.IsKeyframe()) { ref_frames_[VP8_FRAME_LAST] = surface; ref_frames_[VP8_FRAME_GOLDEN] = surface; ref_frames_[VP8_FRAME_ALTREF] = surface; return; } if (frame_hdr.refresh_alternate_frame) { ref_frames_[VP8_FRAME_ALTREF] = surface; } else { switch (frame_hdr.copy_buffer_to_alternate) { case Vp8FrameHeader::COPY_LAST_TO_ALT: DCHECK(ref_frames_[Vp8RefType::VP8_FRAME_LAST]); ref_frames_[VP8_FRAME_ALTREF] = ref_frames_[VP8_FRAME_LAST]; break; case Vp8FrameHeader::COPY_GOLDEN_TO_ALT: DCHECK(ref_frames_[Vp8RefType::VP8_FRAME_GOLDEN]); ref_frames_[VP8_FRAME_ALTREF] = ref_frames_[VP8_FRAME_GOLDEN]; break; case Vp8FrameHeader::NO_ALT_REFRESH: DCHECK(ref_frames_[Vp8RefType::VP8_FRAME_ALTREF]); break; } } if (frame_hdr.refresh_golden_frame) { ref_frames_[VP8_FRAME_GOLDEN] = surface; } else { switch (frame_hdr.copy_buffer_to_golden) { case Vp8FrameHeader::COPY_LAST_TO_GOLDEN: DCHECK(ref_frames_[Vp8RefType::VP8_FRAME_LAST]); ref_frames_[VP8_FRAME_GOLDEN] = ref_frames_[VP8_FRAME_LAST]; break; case Vp8FrameHeader::COPY_ALT_TO_GOLDEN: DCHECK(ref_frames_[Vp8RefType::VP8_FRAME_ALTREF]); ref_frames_[VP8_FRAME_GOLDEN] = ref_frames_[VP8_FRAME_ALTREF]; break; case Vp8FrameHeader::NO_GOLDEN_REFRESH: DCHECK(ref_frames_[Vp8RefType::VP8_FRAME_GOLDEN]); break; } } if (frame_hdr.refresh_last) ref_frames_[VP8_FRAME_LAST] = surface; else DCHECK(ref_frames_[Vp8RefType::VP8_FRAME_LAST]); } VideoDecoder::Result Vp8Decoder::DecodeNextFrame() { // Parse next frame from stream. Vp8FrameHeader frame_hdr{}; const ParseResult parser_res = ReadNextFrame(frame_hdr); if (parser_res == kEOStream) return VideoDecoder::kEOStream; LOG_ASSERT(parser_res == kOk) << "Failed to parse next frame."; if (frame_hdr.IsKeyframe()) { const gfx::Size new_size(frame_hdr.width, frame_hdr.height); LOG_ASSERT(!new_size.IsEmpty()) << "New key frame size is empty."; if (!va_context_ || new_size != va_context_->size()) { va_context_ = std::make_unique<ScopedVAContext>(va_device_, *va_config_, new_size); } } else { frame_hdr.height = va_context_->size().height(); frame_hdr.width = va_context_->size().width(); } LOG_ASSERT(va_context_ != nullptr) << "VA Context not set. First frame was not a key frame."; VLOG_IF(2, !frame_hdr.show_frame) << "not displaying frame"; last_decoded_frame_visible_ = frame_hdr.show_frame; // Create surfaces for decode. VASurfaceAttrib attribute{}; attribute.type = VASurfaceAttribUsageHint; attribute.flags = VA_SURFACE_ATTRIB_SETTABLE; attribute.value.type = VAGenericValueTypeInteger; attribute.value.value.i = VA_SURFACE_ATTRIB_USAGE_HINT_DECODER; scoped_refptr<SharedVASurface> surface = SharedVASurface::Create( va_device_, va_config_->va_rt_format(), va_context_->size(), attribute); // Create the VP8 data structures. VAIQMatrixBufferVP8 iq_matrix_buf{}; VAProbabilityDataBufferVP8 prob_buf{}; VAPictureParameterBufferVP8 pic_param{}; VASliceParameterBufferVP8 slice_param{}; FillVp8DataStructures(frame_hdr, iq_matrix_buf, prob_buf, pic_param, slice_param); // Populate the VA API buffers. std::vector<VABufferID> buffers; VABufferID iq_matrix_id; VAStatus res = vaCreateBuffer(va_device_.display(), va_context_->id(), VAIQMatrixBufferType, sizeof(iq_matrix_buf), 1u, nullptr, &iq_matrix_id); VA_LOG_ASSERT(res, "vaCreateBuffer"); void* iq_matrix_data; res = vaMapBuffer(va_device_.display(), iq_matrix_id, &iq_matrix_data); VA_LOG_ASSERT(res, "vaMapBuffer"); memcpy(iq_matrix_data, &iq_matrix_buf, sizeof(iq_matrix_buf)); buffers.push_back(iq_matrix_id); VABufferID prob_buffer_id; res = vaCreateBuffer(va_device_.display(), va_context_->id(), VAProbabilityBufferType, sizeof(prob_buf), 1u, nullptr, &prob_buffer_id); VA_LOG_ASSERT(res, "vaCreateBuffer"); void* prob_buffer_data; res = vaMapBuffer(va_device_.display(), prob_buffer_id, &prob_buffer_data); VA_LOG_ASSERT(res, "vaMapBuffer"); memcpy(prob_buffer_data, &prob_buf, sizeof(prob_buf)); buffers.push_back(prob_buffer_id); VABufferID picture_params_id; res = vaCreateBuffer(va_device_.display(), va_context_->id(), VAPictureParameterBufferType, sizeof(pic_param), 1u, nullptr, &picture_params_id); VA_LOG_ASSERT(res, "vaCreateBuffer"); void* picture_params_data; res = vaMapBuffer(va_device_.display(), picture_params_id, &picture_params_data); VA_LOG_ASSERT(res, "vaMapBuffer"); memcpy(picture_params_data, &pic_param, sizeof(pic_param)); buffers.push_back(picture_params_id); VABufferID slice_params_id; res = vaCreateBuffer(va_device_.display(), va_context_->id(), VASliceParameterBufferType, sizeof(slice_param), 1u, nullptr, &slice_params_id); VA_LOG_ASSERT(res, "vaCreateBuffer"); void* slice_params_data; res = vaMapBuffer(va_device_.display(), slice_params_id, &slice_params_data); VA_LOG_ASSERT(res, "vaMapBuffer"); memcpy(slice_params_data, &slice_param, sizeof(pic_param)); buffers.push_back(slice_params_id); VABufferID encoded_data_id; res = vaCreateBuffer(va_device_.display(), va_context_->id(), VASliceDataBufferType, frame_hdr.frame_size, 1u, nullptr, &encoded_data_id); VA_LOG_ASSERT(res, "vaCreateBuffer"); void* encoded_data; res = vaMapBuffer(va_device_.display(), encoded_data_id, &encoded_data); VA_LOG_ASSERT(res, "vaMapBuffer"); memcpy(encoded_data, frame_hdr.data, frame_hdr.frame_size); buffers.push_back(encoded_data_id); // Time to render! res = vaBeginPicture(va_device_.display(), va_context_->id(), surface->id()); VA_LOG_ASSERT(res, "vaBeginPicture"); res = vaRenderPicture(va_device_.display(), va_context_->id(), buffers.data(), base::checked_cast<int>(buffers.size())); VA_LOG_ASSERT(res, "vaRenderPicture"); res = vaEndPicture(va_device_.display(), va_context_->id()); VA_LOG_ASSERT(res, "vaEndPicture"); RefreshReferenceSlots(frame_hdr, surface); last_decoded_surface_ = surface; for (const auto buffer_id : buffers) { res = vaUnmapBuffer(va_device_.display(), buffer_id); VA_LOG_ASSERT(res, "vaUnmapBuffer"); res = vaDestroyBuffer(va_device_.display(), buffer_id); VA_LOG_ASSERT(res, "vaDestroyBuffer"); } return VideoDecoder::kOk; } } // namespace vaapi_test } // namespace media
7,325
1,453
//------------------------------------------------------------------------------------------------------- // Copyright (C) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information. //------------------------------------------------------------------------------------------------------- #include "bsqassembly.h" #include "../core/bsqmemory.h" #include "../runtime/environment.h" RefMask jsonLoadRefMask(json val) { if(val.is_null()) { return nullptr; } else { auto mstr = val.get<std::string>(); if (Environment::g_stringmaskToDeclMap.find(mstr) == Environment::g_stringmaskToDeclMap.cend()) { auto rstr = (char*)malloc(mstr.size() + 1); GC_MEM_COPY(rstr, mstr.c_str(), mstr.size()); rstr[mstr.size()] = '\0'; Environment::g_stringmaskToDeclMap[mstr] = rstr; } return Environment::g_stringmaskToDeclMap.find(mstr)->second; } } BSQTypeSizeInfo jsonLoadTypeSizeInfo(json val) { uint64_t heapsize = val["heapsize"].get<uint64_t>(); uint64_t inlinedatasize = val["inlinedatasize"].get<uint64_t>(); uint64_t assigndatasize = val["assigndatasize"].get<uint64_t>(); RefMask hmask = jsonLoadRefMask(val["heapmask"]); RefMask imask = jsonLoadRefMask(val["inlinedmask"]); return BSQTypeSizeInfo{heapsize, inlinedatasize, assigndatasize, hmask, imask}; } BSQTypeID j_tkey(json v) { auto tstr = v["tkey"].get<std::string>(); return Environment::g_typenameToIDMap[tstr].first; } std::string j_name(json v) { return v["name"].get<std::string>(); } BSQTypeKind j_tkind(json v) { return v["tkind"].get<BSQTypeKind>(); } bool j_iskey(json v) { return v["iskey"].get<bool>(); } BSQTypeSizeInfo j_allocinfo(json v) { return jsonLoadTypeSizeInfo(v["allocinfo"]); } void j_vtable(std::map<BSQVirtualInvokeID, BSQInvokeID>& vtable, json v) { auto varray = v["vtable"]; for(size_t i = 0; i < varray.size(); ++i) { auto ventry = varray.at(i); auto vstr = ventry["vcall"].get<std::string>(); auto istr = ventry["inv"].get<std::string>(); vtable[Environment::g_vinvokenameToIDMap[vstr]] = Environment::g_invokenameToIDMap[istr]; } } const BSQType* jsonLoadValidatorType(json v) { auto re = bsqRegexJSONParse_impl(v["regex"]); return new BSQValidatorType(j_tkey(v), j_name(v), re); } const BSQType* jsonLoadStringOfType(json v) { auto vname = v["validator"].get<std::string>(); auto vtid = Environment::g_typenameToIDMap[vname].first; return new BSQStringOfType(j_tkey(v), j_name(v), vtid); } const BSQType* jsonLoadDataStringType(json v) { auto iname = v["chkinv"].get<std::string>(); auto inv = Environment::g_invokenameToIDMap[iname]; return new BSQDataStringType(j_tkey(v), j_name(v), inv); } const BSQType* jsonLoadTypedNumberType(json v) { auto primitive = Environment::g_typenameToIDMap[v["primitive"].get<std::string>()].first; auto underlying = Environment::g_typenameToIDMap[v["underlying"].get<std::string>()].first; switch(primitive) { case BSQ_TYPE_ID_BOOL: return new BSQTypedNumberType<BSQBool>(j_tkey(v), j_name(v), underlying, BSQType::g_typeBool); case BSQ_TYPE_ID_NAT: return new BSQTypedNumberType<BSQNat>(j_tkey(v), j_name(v), underlying, BSQType::g_typeNat); case BSQ_TYPE_ID_INT: return new BSQTypedNumberType<BSQInt>(j_tkey(v), j_name(v), underlying, BSQType::g_typeInt); case BSQ_TYPE_ID_BIGNAT: return new BSQTypedBigNumberType<BSQBigNat>(j_tkey(v), j_name(v), underlying, BSQType::g_typeBigNat); case BSQ_TYPE_ID_BIGINT: return new BSQTypedBigNumberType<BSQBigInt>(j_tkey(v), j_name(v), underlying, BSQType::g_typeBigInt); case BSQ_TYPE_ID_FLOAT: return new BSQTypedNumberType<BSQFloat>(j_tkey(v), j_name(v), underlying, BSQType::g_typeFloat); case BSQ_TYPE_ID_DECIMAL: return new BSQTypedNumberType<BSQDecimal>(j_tkey(v), j_name(v), underlying, BSQType::g_typeDecimal); case BSQ_TYPE_ID_RATIONAL: return new BSQTypedNumberType<BSQRational>(j_tkey(v), j_name(v), underlying, BSQType::g_typeRational); case BSQ_TYPE_ID_ISOTIME: return new BSQTypedNumberType<BSQISOTime>(j_tkey(v), j_name(v), underlying, BSQType::g_typeISOTime); case BSQ_TYPE_ID_LOGICALTIME: return new BSQTypedNumberType<BSQLogicalTime>(j_tkey(v), j_name(v), underlying, BSQType::g_typeLogicalTime); default: { assert(false); return nullptr; } } } const BSQType* jsonLoadEnumType(json v) { auto underlying = Environment::g_typenameToIDMap[v["underlying"].get<std::string>()].first; std::vector<std::pair<std::string, uint32_t>> enuminvs; auto jenuminvs = v["enuminvs"]; std::transform(jenuminvs.cbegin(), jenuminvs.cend(), std::back_inserter(enuminvs), [](json arg) { return std::make_pair(arg["enum"].get<std::string>(), arg["offset"].get<uint32_t>()); }); switch(underlying) { case BSQ_TYPE_ID_BOOL: return new BSQEnumType(j_tkey(v), j_name(v), BSQType::g_typeBool, enuminvs); case BSQ_TYPE_ID_NAT: return new BSQEnumType(j_tkey(v), j_name(v), BSQType::g_typeNat, enuminvs); case BSQ_TYPE_ID_INT: return new BSQEnumType(j_tkey(v), j_name(v), BSQType::g_typeInt, enuminvs); case BSQ_TYPE_ID_BIGNAT: return new BSQEnumType(j_tkey(v), j_name(v), BSQType::g_typeBigNat, enuminvs); case BSQ_TYPE_ID_BIGINT: return new BSQEnumType(j_tkey(v), j_name(v), BSQType::g_typeBigInt, enuminvs); case BSQ_TYPE_ID_STRING: return new BSQEnumType(j_tkey(v), j_name(v), BSQType::g_typeRational, enuminvs); default: { assert(false); return nullptr; } } } const BSQType* jsonLoadListType(json v) { BSQTypeID etype = Environment::g_typenameToIDMap[v["etype"].get<std::string>()].first; uint64_t esize = v["esize"].get<uint64_t>(); RefMask emask = jsonLoadRefMask(v["emask"]); BSQListFlatKType<4>* list4 = new BSQListFlatKType<4>((BSQTypeID)Environment::g_typenameToIDMap.size(), j_name(v) + "_cons4", esize, emask); Environment::g_typenameToIDMap[std::string("[") + list4->name + std::string("]")] = {list4->tid, list4}; BSQListFlatKType<8>* list8 = new BSQListFlatKType<8>((BSQTypeID)Environment::g_typenameToIDMap.size(), j_name(v) + "_cons8", esize, emask); Environment::g_typenameToIDMap[std::string("[") + list8->name + std::string("]")] = {list8->tid, list8}; BSQListFlatKType<12>* list12 = new BSQListFlatKType<12>((BSQTypeID)Environment::g_typenameToIDMap.size(), j_name(v) + "_cons12", esize, emask); Environment::g_typenameToIDMap[std::string("[") + list12->name + std::string("]")] = {list12->tid, list12}; BSQListFlatKType<16>* list16 = new BSQListFlatKType<16>((BSQTypeID)Environment::g_typenameToIDMap.size(), j_name(v) + "_cons16", esize, emask); Environment::g_typenameToIDMap[std::string("[") + list16->name + std::string("]")] = {list16->tid, list16}; BSQListFlatKType<24>* list24 = new BSQListFlatKType<24>((BSQTypeID)Environment::g_typenameToIDMap.size(), j_name(v) + "_cons24", esize, emask); Environment::g_typenameToIDMap[std::string("[") + list24->name + std::string("]")] = {list24->tid, list24}; BSQListFlatKType<32>* list32 = new BSQListFlatKType<32>((BSQTypeID)Environment::g_typenameToIDMap.size(), j_name(v) + "_cons32", esize, emask); Environment::g_typenameToIDMap[std::string("[") + list32->name + std::string("]")] = {list32->tid, list32}; BSQListFlatKType<40>* list40 = new BSQListFlatKType<40>((BSQTypeID)Environment::g_typenameToIDMap.size(), j_name(v) + "_cons40", esize, emask); Environment::g_typenameToIDMap[std::string("[") + list40->name + std::string("]")] = {list40->tid, list40}; BSQListSliceType* slice = new BSQListSliceType((BSQTypeID)Environment::g_typenameToIDMap.size(), j_name(v) + "_consslice"); Environment::g_typenameToIDMap[std::string("[") + slice->name + std::string("]")] = {slice->tid, slice}; BSQListConcatType* concat = new BSQListConcatType((BSQTypeID)Environment::g_typenameToIDMap.size(), j_name(v) + "_consconcat"); Environment::g_typenameToIDMap[std::string("[") + concat->name + std::string("]")] = {concat->tid, concat}; BSQListType* ltype = new BSQListType(j_tkey(v), j_name(v), esize, etype); BSQListType::g_listTypeMap[j_tkey(v)] = ListTypeConstructorInfo{ ltype, list4, list8, list12, list16, list24, list32, list40, slice, concat, {{4, list4}, {8, list8}, {12, list12}, {16, list16}, {24, list24}, {32, list32}, {40, list40}} }; return ltype; } const BSQType* jsonLoadTupleType(json v) { auto tid = j_tkey(v); auto tkind = j_tkind(v); auto allocinfo = j_allocinfo(v); std::map<BSQVirtualInvokeID, BSQInvokeID> vtable; j_vtable(vtable, v); BSQTupleIndex maxIndex = v["maxIndex"].get<BSQTupleIndex>(); std::vector<BSQTypeID> ttypes; auto ttlist = v["ttypes"]; std::transform(ttlist.cbegin(), ttlist.cend(), std::back_inserter(ttypes), [](json ttype) { return Environment::g_typenameToIDMap[ttype.get<std::string>()].first; }); std::vector<size_t> idxoffsets; auto idxlist = v["idxoffsets"]; std::transform(idxlist.cbegin(), idxlist.cend(), std::back_inserter(idxoffsets), [](json offset) { return offset.get<size_t>(); }); if(tkind == BSQTypeKind::Ref) { return new BSQTupleRefType(tid, allocinfo.heapsize, allocinfo.heapmask, vtable, j_name(v), maxIndex, ttypes, idxoffsets); } else { return new BSQTupleStructType(tid, allocinfo.inlinedatasize, allocinfo.inlinedmask, vtable, j_name(v), maxIndex, ttypes, idxoffsets); } } const BSQType* jsonLoadRecordType(json v) { auto tid = j_tkey(v); auto tkind = j_tkind(v); auto allocinfo = j_allocinfo(v); std::map<BSQVirtualInvokeID, BSQInvokeID> vtable; j_vtable(vtable, v); std::vector<BSQRecordPropertyID> propertynames; auto pnlist = v["propertynames"]; std::transform(pnlist.cbegin(), pnlist.cend(), std::back_inserter(propertynames), [](json prop) { return Environment::g_propertynameToIDMap[prop.get<std::string>()]; }); std::vector<BSQTypeID> propertytypes; auto ptlist = v["propertytypes"]; std::transform(ptlist.cbegin(), ptlist.cend(), std::back_inserter(propertytypes), [](json rtype) { return Environment::g_typenameToIDMap[rtype.get<std::string>()].first; }); std::vector<size_t> propertyoffsets; auto polist = v["propertyoffsets"]; std::transform(polist.cbegin(), polist.cend(), std::back_inserter(propertyoffsets), [](json offset) { return offset.get<size_t>(); }); if(tkind == BSQTypeKind::Ref) { return new BSQRecordRefType(tid, allocinfo.heapsize, allocinfo.heapmask, vtable, j_name(v), propertynames, propertytypes, propertyoffsets); } else { return new BSQRecordStructType(tid, allocinfo.inlinedatasize, allocinfo.inlinedmask, vtable, j_name(v), propertynames, propertytypes, propertyoffsets); } } const BSQType* jsonLoadEntityType(json v) { auto tid = j_tkey(v); auto tkind = j_tkind(v); auto allocinfo = j_allocinfo(v); std::map<BSQVirtualInvokeID, BSQInvokeID> vtable; j_vtable(vtable, v); std::vector<BSQFieldID> fieldnames; auto fnlist = v["fieldnames"]; std::transform(fnlist.cbegin(), fnlist.cend(), std::back_inserter(fieldnames), [](json fname) { return Environment::g_fieldnameToIDMap[fname.get<std::string>()]; }); std::vector<BSQTypeID> fieldtypes; auto ftlist = v["fieldtypes"]; std::transform(ftlist.cbegin(), ftlist.cend(), std::back_inserter(fieldtypes), [](json ftype) { return Environment::g_typenameToIDMap[ftype.get<std::string>()].first; }); std::vector<size_t> fieldoffsets; auto folist = v["fieldoffsets"]; std::transform(folist.cbegin(), folist.cend(), std::back_inserter(fieldoffsets), [](json offset) { return offset.get<size_t>(); }); if(tkind == BSQTypeKind::Ref) { return new BSQEntityRefType(tid, allocinfo.heapsize, allocinfo.heapmask, vtable, j_name(v), fieldnames, fieldtypes, fieldoffsets); } else { return new BSQEntityStructType(tid, allocinfo.inlinedatasize, allocinfo.inlinedmask, vtable, j_name(v), fieldnames, fieldtypes, fieldoffsets); } } const BSQType* jsonLoadEphemeralListType(json v) { auto allocinfo = j_allocinfo(v); std::vector<BSQTypeID> etypes; auto etlist = v["etypes"]; std::transform(etlist.cbegin(), etlist.cend(), std::back_inserter(etypes), [](json ttype) { return Environment::g_typenameToIDMap[ttype.get<std::string>()].first; }); std::vector<size_t> idxoffsets; auto eolist = v["idxoffsets"]; std::transform(eolist.cbegin(), eolist.cend(), std::back_inserter(idxoffsets), [](json offset) { return offset.get<size_t>(); }); return new BSQEphemeralListType(j_tkey(v), allocinfo.inlinedatasize, allocinfo.inlinedmask, j_name(v), etypes, idxoffsets); } const BSQType* jsonLoadInlineUnionType(json v) { auto allocinfo = j_allocinfo(v); std::vector<BSQTypeID> subtypes; auto stlist = v["subtypes"]; std::transform(stlist.cbegin(), stlist.cend(), std::back_inserter(subtypes), [](json ttype) { return Environment::g_typenameToIDMap[ttype.get<std::string>()].first; }); return new BSQUnionInlineType(j_tkey(v), allocinfo.inlinedatasize, allocinfo.inlinedmask, j_name(v), subtypes); } const BSQType* jsonLoadRefUnionType(json v) { std::vector<BSQTypeID> subtypes; auto stlist = v["subtypes"]; std::transform(stlist.cbegin(), stlist.cend(), std::back_inserter(subtypes), [](json ttype) { return Environment::g_typenameToIDMap[ttype.get<std::string>()].first; }); return new BSQUnionRefType(j_tkey(v), j_name(v), subtypes); } enum class ICPPParseTag { BuiltinTag = 0x0, ValidatorTag, StringOfTag, DataStringTag, TypedNumberTag, VectorTag, ListTag, StackTag, QueueTag, SetTag, MapTag, TupleTag, RecordTag, EntityTag, EphemeralListTag, EnumTag, InlineUnionTag, RefUnionTag }; void jsonLoadBSQTypeDecl(json v) { const BSQType* ttype = nullptr; ICPPParseTag ptag = v["ptag"].get<ICPPParseTag>(); switch(ptag) { case ICPPParseTag::ValidatorTag: ttype = jsonLoadValidatorType(v); break; case ICPPParseTag::StringOfTag: ttype = jsonLoadStringOfType(v); break; case ICPPParseTag::DataStringTag: ttype = jsonLoadDataStringType(v); break; case ICPPParseTag::TypedNumberTag: ttype = jsonLoadTypedNumberType(v); break; case ICPPParseTag::EnumTag: ttype = jsonLoadEnumType(v); break; case ICPPParseTag::ListTag: ttype = jsonLoadListType(v); break; case ICPPParseTag::TupleTag: ttype = jsonLoadTupleType(v); break; case ICPPParseTag::RecordTag: ttype = jsonLoadRecordType(v); break; case ICPPParseTag::EntityTag: ttype = jsonLoadEntityType(v); break; case ICPPParseTag::EphemeralListTag: ttype = jsonLoadEphemeralListType(v); break; case ICPPParseTag::InlineUnionTag: ttype = jsonLoadInlineUnionType(v); break; case ICPPParseTag::RefUnionTag: ttype = jsonLoadRefUnionType(v); break; default: assert(false); break; } Environment::g_typenameToIDMap[ttype->name].second = ttype; } void jsonLoadBSQLiteralDecl(json v, size_t& storageOffset, const BSQType*& gtype, std::string& lval) { storageOffset = v["offset"].get<size_t>(); gtype = BSQType::g_typetable[Environment::g_typenameToIDMap[v["storage"].get<std::string>()].first]; lval = v["value"].get<std::string>(); } void jsonLoadBSQConstantDecl(json v, size_t& storageOffset, BSQInvokeID& ikey, const BSQType*& gtype) { storageOffset = v["storageOffset"].get<size_t>(); ikey = Environment::g_invokenameToIDMap[v["valueInvoke"].get<std::string>()]; gtype = BSQType::g_typetable[Environment::g_typenameToIDMap[v["ctype"].get<std::string>()].first]; } void BSQInvokeDecl::jsonLoad(json v) { BSQInvokeDecl* dcl = nullptr; if(v["isbuiltin"].get<bool>()) { dcl = BSQInvokePrimitiveDecl::jsonLoad(v); } else { dcl = BSQInvokeBodyDecl::jsonLoad(v); } Environment::g_invokes[dcl->ikey] = dcl; } BSQInvokeBodyDecl* BSQInvokeBodyDecl::jsonLoad(json v) { auto ikey = Environment::g_invokenameToIDMap[v["ikey"].get<std::string>()]; auto srcfile = v["srcFile"].get<std::string>(); auto recursive = v["recursive"].get<bool>(); std::vector<BSQFunctionParameter> params; auto jparams = v["params"]; std::transform(jparams.cbegin(), jparams.cend(), std::back_inserter(params), [](json param) { auto ptype = BSQType::g_typetable[Environment::g_typenameToIDMap[param["ptype"].get<std::string>()].first]; return BSQFunctionParameter{j_name(param), ptype}; }); auto rtype = BSQType::g_typetable[Environment::g_typenameToIDMap[v["resultType"].get<std::string>()].first]; Argument resultArg = { v["resultArg"]["kind"].get<ArgumentTag>(), v["resultArg"]["location"].get<uint32_t>() }; const RefMask mask = jsonLoadRefMask(v["mixedStackMask"]); std::vector<InterpOp*> body; auto jbody = v["body"]; std::transform(jbody.cbegin(), jbody.cend(), std::back_inserter(body), [](json jop) { return InterpOp::jparse(jop); }); return new BSQInvokeBodyDecl(j_name(v), ikey, srcfile, j_sinfo(v), recursive, params, rtype, resultArg, v["scalarStackBytes"].get<size_t>(), v["mixedStackBytes"].get<size_t>(), mask, v["maskSlots"].get<uint32_t>(), body, v["argmaskSize"].get<uint32_t>()); } BSQInvokePrimitiveDecl* BSQInvokePrimitiveDecl::jsonLoad(json v) { auto ikey = Environment::g_invokenameToIDMap[v["ikey"].get<std::string>()]; auto srcfile = v["srcFile"].get<std::string>(); auto recursive = v["recursive"].get<bool>(); std::vector<BSQFunctionParameter> params; auto jparams = v["params"]; std::transform(jparams.cbegin(), jparams.cend(), std::back_inserter(params), [](json param) { auto ptype = BSQType::g_typetable[Environment::g_typenameToIDMap[param["ptype"].get<std::string>()].first]; return BSQFunctionParameter{j_name(param), ptype}; }); auto rtype = BSQType::g_typetable[Environment::g_typenameToIDMap[v["resultType"].get<std::string>()].first]; const RefMask mask = jsonLoadRefMask(v["mixedStackMask"]); const BSQType* enclosingtype = nullptr; if(v.contains("enclosingtype") && v["enclosingtype"].is_string()) { enclosingtype = BSQType::g_typetable[Environment::g_typenameToIDMap[v["enclosingtype"].get<std::string>()].first]; } std::string implkeyname = v["implkeyname"].get<std::string>(); BSQPrimitiveImplTag implkey = Environment::g_primitiveinvokenameToIDMap[implkeyname]; std::map<std::string, std::pair<uint32_t, const BSQType*>> scalaroffsetMap; auto jsom = v["scalaroffsetMap"]; std::for_each(jsom.cbegin(), jsom.cend(), [&scalaroffsetMap](json so) { auto name = so["name"].get<std::string>(); auto minfo = so["info"]; auto offset = minfo[0].get<uint32_t>(); auto otype = BSQType::g_typetable[Environment::g_typenameToIDMap[minfo[1].get<std::string>()].first]; scalaroffsetMap[name] = {offset, otype}; }); std::map<std::string, std::pair<uint32_t, const BSQType*>> mixedoffsetMap; auto jmom = v["mixedoffsetMap"]; std::for_each(jmom.cbegin(), jmom.cend(), [&mixedoffsetMap](json mo) { auto name = mo["name"].get<std::string>(); auto minfo = mo["info"]; auto offset = minfo[0].get<uint32_t>(); auto otype = BSQType::g_typetable[Environment::g_typenameToIDMap[minfo[1].get<std::string>()].first]; mixedoffsetMap[name] = {offset, otype}; }); std::map<std::string, const BSQType*> binds; auto jbinds = v["binds"]; std::for_each(jbinds.cbegin(), jbinds.cend(), [&binds](json b) { auto name = b["name"].get<std::string>(); auto otype = BSQType::g_typetable[Environment::g_typenameToIDMap[b["ttype"].get<std::string>()].first]; binds[name] = otype; }); std::map<std::string, BSQPCode*> pcodes; auto jpcodes = v["pcodes"]; std::for_each(jpcodes.cbegin(), jpcodes.cend(), [&pcodes](json pcode) { auto name = pcode["name"].get<std::string>(); auto pc = pcode["pc"]; auto code = Environment::g_invokenameToIDMap[pc["code"].get<std::string>()]; std::vector<Argument> cargs; auto jcargs = pc["cargs"]; std::transform(jcargs.cbegin(), jcargs.cend(), std::back_inserter(cargs), [](json carg) { return Argument{ carg["kind"].get<ArgumentTag>(), carg["location"].get<uint32_t>() }; }); pcodes[name] = new BSQPCode(code, cargs); }); return new BSQInvokePrimitiveDecl(j_name(v), ikey, srcfile, j_sinfo(v), recursive, params, rtype, v["scalarStackBytes"].get<size_t>(), v["mixedStackBytes"].get<size_t>(), mask, v["maskSlots"].get<uint32_t>(), enclosingtype, implkey, implkeyname, scalaroffsetMap, mixedoffsetMap, binds, pcodes); }
9,308
1,003
/* Copyright (c) 2017-2020 <NAME> * * Permission is hereby granted, free of charge, to any person obtaining * a copy of this software and associated documentation files (the * "Software"), to deal in the Software without restriction, including * without limitation the rights to use, copy, modify, merge, publish, * distribute, sublicense, and/or sell copies of the Software, and to * permit persons to whom the Software is furnished to do so, subject to * the following conditions: * * The above copyright notice and this permission notice shall be * included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #include "global_managers_init.hpp" #include "global_managers.hpp" #include "event.hpp" #include "thread_group.hpp" #include "filesystem.hpp" #include "common_renderer_data.hpp" #include "ui_manager.hpp" #ifdef HAVE_GRANITE_AUDIO #include "audio_mixer.hpp" #include "audio_interface.hpp" #endif #ifdef HAVE_GRANITE_PHYSICS #include "physics_system.hpp" #endif namespace Granite { namespace Global { struct FactoryImplementation : Factory { FilesystemInterface *create_filesystem() override { return new Filesystem; } EventManagerInterface *create_event_manager() override { return new EventManager; } ThreadGroupInterface *create_thread_group() override { return new ThreadGroup; } CommonRendererDataInterface *create_common_renderer_data() override { return new CommonRendererData; } UI::UIManagerInterface *create_ui_manager() override { return new UI::UIManager; } Audio::MixerInterface *create_audio_mixer() override { #ifdef HAVE_GRANITE_AUDIO return new Audio::Mixer; #else return nullptr; #endif } Audio::BackendInterface *create_audio_backend(Audio::MixerInterface *iface, float sample_rate, unsigned channels) override { #ifdef HAVE_GRANITE_AUDIO if (iface) return Audio::create_default_audio_backend(static_cast<Audio::Mixer *>(iface), sample_rate, channels); else return nullptr; #else (void)iface; (void)sample_rate; (void)channels; return nullptr; #endif } PhysicsSystemInterface *create_physics_system() override { #ifdef HAVE_GRANITE_PHYSICS return new PhysicsSystem; #else return nullptr; #endif } }; static FactoryImplementation factory; void init(ManagerFeatureFlags flags, unsigned max_threads) { init(factory, flags, max_threads); } } }
917
590
package com.camnter.easycountdowntextureview.demo; import android.os.Bundle; import android.support.annotation.Nullable; import android.support.v7.app.AppCompatActivity; import android.util.Log; import android.view.View; import com.camnter.easycountdowntextureview.EasyCountDownTextureView; /** * Description:SettingActivity * Created by:CaMnter * Time:2016-03-17 17:20 */ public class SettingActivity extends AppCompatActivity implements View.OnClickListener, EasyCountDownTextureView.EasyCountDownListener { private static final String TAG = "StyleActivity"; private EasyCountDownTextureView countdownText; @Override protected void onCreate(@Nullable Bundle savedInstanceState) { super.onCreate(savedInstanceState); this.setContentView(R.layout.activity_setting); this.countdownText = (EasyCountDownTextureView) this.findViewById( R.id.setting_countdown_text); this.findViewById(R.id.setting_hour_button).setOnClickListener(this); this.findViewById(R.id.setting_minute_button).setOnClickListener(this); this.findViewById(R.id.setting_second_button).setOnClickListener(this); this.findViewById(R.id.setting_start_button).setOnClickListener(this); this.findViewById(R.id.setting_stop_button).setOnClickListener(this); this.countdownText.setEasyCountDownListener(this); } /** * Called when a view has been clicked. * * @param v The view that was clicked. */ @Override public void onClick(View v) { switch (v.getId()) { case R.id.setting_hour_button: this.countdownText.setTimeHour(1); break; case R.id.setting_minute_button: this.countdownText.setTimeMinute(1); break; case R.id.setting_second_button: this.countdownText.setTimeSecond(1); break; case R.id.setting_start_button: this.countdownText.start(); break; case R.id.setting_stop_button: this.countdownText.stop(); break; } } /** * When count down start */ @Override public void onCountDownStart() { Log.i(TAG, "[" + TAG + "] [onCountDownStart]"); } /** * When count down time error */ @Override public void onCountDownTimeError() { } /** * When count down stop * * @param millisInFuture millisInFuture */ @Override public void onCountDownStop(long millisInFuture) { Log.i(TAG, "[" + TAG + "] [onCountDownStop]"); } /** * When count down completed */ @Override public void onCountDownCompleted() { } }
1,176
742
<reponame>kuro-channel/knowledge-1<gh_stars>100-1000 package org.support.project.knowledge.entity; import org.support.project.knowledge.entity.gen.GenTokensEntity; import java.util.List; import java.util.Map; import org.support.project.common.bean.ValidateError; import org.support.project.di.Container; import org.support.project.di.DI; import org.support.project.di.Instance; import java.sql.Timestamp; /** * 認証トークン */ @DI(instance = Instance.Prototype) public class TokensEntity extends GenTokensEntity { /** SerialVersion */ private static final long serialVersionUID = 1L; /** * Get instance from DI container. * @return instance */ public static TokensEntity get() { return Container.getComp(TokensEntity.class); } /** * Constructor. */ public TokensEntity() { super(); } /** * Constructor * @param token TOKEN */ public TokensEntity(String token) { super( token); } }
367
1,466
{ "/admin.js": "/admin.js", "/global.min.js": "/global.min.js", "/easemob.min.js": "/easemob.min.js" }
58
335
<filename>dlint/linters/bad_hashlib_use.py #!/usr/bin/env python from __future__ import ( absolute_import, division, print_function, unicode_literals, ) from .helpers import bad_module_attribute_use class BadHashlibUseLinter(bad_module_attribute_use.BadModuleAttributeUseLinter): """This linter looks for unsafe use of the Python "hashlib" module. Use of md5|sha1 is known to have hash collision weaknesses. """ off_by_default = False _code = 'DUO130' _error_tmpl = 'DUO130 insecure use of "hashlib" module' @property def illegal_module_attributes(self): return { 'hashlib': [ 'md5', 'sha1', ], }
310
1,198
<reponame>dherbst/lullaby<gh_stars>1000+ /* Copyright 2017-2019 Google Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS-IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ #include "lullaby/systems/render/detail/gpu_profiler.h" #include <string.h> #include <EGL/egl.h> #include "lullaby/util/logging.h" // The FPL and Ion toolchains use different GL header include paths, so we need // to include them explicitly for consistency. #include "GLES2/gl2.h" // The Android emulator can't resolve the disjoint timer extension functions // when they're declared as prototypes, so we need to retrieve them ourselves // from EGL. #ifdef GL_GLEXT_PROTOTYPES #undef GL_GLEXT_PROTOTYPES #endif #include "GLES2/gl2ext.h" namespace lull { namespace detail { PFNGLGENQUERIESEXTPROC glGenQueriesEXT = NULL; PFNGLDELETEQUERIESEXTPROC glDeleteQueriesEXT = NULL; PFNGLISQUERYEXTPROC glIsQueryEXT = NULL; PFNGLBEGINQUERYEXTPROC glBeginQueryEXT = NULL; PFNGLENDQUERYEXTPROC glEndQueryEXT = NULL; PFNGLQUERYCOUNTEREXTPROC glQueryCounterEXT = NULL; PFNGLGETQUERYOBJECTIVEXTPROC glGetQueryObjectivEXT = NULL; PFNGLGETQUERYOBJECTUI64VEXTPROC glGetQueryObjectui64vEXT = NULL; GpuProfiler::GpuProfiler() { if (IsSupported()) { static const int kNumInitialQueries = 4; Query pool[kNumInitialQueries]; glGenQueriesEXT(kNumInitialQueries, pool); for (int i = 0; i < kNumInitialQueries; ++i) { available_.push(pool[i]); } // Clear disjoint flag. GLint disjoint = 0; glGetIntegerv(GL_GPU_DISJOINT_EXT, &disjoint); } } GpuProfiler::~GpuProfiler() { while (!pending_.empty()) { Query query = pending_.front(); glDeleteQueriesEXT(1, &query); pending_.pop_front(); } while (!available_.empty()) { Query query = available_.front(); glDeleteQueriesEXT(1, &query); available_.pop(); } for (Query query : abandoned_) { glDeleteQueriesEXT(1, &query); } for (auto it : ready_) { glDeleteQueriesEXT(1, &it.first); } for (Query query : active_timers_) { glDeleteQueriesEXT(1, &query); } } bool GpuProfiler::IsActiveTimer(Query query) const { bool found = false; for (size_t i = 0; !found && i < active_timers_.size(); ++i) { found = (active_timers_[i] == query); } return found; } bool GpuProfiler::GetTime(Query query, uint64_t *out_nanoseconds) { auto it = ready_.find(query); if (it != ready_.end()) { *out_nanoseconds = it->second; ready_.erase(it); available_.push(query); return true; } return false; } void GpuProfiler::Abandon(Query query) { if (query == kInvalidQuery) { return; } DCHECK(!IsActiveTimer(query)) << "Can't abandon an active timer."; DCHECK(glIsQueryEXT(query)); // Mark the query as abandoned only if it's still pending. if (std::find(pending_.begin(), pending_.end(), query) != pending_.end()) { abandoned_.emplace(query); #ifdef LULLABY_GPU_PROFILER_LOG_USAGE LOG(INFO) << "GpuProfiler abandoned pending query " << query; #endif } else if (ready_.erase(query) > 0) { available_.push(query); #ifdef LULLABY_GPU_PROFILER_LOG_USAGE LOG(INFO) << "GpuProfiler abandoned ready query " << query; #endif } else { #ifdef LULLABY_GPU_PROFILER_LOG_USAGE const char *adjective = "unknown"; if (abandoned_.find(query) != abandoned_.end()) { adjective = "abandoned"; } LOG(WARNING) << "GpuProfiler tried to abandon " << adjective << " query " << query; #endif } } GpuProfiler::Query GpuProfiler::GetAvailableQuery() { Query query = kInvalidQuery; if (!available_.empty()) { query = available_.front(); available_.pop(); } else { glGenQueriesEXT(1, &query); } return query; } GpuProfiler::Query GpuProfiler::SetMarker() { if (!IsSupported()) { return kInvalidQuery; } Query query = GetAvailableQuery(); if (query != kInvalidQuery) { glQueryCounterEXT(query, GL_TIMESTAMP_EXT); pending_.push_back(query); #ifdef LULLABY_GPU_PROFILER_LOG_USAGE LOG(INFO) << "GpuProfiler set marker " << query; #endif } return query; } GpuProfiler::Query GpuProfiler::BeginTimer() { if (!IsSupported()) { return kInvalidQuery; } Query query = GetAvailableQuery(); if (query != kInvalidQuery) { glBeginQueryEXT(GL_TIME_ELAPSED_EXT, query); active_timers_.push_back(query); #ifdef LULLABY_GPU_PROFILER_LOG_USAGE LOG(INFO) << "GpuProfiler begin timer " << query; #endif } return query; } void GpuProfiler::EndTimer(Query query) { if (query != kInvalidQuery) { CHECK_EQ(query, active_timers_.back()); glEndQueryEXT(GL_TIME_ELAPSED_EXT); active_timers_.pop_back(); pending_.push_back(query); #ifdef LULLABY_GPU_PROFILER_LOG_USAGE LOG(INFO) << "GpuProfiler end timer " << query; #endif } } void GpuProfiler::PollQueries() { // Adapted from ion::gfxprofile::GpuProfiler::PollGlTimerQueries. if (!IsSupported()) { return; } bool has_checked_disjoint = false; bool was_disjoint = false; for (;;) { if (pending_.empty()) { // No queries pending. return; } Query query = pending_.front(); GLint available = 0; glGetQueryObjectivEXT(query, GL_QUERY_RESULT_AVAILABLE_EXT, &available); if (!available) { // No queries available. return; } // Found an available query, remove it from pending queue. pending_.pop_front(); if (!has_checked_disjoint) { // Check if we need to ignore the result of the timer query because // of some kind of disjoint GPU event such as heat throttling. // If so, we ignore all events that are available during this loop. has_checked_disjoint = true; GLint disjoint_occurred = 0; glGetIntegerv(GL_GPU_DISJOINT_EXT, &disjoint_occurred); was_disjoint = (disjoint_occurred != 0); if (was_disjoint) { LOG(WARNING) << "Skipping disjoint GPU events"; } } auto abandoned_it = abandoned_.find(query); if (abandoned_it != abandoned_.end()) { available_.push(query); abandoned_.erase(abandoned_it); #ifdef LULLABY_GPU_PROFILER_LOG_USAGE LOG(INFO) << "GpuProfiler finished abandoned query " << query; #endif continue; } uint64_t elapsed = 0; if (!was_disjoint) { glGetQueryObjectui64vEXT(query, GL_QUERY_RESULT_EXT, &elapsed); } ready_[query] = elapsed; #ifdef LULLABY_GPU_PROFILER_LOG_USAGE LOG(INFO) << "GpuProfiler resolved query " << query; #endif } } void GpuProfiler::BeginFrame() { PollQueries(); } void GpuProfiler::EndFrame() {} #define LOOKUP_GL_FUNC(type, name) name = (type)eglGetProcAddress(#name) bool GpuProfiler::IsSupported() { static const bool available = []() { #ifdef NDEBUG return false; #endif const GLubyte *ext = glGetString(GL_EXTENSIONS); if (!ext || strstr(reinterpret_cast<const char *>(ext), "GL_EXT_disjoint_timer_query") == NULL) { return false; } LOG(INFO) << "Found disjoint timer extension."; LOOKUP_GL_FUNC(PFNGLGENQUERIESEXTPROC, glGenQueriesEXT); LOOKUP_GL_FUNC(PFNGLDELETEQUERIESEXTPROC, glDeleteQueriesEXT); LOOKUP_GL_FUNC(PFNGLISQUERYEXTPROC, glIsQueryEXT); LOOKUP_GL_FUNC(PFNGLBEGINQUERYEXTPROC, glBeginQueryEXT); LOOKUP_GL_FUNC(PFNGLENDQUERYEXTPROC, glEndQueryEXT); LOOKUP_GL_FUNC(PFNGLQUERYCOUNTEREXTPROC, glQueryCounterEXT); LOOKUP_GL_FUNC(PFNGLGETQUERYOBJECTIVEXTPROC, glGetQueryObjectivEXT); LOOKUP_GL_FUNC(PFNGLGETQUERYOBJECTUI64VEXTPROC, glGetQueryObjectui64vEXT); return true; }(); return available; } } // namespace detail } // namespace lull
3,125
11,094
a = "1" a = a + 2 print(a)
16
2,872
<filename>Ryven/packages/auto_generated/mimetypes/nodes.py from NENV import * import mimetypes class NodeBase(Node): pass class _Default_Mime_Types_Node(NodeBase): """ """ title = '_default_mime_types' type_ = 'mimetypes' init_inputs = [ ] init_outputs = [ NodeOutputBP(type_='data'), ] color = '#32DA22' def update_event(self, inp=-1): self.set_output_val(0, mimetypes._default_mime_types()) class _Main_Node(NodeBase): """ """ title = '_main' type_ = 'mimetypes' init_inputs = [ ] init_outputs = [ NodeOutputBP(type_='data'), ] color = '#32DA22' def update_event(self, inp=-1): self.set_output_val(0, mimetypes._main()) class Add_Type_Node(NodeBase): """ Add a mapping between a type and an extension. When the extension is already known, the new type will replace the old one. When the type is already known the extension will be added to the list of known extensions. If strict is true, information will be added to list of standard types, else to the list of non-standard types. """ title = 'add_type' type_ = 'mimetypes' init_inputs = [ NodeInputBP(label='type'), NodeInputBP(label='ext'), NodeInputBP(label='strict', dtype=dtypes.Data(default=True, size='s')), ] init_outputs = [ NodeOutputBP(type_='data'), ] color = '#32DA22' def update_event(self, inp=-1): self.set_output_val(0, mimetypes.add_type(self.input(0), self.input(1), self.input(2))) class Guess_All_Extensions_Node(NodeBase): """ Guess the extensions for a file based on its MIME type. Return value is a list of strings giving the possible filename extensions, including the leading dot ('.'). The extension is not guaranteed to have been associated with any particular data stream, but would be mapped to the MIME type `type' by guess_type(). If no extension can be guessed for `type', None is returned. Optional `strict' argument when false adds a bunch of commonly found, but non-standard types. """ title = 'guess_all_extensions' type_ = 'mimetypes' init_inputs = [ NodeInputBP(label='type'), NodeInputBP(label='strict', dtype=dtypes.Data(default=True, size='s')), ] init_outputs = [ NodeOutputBP(type_='data'), ] color = '#32DA22' def update_event(self, inp=-1): self.set_output_val(0, mimetypes.guess_all_extensions(self.input(0), self.input(1))) class Guess_Extension_Node(NodeBase): """ Guess the extension for a file based on its MIME type. Return value is a string giving a filename extension, including the leading dot ('.'). The extension is not guaranteed to have been associated with any particular data stream, but would be mapped to the MIME type `type' by guess_type(). If no extension can be guessed for `type', None is returned. Optional `strict' argument when false adds a bunch of commonly found, but non-standard types. """ title = 'guess_extension' type_ = 'mimetypes' init_inputs = [ NodeInputBP(label='type'), NodeInputBP(label='strict', dtype=dtypes.Data(default=True, size='s')), ] init_outputs = [ NodeOutputBP(type_='data'), ] color = '#32DA22' def update_event(self, inp=-1): self.set_output_val(0, mimetypes.guess_extension(self.input(0), self.input(1))) class Guess_Type_Node(NodeBase): """ Guess the type of a file based on its URL. Return value is a tuple (type, encoding) where type is None if the type can't be guessed (no or unknown suffix) or a string of the form type/subtype, usable for a MIME Content-type header; and encoding is None for no encoding or the name of the program used to encode (e.g. compress or gzip). The mappings are table driven. Encoding suffixes are case sensitive; type suffixes are first tried case sensitive, then case insensitive. The suffixes .tgz, .taz and .tz (case sensitive!) are all mapped to ".tar.gz". (This is table-driven too, using the dictionary suffix_map). Optional `strict' argument when false adds a bunch of commonly found, but non-standard types. """ title = 'guess_type' type_ = 'mimetypes' init_inputs = [ NodeInputBP(label='url'), NodeInputBP(label='strict', dtype=dtypes.Data(default=True, size='s')), ] init_outputs = [ NodeOutputBP(type_='data'), ] color = '#32DA22' def update_event(self, inp=-1): self.set_output_val(0, mimetypes.guess_type(self.input(0), self.input(1))) class Init_Node(NodeBase): """ """ title = 'init' type_ = 'mimetypes' init_inputs = [ NodeInputBP(label='files', dtype=dtypes.Data(default=None, size='s')), ] init_outputs = [ NodeOutputBP(type_='data'), ] color = '#32DA22' def update_event(self, inp=-1): self.set_output_val(0, mimetypes.init(self.input(0))) class Read_Mime_Types_Node(NodeBase): """ """ title = 'read_mime_types' type_ = 'mimetypes' init_inputs = [ NodeInputBP(label='file'), ] init_outputs = [ NodeOutputBP(type_='data'), ] color = '#32DA22' def update_event(self, inp=-1): self.set_output_val(0, mimetypes.read_mime_types(self.input(0))) export_nodes( _Default_Mime_Types_Node, _Main_Node, Add_Type_Node, Guess_All_Extensions_Node, Guess_Extension_Node, Guess_Type_Node, Init_Node, Read_Mime_Types_Node, )
2,411
4,224
<filename>src/drivers/optical_flow/paw3902/PAW3902.hpp<gh_stars>1000+ /**************************************************************************** * * Copyright (c) 2019 PX4 Development Team. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the * distribution. * 3. Neither the name PX4 nor the names of its contributors may be * used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. * ****************************************************************************/ /** * @file PAW3902.hpp * * Driver for the Pixart PAW3902 & PAW3903 optical flow sensors connected via SPI. */ #pragma once #include "PixArt_PAW3902JF_Registers.hpp" #include <px4_platform_common/px4_config.h> #include <px4_platform_common/defines.h> #include <px4_platform_common/getopt.h> #include <px4_platform_common/i2c_spi_buses.h> #include <drivers/device/spi.h> #include <conversion/rotation.h> #include <lib/perf/perf_counter.h> #include <lib/parameters/param.h> #include <drivers/drv_hrt.h> #include <uORB/PublicationMulti.hpp> #include <uORB/topics/optical_flow.h> using namespace time_literals; using namespace PixArt_PAW3902JF; #define DIR_WRITE(a) ((a) | (1 << 7)) #define DIR_READ(a) ((a) & 0x7f) class PAW3902 : public device::SPI, public I2CSPIDriver<PAW3902> { public: PAW3902(const I2CSPIDriverConfig &config); virtual ~PAW3902(); static void print_usage(); int init() override; void print_status() override; void RunImpl(); private: void exit_and_cleanup() override; int probe() override; static int DataReadyInterruptCallback(int irq, void *context, void *arg); void DataReady(); bool DataReadyInterruptConfigure(); bool DataReadyInterruptDisable(); uint8_t RegisterRead(uint8_t reg, int retries = 2); void RegisterWrite(uint8_t reg, uint8_t data); bool RegisterWriteVerified(uint8_t reg, uint8_t data, int retries = 1); void EnableLed(); void ModeBright(); void ModeLowLight(); void ModeSuperLowLight(); bool ChangeMode(Mode newMode, bool force = false); void ResetAccumulatedData(); uORB::PublicationMulti<optical_flow_s> _optical_flow_pub{ORB_ID(optical_flow)}; perf_counter_t _sample_perf{perf_alloc(PC_ELAPSED, MODULE_NAME": read")}; perf_counter_t _interval_perf{perf_alloc(PC_INTERVAL, MODULE_NAME": interval")}; perf_counter_t _comms_errors{perf_alloc(PC_COUNT, MODULE_NAME": com err")}; perf_counter_t _false_motion_perf{perf_alloc(PC_COUNT, MODULE_NAME": false motion report")}; perf_counter_t _register_write_fail_perf{perf_alloc(PC_COUNT, MODULE_NAME": verified register write failed")}; perf_counter_t _mode_change_bright_perf{perf_alloc(PC_COUNT, MODULE_NAME": mode change bright (0)")}; perf_counter_t _mode_change_low_light_perf{perf_alloc(PC_COUNT, MODULE_NAME": mode change low light (1)")}; perf_counter_t _mode_change_super_low_light_perf{perf_alloc(PC_COUNT, MODULE_NAME": mode change super low light (2)")}; static constexpr uint64_t COLLECT_TIME{15000}; // 15 milliseconds, optical flow data publish rate const spi_drdy_gpio_t _drdy_gpio; uint64_t _previous_collect_timestamp{0}; uint64_t _flow_dt_sum_usec{0}; uint8_t _flow_sample_counter{0}; uint16_t _flow_quality_sum{0}; matrix::Dcmf _rotation; int _discard_reading{3}; int _flow_sum_x{0}; int _flow_sum_y{0}; Mode _mode{Mode::LowLight}; uint32_t _scheduled_interval_us{SAMPLE_INTERVAL_MODE_1}; int _bright_to_low_counter{0}; int _low_to_superlow_counter{0}; int _low_to_bright_counter{0}; int _superlow_to_low_counter{0}; int _valid_count{0}; bool _data_ready_interrupt_enabled{false}; hrt_abstime _last_good_publish{0}; hrt_abstime _last_reset{0}; };
1,745
5,169
{ "name": "LYPaymentField", "version": "1.2.2", "summary": "可以定制密码支付框或使用已经封装好的UI.You can customize the password payment box or use the already encapsulated controls.", "homepage": "https://github.com/cityleaf/LYPaymentField", "license": "MIT", "authors": { "cityleaf": "<EMAIL>" }, "platforms": { "ios": null }, "source": { "git": "https://github.com/cityleaf/LYPaymentField.git", "tag": "1.2.2" }, "source_files": [ "PaymentField", "PaymentField/**/*.{h,m}" ], "frameworks": "UIKit", "requires_arc": true }
256
679
/************************************************************** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * *************************************************************/ #ifndef _TARGETDRAGCONTEXT_HXX_ #define _TARGETDRAGCONTEXT_HXX_ #include <cppuhelper/implbase1.hxx> #include <com/sun/star/datatransfer/dnd/XDropTargetDragContext.hpp> #include <com/sun/star/datatransfer/DataFlavor.hpp> #include "target.hxx" using namespace ::com::sun::star::datatransfer; using namespace ::com::sun::star::datatransfer::dnd; using namespace ::cppu; using namespace ::com::sun::star::uno; using namespace ::com::sun::star::lang; class TargetDragContext: public WeakImplHelper1<XDropTargetDragContext> { // some calls to the functions of XDropTargetDragContext are delegated // to non-interface functions of m_pDropTarget DropTarget* m_pDropTarget; TargetDragContext(); TargetDragContext( const TargetDragContext&); TargetDragContext &operator= ( const TargetDragContext&); public: TargetDragContext( DropTarget* pTarget); ~TargetDragContext(); virtual void SAL_CALL acceptDrag( sal_Int8 dragOperation ) throw( RuntimeException); virtual void SAL_CALL rejectDrag( ) throw( RuntimeException); }; #endif
560
3,227
<reponame>ffteja/cgal<filename>Stream_support/include/CGAL/IO/Istream_iterator.h // Copyright (c) 1997 // Utrecht University (The Netherlands), // ETH Zurich (Switzerland), // INRIA Sophia-Antipolis (France), // Max-Planck-Institute Saarbruecken (Germany), // and Tel-Aviv University (Israel). All rights reserved. // // This file is part of CGAL (www.cgal.org) // // $URL$ // $Id$ // SPDX-License-Identifier: LGPL-3.0-or-later OR LicenseRef-Commercial // // // Author(s) : <NAME> <<EMAIL>> #ifndef CGAL_IO_ISTREAM_ITERATOR_H #define CGAL_IO_ISTREAM_ITERATOR_H #include <CGAL/circulator.h> namespace CGAL { /*! \ingroup PkgStreamSupportRef The class `Istream_iterator` is an input iterator adaptor for the input stream class `Stream` and value type `T`. It is particularly useful for classes that are similar but not compatible to `std::istream`. \cgalModels `InputIterator` */ template <class T, class Stream> class Istream_iterator { protected: Stream* stream; T value; void read() { if(stream) { if(*stream) { *stream >> value; if(! *stream) stream = 0; } else { stream = 0; } } } public: typedef T value_type; typedef const T& reference; typedef const T& const_reference; typedef const T* pointer; typedef const T* const_pointer; typedef std::size_t size_type; typedef std::ptrdiff_t difference_type; typedef std::input_iterator_tag iterator_category; typedef Istream_iterator<T,Stream> Self; /// \name Creation /// @{ /*! creates an end-of-stream iterator. This is a past-the-end iterator, and it is useful when constructing a range. */ Istream_iterator() : stream(0) {} /*! creates an input iterator reading from `s`. When `s` reaches end of stream, this iterator will compare equal to an end-of-stream iterator created using the default constructor. */ Istream_iterator(Stream& s) : stream(&s) { read(); } /// @} bool operator==( const Self& i) const { return stream == i.stream; } bool operator!=( const Self& i) const { return stream != i.stream; } reference operator*() const { return value; } #ifdef CGAL_ARROW_OPERATOR pointer operator->() const { return &(operator*()); } #endif Self& operator++() { read(); return *this; } Self operator++(int) { Self tmp = *this; read(); return tmp; } }; } // namespace CGAL #endif // CGAL_IO_ISTREAM_ITERATOR_H
1,040
497
<gh_stars>100-1000 // // Copyright (c) 2008-2014 the Urho3D project. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. // #pragma once #include "RefCounted.h" #include <cassert> #include <cstddef> namespace Urho3D { /// Shared pointer template class with intrusive reference counting. template <class T> class SharedPtr { public: /// Construct a null shared pointer. SharedPtr() : ptr_(0) { } /// Copy-construct from another shared pointer. SharedPtr(const SharedPtr<T>& rhs) : ptr_(rhs.ptr_) { AddRef(); } /// Construct from a raw pointer. explicit SharedPtr(T* ptr) : ptr_(ptr) { AddRef(); } /// Destruct. Release the object reference. ~SharedPtr() { ReleaseRef(); } /// Assign from another shared pointer. SharedPtr<T>& operator = (const SharedPtr<T>& rhs) { if (ptr_ == rhs.ptr_) return *this; ReleaseRef(); ptr_ = rhs.ptr_; AddRef(); return *this; } /// Assign from a raw pointer. SharedPtr<T>& operator = (T* ptr) { if (ptr_ == ptr) return *this; ReleaseRef(); ptr_ = ptr; AddRef(); return *this; } /// Point to the object. T* operator -> () const { assert(ptr_); return ptr_; } /// Dereference the object. T& operator * () const { assert(ptr_); return *ptr_; } /// Subscript the object if applicable. T& operator [] (const int index) { assert(ptr_); return ptr_[index]; } /// Test for less than with another shared pointer. bool operator < (const SharedPtr<T>& rhs) const { return ptr_ < rhs.ptr_; } /// Test for equality with another shared pointer. bool operator == (const SharedPtr<T>& rhs) const { return ptr_ == rhs.ptr_; } /// Test for inequality with another shared pointer. bool operator != (const SharedPtr<T>& rhs) const { return ptr_ != rhs.ptr_; } /// Reset to null and release the object reference. void Reset() { ReleaseRef(); } /// Detach without destroying the object even if the refcount goes zero. To be used for scripting language interoperation. void Detach() { if (ptr_) { RefCount* refCount = RefCountPtr(); ++refCount->refs_; // 2 refs Reset(); // 1 ref --refCount->refs_; // 0 refs } } /// Perform a static cast from a shared pointer of another type. template <class U> void StaticCast(const SharedPtr<U>& rhs) { ReleaseRef(); ptr_ = static_cast<T*>(rhs.Get()); AddRef(); } /// Perform a dynamic cast from a shared pointer of another type. template <class U> void DynamicCast(const SharedPtr<U>& rhs) { ReleaseRef(); ptr_ = dynamic_cast<T*>(rhs.Get()); AddRef(); } /// Check if the pointer is null. bool Null() const { return ptr_ == 0; } /// Check if the pointer is not null. bool NotNull() const { return ptr_ != 0; } /// Return the raw pointer. T* Get() const { return ptr_; } /// Return the object's reference count, or 0 if the pointer is null. int Refs() const { return ptr_ ? ptr_->Refs() : 0; } /// Return the object's weak reference count, or 0 if the pointer is null. int WeakRefs() const { return ptr_ ? ptr_->WeakRefs() : 0; } /// Return pointer to the RefCount structure. RefCount* RefCountPtr() const { return ptr_ ? ptr_->RefCountPtr() : 0; } /// Return hash value for HashSet & HashMap. unsigned ToHash() const; private: /// Prevent direct assignment from a shared pointer of another type. template <class U> SharedPtr<T>& operator = (const SharedPtr<U>& rhs); /// Add a reference to the object pointed to. void AddRef() { if (ptr_) ptr_->AddRef(); } /// Release the object reference and delete it if necessary. void ReleaseRef() { if (ptr_) { ptr_->ReleaseRef(); ptr_ = 0; } } /// Pointer to the object. T* ptr_; }; /// Perform a static cast from one shared pointer type to another. template <class T, class U> SharedPtr<T> StaticCast(const SharedPtr<U>& ptr) { SharedPtr<T> ret; ret.StaticCast(ptr); return ret; } /// Perform a dynamic cast from one weak pointer type to another. template <class T, class U> SharedPtr<T> DynamicCast(const SharedPtr<U>& ptr) { SharedPtr<T> ret; ret.DynamicCast(ptr); return ret; } /// Weak pointer template class with intrusive reference counting. Does not keep the object pointed to alive. template <class T> class WeakPtr { public: /// Construct a null weak pointer. WeakPtr() : ptr_(0), refCount_(0) { } /// Copy-construct from another weak pointer. WeakPtr(const WeakPtr<T>& rhs) : ptr_(rhs.ptr_), refCount_(rhs.refCount_) { AddRef(); } /// Construct from a shared pointer. WeakPtr(const SharedPtr<T>& rhs) : ptr_(rhs.Get()), refCount_(rhs.RefCountPtr()) { AddRef(); } /// Construct from a raw pointer. explicit WeakPtr(T* ptr) : ptr_(ptr), refCount_(ptr ? ptr->RefCountPtr() : 0) { AddRef(); } /// Destruct. Release the weak reference to the object. ~WeakPtr() { ReleaseRef(); } /// Assign from a shared pointer. WeakPtr<T>& operator = (const SharedPtr<T>& rhs) { if (ptr_ == rhs.Get() && refCount_ == rhs.RefCountPtr()) return *this; ReleaseRef(); ptr_ = rhs.Get(); refCount_ = rhs.RefCountPtr(); AddRef(); return *this; } /// Assign from a weak pointer. WeakPtr<T>& operator = (const WeakPtr<T>& rhs) { if (ptr_ == rhs.ptr_ && refCount_ == rhs.refCount_) return *this; ReleaseRef(); ptr_ = rhs.ptr_; refCount_ = rhs.refCount_; AddRef(); return *this; } /// Assign from a raw pointer. WeakPtr<T>& operator = (T* ptr) { RefCount* refCount = ptr ? ptr->RefCountPtr() : 0; if (ptr_ == ptr && refCount_ == refCount) return *this; ReleaseRef(); ptr_ = ptr; refCount_ = refCount; AddRef(); return *this; } /// Convert to a shared pointer. If expired, return a null shared pointer. SharedPtr<T> Lock() const { if (Expired()) return SharedPtr<T>(); else return SharedPtr<T>(ptr_); } /// Return raw pointer. If expired, return null. T* Get() const { if (Expired()) return 0; else return ptr_; } /// Point to the object. T* operator -> () const { T* rawPtr = Get(); assert(rawPtr); return rawPtr; } /// Dereference the object. T& operator * () const { T* rawPtr = Get(); assert(rawPtr); return *rawPtr; } /// Subscript the object if applicable. T& operator [] (const int index) { T* rawPtr = Get(); assert(rawPtr); return (*rawPtr)[index]; } /// Test for equality with another weak pointer. bool operator == (const WeakPtr<T>& rhs) const { return ptr_ == rhs.ptr_ && refCount_ == rhs.refCount_; } /// Test for inequality with another weak pointer. bool operator != (const WeakPtr<T>& rhs) const { return ptr_ != rhs.ptr_ || refCount_ != rhs.refCount_; } /// Test for less than with another weak pointer. bool operator < (const WeakPtr<T>& rhs) const { return ptr_ < rhs.ptr_; } /// Reset to null and release the weak reference. void Reset() { ReleaseRef(); } /// Perform a static cast from a weak pointer of another type. template <class U> void StaticCast(const WeakPtr<U>& rhs) { ReleaseRef(); ptr_ = static_cast<T*>(rhs.Get()); refCount_ = rhs.refCount_; AddRef(); } /// Perform a dynamic cast from a weak pointer of another type. template <class U> void DynamicCast(const WeakPtr<U>& rhs) { ReleaseRef(); ptr_ = dynamic_cast<T*>(rhs.Get()); if (ptr_) { refCount_ = rhs.refCount_; AddRef(); } else refCount_ = 0; } /// Check if the pointer is null. bool Null() const { return refCount_ == 0; } /// Check if the pointer is not null. bool NotNull() const { return refCount_ != 0; } /// Return the object's reference count, or 0 if null pointer or if object has expired. int Refs() const { return (refCount_ && refCount_->refs_ >= 0) ? refCount_->refs_ : 0; } /// Return the object's weak reference count. int WeakRefs() const { if (!Expired()) return ptr_->WeakRefs(); else return refCount_ ? refCount_->weakRefs_ : 0; } /// Return whether the object has expired. If null pointer, always return true. bool Expired() const { return refCount_ ? refCount_->refs_ < 0 : true; } /// Return pointer to the RefCount structure. RefCount* RefCountPtr() const { return refCount_; } /// Return hash value for HashSet & HashMap. unsigned ToHash() const; private: /// Prevent direct assignment from a weak pointer of different type. template <class U> WeakPtr<T>& operator = (const WeakPtr<U>& rhs); /// Add a weak reference to the object pointed to. void AddRef() { if (refCount_) { assert(refCount_->weakRefs_ >= 0); ++(refCount_->weakRefs_); } } /// Release the weak reference. Delete the Refcount structure if necessary. void ReleaseRef() { if (refCount_) { assert(refCount_->weakRefs_ > 0); --(refCount_->weakRefs_); if (Expired() && !refCount_->weakRefs_) delete refCount_; } ptr_ = 0; refCount_ = 0; } /// Pointer to the object. T* ptr_; /// Pointer to the RefCount structure. RefCount* refCount_; }; /// Perform a static cast from one weak pointer type to another. template <class T, class U> WeakPtr<T> StaticCast(const WeakPtr<U>& ptr) { WeakPtr<T> ret; ret.StaticCast(ptr); return ret; } /// Perform a dynamic cast from one weak pointer type to another. template <class T, class U> WeakPtr<T> DynamicCast(const WeakPtr<U>& ptr) { WeakPtr<T> ret; ret.DynamicCast(ptr); return ret; } }
4,697
308
<reponame>yusufcakal/Modular-Architecture-Hexagonal-Demo-Project package com.hexagonaldemo.ticketapi.ticket.event; import com.hexagonaldemo.ticketapi.common.model.Event; import com.hexagonaldemo.ticketapi.common.util.CurrentTimeFactory; import com.hexagonaldemo.ticketapi.payment.model.Payment; import com.hexagonaldemo.ticketapi.ticket.model.Ticket; import lombok.*; import java.math.BigDecimal; import java.time.LocalDateTime; @ToString @Getter @Builder @NoArgsConstructor @AllArgsConstructor @EqualsAndHashCode public class TicketReservedEvent implements Event { private LocalDateTime eventCreatedAt; private Long id; private Long accountId; private Long meetupId; private LocalDateTime reserveDate; private BigDecimal price; private Integer count; private Long paymentId; public static TicketReservedEvent from(Ticket ticket, Payment payment) { return TicketReservedEvent.builder() .eventCreatedAt(CurrentTimeFactory.now()) .id(ticket.getId()) .accountId(ticket.getAccountId()) .meetupId(ticket.getMeetupId()) .reserveDate(ticket.getReserveDate()) .price(ticket.getPrice()) .count(ticket.getCount()) .paymentId(payment.getId()) .build(); } }
535
343
/* $Id$ $Revision$ */ /* vim:set shiftwidth=4 ts=8: */ /************************************************************************* * Copyright (c) 2011 AT&T Intellectual Property * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: See CVS logs. Details at http://www.graphviz.org/ *************************************************************************/ #ifdef __cplusplus extern "C" { #endif #ifndef XLAYOUT_H #define XLAYOUT_H #include <fdp.h> typedef struct { int numIters; double T0; double K; double C; int loopcnt; } xparams; extern void fdp_xLayout(graph_t *, xparams *); #endif #ifdef __cplusplus } #endif
267
5,169
{ "name": "SBugly", "version": "0.0.1", "summary": "I am bugly summary.", "homepage": "http://gitlab.xingmentech.com/alita/sdkunit/sbugly.git", "license": { "type": "MIT", "file": "LICENSE" }, "authors": { "<NAME>": "<EMAIL>" }, "platforms": { "ios": "9.0" }, "source": { "git": "http://gitlab.xingmentech.com/alita/sdkunit/sbugly.git", "tag": "0.0.1" }, "vendored_frameworks": "Frameworks/Bugly.framework", "frameworks": [ "SystemConfiguration", "Security" ], "libraries": [ "c++", "z" ], "requires_arc": true }
276
349
# import waitGPU # import setGPU # waitGPU.wait(utilization=20, available_memory=10000, interval=60) # waitGPU.wait(gpu_ids=[1,3], utilization=20, available_memory=10000, interval=60) import torch import torch.nn as nn import torch.optim as optim import torch.nn.functional as F from torch.autograd import Variable import torch.backends.cudnn as cudnn cudnn.benchmark = True import torchvision.transforms as transforms import torchvision.datasets as datasets import random import setproctitle import problems as pblm from trainer import * import math import numpy def select_model(m): if m == 'large': # raise ValueError model = pblm.cifar_model_large().cuda() elif m == 'resnet': model = pblm.cifar_model_resnet(N=1, factor=1).cuda() else: model = pblm.cifar_model().cuda() return model torch.manual_seed(0) torch.cuda.manual_seed_all(0) random.seed(0) numpy.random.seed(0) if __name__ == "__main__": args = pblm.argparser_evaluate(epsilon = 0.0347, norm='l1') print("saving file to {}".format(args.output)) setproctitle.setproctitle(args.output) test_log = open(args.output, "w") _, test_loader = pblm.cifar_loaders(1) d = torch.load(args.load) model = [] for sd in d['state_dict']: m = select_model(args.model) m.load_state_dict(sd) model.append(m) best_err = 1 epsilon = args.epsilon # robust cascade training err = evaluate_robust_cascade(test_loader, model, args.epsilon, 0, test_log, args.verbose, norm_type=args.norm, bounded_input=False, proj=args.proj)
669
346
<reponame>ja8zyjits/universe { "images": { "icon-small": "https://downloads.mesosphere.com/assets/universe/000/redis-icon-small.png", "icon-medium": "https://downloads.mesosphere.com/assets/universe/000/redis-icon-medium.png", "icon-large": "https://downloads.mesosphere.com/assets/universe/000/redis-icon-large.png", "screenshots": [ "https://redis.io/images/redis-white.png" ] }, "assets": { "container": { "docker": { "redis": "redis:3.2.9" } } } }
233
15,947
<reponame>ChaseKnowlden/airflow # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import os from contextlib import contextmanager from typing import List, Optional import pytest from airflow.models import Connection from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook from airflow.utils import db from tests.test_utils import AIRFLOW_MAIN_FOLDER from tests.test_utils.logging_command_executor import get_executor from tests.test_utils.system_tests_class import SystemTest AWS_DAG_FOLDER = os.path.join(AIRFLOW_MAIN_FOLDER, "airflow", "providers", "amazon", "aws", "example_dags") AWS_EKS_KEY = "aws_eks.json" @contextmanager def provide_aws_context(key_file_path: Optional[str] = None): """ Authenticates the context to be able use aws resources. Falls back to awscli default authentication methods via `.aws`` folder. """ # TODO: Implement more authentication methods yield @contextmanager def provide_aws_s3_bucket(name): AmazonSystemTest.create_aws_s3_bucket(name) yield AmazonSystemTest.delete_aws_s3_bucket(name) @pytest.mark.system("amazon") class AmazonSystemTest(SystemTest): @staticmethod def _region_name(): return os.environ.get("REGION_NAME") @staticmethod def _registry_id(): return os.environ.get("REGISTRY_ID") @staticmethod def _image(): return os.environ.get("IMAGE") @staticmethod def _execution_role_arn(): return os.environ.get("EXECUTION_ROLE_ARN") @staticmethod def _remove_resources(): # remove all created/existing resources flag return os.environ.get("REMOVE_RESOURCES", False) @classmethod def execute_with_ctx(cls, cmd: List[str]): """ Executes command with context created by provide_aws_context. """ executor = get_executor() with provide_aws_context(): executor.execute_cmd(cmd=cmd) @staticmethod def create_connection(aws_conn_id: str, region: str) -> None: """ Create aws connection with region :param aws_conn_id: id of the aws connection to create :type aws_conn_id: str :param region: aws region name to use in extra field of the aws connection :type region: str """ db.merge_conn( Connection( conn_id=aws_conn_id, conn_type="aws", extra=f'{{"region_name": "{region}"}}', ), ) @classmethod def create_aws_s3_bucket(cls, name: str) -> None: """ Creates the aws bucket with the given name. :param name: name of the bucket """ cmd = ["aws", "s3api", "create-bucket", "--bucket", name] cls.execute_with_ctx(cmd) @classmethod def delete_aws_s3_bucket(cls, name: str) -> None: """ Deletes the aws bucket with the given name. It needs to empty the bucket before it can be deleted. :param name: name of the bucket """ cmd = ["aws", "s3", "rm", f"s3://{name}", "--recursive"] cls.execute_with_ctx(cmd) cmd = ["aws", "s3api", "delete-bucket", "--bucket", name] cls.execute_with_ctx(cmd) @classmethod def create_emr_default_roles(cls) -> None: """Create EMR Default roles for running system test This will create the default IAM roles: - `EMR_EC2_DefaultRole` - `EMR_DefaultRole` """ cmd = ["aws", "emr", "create-default-roles"] cls.execute_with_ctx(cmd) @staticmethod def create_ecs_cluster(aws_conn_id: str, cluster_name: str) -> None: """ Create ecs cluster with given name If specified cluster exists, it doesn't change and new cluster will not be created. :param aws_conn_id: id of the aws connection to use when creating boto3 client/resource :type aws_conn_id: str :param cluster_name: name of the cluster to create in aws ecs :type cluster_name: str """ hook = AwsBaseHook( aws_conn_id=aws_conn_id, client_type="ecs", ) hook.conn.create_cluster( clusterName=cluster_name, capacityProviders=[ "FARGATE_SPOT", "FARGATE", ], defaultCapacityProviderStrategy=[ { "capacityProvider": "FARGATE_SPOT", "weight": 1, "base": 0, }, { "capacityProvider": "FARGATE", "weight": 1, "base": 0, }, ], ) @staticmethod def delete_ecs_cluster(aws_conn_id: str, cluster_name: str) -> None: """ Delete ecs cluster with given short name or full Amazon Resource Name (ARN) :param aws_conn_id: id of the aws connection to use when creating boto3 client/resource :type aws_conn_id: str :param cluster_name: name of the cluster to delete in aws ecs :type cluster_name: str """ hook = AwsBaseHook( aws_conn_id=aws_conn_id, client_type="ecs", ) hook.conn.delete_cluster( cluster=cluster_name, ) @staticmethod def create_ecs_task_definition( aws_conn_id: str, task_definition: str, container: str, image: str, execution_role_arn: str, awslogs_group: str, awslogs_region: str, awslogs_stream_prefix: str, ) -> None: """ Create ecs task definition with given name :param aws_conn_id: id of the aws connection to use when creating boto3 client/resource :type aws_conn_id: str :param task_definition: family name for task definition to create in aws ecs :type task_definition: str :param container: name of the container :type container: str :param image: image used to start a container, format: `registry_id`.dkr.ecr.`region`.amazonaws.com/`repository_name`:`tag` :type image: str :param execution_role_arn: task execution role that the Amazon ECS container agent can assume, format: arn:aws:iam::`registry_id`:role/`role_name` :type execution_role_arn: str :param awslogs_group: awslogs group option in log configuration :type awslogs_group: str :param awslogs_region: awslogs region option in log configuration :type awslogs_region: str :param awslogs_stream_prefix: awslogs stream prefix option in log configuration :type awslogs_stream_prefix: str """ hook = AwsBaseHook( aws_conn_id=aws_conn_id, client_type="ecs", ) hook.conn.register_task_definition( family=task_definition, executionRoleArn=execution_role_arn, networkMode="awsvpc", containerDefinitions=[ { "name": container, "image": image, "cpu": 256, "memory": 512, # hard limit "memoryReservation": 512, # soft limit "logConfiguration": { "logDriver": "awslogs", "options": { "awslogs-group": awslogs_group, "awslogs-region": awslogs_region, "awslogs-stream-prefix": awslogs_stream_prefix, }, }, }, ], requiresCompatibilities=[ "FARGATE", ], cpu="256", # task cpu limit (total of all containers) memory="512", # task memory limit (total of all containers) ) @staticmethod def delete_ecs_task_definition(aws_conn_id: str, task_definition: str) -> None: """ Delete all revisions of given ecs task definition :param aws_conn_id: id of the aws connection to use when creating boto3 client/resource :type aws_conn_id: str :param task_definition: family prefix for task definition to delete in aws ecs :type task_definition: str """ hook = AwsBaseHook( aws_conn_id=aws_conn_id, client_type="ecs", ) response = hook.conn.list_task_definitions( familyPrefix=task_definition, status="ACTIVE", sort="ASC", maxResults=100, ) revisions = [arn.split(":")[-1] for arn in response["taskDefinitionArns"]] for revision in revisions: hook.conn.deregister_task_definition( taskDefinition=f"{task_definition}:{revision}", ) @staticmethod def is_ecs_task_definition_exists(aws_conn_id: str, task_definition: str) -> bool: """ Check whether given task definition exits in ecs :param aws_conn_id: id of the aws connection to use when creating boto3 client/resource :type aws_conn_id: str :param task_definition: family prefix for task definition to check in aws ecs :type task_definition: str """ hook = AwsBaseHook( aws_conn_id=aws_conn_id, client_type="ecs", ) response = hook.conn.list_task_definition_families( familyPrefix=task_definition, status="ACTIVE", maxResults=100, ) return task_definition in response["families"]
4,699
856
package org.lionsoul.jcseg.test; import java.io.IOException; import java.io.StringReader; import org.lionsoul.jcseg.sentence.Sentence; import org.lionsoul.jcseg.sentence.SentenceSeg; /** * sentence seg test program * * @author chenxin<<EMAIL>> */ public class SentenceSegTest { public static void main(String[] args) { String doc = "冰岛时间7月1日,正在当地拍片的汤姆·克鲁斯通过发言人承认,他与第三任妻子凯蒂·赫尔墨斯(第一二任妻子分别为咪咪·罗杰斯、妮可·基德曼)的婚姻即将结束。" + "讽刺的是,3个女人都是在33岁离开这位“碟中谍”英雄的。" + "“三进三出”的婚姻令阿汤哥昔日“万人迷”的形象遭受严重冲击,也让公众不解,阿汤哥为什么找不到一个能长相依的爱人呢?" + "记者调查发现,阿汤哥的超强控制欲和生活中的种种“怪异行为”是导致其婚姻屡屡失败的原因。"; try { SentenceSeg seg = new SentenceSeg(new StringReader(doc)); Sentence sen = null; while ( (sen = seg.next()) != null ) { System.out.println(sen); } } catch (IOException e) { e.printStackTrace(); } } }
811
2,989
package com.linkedin.databus.core; /* * * Copyright 2013 LinkedIn Corp. All rights reserved * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.OutputStream; import java.io.Serializable; import java.nio.ByteBuffer; import java.nio.charset.Charset; import java.util.HashMap; import java.util.Map; import org.apache.log4j.Logger; import org.codehaus.jackson.JsonGenerationException; import org.codehaus.jackson.JsonParseException; import org.codehaus.jackson.map.JsonMappingException; import org.codehaus.jackson.map.ObjectMapper; import com.linkedin.databus.core.util.Fnv1aHashImpl; /** * This class represents the state of a consumer consuming events from a Databus server for * a single timeline (physical partition). There are two main types of checkpoints: * * <ul> * <li>Online consumption - used for consuming events a relay * <li>Bootstrap - used for consuming events from a bootstrap server * </ul> * * The type of a checkpoint is determined by the consumptionMode parameter * {@link #getConsumptionMode()}. * * <p><b>Online-Consumption checkpoints</b> * * Online-consumption checkpoints {@link DbusClientMode#ONLINE_CONSUMPTION} represent the state of * a consumer of event stream from a relay. The main properties of the checkpoint are: * * <ul> * <li> {@code consumption_mode} - must be {@link DbusClientMode#ONLINE_CONSUMPTION} * <li> {@code windowScn} - the sequence number (SCN) of the current window; -1 denotes * "flexible" checkpoint (see below). If the SCN is 0, and tsNescs is greater than 0 * then the relay may (if capable) stream events that have timestamp greater than * or equal to tsNsecs. However, the relay MUST ensure that it does not miss any * events that have a timestamp greater than or equal to tsNsecs. * TODO: Until we have this capability in the relays we don't have to define the exact behavior * <li> {@code prevScn} - the sequence number (SCN) of the window before current; -1 means * "unknown". * <li> {@code windowOffset} - the number of events processed from the current window; * -1 means the entire window has been processed including the end-of-window event and * prevScn must be equal to windowScn. * <li> {@code tsNsecs} - optional value that is set to the timestamp of the EOP event in the window of * events with the highest SCN that has been successfully consumed. If tsNsecs is * greater than 0 then the value of windowScn must not be -1 (see discussion on * flexible checkpoints below). * </ul> * * <i>Flexible online-consumption checkpoints</i> * * Used by consumers which do not care from where they start consuming. The relay will make the * best effort to serve whatever data it has. Flexible checkpoints can be created using * {@link #createFlexibleCheckpoint()} or invoking {@link #setFlexible()} on an existing Checkpoint. * If a flexible checkpoint has tsNsecs set, the value of tsNsecs must be -1 (unset value). * *<p><b>Bootstrap checkpoints</b> * * These are common fields for snapshot and catchup bootsrap checkpoints (see below). * * <ul> * <li> {@code bootstrap_since_scn} - the SCN of the last fully-consumed window or 0 for a full bootstrap. Must be >= 0. * <li> {@code bootstrap_start_scn} - the last SCN written to the snapshot when the snapshot started; must be set * before any data is read, i.e. {@code bootstrap_snapshot_source_index} > 0 or * {@code snapshot_offset} > 0 or {@code bootstrap_target_scn} != -1 * <li> {@code bootstrap_target_scn} - the last SCN written to the log tables when the snapshot of the last source * completed. It provides an upper bound of how much dirty data might have been * read while snapshotting. It specifies the SCN up to which catch-up should be * performed to guarantee consistency of the bootstrap results. * <li> {@code bootstrap_start_tsnsecs} * - (optional) the timestamp of the EOP event of the highest window successfully * processed by the client before the client fell off the relay. This value * is optionally set by the bootstrap client before bootstrapping begins, and * is never changed during the entire bootstrap sequence * (snapshot and catchup phases). * </ul> * * <p><b>Bootstrap snapshot checkpoints</b> * * Bootstrap snapshot checkpoints ({@link DbusClientMode#BOOTSTRAP_SNAPSHOT}) represent a consumer in the SNAPSHOT * phase of bootstrapping. The main properties of checkpoints are: * * <ul> * <li> {@code consumption_mode} - must be {@link DbusClientMode#BOOTSTRAP_SNAPSHOT} * <li> {@code bootstrap_snapshot_source_index} - the index of the current source being snapshotted or the index * of the next source if {@code bootstrap_snapshot_offet} == -1 * <li> {@code snapshot_source} - the name of the current source being snapshotted or the name of the next source * if {@code bootstrap_snapshot_offet} == -1 * <li> {@code bootstrap_snapshot_offset} - number of rows successfully read for the current snapshot source; if -1, * all rows have been read * <li> {@code bootstrap_snapshot_file_record_offset} - Applicable for V3 bootstrap only; refers to the offset of the * record within the AVRO block * <li> {@code storage_cluster_name} - Applicable for V3 bootstrap only; refers to name of espresso storage cluster * * </ul> * * <p><b>Bootstrap catchup checkpoints</b> * * Bootstrap catchup checkpoints ({@link DbusClientMode#BOOTSTRAP_CATCHUP}) represent a consumer in the CATCHUP * phase of bootstrapping. The main properties of checkpoints are: * * <ul> * <li> {@code consumption_mode} - must be {@link DbusClientMode#BOOTSTRAP_CATCHUP} * <li> {@code bootstrap_snapshot_source_index} - the index of the last snapshot source * <li> {@code bootstrap_snapshot_offset} - should always be -1 * <li> {@code catchup_source} - the name of the current catch-up source or the name of the next source * if {@code windowScn} == -1 * <li> {@code windowScn} - the sequence number (SCN) of the current window being caught-up; * <li> {@code windowOffset} - the number of events processed from the current window; * </ul> * * @see CheckpointMult for multi-partition checkpoints * */ public class Checkpoint extends InternalDatabusEventsListenerAbstract implements Serializable, Cloneable { private static final long serialVersionUID = 1L; public static final String MODULE = Checkpoint.class.getName(); public static final Logger LOG = Logger.getLogger(MODULE); public static final long UNSET_BOOTSTRAP_START_NSECS = -1; public static final long UNSET_TS_NSECS = -1; public static final long UNSET_BOOTSTRAP_START_SCN = -1; public static final long UNSET_BOOTSTRAP_SINCE_SCN = -1; public static final long UNSET_BOOTSTRAP_TARGET_SCN = -1; public static final int UNSET_BOOTSTRAP_INDEX = 0; public static final long UNSET_ONLINE_PREVSCN = -1; /** * A checkpoint has the tuple (SCN, Timestamp-of-highest-scn) to indicate the point of successful * consumption -- The SCN and timestamp being that of the EOW event consumed successfully. * However, it is possible to create a checkpoint (e.g. by the operator as a run-book procedure) that * has only a timestamp to indicate the last consumption point, but does not have the corresponding SCN. * For now, we restrict these checkpoints to have an SCN of 0 (definitely not -1, since -1 will indicate * a 'flexible checkpoint') */ public static final long WINDOW_SCN_FOR_PURE_TIMEBASED_CKPT = 0; public static final String NO_SOURCE_NAME = ""; /** The window offset value for a full-consumed window*/ public static final Long FULLY_CONSUMED_WINDOW_OFFSET = -1L; public static final Long DEFAULT_SNAPSHOT_FILE_RECORD_OFFSET = -1L; private static final String TS_NSECS = "tsNsecs"; private static final String WINDOW_SCN = "windowScn"; // which window scn have we processed completely private static final String WINDOW_OFFSET = "windowOffset"; // when non-zero: within a window, how many messages have been processed /** * the last window we have completely processed */ private static final String PREV_SCN = "prevScn"; // Bootstrap Checkpoint // The checkpoint consists of // 1. The phase in the bootstrap (snapshot/catchup) // 2. Start SCN - the max. scn of a source (min of bootstrap_applier_state) when bootstrap process is initiated for it // 3. Target SCN - the max. scn of a source (bootstrap_producer_state) at the beginning of bootstrap catchup // 4. The source we are currently snapshotting // 5. The row_offset for the source being snapshotted // 6. The source involved for catchup // 7. The window scn for the catchup [Similar to regular consumption mode] // 8. The window offset for the catchup [Similar to regular consumption mode] // 9. Since SCN - the SCN at which bootstrap process is initiated. // 10. Bootstrap Server Coordinates // 11. (V3 only) Bootstrap server side file record offset. // (i) snapshot_offset field has the avro block number to seek within the avro file in v3 bootstrap // (ii) snapshot_file_record_offset is used to skip records // 12. (V3 only) When storage in on Espresso, this refers to storage cluster name private static final String CONSUMPTION_MODE = "consumption_mode"; private static final String BOOTSTRAP_START_SCN = "bootstrap_start_scn"; private static final String SNAPSHOT_SOURCE = "snapshot_source"; private static final String SNAPSHOT_OFFSET = "snapshot_offset"; private static final String CATCHUP_SOURCE = "catchup_source"; private static final String BOOTSTRAP_TARGET_SCN = "bootstrap_target_scn"; private static final String BOOTSTRAP_SINCE_SCN = "bootstrap_since_scn"; private static final String BOOTSTRAP_SNAPSHOT_SOURCE_INDEX = "bootstrap_snapshot_source_index"; private static final String BOOTSTRAP_CATCHUP_SOURCE_INDEX = "bootstrap_catchup_source_index"; public static final String BOOTSTRAP_SERVER_INFO = "bootstrap_server_info"; public static final String SNAPSHOT_FILE_RECORD_OFFSET = "bootstrap_snapshot_file_record_offset"; public static final String STORAGE_CLUSTER_NAME = "storage_cluster_name"; public static final String BOOTSTRAP_START_TSNSECS = "bootstrap_start_tsnsecs"; private static final ObjectMapper mapper = new ObjectMapper(); private final Map<String, Object> internalData; private long currentWindowScn; private long prevWindowScn; private long currentWindowOffset; private long snapShotOffset; // TODO ALERT XXX WARNING: Do NOT add any more member variables. See DDSDBUS-3070. It is ok to add to internalData @SuppressWarnings("unchecked") public Checkpoint(String serializedCheckpoint) throws JsonParseException, JsonMappingException, IOException { this(); internalData.putAll(mapper.readValue(new ByteArrayInputStream(serializedCheckpoint.getBytes(Charset.defaultCharset())), Map.class)); // copy from map to local state variables mapToInternalState(); } private void mapToInternalState() { currentWindowScn = (internalData.get(WINDOW_SCN) != null) ? ((Number) internalData.get(WINDOW_SCN)).longValue() : -1; prevWindowScn = (internalData.get(PREV_SCN) != null) ? ((Number) internalData.get(PREV_SCN)).longValue() : -1; currentWindowOffset = (internalData.get(WINDOW_OFFSET) != null) ? ((Number) internalData.get(WINDOW_OFFSET)).longValue() : FULLY_CONSUMED_WINDOW_OFFSET; snapShotOffset = (internalData.get(SNAPSHOT_OFFSET) != null) ? ((Number) internalData.get(SNAPSHOT_OFFSET)).longValue() : -1; } private void internalStateToMap( ) { internalData.put(WINDOW_SCN, currentWindowScn); internalData.put(PREV_SCN, prevWindowScn); internalData.put(WINDOW_OFFSET, currentWindowOffset); internalData.put(SNAPSHOT_OFFSET, snapShotOffset); } public Checkpoint() { internalData = new HashMap<String, Object>(); init(); } /** Clears the checkpoint. */ public void init() { currentWindowScn = -1L; prevWindowScn = -1L; currentWindowOffset = FULLY_CONSUMED_WINDOW_OFFSET; snapShotOffset = -1; internalData.clear(); setConsumptionMode(DbusClientMode.INIT); } public void setTsNsecs(long nsecs) { internalData.put(TS_NSECS, Long.valueOf(nsecs)); } public long getTsNsecs() { return number2Long((Number)internalData.get(TS_NSECS), UNSET_TS_NSECS); } public void setBootstrapStartNsecs(long nsecs) { internalData.put(BOOTSTRAP_START_TSNSECS, Long.valueOf(nsecs)); } public long getBootstrapStartNsecs() { return number2Long((Number)internalData.get(BOOTSTRAP_START_TSNSECS), UNSET_BOOTSTRAP_START_NSECS); } public void setBootstrapSnapshotSourceIndex(int index) { internalData.put(BOOTSTRAP_SNAPSHOT_SOURCE_INDEX, index); } public void setBootstrapCatchupSourceIndex(int index) { internalData.put(BOOTSTRAP_CATCHUP_SOURCE_INDEX, index); } int nextBootstrapSnapshotSourceIndex() { int index = getBootstrapSnapshotSourceIndex(); return index + 1; } int nextBootstrapCatchupSourceIndex() { int index = getBootstrapCatchupSourceIndex(); return index + 1; } public void setBootstrapServerInfo(String serverInfoStr) { internalData.put(BOOTSTRAP_SERVER_INFO, serverInfoStr); } public String getBootstrapServerInfo() { Object obj = internalData.get(BOOTSTRAP_SERVER_INFO); if ( null == obj) return null; return (String)obj; } public void setWindowScn(Long windowScn) { if (DbusClientMode.BOOTSTRAP_CATCHUP == getConsumptionMode() && !isBootstrapTargetScnSet()) { throw new InvalidCheckpointException("target SCN must be set for catchup to proceed", this); } currentWindowScn = windowScn; } public void setPrevScn(Long windowScn) { prevWindowScn = windowScn; } public long getPrevScn() { return prevWindowScn; } public void setWindowOffset(long windowOffset) { if (DbusClientMode.BOOTSTRAP_CATCHUP == getConsumptionMode() && !isBootstrapTargetScnSet()) { throw new InvalidCheckpointException("target SCN must be set for catchup to proceed", this); } currentWindowOffset = windowOffset; } @Deprecated /** @deprecated Please use {@link #setWindowOffset(long)} */ public void setWindowOffset(Integer windowOffset) { currentWindowOffset = windowOffset.longValue(); } public void setConsumptionMode(DbusClientMode mode) { internalData.put(CONSUMPTION_MODE, mode.toString()); } public void setBootstrapStartScn(Long bootstrapStartScn) { if (isBootstrapStartScnSet() && bootstrapStartScn.longValue() != UNSET_BOOTSTRAP_START_SCN) { throw new InvalidCheckpointException("bootstrap_start_scn is already set", this); } if (bootstrapStartScn.longValue() != UNSET_BOOTSTRAP_START_SCN && DbusClientMode.BOOTSTRAP_SNAPSHOT != getConsumptionMode()) { throw new InvalidCheckpointException("not in bootstrap snapshot mode", this); } if (bootstrapStartScn.longValue() != UNSET_BOOTSTRAP_START_SCN && bootstrapStartScn.longValue() < 0) { throw new InvalidCheckpointException("invalid bootstra_start_scn value:" + bootstrapStartScn, this); } internalData.put(BOOTSTRAP_START_SCN, bootstrapStartScn); } public void setSnapshotSource(int sourceIndex, String sourceName) { internalData.put(SNAPSHOT_SOURCE, sourceName); setBootstrapSnapshotSourceIndex(sourceIndex); } public void setSnapshotOffset(long snapshotOffset) { if (snapshotOffset != 0 && !isBootstrapStartScnSet()) { throw new InvalidCheckpointException("cannot snapshot without bootstrap_start_scn", this); } internalData.put(SNAPSHOT_OFFSET, Long.valueOf(snapshotOffset)); this.snapShotOffset = snapshotOffset; } protected void clearSnapshotOffset() { internalData.put(SNAPSHOT_OFFSET, FULLY_CONSUMED_WINDOW_OFFSET); this.snapShotOffset = FULLY_CONSUMED_WINDOW_OFFSET; } @Deprecated /** @deprecated Please use #setSnapshotOffset(long) */ public void setSnapshotOffset(Integer snapshotOffset) { internalData.put(SNAPSHOT_OFFSET, Long.valueOf(snapshotOffset)); } protected void setCatchupSource(int sourceIndex, String sourceName) { setBootstrapCatchupSourceIndex(sourceIndex); internalData.put(CATCHUP_SOURCE, sourceName); } public void setCatchupOffset(Integer catchupOffset) { setWindowOffset(catchupOffset.longValue()); // There is no separate field called CATCHUP_OFFSET in checkpoint. // WINDOW_OFFSET is used to store the catchupOffset internalData.put(WINDOW_OFFSET, catchupOffset); } public void setBootstrapTargetScn(Long targetScn) { if (UNSET_BOOTSTRAP_TARGET_SCN != targetScn.longValue()) { if (targetScn < getBootstrapStartScn()) { throw new InvalidCheckpointException("bootstrap_target_scn cannot be smaller than bootstrap_start_scn", this); } if (!isSnapShotSourceCompleted()) { throw new InvalidCheckpointException("snapshot should be complete before setting bootstrap_target_scn", this); } } internalData.put(BOOTSTRAP_TARGET_SCN, targetScn); } public void setBootstrapSinceScn(Long sinceScn) { internalData.put(BOOTSTRAP_SINCE_SCN, sinceScn); } public long getWindowScn() { return currentWindowScn; } public Long getWindowOffset() { return currentWindowOffset; } public DbusClientMode getConsumptionMode() { return (DbusClientMode.valueOf((String) internalData.get(CONSUMPTION_MODE))); } public String getSnapshotSource() { return (String) internalData.get(SNAPSHOT_SOURCE); } public long getSnapshotFileRecordOffset() { return number2Long((Number)internalData.get(SNAPSHOT_FILE_RECORD_OFFSET), DEFAULT_SNAPSHOT_FILE_RECORD_OFFSET); } public void setSnapshotFileRecordOffset(long snapshotFileRecordOffset) { internalData.put(SNAPSHOT_FILE_RECORD_OFFSET, snapshotFileRecordOffset); } public String getStorageClusterName() { return (String) internalData.get(STORAGE_CLUSTER_NAME); } public void setStorageClusterName(String storageClusterName) { internalData.put(STORAGE_CLUSTER_NAME, storageClusterName); } private static Long number2Long(Number n, Long nullValue) { return (null == n) ? nullValue : (n instanceof Long) ? (Long)n : n.longValue(); } private static Integer number2Integer(Number n, Integer nullValue) { return (null == n) ? nullValue : (n instanceof Integer) ? (Integer)n : n.intValue(); } public Long getSnapshotOffset() { return number2Long((Number)internalData.get(SNAPSHOT_OFFSET), FULLY_CONSUMED_WINDOW_OFFSET); } public String getCatchupSource() { return (String) internalData.get(CATCHUP_SOURCE); } public Long getBootstrapStartScn() { Number n = ((Number) internalData.get(BOOTSTRAP_START_SCN)); return number2Long(n, UNSET_BOOTSTRAP_START_SCN); } public Long getBootstrapTargetScn() { Number n = ((Number) internalData.get(BOOTSTRAP_TARGET_SCN)); return number2Long(n, UNSET_BOOTSTRAP_TARGET_SCN); } public Integer getBootstrapSnapshotSourceIndex() { Number n = ((Number) internalData.get(BOOTSTRAP_SNAPSHOT_SOURCE_INDEX)); return number2Integer(n, UNSET_BOOTSTRAP_INDEX); } public Integer getBootstrapCatchupSourceIndex() { Number n = ((Number) internalData.get(BOOTSTRAP_CATCHUP_SOURCE_INDEX)); return number2Integer(n, UNSET_BOOTSTRAP_INDEX); } public Long getBootstrapSinceScn() { Number n = ((Number) internalData.get(BOOTSTRAP_SINCE_SCN)); return number2Long(n, UNSET_BOOTSTRAP_SINCE_SCN); } // TODO Deprecate and remove this method. See DDSDBUS-3070. // See toString() public void serialize(OutputStream outStream) throws JsonGenerationException, JsonMappingException, IOException { internalStateToMap(); mapper.writeValue(outStream, internalData); } // This is the method used by databus components to "serialize" a checkpoint for on-the-wire // transmission. @Override public String toString() { internalStateToMap(); try { return (mapper.writeValueAsString(internalData)); } catch (JsonGenerationException e) { LOG.error("JSON generation error: " + e.getMessage(), e); return ("JsonGenerationException while printing Checkpoint."); } catch (JsonMappingException e) { LOG.error("JSON mapping error: " + e.getMessage(), e); return ("JsonMappingException while printing Checkpoint."); } catch (IOException e) { LOG.error("JSON IO error: " + e.getMessage(), e); return ("IOException while printing Checkpoint."); } } public void startEvent() { } @Override public void onEvent(DbusEvent e, long offset, int size) { // Checkpoint doesn't use the offset in the buffer for anything (yet) onEvent(e); } public void onEvent(DbusEvent e) { if (e.isEndOfPeriodMarker()) { prevWindowScn = e.sequence(); endEvents(e.sequence(), e.timestampInNanos()); } else if (e.isCheckpointMessage()) { Checkpoint ckpt = null; try { ByteBuffer tmpBuffer = e.value(); byte[] valueBytes = new byte[tmpBuffer.limit()]; tmpBuffer.get(valueBytes); ckpt = new Checkpoint(new String(valueBytes, "UTF-8")); switch (this.getConsumptionMode()) { case BOOTSTRAP_SNAPSHOT: copyBootstrapSnapshotCheckpoint(ckpt); break; case BOOTSTRAP_CATCHUP: copyBootstrapCatchupCheckpoint(ckpt); break; case ONLINE_CONSUMPTION: copyOnlineCheckpoint(ckpt); break; default: throw new RuntimeException("Invalid checkpoint message received: " + this); } } catch (Exception exception) { LOG.error("Exception encountered while reading checkpiont from bootstrap service", exception); } finally { if (null != ckpt) ckpt.close(); } } else // regular dbusEvent { if (currentWindowScn == e.sequence()) { ++currentWindowOffset; } else { currentWindowScn = e.sequence(); currentWindowOffset = 1L; } } if (LOG.isDebugEnabled()) LOG.info("CurrentWindowSCN : " + currentWindowScn + ", currentWindowOffset :" + currentWindowOffset + ", PrevSCN :" + prevWindowScn); } /** Copy data about bootstrap catchup consumption from another checkpoint */ protected void copyBootstrapCatchupCheckpoint(Checkpoint ckpt) { setWindowScn(ckpt.getWindowScn()); setWindowOffset(ckpt.getWindowOffset()); //setCatchupSource(ckpt.getCatchupSource()); //setBootstrapCatchupSourceIndex(ckpt.getBootstrapCatchupSourceIndex()); // Update file record offset. The storage_cluster_name is not updated, as it // is meant to be an invariant, once set setSnapshotFileRecordOffset(ckpt.getSnapshotFileRecordOffset()); } /** Copy data about bootstrap snapshot consumption from another checkpoint * TODO : This seems to be used only on the eventBuffer and on client side, * the lastCheckpoint is saved and then this method is invoked on itself. * This seems to be a no-op */ protected void copyBootstrapSnapshotCheckpoint(Checkpoint ckpt) { setSnapshotOffset(ckpt.getSnapshotOffset()); setSnapshotSource(ckpt.getBootstrapSnapshotSourceIndex(), ckpt.getSnapshotSource()); //setBootstrapSnapshotSourceIndex(ckpt.getBootstrapSnapshotSourceIndex()); // Update file record offset. The storage_cluster_name is not updated, as it // is meant to be an invariant, once set setSnapshotFileRecordOffset(ckpt.getSnapshotFileRecordOffset()); } /** Copy data about online consumption from another checkpoint */ private void copyOnlineCheckpoint(Checkpoint fromCkpt) { setWindowScn(fromCkpt.getWindowScn()); setWindowOffset(fromCkpt.getWindowOffset()); } private void endEvents(long endWindowScn, long nsecs) { setFullyConsumed(endWindowScn); setTsNsecs(nsecs); } private void setFullyConsumed(long endWindowScn) { currentWindowOffset = FULLY_CONSUMED_WINDOW_OFFSET; this.clearWindowOffset(); this.setWindowScn(endWindowScn); } public void onSnapshotEvent(long snapshotOffset) { snapShotOffset = snapshotOffset; } public void onCatchupEvent(long eventWindowScn, long catchupOffset) { currentWindowScn = eventWindowScn; currentWindowOffset = catchupOffset; } public void startSnapShotSource() { setSnapshotOffset(0); } public void endSnapShotSource() { this.setSnapshotOffset(-1); } public boolean isSnapShotSourceCompleted() { return ((this.getSnapshotOffset() == -1) ? true : false); } public void startCatchupSource() { setWindowOffset(0); setWindowScn(getBootstrapStartScn()); } public void endCatchupSource() { setFullyConsumed(currentWindowScn); this.setWindowOffset(FULLY_CONSUMED_WINDOW_OFFSET); } public boolean isCatchupSourceCompleted() { return (this.getWindowOffset() == FULLY_CONSUMED_WINDOW_OFFSET); } public void bootstrapCheckPoint() { if (this.getConsumptionMode() == DbusClientMode.BOOTSTRAP_CATCHUP) { this.setWindowOffset(currentWindowOffset); this.setWindowScn(currentWindowScn); } else if (this.getConsumptionMode() == DbusClientMode.BOOTSTRAP_SNAPSHOT) { this.setSnapshotOffset(snapShotOffset); } } private void clearWindowOffset() { internalData.remove(WINDOW_OFFSET); } public void checkPoint() { if (currentWindowScn >= 0) { this.setWindowScn(currentWindowScn); } if (currentWindowOffset >= 0) { this.setWindowOffset(currentWindowOffset); } } public boolean isPartialWindow() { return currentWindowOffset >= 0; } /** @deprecated Please use {@link Checkpoint#init()}*/ @Deprecated public void setInit() { setConsumptionMode(DbusClientMode.INIT); } /** Checks if the checkpoint is in initialized state, i.e. empty. */ public boolean getInit() { return (getConsumptionMode() == DbusClientMode.INIT); } /** Converts a checkpoint to a flexible online-consumption checkpoint. */ public void setFlexible() { setConsumptionMode(DbusClientMode.ONLINE_CONSUMPTION); setWindowScn(-1L); setTsNsecs(UNSET_TS_NSECS); } public boolean getFlexible() { if ((getConsumptionMode() == DbusClientMode.ONLINE_CONSUMPTION) && (getWindowScn() < 0) && getTsNsecs() == UNSET_TS_NSECS) { return true; } else { return false; } } public void clearBootstrapStartTsNsecs() { setBootstrapStartNsecs(UNSET_BOOTSTRAP_START_NSECS); } public void clearBootstrapSinceScn() { setBootstrapSinceScn(Long.valueOf(UNSET_BOOTSTRAP_SINCE_SCN)); } public void clearBootstrapStartScn() { setBootstrapStartScn(Long.valueOf(UNSET_BOOTSTRAP_START_SCN)); } public void clearBootstrapTargetScn() { setBootstrapTargetScn(Long.valueOf(UNSET_BOOTSTRAP_TARGET_SCN)); } public boolean isBootstrapStartScnSet() { return (null != getBootstrapStartScn() && UNSET_BOOTSTRAP_START_SCN != getBootstrapStartScn().longValue()); } public boolean isBootstrapTargetScnSet() { return (null != getBootstrapTargetScn() && UNSET_BOOTSTRAP_TARGET_SCN != getBootstrapTargetScn().longValue()); } public boolean isBootstrapSinceScnSet() { return (null != getBootstrapSinceScn() && UNSET_BOOTSTRAP_SINCE_SCN != getBootstrapSinceScn().longValue()); } /* * reset bootstrap specific values in the checkpoint */ public void resetBootstrap() { clearBootstrapSinceScn(); clearSnapshotOffset(); setWindowOffset(FULLY_CONSUMED_WINDOW_OFFSET); clearBootstrapStartScn(); clearBootstrapTargetScn(); setBootstrapSnapshotSourceIndex(UNSET_BOOTSTRAP_INDEX); setBootstrapCatchupSourceIndex(UNSET_BOOTSTRAP_INDEX); setBootstrapServerInfo(null); setSnapshotFileRecordOffset(DEFAULT_SNAPSHOT_FILE_RECORD_OFFSET); setStorageClusterName(""); clearBootstrapStartTsNsecs(); } /** * Resets the bootstrap checkpoint to consume events from a new bootstrap server * This method must be invoked on the client whenever a connection is made to a * bootstrap server that is different from the one serving so far. */ protected void resetForServerChange() { setConsumptionMode(DbusClientMode.BOOTSTRAP_SNAPSHOT); setSnapshotOffset(0L); setWindowOffset(FULLY_CONSUMED_WINDOW_OFFSET); setWindowScn(getBootstrapSinceScn()); clearBootstrapStartScn(); clearBootstrapTargetScn(); setBootstrapSnapshotSourceIndex(UNSET_BOOTSTRAP_INDEX); setBootstrapCatchupSourceIndex(UNSET_BOOTSTRAP_INDEX); setBootstrapServerInfo(null); setSnapshotFileRecordOffset(DEFAULT_SNAPSHOT_FILE_RECORD_OFFSET); setStorageClusterName(""); } /** Remove IOException javac warnings */ @Override public void close() { } @Override public Checkpoint clone() { Checkpoint ckpt = new Checkpoint(); ckpt.currentWindowOffset = currentWindowOffset; ckpt.currentWindowScn = currentWindowScn; ckpt.prevWindowScn = prevWindowScn; ckpt.snapShotOffset = snapShotOffset; for (Map.Entry<String, Object> srcEntry: internalData.entrySet()) { ckpt.internalData.put(srcEntry.getKey(), srcEntry.getValue()); } return ckpt; } /* Helper factory methods */ /** * Creates a time-based checkpoint. * * A very nice API to have for the clients, when we provide the use case for a registration to * start receiving relay events X hours before registration time (i,e. neither from the beginning of * buffer, nor from latest point). public static Checkpoint createTimeBasedCheckpoint(long nsecs) throws DatabusRuntimeException { if (nsecs <= UNSET_TS_NSECS) { throw new DatabusRuntimeException("Invalid value for timestamp:" + nsecs); } Checkpoint cp = new Checkpoint(); cp.setTsNsecs(nsecs); cp.setWindowScn(WINDOW_SCN_FOR_PURE_TIMEBASED_CKPT); return cp; } */ /** * Creates a flexible online-consumption checkpoint. * @return the new checkpoint */ public static Checkpoint createFlexibleCheckpoint() { Checkpoint cp = new Checkpoint(); cp.setFlexible(); return cp; } /** * Creates a simple online-consumption checkpoint for a given SCN. * @param lastConsumedScn the sequence number of the last fully consumed window * @return the new checkpoint */ public static Checkpoint createOnlineConsumptionCheckpoint(long lastConsumedScn) { if (lastConsumedScn < 0) { throw new InvalidCheckpointException("scn must be non-negative: " + lastConsumedScn, null); } Checkpoint cp = new Checkpoint(); cp.setConsumptionMode(DbusClientMode.ONLINE_CONSUMPTION); cp.setWindowScn(lastConsumedScn); cp.setPrevScn(lastConsumedScn); cp.setWindowOffset(FULLY_CONSUMED_WINDOW_OFFSET); return cp; } /** * Creates an online checkpoint with timestamp and SCN. See DDSDBUS-3332 * @param lastConsumedScn the sequence number of the last fully consumed window * @param tsNanos the timestamp, if available, of the last fully consumed window. */ public static Checkpoint createOnlineConsumptionCheckpoint(long lastConsumedScn, long tsNanos) { Checkpoint cp = createOnlineConsumptionCheckpoint(lastConsumedScn); cp.setTsNsecs(tsNanos); return cp; } @Override public boolean equals(Object other) { if (null == other) return false; if (this == other) return true; if (!(other instanceof Checkpoint)) return false; Checkpoint otherCp = (Checkpoint)other; boolean success = (currentWindowScn == otherCp.currentWindowScn && prevWindowScn == otherCp.prevWindowScn && currentWindowOffset == otherCp.currentWindowOffset && snapShotOffset == otherCp.getSnapshotOffset()); if (success) { //Unfortunately, we cannot use the the Map.equals() method. //If a checkpoint is deserialized from a string, the ObjectMapper may create Integer objects for some fields while //the other checkpoint may have Longs. For java, Integer(-1) != Long(-1). Go figure. for (Map.Entry<String, Object> e: internalData.entrySet()) { String k = e.getKey(); Object v = e.getValue(); Object otherV = otherCp.internalData.get(k); if (v instanceof Number) { success = (otherV instanceof Number) && (((Number) v).longValue() == ((Number)otherV).longValue()); } else { success = v.equals(otherV); } if (!success) break; } } return success; } @Override public int hashCode() { long lhash = Fnv1aHashImpl.init32(); final DbusClientMode mode = getConsumptionMode(); lhash = Fnv1aHashImpl.addInt32(lhash, mode.ordinal()); lhash = Fnv1aHashImpl.addLong32(lhash, currentWindowScn); lhash = Fnv1aHashImpl.addLong32(lhash, prevWindowScn); lhash = Fnv1aHashImpl.addLong32(lhash, currentWindowOffset); lhash = Fnv1aHashImpl.addLong32(lhash, getTsNsecs()); if (DbusClientMode.BOOTSTRAP_CATCHUP == mode || DbusClientMode.BOOTSTRAP_SNAPSHOT == mode) { lhash = Fnv1aHashImpl.addLong32(lhash, snapShotOffset); lhash = Fnv1aHashImpl.addLong32(lhash, getBootstrapSinceScn().longValue()); lhash = Fnv1aHashImpl.addLong32(lhash, getBootstrapStartScn().longValue()); lhash = Fnv1aHashImpl.addLong32(lhash, getBootstrapTargetScn().longValue()); lhash = Fnv1aHashImpl.addLong32(lhash, getBootstrapCatchupSourceIndex()); lhash = Fnv1aHashImpl.addLong32(lhash, getBootstrapSnapshotSourceIndex()); lhash = Fnv1aHashImpl.addLong32(lhash, getSnapshotFileRecordOffset()); lhash = Fnv1aHashImpl.addLong32(lhash, getBootstrapStartNsecs()); } return Fnv1aHashImpl.getHash32(lhash); } /** * Checks invariants for a checkpoint. * @return true; this is so one can write "assert assertCheckpoint()" if they want control if the assert is to be run * @throws InvalidCheckpointException if the validation fails */ public boolean assertCheckpoint() { switch (getConsumptionMode()) { case INIT: return true; case ONLINE_CONSUMPTION: return assertOnlineCheckpoint(); case BOOTSTRAP_SNAPSHOT: return assertSnapshotCheckpoint(); case BOOTSTRAP_CATCHUP: return assertCatchupCheckpoint(); default: throw new InvalidCheckpointException("unknown checkpoint type", this); } } private boolean assertCatchupCheckpoint() { assertCatchupSourceIndex(); if (! isBootstrapSinceScnSet()) { throw new InvalidCheckpointException("bootstrap_since_scn must be set", this); } if (! isBootstrapStartScnSet()) { throw new InvalidCheckpointException("bootstrap_start_scn must be set", this); } if (! isBootstrapTargetScnSet()) { throw new InvalidCheckpointException("bootstrap_target_scn must be set", this); } if (! isSnapShotSourceCompleted()) { throw new InvalidCheckpointException("bootstrap_snapshot_offset must be -1 for CATCHUP checkpoints", this); } if (getBootstrapTargetScn() < getBootstrapStartScn()) { throw new InvalidCheckpointException("bootstrap_target_scn < getbootstrap_start_scn", this); } // If offset is set, then clusterName cannot be empty if (getSnapshotFileRecordOffset() != DEFAULT_SNAPSHOT_FILE_RECORD_OFFSET) { if (getStorageClusterName().isEmpty()) { throw new InvalidCheckpointException("snapshot file record offset cannot be set when storage cluster name is empty", this); } } return true; } private boolean assertSnapshotCheckpoint() { if (0 != getBootstrapCatchupSourceIndex()) { throw new InvalidCheckpointException("bootstrap_catchup_source_index must be 0", this); } if (! isBootstrapSinceScnSet()) { throw new InvalidCheckpointException("bootstrap_since_scn must be set", this); } if (! isBootstrapStartScnSet()) { //we allow bootstrap_start_scn not to be set only in the beginning of the bootstrap before any //data has been read if (0 != getBootstrapSnapshotSourceIndex()) { throw new InvalidCheckpointException("bootstrap_snapshot_source_index must be 0 when bootstrap_start_scn is not set", this); } if (0 != getSnapshotOffset()) { throw new InvalidCheckpointException("snapshot_offset must be 0 when bootstrap_start_scn is not set", this); } if (isBootstrapTargetScnSet()) { throw new InvalidCheckpointException("bootstrap_target_scn cannot be set when bootstrap_start_scn is not set", this); } } // If offset is set, then clusterName cannot be empty if (getSnapshotFileRecordOffset() != DEFAULT_SNAPSHOT_FILE_RECORD_OFFSET) { if (getStorageClusterName().isEmpty()) { throw new InvalidCheckpointException("snapshot file record offset cannot be set when storage cluster name is empty", this); } } return true; } private boolean assertOnlineCheckpoint() { if (getFlexible()) { long tsNsecs = getTsNsecs(); // tsNsecs should be unset. if (tsNsecs != UNSET_TS_NSECS) { throw new InvalidCheckpointException("unexpected tsNsecs:" + tsNsecs, this); } return true; } if (getWindowScn() < 0) { throw new InvalidCheckpointException("unexpected windowScn: " + getWindowScn(), this); } final long ofs = getWindowOffset(); if (ofs < 0 && FULLY_CONSUMED_WINDOW_OFFSET != ofs) { throw new InvalidCheckpointException("unexpected windowOfs: " + getWindowOffset(), this); } if (FULLY_CONSUMED_WINDOW_OFFSET == ofs && UNSET_ONLINE_PREVSCN != getPrevScn() && getPrevScn() != getWindowScn()) { throw new InvalidCheckpointException("prevScn != windowScn for a fully consumed window ", this); } if (getPrevScn() > getWindowScn()) { throw new InvalidCheckpointException("prevScn > windowScn", this); } return true; } private void assertCatchupSourceIndex() { final int catchupSourceIndex = getBootstrapCatchupSourceIndex(); final int snapshotSourceIndex = getBootstrapSnapshotSourceIndex(); if (0 > catchupSourceIndex || catchupSourceIndex > snapshotSourceIndex) { throw new InvalidCheckpointException("invalid catchup source index for using sources ", this); } } }
15,174
3,372
/* * Copyright 2016-2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.sagemaker.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * Specifies an authentication configuration for the private docker registry where your model image is hosted. Specify a * value for this property only if you specified <code>Vpc</code> as the value for the <code>RepositoryAccessMode</code> * field of the <code>ImageConfig</code> object that you passed to a call to <code>CreateModel</code> and the private * Docker registry where the model image is hosted requires authentication. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/RepositoryAuthConfig" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class RepositoryAuthConfig implements Serializable, Cloneable, StructuredPojo { /** * <p> * The Amazon Resource Name (ARN) of an Amazon Web Services Lambda function that provides credentials to * authenticate to the private Docker registry where your model image is hosted. For information about how to create * an Amazon Web Services Lambda function, see <a * href="https://docs.aws.amazon.com/lambda/latest/dg/getting-started-create-function.html">Create a Lambda function * with the console</a> in the <i>Amazon Web Services Lambda Developer Guide</i>. * </p> */ private String repositoryCredentialsProviderArn; /** * <p> * The Amazon Resource Name (ARN) of an Amazon Web Services Lambda function that provides credentials to * authenticate to the private Docker registry where your model image is hosted. For information about how to create * an Amazon Web Services Lambda function, see <a * href="https://docs.aws.amazon.com/lambda/latest/dg/getting-started-create-function.html">Create a Lambda function * with the console</a> in the <i>Amazon Web Services Lambda Developer Guide</i>. * </p> * * @param repositoryCredentialsProviderArn * The Amazon Resource Name (ARN) of an Amazon Web Services Lambda function that provides credentials to * authenticate to the private Docker registry where your model image is hosted. For information about how to * create an Amazon Web Services Lambda function, see <a * href="https://docs.aws.amazon.com/lambda/latest/dg/getting-started-create-function.html">Create a Lambda * function with the console</a> in the <i>Amazon Web Services Lambda Developer Guide</i>. */ public void setRepositoryCredentialsProviderArn(String repositoryCredentialsProviderArn) { this.repositoryCredentialsProviderArn = repositoryCredentialsProviderArn; } /** * <p> * The Amazon Resource Name (ARN) of an Amazon Web Services Lambda function that provides credentials to * authenticate to the private Docker registry where your model image is hosted. For information about how to create * an Amazon Web Services Lambda function, see <a * href="https://docs.aws.amazon.com/lambda/latest/dg/getting-started-create-function.html">Create a Lambda function * with the console</a> in the <i>Amazon Web Services Lambda Developer Guide</i>. * </p> * * @return The Amazon Resource Name (ARN) of an Amazon Web Services Lambda function that provides credentials to * authenticate to the private Docker registry where your model image is hosted. For information about how * to create an Amazon Web Services Lambda function, see <a * href="https://docs.aws.amazon.com/lambda/latest/dg/getting-started-create-function.html">Create a Lambda * function with the console</a> in the <i>Amazon Web Services Lambda Developer Guide</i>. */ public String getRepositoryCredentialsProviderArn() { return this.repositoryCredentialsProviderArn; } /** * <p> * The Amazon Resource Name (ARN) of an Amazon Web Services Lambda function that provides credentials to * authenticate to the private Docker registry where your model image is hosted. For information about how to create * an Amazon Web Services Lambda function, see <a * href="https://docs.aws.amazon.com/lambda/latest/dg/getting-started-create-function.html">Create a Lambda function * with the console</a> in the <i>Amazon Web Services Lambda Developer Guide</i>. * </p> * * @param repositoryCredentialsProviderArn * The Amazon Resource Name (ARN) of an Amazon Web Services Lambda function that provides credentials to * authenticate to the private Docker registry where your model image is hosted. For information about how to * create an Amazon Web Services Lambda function, see <a * href="https://docs.aws.amazon.com/lambda/latest/dg/getting-started-create-function.html">Create a Lambda * function with the console</a> in the <i>Amazon Web Services Lambda Developer Guide</i>. * @return Returns a reference to this object so that method calls can be chained together. */ public RepositoryAuthConfig withRepositoryCredentialsProviderArn(String repositoryCredentialsProviderArn) { setRepositoryCredentialsProviderArn(repositoryCredentialsProviderArn); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getRepositoryCredentialsProviderArn() != null) sb.append("RepositoryCredentialsProviderArn: ").append(getRepositoryCredentialsProviderArn()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof RepositoryAuthConfig == false) return false; RepositoryAuthConfig other = (RepositoryAuthConfig) obj; if (other.getRepositoryCredentialsProviderArn() == null ^ this.getRepositoryCredentialsProviderArn() == null) return false; if (other.getRepositoryCredentialsProviderArn() != null && other.getRepositoryCredentialsProviderArn().equals(this.getRepositoryCredentialsProviderArn()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getRepositoryCredentialsProviderArn() == null) ? 0 : getRepositoryCredentialsProviderArn().hashCode()); return hashCode; } @Override public RepositoryAuthConfig clone() { try { return (RepositoryAuthConfig) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.sagemaker.model.transform.RepositoryAuthConfigMarshaller.getInstance().marshall(this, protocolMarshaller); } }
2,731
679
/************************************************************** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * *************************************************************/ // MARKER(update_precomp.py): autogen include statement, do not remove #include "precompiled_slideshow.hxx" // must be first #include <canvas/debug.hxx> #include <tools/diagnose_ex.h> #include "viewbackgroundshape.hxx" #include "tools.hxx" #include <rtl/logfile.hxx> #include <rtl/math.hxx> #include <comphelper/anytostring.hxx> #include <cppuhelper/exc_hlp.hxx> #include <basegfx/polygon/b2dpolygontools.hxx> #include <basegfx/polygon/b2dpolygon.hxx> #include <basegfx/numeric/ftools.hxx> #include <basegfx/matrix/b2dhommatrix.hxx> #include <basegfx/matrix/b2dhommatrixtools.hxx> #include <com/sun/star/rendering/XCanvas.hpp> #include <canvas/verbosetrace.hxx> #include <canvas/canvastools.hxx> #include <cppcanvas/vclfactory.hxx> #include <cppcanvas/basegfxfactory.hxx> #include <cppcanvas/renderer.hxx> #include <cppcanvas/bitmap.hxx> using namespace ::com::sun::star; namespace slideshow { namespace internal { bool ViewBackgroundShape::prefetch( const ::cppcanvas::CanvasSharedPtr& rDestinationCanvas, const GDIMetaFileSharedPtr& rMtf ) const { RTL_LOGFILE_CONTEXT( aLog, "::presentation::internal::ViewBackgroundShape::prefetch()" ); ENSURE_OR_RETURN_FALSE( rMtf, "ViewBackgroundShape::prefetch(): no valid metafile!" ); const ::basegfx::B2DHomMatrix& rCanvasTransform( mpViewLayer->getTransformation() ); if( !mxBitmap.is() || rMtf != mpLastMtf || rCanvasTransform != maLastTransformation ) { // buffered bitmap is invalid, re-create // determine transformed page bounds ::basegfx::B2DRectangle aTmpRect; ::canvas::tools::calcTransformedRectBounds( aTmpRect, maBounds, rCanvasTransform ); // determine pixel size of bitmap (choose it one pixel // larger, as polygon rendering takes one pixel more // to the right and to the bottom) const ::basegfx::B2ISize aBmpSizePixel( ::basegfx::fround( aTmpRect.getRange().getX() + 1), ::basegfx::fround( aTmpRect.getRange().getY() + 1) ); // create a bitmap of appropriate size ::cppcanvas::BitmapSharedPtr pBitmap( ::cppcanvas::BaseGfxFactory::getInstance().createBitmap( rDestinationCanvas, aBmpSizePixel ) ); ENSURE_OR_THROW( pBitmap, "ViewBackgroundShape::prefetch(): Cannot create background bitmap" ); ::cppcanvas::BitmapCanvasSharedPtr pBitmapCanvas( pBitmap->getBitmapCanvas() ); ENSURE_OR_THROW( pBitmapCanvas, "ViewBackgroundShape::prefetch(): Cannot create background bitmap canvas" ); // clear bitmap initSlideBackground( pBitmapCanvas, aBmpSizePixel ); // apply linear part of destination canvas transformation (linear means in this context: // transformation without any translational components) ::basegfx::B2DHomMatrix aLinearTransform( rCanvasTransform ); aLinearTransform.set( 0, 2, 0.0 ); aLinearTransform.set( 1, 2, 0.0 ); pBitmapCanvas->setTransformation( aLinearTransform ); const basegfx::B2DHomMatrix aShapeTransform(basegfx::tools::createScaleTranslateB2DHomMatrix( maBounds.getWidth(), maBounds.getHeight(), maBounds.getMinX(), maBounds.getMinY())); ::cppcanvas::RendererSharedPtr pRenderer( ::cppcanvas::VCLFactory::getInstance().createRenderer( pBitmapCanvas, *rMtf.get(), ::cppcanvas::Renderer::Parameters() ) ); ENSURE_OR_RETURN_FALSE( pRenderer, "ViewBackgroundShape::prefetch(): Could not create Renderer" ); pRenderer->setTransformation( aShapeTransform ); pRenderer->draw(); mxBitmap = pBitmap->getUNOBitmap(); } mpLastMtf = rMtf; maLastTransformation = rCanvasTransform; return mxBitmap.is(); } ViewBackgroundShape::ViewBackgroundShape( const ViewLayerSharedPtr& rViewLayer, const ::basegfx::B2DRectangle& rShapeBounds ) : mpViewLayer( rViewLayer ), mxBitmap(), mpLastMtf(), maLastTransformation(), maBounds( rShapeBounds ) { ENSURE_OR_THROW( mpViewLayer, "ViewBackgroundShape::ViewBackgroundShape(): Invalid View" ); ENSURE_OR_THROW( mpViewLayer->getCanvas(), "ViewBackgroundShape::ViewBackgroundShape(): Invalid ViewLayer canvas" ); } ViewLayerSharedPtr ViewBackgroundShape::getViewLayer() const { return mpViewLayer; } bool ViewBackgroundShape::render( const GDIMetaFileSharedPtr& rMtf ) const { RTL_LOGFILE_CONTEXT( aLog, "::presentation::internal::ViewBackgroundShape::draw()" ); const ::cppcanvas::CanvasSharedPtr& rDestinationCanvas( mpViewLayer->getCanvas() ); if( !prefetch( rDestinationCanvas, rMtf ) ) return false; ENSURE_OR_RETURN_FALSE( mxBitmap.is(), "ViewBackgroundShape::draw(): Invalid background bitmap" ); ::basegfx::B2DHomMatrix aTransform( mpViewLayer->getTransformation() ); // invert the linear part of the view transformation // (i.e. the view transformation without translational // components), to be able to leave the canvas // transformation intact (would otherwise destroy possible // clippings, as the clip polygon is relative to the view // coordinate system). aTransform.set(0,2, 0.0 ); aTransform.set(1,2, 0.0 ); aTransform.invert(); rendering::RenderState aRenderState; ::canvas::tools::initRenderState( aRenderState ); ::canvas::tools::setRenderStateTransform( aRenderState, aTransform ); try { rDestinationCanvas->getUNOCanvas()->drawBitmap( mxBitmap, rDestinationCanvas->getViewState(), aRenderState ); } catch( uno::Exception& ) { OSL_ENSURE( false, rtl::OUStringToOString( comphelper::anyToString( cppu::getCaughtException() ), RTL_TEXTENCODING_UTF8 ).getStr() ); return false; } return true; } } }
4,032
1,144
package de.metas.handlingunits.attribute; /* * #%L * de.metas.handlingunits.base * %% * Copyright (C) 2015 metas GmbH * %% * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as * published by the Free Software Foundation, either version 2 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public * License along with this program. If not, see * <http://www.gnu.org/licenses/gpl-2.0.html>. * #L% */ import java.util.List; import org.eevolution.model.I_PP_Cost_Collector; import de.metas.handlingunits.model.I_M_HU_Attribute; import de.metas.handlingunits.model.I_PP_Order_ProductAttribute; import de.metas.handlingunits.model.I_PP_Order_Qty; import de.metas.util.ISingletonService; public interface IPPOrderProductAttributeDAO extends ISingletonService { /** * Add PP_OrderProductAttributes for the cost collector, based on the values from the huAttributes * @param costCollector * @param attributes */ void addPPOrderProductAttributes(I_PP_Cost_Collector costCollector, List<I_M_HU_Attribute> huAttributes); void addPPOrderProductAttributesFromIssueCandidate(I_PP_Order_Qty issueCandidate, List<I_M_HU_Attribute> huAttributes); /** * @param ppOrderId * @return the PP_Order_ProductAttribute entries for the given ppOrder if exist, EmptyList otherwise */ List<I_PP_Order_ProductAttribute> retrieveProductAttributesForPPOrder(int ppOrderId); /** * Deactivate all PP_Order_ProductAttributes for the given cost collector * * @param costCollectorId */ void deactivateForCostCollector(int costCollectorId); void deleteForHU(int ppOrderId, int huId); }
615
2,984
<gh_stars>1000+ /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.calcite.example.maze; import org.apache.calcite.linq4j.Enumerator; import java.io.PrintWriter; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Deque; import java.util.LinkedHashSet; import java.util.List; import java.util.Random; import java.util.Set; /** Maze generator. */ class Maze { private final int width; final int height; private final int[] regions; private final boolean[] ups; private final boolean[] lefts; static final boolean DEBUG = false; private final boolean horizontal = false; private final boolean spiral = false; Maze(int width, int height) { this.width = width; this.height = height; this.regions = new int[width * height]; for (int i = 0; i < regions.length; i++) { regions[i] = i; } this.ups = new boolean[width * height + width]; this.lefts = new boolean[width * height + 1]; } private int region(int cell) { int region = regions[cell]; if (region == cell) { return region; } return regions[cell] = region(region); } /** Prints the maze. Results are like this: * * <blockquote> * +--+--+--+--+--+ * | | | * +--+ +--+--+ + * | | | | * + +--+ +--+ + * | | * +--+--+--+--+--+ * </blockquote> * * @param pw Print writer * @param space Whether to put a space in each cell; if false, prints the * region number of the cell */ public void print(PrintWriter pw, boolean space) { pw.println(); final StringBuilder b = new StringBuilder(); final StringBuilder b2 = new StringBuilder(); final CellContent cellContent; if (space) { cellContent = c -> " "; } else { cellContent = c -> { String s = region(c) + ""; return s.length() == 1 ? " " + s : s; }; } for (int y = 0; y < height; y++) { row(cellContent, b, b2, y); pw.println(b.toString()); pw.println(b2.toString()); b.setLength(0); b2.setLength(0); } for (int x = 0; x < width; x++) { pw.print("+--"); } pw.println('+'); pw.flush(); } /** Generates a list of lines representing the maze in text form. */ public Enumerator<String> enumerator(final Set<Integer> solutionSet) { final CellContent cellContent; if (solutionSet == null) { cellContent = CellContent.SPACE; } else { cellContent = c -> solutionSet.contains(c) ? "* " : " "; } return new Enumerator<String>() { int i = -1; final StringBuilder b = new StringBuilder(); final StringBuilder b2 = new StringBuilder(); @Override public String current() { return i % 2 == 0 ? b.toString() : b2.toString(); } @Override public boolean moveNext() { if (i >= height * 2) { return false; } ++i; if (i % 2 == 0) { b.setLength(0); b2.setLength(0); row(cellContent, b, b2, i / 2); } return true; } @Override public void reset() { i = -1; } @Override public void close() {} }; } /** Returns a pair of strings representing a row of the maze. */ private void row(CellContent cellContent, StringBuilder b, StringBuilder b2, int y) { final int c0 = y * width; for (int x = 0; x < width; x++) { b.append('+'); b.append(ups[c0 + x] ? " " : "--"); } b.append('+'); if (y == height) { return; } for (int x = 0; x < width; x++) { b2.append(lefts[c0 + x] ? ' ' : '|') .append(cellContent.get(c0 + x)); } b2.append('|'); } public Maze layout(Random random, PrintWriter pw) { int[] candidates = new int[width * height - width + width * height - height]; int z = 0; for (int y = 0, c = 0; y < height; y++) { for (int x = 0; x < width; x++) { if (x > 0) { candidates[z++] = c; } ++c; if (y > 0) { candidates[z++] = c; } ++c; } } assert z == candidates.length; shuffle(random, candidates); for (int candidate : candidates) { final boolean up = (candidate & 1) != 0; final int c = candidate >> 1; if (up) { int region = region(c - width); // make sure we are not joining the same region, that is, making // a cycle if (region(c) != region) { ups[c] = true; regions[regions[c]] = region; regions[c] = region; if (DEBUG) { pw.println("up " + c); } } else { if (DEBUG) { pw.println("cannot remove top wall at " + c); } } } else { int region = region(c - 1); // make sure we are not joining the same region, that is, making // a cycle if (region(c) != region) { lefts[c] = true; regions[regions[c]] = region; regions[c] = region; if (DEBUG) { pw.println("left " + c); } } else { if (DEBUG) { pw.println("cannot remove left wall at " + c); } } } if (DEBUG) { print(pw, false); print(pw, true); } } return this; } Set<Integer> solve(int x, int y) { int c = y * width + x; final int target = regions.length - 1; Direction d = Direction.UP; final List<Integer> list = new ArrayList<>(); final Deque<Direction> fromStack = new ArrayDeque<>(); final Deque<Direction> directionStack = new ArrayDeque<>(); Direction from = Direction.BACKTRACK; int cNext = 0; Direction dNext = Direction.UP; boolean move = false; for (;;) { switch (d) { case UP: // try to go up move = from != Direction.DOWN && ups[c]; cNext = c - width; dNext = Direction.LEFT; break; case LEFT: // try to go left move = from != Direction.RIGHT && lefts[c]; cNext = c - 1; dNext = Direction.DOWN; break; case DOWN: // try to go down move = from != Direction.UP && c + width < regions.length && ups[c + width]; cNext = c + width; dNext = Direction.RIGHT; break; case RIGHT: move = from != Direction.LEFT && c % width < width - 1 && lefts[c + 1]; cNext = c + 1; dNext = Direction.BACKTRACK; break; case BACKTRACK: move = false; do { c = list.remove(list.size() - 1); dNext = directionStack.pop(); from = fromStack.pop(); } while (dNext == Direction.BACKTRACK); break; default: break; } if (move) { directionStack.push(dNext); fromStack.push(from); list.add(c); if (cNext == target) { list.add(cNext); return new LinkedHashSet<>(list); } from = d; d = Direction.UP; c = cNext; } else { d = dNext; } } } /** Direction. */ private enum Direction { UP, LEFT, DOWN, RIGHT, BACKTRACK } /** * Randomly permutes the members of an array. Based on the Fisher-Yates * algorithm. * * @param random Random number generator * @param ints Array of integers to shuffle */ private void shuffle(Random random, int[] ints) { for (int i = ints.length - 1; i > 0; i--) { int j = random.nextInt(i + 1); int t = ints[j]; ints[j] = ints[i]; ints[i] = t; } // move even walls (left) towards the start, so we end up with // long horizontal corridors if (horizontal) { for (int i = 2; i < ints.length; i++) { if (ints[i] % 2 == 0) { int j = random.nextInt(i); int t = ints[j]; ints[j] = ints[i]; ints[i] = t; } } } // move walls towards the edges towards the start if (spiral) { for (int z = 0; z < 5; z++) { for (int i = 2; i < ints.length; i++) { int x = ints[i] / 2 % width; int y = ints[i] / 2 / width; int xMin = Math.min(x, width - x); int yMin = Math.min(y, height - y); if (ints[i] % 2 == (xMin < yMin ? 1 : 0)) { int j = random.nextInt(i); int t = ints[j]; ints[j] = ints[i]; ints[i] = t; } } } } } /** Callback to get what to print in a particular cell. Must be two characters * long, usually two spaces. */ interface CellContent { CellContent SPACE = c -> " "; String get(int c); } }
4,269
27,296
// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef NW_APP_PROTOCOL_HANDLER_H_ #define NW_APP_PROTOCOL_HANDLER_H_ #include "base/basictypes.h" #include "base/compiler_specific.h" #include "base/files/file_path.h" #include "net/url_request/url_request_job_factory.h" class GURL; namespace net { class NetworkDelegate; class URLRequestJob; // Implements a ProtocolHandler for File jobs. If |network_delegate_| is NULL, // then all file requests will fail with ERR_ACCESS_DENIED. class AppProtocolHandler : public URLRequestJobFactory::ProtocolHandler { public: AppProtocolHandler(const base::FilePath& root); URLRequestJob* MaybeCreateJob( URLRequest* request, NetworkDelegate* network_delegate) const override; bool IsSafeRedirectTarget(const GURL& location) const override; private: base::FilePath root_path_; DISALLOW_COPY_AND_ASSIGN(AppProtocolHandler); }; } // namespace net #endif
348
356
<reponame>dilanbhalla/security-advisories<filename>ecosystem/i18next/325.json { "id": 325, "created_at": "2017-03-14", "updated_at": "2017-03-14", "title": "Cross-Site Scripting", "author": { "name": "<NAME>", "website": null, "username": null }, "module_name": "i18next", "publish_date": "2017-03-14", "cves": [], "vulnerable_versions": "<=1.10.2", "patched_versions": ">=1.10.3", "overview": "i18next is a language translation framework.\n\nBecause of how the interpolation is implemented, making replacements from the dictionary one at a time, untrusted user input can use the name of one of the dictionary keys to inject script into the browser.\n\nExample: \n```\nvar init = i18n.init({debug: true}, function(){\n var test = i18n.t('__firstName__ __lastName__', {\n escapeInterpolation: true,\n firstName: '__lastNameHTML__',\n lastName: '<script>',\n });\n console.log(test);\n});\n// equals \"<script> &lt;script&gt;\"\n```", "recommendation": "Upgrade to v1.10.3 or greater.", "references": [ "https://github.com/i18next/i18next/pull/443)" ], "cvss_vector": "CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:L/I:L/A:N", "cvss_score": 6.5, "coordinating_vendor": "^Lift Security" }
512
449
<filename>trilinear_c/src/trilinear.h int trilinear_forward(THFloatTensor * lut, THFloatTensor * image, THFloatTensor * output, int lut_dim, int shift, float binsize, int width, int height, int batch); int trilinear_backward(THFloatTensor * image, THFloatTensor * image_grad, THFloatTensor * lut_grad, int lut_dim, int shift, float binsize, int width, int height, int batch);
176
1,711
/** * */ package org.squirrelframework.foundation.fsm; import static org.junit.Assert.assertEquals; import java.util.concurrent.CyclicBarrier; import org.junit.Test; import org.squirrelframework.foundation.fsm.annotation.StateMachineParameters; import org.squirrelframework.foundation.fsm.impl.AbstractUntypedStateMachine; import org.squirrelframework.foundation.fsm.threadsafe.BarrierThread; public class PerformanceTest { enum FSMEvent { ToA, ToB, ToC, ToD } @StateMachineParameters(stateType = String.class, eventType = FSMEvent.class, contextType = Integer.class) static class StateMachineSample extends AbstractUntypedStateMachine { } @Test(timeout = 10000) public void manyTransitions() { performTest(10000, false, null); } @Test public void manyTransitionWithMonitor() { performTest(1000, true, null); } // @Test // public void comparePerfMonitorOverload() { // Runnable task = new Runnable() { // @Override // public void run() { // calculatePi(100); // } // }; // // int testIter = 100; // for(int i=0; i<4; ++i) { // performTest(testIter, false, task); // performTest(testIter, true, task); // } // // int iterTimes = 20; // float overloadTimes=0; // for(int i=0; i<iterTimes; ++i) { // Stopwatch watch1 = new Stopwatch().start(); // performTest(testIter, false, task); // long time1 = watch1.stop().elapsedMillis(); // System.out.println("Task 1 finished in "+time1+"ms."); // // Stopwatch watch2 = new Stopwatch().start(); // performTest(testIter, true, task); // long time2 = watch2.stop().elapsedMillis(); // System.out.println("Task 2 finished in "+time2+"ms."); // float overloadTime = time2-time1; // overloadTimes+=overloadTime; // System.out.println("--------------------------------------"); // } // System.out.println("Average overload for each transition is "+ // String.format("%.4f", overloadTimes/iterTimes/testIter/4)+"ms."); // } // // private BigDecimal calculatePi(int iterTimes) { // BigDecimal sum = new BigDecimal(0); // final sum // BigDecimal term = new BigDecimal(0); // term without sign // BigDecimal sign = new BigDecimal(1.0); // sign on each term // // BigDecimal one = new BigDecimal(1.0); // BigDecimal two = new BigDecimal(2.0); // // for (int k = 0; k < iterTimes; k++) { // BigDecimal count = new BigDecimal(k); // //term = 1.0/(2.0*k + 1.0); // BigDecimal temp1 = two.multiply(count); // BigDecimal temp2 = temp1.add(one); // term = one.divide(temp2,50,BigDecimal.ROUND_FLOOR); // //sum = sum + sign*term; // BigDecimal temp3 = sign.multiply(term); // sum = sum.add(temp3); // sign = sign.negate(); // } // BigDecimal pi = new BigDecimal(0); // BigDecimal four = new BigDecimal(4); // pi = sum.multiply(four); // return pi; // } void performTest(final int iterTimes, final boolean addPerfMonitor, final Runnable task) { UntypedStateMachineBuilder builder = StateMachineBuilderFactory.create(StateMachineSample.class); UntypedAnonymousAction action = new UntypedAnonymousAction() { @Override public void execute(Object from, Object to, Object event, Object context, UntypedStateMachine stateMachine) { if(task!=null) task.run(); } }; builder.externalTransition().from("D").to("A").on(FSMEvent.ToA).perform(action); builder.externalTransition().from("A").to("B").on(FSMEvent.ToB).perform(action); builder.externalTransition().from("B").to("C").on(FSMEvent.ToC).perform(action); builder.externalTransition().from("C").to("D").on(FSMEvent.ToD).perform(action); final UntypedStateMachine fsm1 = builder.newStateMachine("D"); final UntypedStateMachine fsm2 = builder.newStateMachine("D"); Runnable showPerfResult = null; if(addPerfMonitor) { final StateMachinePerformanceMonitor performanceMonitor = new StateMachinePerformanceMonitor(fsm1.getClass().getName()); fsm1.addDeclarativeListener(performanceMonitor); fsm2.addDeclarativeListener(performanceMonitor); showPerfResult = new Runnable() { @Override public void run() { fsm1.removeDeclarativeListener(performanceMonitor); fsm2.removeDeclarativeListener(performanceMonitor); StateMachinePerformanceModel perfModel = performanceMonitor.getPerfModel(); long totalTimes = 2*4*iterTimes; assertEquals(perfModel.getTotalTransitionInvokedTimes(), totalTimes); assertEquals(perfModel.getTotalActionInvokedTimes(), totalTimes); // System.out.println(perfModel); } }; } CyclicBarrier entryBarrier = new CyclicBarrier(2); CyclicBarrier exitBarrier = new CyclicBarrier(3); new BarrierThread(new Runnable() { @Override public void run() { for (int i = 0; i < iterTimes; i++) { fsm1.fire(FSMEvent.ToA, 10); fsm1.fire(FSMEvent.ToB, 10); fsm1.fire(FSMEvent.ToC, 10); fsm1.fire(FSMEvent.ToD, 10); } } }, "Performance-Test-Thread-1", entryBarrier, exitBarrier).start(); new BarrierThread(new Runnable() { @Override public void run() { for (int i = 0; i < iterTimes; i++) { fsm2.fire(FSMEvent.ToA, 10); fsm2.fire(FSMEvent.ToB, 10); fsm2.fire(FSMEvent.ToC, 10); fsm2.fire(FSMEvent.ToD, 10); } } }, "Performance-Test-Thread-2", entryBarrier, exitBarrier).start(); try { exitBarrier.await(); } catch (Exception e) { throw new RuntimeException(e); } if(showPerfResult!=null) { showPerfResult.run(); } } }
3,141
14,668
<gh_stars>1000+ // Copyright 2020 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "gpu/vulkan/vulkan_image.h" #include "base/logging.h" #include "build/build_config.h" #include "gpu/config/gpu_info_collector.h" #include "gpu/config/gpu_test_config.h" #include "gpu/ipc/service/gpu_memory_buffer_factory.h" #include "gpu/vulkan/tests/basic_vulkan_test.h" #include "gpu/vulkan/vulkan_device_queue.h" #include "gpu/vulkan/vulkan_function_pointers.h" #include "ui/gfx/geometry/rect.h" #if defined(OS_ANDROID) #include "base/android/android_hardware_buffer_compat.h" #endif namespace gpu { namespace { // TODO(penghuang): add more formats used by chrome. const VkFormat kFormats[] = { VK_FORMAT_R8G8B8A8_UNORM, VK_FORMAT_B8G8R8A8_UNORM, }; } // namespace using VulkanImageTest = BasicVulkanTest; TEST_F(VulkanImageTest, Create) { constexpr gfx::Size size(100, 100); constexpr VkImageUsageFlags usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; auto* device_queue = GetDeviceQueue(); for (auto format : kFormats) { auto image = VulkanImage::Create(device_queue, size, format, usage); EXPECT_TRUE(image); EXPECT_EQ(image->size(), size); EXPECT_EQ(image->format(), format); EXPECT_GT(image->device_size(), 0u); EXPECT_EQ(image->image_tiling(), VK_IMAGE_TILING_OPTIMAL); EXPECT_NE(image->image(), static_cast<VkImage>(VK_NULL_HANDLE)); EXPECT_NE(image->device_memory(), static_cast<VkDeviceMemory>(VK_NULL_HANDLE)); EXPECT_EQ(image->handle_types(), 0u); image->Destroy(); } } TEST_F(VulkanImageTest, CreateWithExternalMemory) { { // TODO(crbug.com/1069516) : Fails on current driver version on this bot. if (GPUTestBotConfig::CurrentConfigMatches("Win10")) return; } constexpr gfx::Size size(100, 100); constexpr VkImageUsageFlags usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; auto* device_queue = GetDeviceQueue(); for (auto format : kFormats) { auto image = VulkanImage::CreateWithExternalMemory(device_queue, size, format, usage); EXPECT_TRUE(image); EXPECT_EQ(image->size(), size); EXPECT_EQ(image->format(), format); EXPECT_GT(image->device_size(), 0u); EXPECT_EQ(image->image_tiling(), VK_IMAGE_TILING_OPTIMAL); EXPECT_NE(image->image(), static_cast<VkImage>(VK_NULL_HANDLE)); EXPECT_NE(image->device_memory(), static_cast<VkDeviceMemory>(VK_NULL_HANDLE)); #if defined(OS_POSIX) EXPECT_TRUE(image->handle_types() & VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT) << std::hex << "handle_types = 0x" << image->handle_types(); const VkExternalMemoryHandleTypeFlagBits kHandleTypes[] = { VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT, VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT, }; // Get fd for all supported types. for (auto handle_type : kHandleTypes) { if ((image->handle_types() & handle_type) == 0) continue; base::ScopedFD scoped_fd = image->GetMemoryFd(handle_type); EXPECT_TRUE(scoped_fd.is_valid()) << std::hex << " handle_types = 0x" << image->handle_types() << " handle_type = 0x" << handle_type; } #elif defined(OS_WIN) EXPECT_TRUE(image->handle_types() & VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT) << std::hex << "handle_types = 0x" << image->handle_types(); const VkExternalMemoryHandleTypeFlagBits kHandleTypes[] = { VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT, VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT, VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT, VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT, }; // Get fd for all supported types. for (auto handle_type : kHandleTypes) { if ((image->handle_types() & handle_type) == 0) continue; base::win::ScopedHandle scoped_handle = image->GetMemoryHandle( static_cast<VkExternalMemoryHandleTypeFlagBits>(handle_type)); EXPECT_TRUE(scoped_handle.IsValid()) << std::hex << " handle_types = 0x" << image->handle_types() << " handle_type = 0x" << handle_type; } #elif defined(OS_FUCHSIA) EXPECT_TRUE(image->handle_types() & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ZIRCON_VMO_BIT_FUCHSIA); zx::vmo handle = image->GetMemoryZirconHandle(); EXPECT_TRUE(handle); #endif image->Destroy(); } } #if defined(OS_ANDROID) TEST_F(VulkanImageTest, CreateFromGpuMemoryBufferHandle) { if (!base::AndroidHardwareBufferCompat::IsSupportAvailable()) { LOG(ERROR) << "AndroidHardwareBuffer is not supported"; return; } auto* device_queue = GetDeviceQueue(); if (!gfx::HasExtension( device_queue->enabled_extensions(), VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME)) { LOG(ERROR) << "Vulkan extension " "VK_ANDROID_external_memory_android_hardware_buffer is not " "supported"; return; } auto factory = GpuMemoryBufferFactory::CreateNativeType( /*viz::VulkanContextProvider=*/nullptr); EXPECT_TRUE(factory); constexpr gfx::Size size(100, 100); constexpr VkImageUsageFlags usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; const struct { gfx::BufferFormat buffer; VkFormat vk; } formats[] = { {gfx::BufferFormat::RGBA_8888, VK_FORMAT_R8G8B8A8_UNORM}, {gfx::BufferFormat::BGR_565, VK_FORMAT_R5G6B5_UNORM_PACK16}, {gfx::BufferFormat::RGBA_F16, VK_FORMAT_R16G16B16A16_SFLOAT}, {gfx::BufferFormat::RGBX_8888, VK_FORMAT_R8G8B8A8_UNORM}, {gfx::BufferFormat::RGBA_1010102, VK_FORMAT_A2B10G10R10_UNORM_PACK32}, }; for (const auto format : formats) { gfx::GpuMemoryBufferId id(1); gfx::BufferUsage buffer_usage = gfx::BufferUsage::SCANOUT; int client_id = 1; auto gmb_handle = factory->CreateGpuMemoryBuffer( id, size, /*framebuffer_size=*/size, format.buffer, buffer_usage, client_id, kNullSurfaceHandle); EXPECT_TRUE(!gmb_handle.is_null()); EXPECT_EQ(gmb_handle.type, gfx::GpuMemoryBufferType::ANDROID_HARDWARE_BUFFER); auto image = VulkanImage::CreateFromGpuMemoryBufferHandle( device_queue, std::move(gmb_handle), size, format.vk, usage, /*flags=*/0, /*image_tiling=*/VK_IMAGE_TILING_OPTIMAL, /*queue_family_index=*/VK_QUEUE_FAMILY_EXTERNAL); EXPECT_TRUE(image); EXPECT_EQ(image->size(), size); EXPECT_EQ(image->format(), format.vk); EXPECT_GT(image->device_size(), 0u); EXPECT_EQ(image->image_tiling(), VK_IMAGE_TILING_OPTIMAL); EXPECT_NE(image->image(), static_cast<VkImage>(VK_NULL_HANDLE)); EXPECT_NE(image->device_memory(), static_cast<VkDeviceMemory>(VK_NULL_HANDLE)); image->Destroy(); factory->DestroyGpuMemoryBuffer(id, client_id); } } #endif } // namespace gpu
3,235
1,299
<gh_stars>1000+ /* * Copyright 2016 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package io.reactivex.netty.examples.tcp.loadbalancing; import io.reactivex.netty.channel.Connection; import io.reactivex.netty.client.ConnectionProvider; import io.reactivex.netty.client.HostConnector; import io.reactivex.netty.client.events.ClientEventListener; import io.reactivex.netty.client.loadbalancer.HostHolder; import io.reactivex.netty.client.loadbalancer.LoadBalancingStrategy; import rx.Observable; import rx.Observable.OnSubscribe; import rx.functions.Func1; import java.net.SocketException; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; public abstract class AbstractLoadBalancer<W, R> implements LoadBalancingStrategy<W, R> { private final Func1<Integer, Integer> nextIndexFinder; protected AbstractLoadBalancer(Func1<Integer, Integer> nextIndexFinder) { this.nextIndexFinder = nextIndexFinder; } protected AbstractLoadBalancer() { this(new Func1<Integer, Integer>() { private final AtomicInteger nextIndex = new AtomicInteger(); @Override public Integer call(Integer maxValue) { return nextIndex.incrementAndGet() % maxValue; } }); } @Override public ConnectionProvider<W, R> newStrategy(final List<HostHolder<W, R>> hosts) { final int size = hosts.size(); return () -> Observable.create((OnSubscribe<Connection<R, W>>) subscriber -> { ConnectionProvider<W, R> hostToUse; HostHolder<W, R> host1 = hosts.get(nextIndexFinder.call(size)); HostHolder<W, R> host2 = hosts.get(nextIndexFinder.call(size)); long weight1 = getWeight(host1.getEventListener()); long weight2 = getWeight(host2.getEventListener()); if (weight1 >= weight2) { hostToUse = host1.getConnector().getConnectionProvider(); } else { hostToUse = host2.getConnector().getConnectionProvider(); } hostToUse.newConnectionRequest().unsafeSubscribe(subscriber); }).retry((count, th) -> count < 3 && th instanceof SocketException); } @Override public final HostHolder<W, R> toHolder(HostConnector<W, R> connector) { return new HostHolder<>(connector, newListener()); } protected abstract ClientEventListener newListener(); protected abstract long getWeight(ClientEventListener eventListener); }
1,091
1,144
/***************************************************************************** ** ** Name: app_wav.c ** ** Description: WAV file related functions ** ** Copyright (c) 2009-2012, Broadcom Corp., All Rights Reserved. ** Broadcom Bluetooth Core. Proprietary and confidential. ** *****************************************************************************/ #include <stdio.h> #include <sys/types.h> #include <sys/stat.h> #include <unistd.h> #include <fcntl.h> #include <string.h> #include <errno.h> #include "app_utils.h" #include "app_wav.h" typedef struct { BOOLEAN is_be; } tAPP_WAV_CB; tAPP_WAV_CB app_wav_cb = { FALSE }; #define APP_WAVE_HDR_SIZE 44 const unsigned char app_wav_hdr[APP_WAVE_HDR_SIZE] = { 'R', 'I', 'F', 'F', /* Chunk ID : "RIFF" */ '\0', '\0', '\0', '\0', /* Chunk size = file size - 8 */ 'W', 'A', 'V', 'E', /* Chunk format : "WAVE" */ 'f', 'm', 't', ' ', /* Subchunk ID : "fmt " */ 0x10, 0x00, 0x00, 0x00, /* Subchunk size : 16 for PCM format */ 0x01, 0x00, /* Audio format : 1 means PCM linear */ '\0', '\0', /* Number of channels */ '\0', '\0', '\0', '\0', /* Sample rate */ '\0', '\0', '\0', '\0', /* Byte rate = SampleRate * NumChannels * BitsPerSample/8 */ '\0', '\0', /* Blockalign = NumChannels * BitsPerSample/8 */ '\0', '\0', /* Bitpersample */ 'd', 'a', 't', 'a', /* Subchunk ID : "data" */ '\0', '\0', '\0', '\0' /* Subchunk size = NumSamples * NumChannels * BitsPerSample/8 */ }; /******************************************************************************* ** ** Function app_wav_format ** ** Description Read the WAV file header from an unopened file ** ** Parameters p_fname: name the file to parse ** p_format: format to fill with parsed values ** ** Returns 0 if successful, -1 in case of error ** *******************************************************************************/ int app_wav_format(const char *p_fname, tAPP_WAV_FILE_FORMAT *p_format) { int fd; fd = app_wav_open_file(p_fname, p_format); if (fd >= 0) { close(fd); return 0; } return -1; } /******************************************************************************* ** ** Function app_wav_read_data ** ** Description Read data from the WAV file ** ** Parameters fd: file descriptor of the file to read from ** p_format: WAV format returned by app_wav_read_format ** p_data: buffer to fill with the data ** len: length to read ** ** Returns Number of bytes read, -1 in case of error, 0 if end of file ** *******************************************************************************/ int app_wav_read_data(int fd, const tAPP_WAV_FILE_FORMAT *p_format, char *p_data, int len) { int nbytes; int index; int bytes_per_sample = (p_format->bits_per_sample + 7)/8; unsigned short tmp16; unsigned short *tmp16_ptr; unsigned long tmp32; unsigned long *tmp32_ptr; nbytes = read(fd, p_data, len); /* Check that the buffer is aligned */ if (((bytes_per_sample == 2) && ((size_t)p_data & 1)) || ((bytes_per_sample == 4) && ((size_t)p_data & 3))) { APP_DEBUG1("Audio buffer start is not aligned on PCM sample (%d bytes) word boundary, this could degrade system perf (%p)", bytes_per_sample, p_data); } if (nbytes > 0 && p_format->codec == BSA_AV_CODEC_PCM) { switch (bytes_per_sample) { case 1: break; case 2: if (nbytes & 1) { APP_DEBUG1("Number of PCM samples read not multiple of sample size(%d)", bytes_per_sample); } if (app_wav_cb.is_be) { tmp16_ptr = (unsigned short *)p_data; for (index = 0; index < nbytes; index += 2) { tmp16 = *tmp16_ptr; *tmp16_ptr = (tmp16 << 8 | tmp16 >> 8); tmp16_ptr++; } } break; case 4: if (nbytes & 3) { APP_DEBUG1("Number of PCM samples read not multiple of sample size(%d)", bytes_per_sample); } if (app_wav_cb.is_be) { tmp32_ptr = (unsigned long *)p_data; for (index = 0; index < nbytes; index += 4) { tmp32 = *tmp32_ptr; *tmp32_ptr = (tmp32 >> 24) | ((tmp32 >> 8) & 0xff00) | ((tmp32 << 8) & 0xff0000) | ((tmp32 << 24) & 0xff000000); tmp32_ptr++; } } break; default: APP_ERROR1("Sample size is not supported (%d)", bytes_per_sample); break; } } return nbytes; } /******************************************************************************* ** ** Function app_wav_open_file ** ** Description Open a WAV file and parse the header ** ** Parameters p_fname: name the file to parse ** p_format: format to fill with parsed values ** ** Returns file descriptor if successful, -1 in case of error ** *******************************************************************************/ int app_wav_open_file(const char *p_fname, tAPP_WAV_FILE_FORMAT *p_format) { int fd; ssize_t totalsize; ssize_t size; unsigned char riff[12]; unsigned char fmt_params[40]; unsigned char data[8]; off_t file_size; struct stat stat_info; unsigned long tmpU32; unsigned short tmpU16; /* Check dynamically if computer is little or big endian */ tmpU32 = 1; if (*((char *)(&tmpU32)) == 0) app_wav_cb.is_be = TRUE; if (p_format == NULL) { APP_ERROR0("format is NULL"); return -1; } if (p_fname == NULL) { APP_ERROR0("no filename"); return -1; } if ((fd = open(p_fname, O_RDONLY)) < 0) { APP_ERROR1("open(%s) failed: %d", p_fname, errno); return -1; } if (fstat(fd, &stat_info) < 0) { APP_ERROR1("stat(%s) failed: %d", p_fname, errno); goto app_wav_open_file_error; } file_size = stat_info.st_size; /* Read the RIFF header */ size = read(fd, riff, sizeof(riff)); if (size < 0) { APP_ERROR1("read(%s) failed: %d", p_fname, errno); goto app_wav_open_file_error; } if (size != sizeof(riff)) { APP_ERROR1("Length read does not match RIFF header (%d != %d)", (int)size, sizeof(riff)); goto app_wav_open_file_error; } /* Check ChunkId field */ if (memcmp(&riff[0], "RIFF", 4)) { APP_ERROR0("RIFF not found"); goto app_wav_open_file_error; } /* Read ChunkSize field */ tmpU32 = riff[4]; tmpU32 |= (riff[5] << 8); tmpU32 |= (riff[6] << 16); tmpU32 |= (unsigned long) (riff[7] << 24); /* Check ChunkSize field */ if ((tmpU32 + 8) != (UINT32)file_size) { APP_DEBUG1("WARNING: RIFF chunk size (%lu + 8) does not match file size(%lu)", tmpU32, file_size); } /* Check WAVE ID field */ if (memcmp(&riff[8], "WAVE", 4)) { APP_ERROR0("WAVE not found"); goto app_wav_open_file_error; } /* RIFF header */ totalsize = 12; /* Invalid value to indicate format chunk was not parsed */ p_format->sample_rate = 0; /* Read all the chunks until data is found */ do { /* Read next chunk header */ size = read(fd, data, sizeof(data)); if (size < 0) { APP_ERROR1("read(%s) failed: %d", p_fname, errno); goto app_wav_open_file_error; } if (size != sizeof(data)) { APP_ERROR1("Length read does not match header (%d != %d)", (int)size, sizeof(data)); goto app_wav_open_file_error; } /* ChunkSize */ tmpU32 = data[4]; tmpU32 |= (data[5] << 8); tmpU32 |= (data[6] << 16); tmpU32 |= (unsigned long) (data[7] << 24); totalsize += 8 + tmpU32; /* Check ChunkId field */ if (!memcmp(&data[0], "fmt ", 4)) { if ((tmpU32 != 16) && (tmpU32 != 18) && (tmpU32 != 40)) { APP_DEBUG1("WARNING: format chunk size is not supported (%lu != (16, 18, 40))", tmpU32); goto app_wav_open_file_error; } /* Read the FMT params */ size = read(fd, fmt_params, tmpU32); if (size < 0) { APP_ERROR1("read(%s) failed: %d", p_fname, errno); goto app_wav_open_file_error; } if ((UINT32)size != tmpU32) { APP_ERROR1("Length read does not match FMT params (%d != %d)", (int)size, (int)tmpU32); goto app_wav_open_file_error; } /* Read AudioFormat field */ tmpU16 = fmt_params[0]; tmpU16 |= (fmt_params[1] << 8); /* Check AudioFormat field => 0x01 = PCM LINEAR, 0x25 = apt-X */ if (tmpU16 == 0x01) { p_format->codec = BSA_AV_CODEC_PCM; } else if (tmpU16 == 0x25) { p_format->codec = BSA_AV_CODEC_APTX; } else { APP_ERROR1("WAV audio format is not supported (%u)", tmpU16); goto app_wav_open_file_error; } /* Read NumChannels field */ tmpU16 = fmt_params[2]; tmpU16 |= (fmt_params[3] << 8); /* Check NumChannels field => 1:Mono 2:Stereo */ if (p_format->codec == BSA_AV_CODEC_APTX) { if (tmpU16 > 3) { APP_ERROR1("number of channels not supported in apt-X (%u)", tmpU16); goto app_wav_open_file_error; } if (tmpU16 > 1) { p_format->stereo_mode = tmpU16; tmpU16 = 2; } } else if (tmpU16 > 2) { APP_ERROR1("bad number of channels not supported (%u)", tmpU16); goto app_wav_open_file_error; } p_format->nb_channels = tmpU16; /* Read SampleRate field */ tmpU32 = fmt_params[4]; tmpU32 |= (fmt_params[5] << 8); tmpU32 |= (fmt_params[6] << 16); tmpU32 |= (unsigned long) (fmt_params[7] << 24); p_format->sample_rate = tmpU32; /* Read BitsPerSample field (read before ByteRate and BlockAlign) */ tmpU16 = fmt_params[14]; tmpU16 |= (fmt_params[15] << 8); p_format->bits_per_sample = tmpU16; /* Read ByteRate field */ tmpU32 = fmt_params[8]; tmpU32 |= (fmt_params[9] << 8); tmpU32 |= (fmt_params[10] << 16); tmpU32 |= (unsigned long)(fmt_params[11] << 24); /* Check ByteRate field: should be = SampleRate * NumChannels * BitsPerSample/8 */ if (tmpU32 != (p_format->sample_rate * p_format->nb_channels * p_format->bits_per_sample / 8)) { APP_DEBUG1("WARNING: byte rate does not match PCM rate calculation (%lu != (%lu * %u * %u / 8))", tmpU32, p_format->sample_rate, p_format->nb_channels, p_format->bits_per_sample); } /* Read BlockAlign field */ tmpU16 = fmt_params[12]; tmpU16 |= (fmt_params[13] << 8); /* Check BlockAlign field: should be NumChannels * BitsPerSample/8 */ if (tmpU16 != (p_format->nb_channels * p_format->bits_per_sample / 8)) { APP_ERROR1("Block alignment does not match calculation (%u != (%u * %u / 8))", tmpU16, p_format->nb_channels, p_format->bits_per_sample); goto app_wav_open_file_error; } } else if (!memcmp(&data[0], "fact", 4)) { if (tmpU32 > 8) { APP_ERROR1("FACT chunk size is not supported (%d)", (int)tmpU32); goto app_wav_open_file_error; } /* Bypass */ size = read(fd, data, tmpU32); if (size < 0) { APP_ERROR1("read(%s) failed: %d", p_fname, errno); goto app_wav_open_file_error; } if ((unsigned long)size != tmpU32) { APP_ERROR1("Length read does not match FACT chunk (%d != %d)", (int)size, sizeof(data)); goto app_wav_open_file_error; } } else if (!memcmp(&data[0], "LIST", 4) || !memcmp(&data[0], "cue ", 4)) { /* Bypass */ if (lseek(fd, tmpU32, SEEK_CUR) == -1) { APP_ERROR1("Failed bypassing the LIST chunk (%u)", tmpU32); goto app_wav_open_file_error; } if (size != sizeof(data)) { APP_ERROR1("Length read does not match LIST chunk (%d != %d)", (int)size, sizeof(data)); goto app_wav_open_file_error; } } else if (!memcmp(&data[0], "data", 4)) { /* data chunk specific: added byte not included in chunk size*/ if (tmpU32 & 1) { totalsize++; } /* Check size matches file size */ if (totalsize != file_size) { APP_DEBUG1("WARNING: RIFF size does not match file size (%lu != %lu))", file_size, totalsize); } } else { APP_ERROR1("unsupported chunk (%c%c%c%c)", data[0], data[1], data[2], data[3]); goto app_wav_open_file_error; } } while (memcmp(&data[0], "data", 4)); if (!p_format->sample_rate) { APP_ERROR0("Format chunk was not found in WAV file"); goto app_wav_open_file_error; } return fd; app_wav_open_file_error: close(fd); return -1; } /******************************************************************************* ** ** Function app_wav_create_file ** ** Description Create a wave file with proper header ** ** Parameters p_fname: name of the file to create ** flags: extra flags to add to O_RDWR and O_CREAT (O_EXCL for ** example if you want to prevent overwriting a file) ** ** Returns The file descriptor of the file created (-1 in case of error) ** *******************************************************************************/ int app_wav_create_file(const char *p_fname, int flags) { int fd, dummy; /* Create file in read/write mode, reset the length field */ flags |= O_RDWR | O_CREAT | O_TRUNC; fd = open(p_fname, flags, 0666); if (fd < 0) { if (!(flags & O_EXCL)) { APP_ERROR1("open(%s) failed: %d", p_fname, errno); } } else { APP_DEBUG1("created WAV file %s", p_fname); dummy = write(fd, app_wav_hdr, sizeof(app_wav_hdr)); (void)dummy; } return fd; } /******************************************************************************* ** ** Function app_wav_close_file ** ** Description Update WAV file header and close file ** ** Parameters fd: file descriptor ** p_format: structure containing the format of the samples ** ** Returns void ** *******************************************************************************/ void app_wav_close_file(int fd, const tAPP_WAV_FILE_FORMAT *p_format) { int data_size, dummy; unsigned char header[APP_WAVE_HDR_SIZE]; int chunk_size; int byte_rate; short block_align; if (fd < 0) { APP_ERROR0("Bad file descriptor"); return; } /* Copy the standard header */ memcpy(header, app_wav_hdr, sizeof(header)); /* Retrieve the size of the file */ data_size = lseek(fd, 0, SEEK_CUR); if (data_size < 0) { APP_ERROR0("Read current pointer failed"); data_size = APP_WAVE_HDR_SIZE; } /* Remove the standard header from the size of the file */ data_size -= APP_WAVE_HDR_SIZE; chunk_size = data_size + 36; if (p_format->bits_per_sample == 8) { byte_rate = p_format->nb_channels * sizeof(char) * p_format->sample_rate; block_align = p_format->nb_channels * sizeof(char); } else { byte_rate = p_format->nb_channels * sizeof(short) * p_format->sample_rate; block_align = p_format->nb_channels * sizeof(short); } header[4] = (unsigned char)chunk_size; header[5] = (unsigned char)(chunk_size >> 8); header[6] = (unsigned char)(chunk_size >> 16); header[7] = (unsigned char)(chunk_size >> 24); header[22] = (unsigned char)p_format->nb_channels; header[23] = 0; /* p_format->nb_channels is coded on 1 byte only */ header[24] = (unsigned char)p_format->sample_rate; header[25] = (unsigned char)(p_format->sample_rate >> 8); header[26] = (unsigned char)(p_format->sample_rate >> 16); header[27] = (unsigned char)(p_format->sample_rate >> 24); header[28] = (unsigned char)byte_rate; header[29] = (unsigned char)(byte_rate >> 8); header[30] = (unsigned char)(byte_rate >> 16); header[31] = (unsigned char)(byte_rate >> 24); header[32] = (unsigned char)block_align; header[33] = (unsigned char)(block_align >> 8); header[34] = (unsigned char)p_format->bits_per_sample; header[35] = (unsigned char)(p_format->bits_per_sample >> 8); header[40] = (unsigned char)data_size; header[41] = (unsigned char)(data_size >> 8); header[42] = (unsigned char)(data_size >> 16); header[43] = (unsigned char)(data_size >> 24); lseek(fd, 0, SEEK_SET); dummy = write(fd, header, sizeof(header)); (void)dummy; close(fd); }
9,004
5,788
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.shardingsphere.infra.config.datasource.creator.impl; import org.apache.shardingsphere.infra.config.datasource.DataSourceConfiguration; import javax.sql.DataSource; import java.lang.reflect.Method; import java.util.Map.Entry; /** * Default data source creator. */ public final class DefaultDataSourceCreator extends AbstractDataSourceCreator { @Override public DataSourceConfiguration createDataSourceConfiguration(final DataSource dataSource) { return buildDataSourceConfig(dataSource); } @Override public DataSource createDataSource(final DataSourceConfiguration dataSourceConfig) { DataSource result = buildDataSource(dataSourceConfig.getDataSourceClassName()); Method[] methods = result.getClass().getMethods(); for (Entry<String, Object> entry : dataSourceConfig.getAllProps().entrySet()) { setField(result, methods, entry.getKey(), entry.getValue()); } return result; } @Override public String getType() { return "Default"; } @Override public boolean isDefault() { return true; } }
596
550
package com.qiniu.rtc.service; import com.qiniu.common.QiniuException; import com.qiniu.http.Response; import com.qiniu.rtc.model.AppParam; import com.qiniu.util.Auth; public class AppService extends AbstractService { /** * 初始化 * * @param auth */ public AppService(Auth auth) { super(auth); } /** * hub 绑定的直播 hub,可选,使用此 hub 的资源进行推流等业务功能,hub 与 app 必须属于同一个七牛账户 * title app 的名称,可选,注意,Title 不是唯一标识,重复 create 动作将生成多个 app * maxUsers int 类型,可选,连麦房间支持的最大在线人数。 * noAutoKickUser bool 类型,可选,禁止自动踢人(抢流)。默认为 false ,即同一个身份的 client (app/room/user) ,新的连 * 麦请求可以成功,旧连接被关闭。 * * @return Response 如果不读取Response的数据,请注意调用Close方法关闭 * @throws QiniuException */ public Response createApp(AppParam appParam) throws QiniuException { String urlPattern = "/v3/apps"; return postCall(appParam, urlPattern); } /** * 获取房间信息 * * @param appId 房间所属帐号的 app * @return Response 如果不读取Response的数据,请注意调用Close方法关闭 * @throws QiniuException */ public Response getApp(String appId) throws QiniuException { String urlPattern = "/v3/apps/%s"; return getCall(urlPattern, appId); } /** * 删除app * * @param appId * @throws QiniuException */ public Response deleteApp(String appId) throws QiniuException { String urlPattern = "/v3/apps/%s"; return deleteCall(null, urlPattern, appId); } /** * 更新app信息 * 注意!调用这个接口后仅对调用后新创建的房间有效,已经存在的房间需要等待被关闭重新创建后生效 * * @param appParam * @return * @throws QiniuException */ public Response updateApp(AppParam appParam) throws QiniuException { String urlPattern = "/v3/apps"; return postCall(appParam, urlPattern); } }
1,212
308
<filename>OpenEdXMobile/src/main/java/org/edx/mobile/base/RuntimeApplication.java package org.edx.mobile.base; /** * Put any custom application configuration here. * This file will not be edited by edX unless absolutely necessary. */ public class RuntimeApplication extends MainApplication { @Override public void onCreate() { super.onCreate(); // If you have any custom extensions, add them here. } }
133
455
<reponame>gromit1811/network-lab<gh_stars>100-1000 #!/usr/bin/env python3 """Execute a bunch of HTTP requests using provided source/destinations and log the result. Should be run with a list of source/destination tuples as arguments. For example:: ./httprequests.py \ 203.0.113.105:38447,203.0.113.15:80 \ 203.0.113.106:38457,203.0.113.15:80 \ 203.0.113.106:31447,203.0.113.15:80 \ 203.0.113.106:28447,203.0.113.15:80 \ 203.0.113.105:38487,203.0.113.15:80 \ 203.0.113.105:38147,203.0.113.15:80 \ 203.0.113.105:31210,203.0.113.15:80 Could be used with ``xargs``:: for i in $(seq 120 125); do for c in $(seq 1 2000); do echo 203.0.113.$i:$((RANDOM%10000 + 30000)),203.0.113.15:80 done done \ | sort | uniq \ | xargs -n1000 ./httprequests.py \ | awk '{print $NF}' \ | sort | uniq -c """ import aiohttp import asyncio import sys async def fetch(source, destination): """HTTP request to the provided destination (IP, port) using the provided source (IP, port).""" host, port = destination url = f'http://{host}:{port}/' conn = aiohttp.TCPConnector(local_addr=source) async with aiohttp.ClientSession(connector=conn) as client: start = loop.time() async with client.get(url) as resp: status = resp.status got = await resp.text() got = got.splitlines()[0] end = loop.time() elapsed = (end - start) * 1000 print(f'{source[0]}:{source[1]} → {host}:{port} : ' f'{int(elapsed)}ms {status} {got}') async def main(pairs): await asyncio.wait([fetch(pair[0], pair[1]) for pair in pairs]) def parse_pair(pair): source, destination = pair.split(",") ip, port = source.split(":") source = (ip, int(port)) ip, port = destination.split(":") destination = (ip, int(port)) return (source, destination) pairs = (parse_pair(arg) for arg in sys.argv[1:]) loop = asyncio.get_event_loop() loop.run_until_complete(main(pairs))
954
2,151
<reponame>zipated/src<filename>components/cryptauth/remote_device.h<gh_stars>1000+ // Copyright 2014 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef COMPONENTS_CRYPTAUTH_REMOTE_DEVICE_H_ #define COMPONENTS_CRYPTAUTH_REMOTE_DEVICE_H_ #include <map> #include <string> #include <vector> #include "components/cryptauth/proto/cryptauth_api.pb.h" #include "components/cryptauth/software_feature_state.h" namespace cryptauth { struct RemoteDevice { public: // Generates the device ID for a device given its public key. static std::string GenerateDeviceId(const std::string& public_key); std::string user_id; std::string name; std::string public_key; std::string persistent_symmetric_key; bool unlock_key; bool supports_mobile_hotspot; int64_t last_update_time_millis; std::map<SoftwareFeature, SoftwareFeatureState> software_features; // Note: To save space, the BeaconSeeds may not necessarily be included in // this object. bool are_beacon_seeds_loaded = false; std::vector<BeaconSeed> beacon_seeds; RemoteDevice(); RemoteDevice( const std::string& user_id, const std::string& name, const std::string& public_key, const std::string& persistent_symmetric_key, bool unlock_key, bool supports_mobile_hotspot, int64_t last_update_time_millis, const std::map<SoftwareFeature, SoftwareFeatureState>& software_features); RemoteDevice(const RemoteDevice& other); ~RemoteDevice(); // Loads a vector of BeaconSeeds for the RemoteDevice. void LoadBeaconSeeds(const std::vector<BeaconSeed>& beacon_seeds); std::string GetDeviceId() const; bool operator==(const RemoteDevice& other) const; // Compares devices via their public keys. Note that this function is // necessary in order to use |RemoteDevice| as a key of a std::map. bool operator<(const RemoteDevice& other) const; }; typedef std::vector<RemoteDevice> RemoteDeviceList; } // namespace cryptauth #endif // COMPONENTS_CRYPTAUTH_REMOTE_DEVICE_H_
692
1,695
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.trino.sql.planner.iterative.rule; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import io.trino.Session; import io.trino.metadata.Metadata; import io.trino.metadata.ResolvedFunction; import io.trino.operator.scalar.ArrayDistinctFunction; import io.trino.operator.scalar.ArraySortFunction; import io.trino.spi.type.Type; import io.trino.sql.PlannerContext; import io.trino.sql.planner.FunctionCallBuilder; import io.trino.sql.planner.iterative.Rule; import io.trino.sql.tree.Expression; import io.trino.sql.tree.ExpressionTreeRewriter; import io.trino.sql.tree.FunctionCall; import io.trino.sql.tree.QualifiedName; import io.trino.sql.tree.SymbolReference; import java.util.List; import java.util.Set; import static com.google.common.collect.Iterables.getOnlyElement; public class ArraySortAfterArrayDistinct extends ExpressionRewriteRuleSet { public ArraySortAfterArrayDistinct(PlannerContext plannerContext) { super((expression, context) -> rewrite(expression, context, plannerContext.getMetadata())); } @Override public Set<Rule<?>> rules() { return ImmutableSet.of( projectExpressionRewrite(), filterExpressionRewrite(), joinExpressionRewrite(), valuesExpressionRewrite(), patternRecognitionExpressionRewrite()); } private static Expression rewrite(Expression expression, Rule.Context context, Metadata metadata) { if (expression instanceof SymbolReference) { return expression; } Session session = context.getSession(); return ExpressionTreeRewriter.rewriteWith(new Visitor(metadata, session), expression); } private static class Visitor extends io.trino.sql.tree.ExpressionRewriter<Void> { private final Metadata metadata; private final Session session; public Visitor(Metadata metadata, Session session) { this.metadata = metadata; this.session = session; } @Override public Expression rewriteFunctionCall(FunctionCall node, Void context, ExpressionTreeRewriter<Void> treeRewriter) { FunctionCall rewritten = treeRewriter.defaultRewrite(node, context); if (metadata.decodeFunction(rewritten.getName()).getSignature().getName().equals(ArrayDistinctFunction.NAME) && getOnlyElement(rewritten.getArguments()) instanceof FunctionCall) { Expression expression = getOnlyElement(rewritten.getArguments()); FunctionCall functionCall = (FunctionCall) expression; ResolvedFunction resolvedFunction = metadata.decodeFunction(functionCall.getName()); if (resolvedFunction.getSignature().getName().equals(ArraySortFunction.NAME)) { List<Expression> arraySortArguments = functionCall.getArguments(); List<Type> arraySortArgumentsTypes = resolvedFunction.getSignature().getArgumentTypes(); FunctionCall arrayDistinctCall = FunctionCallBuilder.resolve(session, metadata) .setName(QualifiedName.of(ArrayDistinctFunction.NAME)) .setArguments( ImmutableList.of(arraySortArgumentsTypes.get(0)), ImmutableList.of(arraySortArguments.get(0))) .build(); FunctionCallBuilder arraySortCallBuilder = FunctionCallBuilder.resolve(session, metadata) .setName(QualifiedName.of(ArraySortFunction.NAME)) .addArgument(arraySortArgumentsTypes.get(0), arrayDistinctCall); if (arraySortArguments.size() == 2) { arraySortCallBuilder.addArgument(arraySortArgumentsTypes.get(1), arraySortArguments.get(1)); } return arraySortCallBuilder.build(); } } return rewritten; } } }
1,877
619
/* * Author: <NAME> <<EMAIL>> * Copyright (c) 2016 Intel Corporation. * * This program and the accompanying materials are made available under the * terms of the The MIT License which is available at * https://opensource.org/licenses/MIT. * * SPDX-License-Identifier: MIT */ import upm_bh1750.BH1750; public class BH1750_Example { public static void main(String[] args) throws InterruptedException { // ! [Interesting] System.out.println("Initializing..."); // Instantiate a BH1750 sensor using defaults (I2C bus (0), using // the default I2C address (0x23), and setting the mode to highest // resolution, lowest power mode). BH1750 sensor = new BH1750(); // Every second, sample the BH1750 and output the measured lux // value while (true) { System.out.println("Detected Light Level (lux): " + sensor.getLux()); Thread.sleep(1000); } // ! [Interesting] } }
423
530
/* * Copyright (c) 2018, Oracle and/or its affiliates. All rights reserved. * * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * The contents of this file are subject to the terms of either the Universal Permissive License * v 1.0 as shown at http://oss.oracle.com/licenses/upl * * or the following license: * * Redistribution and use in source and binary forms, with or without modification, are permitted * provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this list of conditions * and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, this list of * conditions and the following disclaimer in the documentation and/or other materials provided with * the distribution. * * 3. Neither the name of the copyright holder nor the names of its contributors may be used to * endorse or promote products derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY * WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.openjdk.jmc.ui.accessibility; import org.eclipse.swt.accessibility.AccessibleAdapter; import org.eclipse.swt.accessibility.AccessibleEvent; import org.eclipse.swt.accessibility.AccessibleListener; /** * Class that can be used to give information for an accessibility client */ public class MCAccessibleListener extends AccessibleAdapter { private String m_name; private String m_description; private String m_help; private String m_componentType; /** * Creates an {@link AccessibleListener} with a name and a description. * * @param name * the name, of the control. Eg, table * @param description * @return */ public static AccessibleListener createNameHelp(String name, String help) { MCAccessibleListener accessibleListener = new MCAccessibleListener(); accessibleListener.setName(name); accessibleListener.setHelp(help); return accessibleListener; } /** * Sets the name. E.g. "Table", "ExpandableComposite". * * @param name * of the control */ final public void setName(String name) { m_name = name; } /** * Sets the description. E.g. "A graph showing heap usage." * * @param name * of the control */ final public void setDescription(String description) { m_description = description; } /** * Set the help text. Typically the tooltip text. * * @param help * the help text */ final public void setHelp(String help) { m_help = help; } /* See {@link AccessibleAdapter#getDescription(AccessibleEvent)} */ @Override final public void getDescription(AccessibleEvent e) { if (m_description != null) { e.result = m_description; } } /* See {@link AccessibleAdapter#getHelp(AccessibleEvent)} */ @Override final public void getHelp(AccessibleEvent e) { if (m_help != null) { e.result = m_help; } } /** * Sets what kind of type the control is. This is typically read after the name. */ public void setComponentType(String componentType) { m_componentType = componentType; } /* See {@link AccessibleAdapter#getName(AccessibleEvent)} */ @Override final public void getName(AccessibleEvent e) { if (m_name != null) { e.result = m_name; if (m_componentType != null) { e.result += ' ' + m_componentType; } } } /** * Returns the name this listener will use. * * @return the name, or null if not set */ public String getName() { return m_name; } }
1,337
906
<reponame>xiejiachen/CDial-GPT<filename>od/utils/data_utils.py import pickle import json def load_pkl(path): with open(path, 'rb') as f: data = pickle.load(f) return data def load_txt(path): with open(path, encoding='UTF-8', errors='ignore') as f: data = [i.strip() for i in f.readlines() if len(i) > 0] return data def save_txt(data, path): with open(path, 'w', encoding='UTF-8') as f: f.write(data) def load_json(path): with open(path, 'r', encoding='UTF_8') as f: return json.load(f) def save_json(data, path, indent=0): with open(path, 'w', encoding='UTF-8') as f: json.dump(data, f, ensure_ascii=False, indent=indent)
308
841
/* * Copyright 2021 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jbpm.bpmn2.xml; import java.util.HashMap; import java.util.Map; import org.drools.core.xml.BaseAbstractHandler; import org.drools.core.xml.ExtensibleXmlParser; import org.drools.core.xml.Handler; import org.jbpm.bpmn2.core.CorrelationSubscription; import org.jbpm.bpmn2.core.Expression; import org.jbpm.ruleflow.core.RuleFlowProcess; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.Attributes; import org.xml.sax.SAXException; public class CorrelationSubscriptionHandler extends BaseAbstractHandler implements Handler { @Override public Object start(String uri, String localName, Attributes attrs, ExtensibleXmlParser parser) throws SAXException { parser.startElementBuilder( localName, attrs ); String correlationSubscriptionPropertyId = attrs.getValue("id"); String correlationSubscriptionPropertyName = attrs.getValue("name"); String correlationSubscriptionRef = attrs.getValue("correlationKeyRef"); CorrelationSubscription correlationSubscription = new CorrelationSubscription(); correlationSubscription.setId(correlationSubscriptionPropertyId); correlationSubscription.setName(correlationSubscriptionPropertyName); correlationSubscription.setCorrelationKeyRef(correlationSubscriptionRef); RuleFlowProcess process = (RuleFlowProcess) parser.getParent(); HandlerUtil.correlationSuscription(process).put(correlationSubscriptionPropertyId, correlationSubscription); return correlationSubscription; } @Override public Object end(String uri, String localName, ExtensibleXmlParser parser) throws SAXException { Element element = parser.endElementBuilder(); CorrelationSubscription correlationSubscription = (CorrelationSubscription) parser.getCurrent(); correlationSubscription.getPropertyExpressions().putAll(buildPropertyProcessBindings(element.getChildNodes(), parser)); return null; } private Map<String, Expression> buildPropertyProcessBindings(NodeList childNodes, ExtensibleXmlParser parser) { Map<String, Expression> correlationKeys = new HashMap<>(); for(int i = 0; i < childNodes.getLength(); i++) { Node node = childNodes.item(i); if("correlationPropertyBinding".equals(node.getNodeName())) { Element elementBinding = (Element) node; correlationKeys.put(elementBinding.getAttribute("correlationPropertyRef"), buildBindingExpression(elementBinding.getChildNodes(), parser)); } } return correlationKeys; } private Expression buildBindingExpression(NodeList childNodes, ExtensibleXmlParser parser) { for(int i = 0; i < childNodes.getLength(); i++) { Node node = childNodes.item(i); if("dataPath".equals(node.getNodeName())) { Element expressionElement = (Element) node; Expression expression = new Expression(); expression.setId(expressionElement.getAttribute("id")); expression.setLang(expressionElement.getAttribute("language")); expression.setScript(expressionElement.getTextContent()); expression.setOutcomeType(HandlerUtil.definitions(parser).get(expressionElement.getAttribute("evaluatesToTypeRef")).getStructureRef()); return expression; } } throw new RuntimeException("message Path not found for correlation property " + parser.getCurrent()); } @Override public Class<?> generateNodeFor() { return CorrelationSubscription.class; } }
1,436
3,212
<gh_stars>1000+ /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.toolkit.cli.impl.command.session; import org.apache.commons.cli.CommandLine; import org.apache.commons.lang3.StringUtils; import org.apache.nifi.toolkit.cli.api.CommandException; import org.apache.nifi.toolkit.cli.api.SessionException; import org.apache.nifi.toolkit.cli.impl.command.AbstractCommand; import org.apache.nifi.toolkit.cli.impl.result.VoidResult; /** * Sets a variable in the session. */ public class SetVariable extends AbstractCommand<VoidResult> { public static final String NAME = "set"; public SetVariable() { super(NAME, VoidResult.class); } @Override public String getDescription() { return "Sets the given variable in the session. " + "Use the 'keys' command to show the variable names that are supported."; } @Override public VoidResult execute(final CommandLine commandLine) throws CommandException { final String[] args = commandLine.getArgs(); if (args == null || args.length < 2 || StringUtils.isBlank(args[0]) || StringUtils.isBlank(args[1])) { throw new CommandException("Incorrect number of arguments, should be: <var> <value>"); } try { getContext().getSession().set(args[0], args[1]); return VoidResult.getInstance(); } catch (SessionException se) { throw new CommandException(se.getMessage(), se); } } }
726
1,665
<reponame>amnaabbassi/shapash """ Summarize Module """ import warnings import numpy as np import pandas as pd from pandas.core.common import flatten from sklearn.manifold import TSNE from shapash.utils.transform import get_features_transform_mapping def summarize_el(dataframe, mask, prefix): """ Compute a summarized Matrix. Parameters ---------- dataframe: pd.DataFrame Matrix containing contributions, label or feature names that will be summarized mask: pd.DataFrame Mask to apply during the summary step prefix: str prefix used for columns name Returns ------- pd.DataFrame Result of the summarize step """ matrix = dataframe.where(mask.to_numpy()).values.tolist() summarized_matrix = [[x for x in l if str(x) != 'nan'] for l in matrix] # Padding to create pd.DataFrame max_length = max(len(l) for l in summarized_matrix) for elem in summarized_matrix: elem.extend([np.nan] * (max_length - len(elem))) # Create DataFrame col_list = [prefix + str(x + 1) for x in list(range(max_length))] df_summarized_matrix = pd.DataFrame(summarized_matrix, index=list(dataframe.index), columns=col_list, dtype=object) return df_summarized_matrix def compute_features_import(dataframe): """ Compute a relative features importance, sum of absolute values ​​of the contributions for each features importance compute in base 100 Parameters ---------- dataframe: pd.DataFrame Matrix containing all contributions Returns ------- pd.Series feature importance One row by feature, index of the serie = dataframe.columns """ feat_imp = dataframe.abs().sum().sort_values(ascending=True) tot = feat_imp.sum() return feat_imp / tot def summarize(s_contrib, var_dict, x_sorted, mask, columns_dict, features_dict): """ Compute the summarized contributions of features. Parameters ---------- s_contrib: pd.DataFrame Matrix containing contributions that will be summarized var_dict: pd.DataFrame Matrix of feature names that will be summarized x_sorted: pd.DataFrame Matrix containing the value of each feature mask: pd.DataFrame Mask to apply during the summary step columns_dict: Dict of column Names, matches column num with column name features_dict: Dict of column Label, matches column name with column label Returns ------- pd.DataFrame Result of the summarize step """ contrib_sum = summarize_el(s_contrib, mask, 'contribution_') var_dict_sum = summarize_el(var_dict, mask, 'feature_').applymap( lambda x: features_dict[columns_dict[x]] if not np.isnan(x) else x) x_sorted_sum = summarize_el(x_sorted, mask, 'value_') # Concatenate pd.DataFrame summary = pd.concat([contrib_sum, var_dict_sum, x_sorted_sum], axis=1) # Ordering columns ordered_columns = list(flatten(zip(var_dict_sum.columns, x_sorted_sum.columns, contrib_sum.columns))) summary = summary[ordered_columns] return summary def group_contributions(contributions, features_groups): """ Regroup contributions according to features_groups parameter Parameters ---------- contributions : pd.DataFrame Contributions of each unique feature. features_groups : dict Python dict that inform which features to regroup. Returns ------- contributions : pd.DataFrame Contributions with grouped features. """ new_contributions = contributions.copy() # Computing features groups that are the sum of their corresponding features contributions for group_name in features_groups.keys(): new_contributions[group_name] = new_contributions[features_groups[group_name]].sum(axis=1) # Dropping features that are part of the group of features for features_grouped in features_groups.values(): new_contributions = new_contributions.drop(features_grouped, axis=1) return new_contributions def project_feature_values_1d(feature_values, col, x_pred, x_init, preprocessing, features_dict, how='tsne'): """ Project feature values of a group of features in 1 dimension. If feature_values contains categorical features, use preprocessing to get the corresponding encoded variables. Parameters ---------- feature_values : pd.DataFrame DataFrame that contains the feature values col : str Name of the group of features. preprocessing : category_encoders, ColumnTransformer, list, dict, optional Preprocessing used to encode categorical variables. x_pred : pd.DataFrame Pandas dataframe before preprocessing transformations x_init : pd.DataFrame Pandas dataframe after preprocessing transformations preprocessing : category_encoders or ColumnTransformer or list or dict or list of dict The processing apply to the original data features_dict: dict, optional (default: None) Dictionary mapping technical feature names to domain names. how : str Method used to compute groups of features values in one column. Returns ------- feature_values : pd.Series Series containing the projected feature values. """ # Getting mapping of variables to transform categorical features with corresponding encoded variables encoding_mapping = get_features_transform_mapping(x_pred, x_init, preprocessing) col_names_in_xinit = list() for c in feature_values.columns: col_names_in_xinit.extend(encoding_mapping.get(c, [c])) feature_values = x_init.loc[feature_values.index, col_names_in_xinit] # Project in 1D the feature values if how == 'tsne': try: feature_values_proj_1d = TSNE(n_components=1, random_state=1).fit_transform(feature_values) feature_values = pd.Series(feature_values_proj_1d[:, 0], name=col, index=feature_values.index) except Exception as e: warnings.warn(f'Could not project group features values : {e}', UserWarning) feature_values = pd.Series(feature_values.iloc[:, 0], name=col, index=feature_values.index) elif how == 'dict_of_values': feature_values.columns = [features_dict.get(x, x) for x in feature_values.columns] feature_values = pd.Series(feature_values.apply(lambda x: x.to_dict(), axis=1), name=col, index=feature_values.index) else: raise NotImplementedError(f'Unknown method : {how}') return feature_values def compute_corr(df, compute_method): """ Compute correlations between features of given dataframe. Parameters ---------- df : pd.DataFrame DataFrame used to compute correlations. compute_method : str Method used to compute correlations ('phik' or 'pearson'). Returns ------- pd.DataFrame """ # Remove user warnings (when not enough values to compute correlation). warnings.filterwarnings("ignore") if compute_method == 'phik': from phik import phik_matrix return phik_matrix(df, verbose=False) elif compute_method == 'pearson': return df.corr() else: raise NotImplementedError(f'Not implemented correlation method : {compute_method}') def create_grouped_features_values( x_pred, x_init, preprocessing, features_groups, features_dict, how='tsne' ) -> pd.DataFrame: """ Compute projections of groups of features using t-sne. Parameters ---------- x_pred : pd.DataFrame x_init dataset with inverse transformation with eventual postprocessing modifications. x_init : pd.DataFrame preprocessed dataset used by the model to perform the prediction. preprocessing : category_encoders, ColumnTransformer, list, dict, optional Preprocessing used to encode categorical variables. features_groups : dict Groups names and corresponding list of features features_dict: dict, optional (default: None) Dictionary mapping technical feature names to domain names. how : str Method used to compute groups of features values in one column. Returns ------- df : pd.DataFrame features values with projection used for groups of features """ df = x_pred.copy() for group in features_groups.keys(): if not isinstance(features_groups[group], list): raise ValueError(f'features_groups[{group}] should be a list of features') features_values = x_pred[features_groups[group]] df[group] = project_feature_values_1d( features_values, col=group, x_pred=x_pred, x_init=x_init, preprocessing=preprocessing, features_dict=features_dict, how=how ) for f in features_groups[group]: if f in df.columns: df.drop(f, axis=1, inplace=True) return df
3,442
1,478
<reponame>greytao/incubator-seatunnel /* * Copyright (C) 2011-2012 Typesafe Inc. <http://typesafe.com> */ package org.apache.seatunnel.shade.com.typesafe.config.impl; import org.apache.seatunnel.shade.com.typesafe.config.ConfigException; import org.apache.seatunnel.shade.com.typesafe.config.ConfigObject; import org.apache.seatunnel.shade.com.typesafe.config.ConfigOrigin; import org.apache.seatunnel.shade.com.typesafe.config.ConfigRenderOptions; import org.apache.seatunnel.shade.com.typesafe.config.ConfigValue; import java.io.ObjectStreamException; import java.io.Serializable; import java.util.AbstractMap; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; final class SimpleConfigObject extends AbstractConfigObject implements Serializable { private static final long serialVersionUID = 2L; private final Map<String, AbstractConfigValue> value; private final boolean resolved; private final boolean ignoresFallbacks; private static final SimpleConfigObject EMPTY_INSTANCE = empty(SimpleConfigOrigin.newSimple("empty config")); private static final int HASH_CODE = 41; SimpleConfigObject(ConfigOrigin origin, Map<String, AbstractConfigValue> value, ResolveStatus status, boolean ignoresFallbacks) { super(origin); if (value == null) { throw new ConfigException.BugOrBroken("creating config object with null map"); } else { this.value = value; this.resolved = status == ResolveStatus.RESOLVED; this.ignoresFallbacks = ignoresFallbacks; if (status != ResolveStatus.fromValues(value.values())) { throw new ConfigException.BugOrBroken("Wrong resolved status on " + this); } } } SimpleConfigObject(ConfigOrigin origin, Map<String, AbstractConfigValue> value) { this(origin, value, ResolveStatus.fromValues(value.values()), false); } public SimpleConfigObject withOnlyKey(String key) { return this.withOnlyPath(Path.newKey(key)); } public SimpleConfigObject withoutKey(String key) { return this.withoutPath(Path.newKey(key)); } protected SimpleConfigObject withOnlyPathOrNull(Path path) { String key = path.first(); Path next = path.remainder(); AbstractConfigValue v = this.value.get(key); if (next != null) { if (v instanceof AbstractConfigObject) { v = ((AbstractConfigObject) v).withOnlyPathOrNull(next); } else { v = null; } } return v == null ? null : new SimpleConfigObject(this.origin(), Collections.singletonMap(key, v), v.resolveStatus(), this.ignoresFallbacks); } SimpleConfigObject withOnlyPath(Path path) { SimpleConfigObject o = this.withOnlyPathOrNull(path); return o == null ? new SimpleConfigObject(this.origin(), Collections.emptyMap(), ResolveStatus.RESOLVED, this.ignoresFallbacks) : o; } SimpleConfigObject withoutPath(Path path) { String key = path.first(); Path next = path.remainder(); AbstractConfigValue v = this.value.get(key); HashMap<String, AbstractConfigValue> smaller; if (next != null && v instanceof AbstractConfigObject) { v = ((AbstractConfigObject) v).withoutPath(next); smaller = new HashMap<>(this.value); smaller.put(key, v); return new SimpleConfigObject(this.origin(), smaller, ResolveStatus.fromValues(smaller.values()), this.ignoresFallbacks); } else if (next == null && v != null) { smaller = new HashMap<>(this.value.size() - 1); for (Entry<String, AbstractConfigValue> stringAbstractConfigValueEntry : this.value.entrySet()) { if (!stringAbstractConfigValueEntry.getKey().equals(key)) { smaller.put(stringAbstractConfigValueEntry.getKey(), stringAbstractConfigValueEntry.getValue()); } } return new SimpleConfigObject(this.origin(), smaller, ResolveStatus.fromValues(smaller.values()), this.ignoresFallbacks); } else { return this; } } public SimpleConfigObject withValue(String key, ConfigValue v) { if (v == null) { throw new ConfigException.BugOrBroken("Trying to store null ConfigValue in a ConfigObject"); } else { Map newMap; if (this.value.isEmpty()) { newMap = Collections.singletonMap(key, (AbstractConfigValue) v); } else { newMap = new HashMap<>(this.value); newMap.put(key, v); } return new SimpleConfigObject(this.origin(), newMap, ResolveStatus.fromValues(newMap.values()), this.ignoresFallbacks); } } SimpleConfigObject withValue(Path path, ConfigValue v) { String key = path.first(); Path next = path.remainder(); if (next == null) { return this.withValue(key, v); } else { AbstractConfigValue child = this.value.get(key); if (child instanceof AbstractConfigObject) { return this.withValue(key, ((AbstractConfigObject) child).withValue(next, v)); } else { SimpleConfig subtree = ((AbstractConfigValue) v).atPath(SimpleConfigOrigin.newSimple("withValue(" + next.render() + ")"), next); return this.withValue(key, subtree.root()); } } } protected AbstractConfigValue attemptPeekWithPartialResolve(String key) { return this.value.get(key); } private SimpleConfigObject newCopy(ResolveStatus newStatus, ConfigOrigin newOrigin, boolean newIgnoresFallbacks) { return new SimpleConfigObject(newOrigin, this.value, newStatus, newIgnoresFallbacks); } protected SimpleConfigObject newCopy(ResolveStatus newStatus, ConfigOrigin newOrigin) { return this.newCopy(newStatus, newOrigin, this.ignoresFallbacks); } protected SimpleConfigObject withFallbacksIgnored() { return this.ignoresFallbacks ? this : this.newCopy(this.resolveStatus(), this.origin(), true); } ResolveStatus resolveStatus() { return ResolveStatus.fromBoolean(this.resolved); } public SimpleConfigObject replaceChild(AbstractConfigValue child, AbstractConfigValue replacement) { Map<String, AbstractConfigValue> newChildren = new HashMap<>(this.value); Iterator<Entry<String, AbstractConfigValue>> var4 = newChildren.entrySet().iterator(); Entry<String, AbstractConfigValue> old; do { if (!var4.hasNext()) { throw new ConfigException.BugOrBroken("SimpleConfigObject.replaceChild did not find " + child + " in " + this); } old = var4.next(); } while (old.getValue() != child); if (replacement != null) { old.setValue(replacement); } else { newChildren.remove(old.getKey()); } return new SimpleConfigObject(this.origin(), newChildren, ResolveStatus.fromValues(newChildren.values()), this.ignoresFallbacks); } public boolean hasDescendant(AbstractConfigValue descendant) { Iterator<AbstractConfigValue> var2 = this.value.values().iterator(); AbstractConfigValue child; do { if (!var2.hasNext()) { var2 = this.value.values().iterator(); do { if (!var2.hasNext()) { return false; } child = var2.next(); } while (!(child instanceof Container) || !((Container) child).hasDescendant(descendant)); return true; } child = var2.next(); } while (child != descendant); return true; } protected boolean ignoresFallbacks() { return this.ignoresFallbacks; } public Map<String, Object> unwrapped() { Map<String, Object> m = new HashMap<>(); for (Entry<String, AbstractConfigValue> stringAbstractConfigValueEntry : this.value.entrySet()) { m.put(stringAbstractConfigValueEntry.getKey(), stringAbstractConfigValueEntry.getValue().unwrapped()); } return m; } protected SimpleConfigObject mergedWithObject(AbstractConfigObject abstractFallback) { this.requireNotIgnoringFallbacks(); if (!(abstractFallback instanceof SimpleConfigObject)) { throw new ConfigException.BugOrBroken("should not be reached (merging non-SimpleConfigObject)"); } else { SimpleConfigObject fallback = (SimpleConfigObject) abstractFallback; boolean changed = false; boolean allResolved = true; Map<String, AbstractConfigValue> merged = new HashMap<>(); Set<String> allKeys = new HashSet<>(); allKeys.addAll(this.keySet()); allKeys.addAll(fallback.keySet()); for (String key : allKeys) { AbstractConfigValue first = this.value.get(key); AbstractConfigValue second = fallback.value.get(key); AbstractConfigValue kept; if (first == null) { kept = second; } else if (second == null) { kept = first; } else { kept = first.withFallback(second); } merged.put(key, kept); if (first != kept) { changed = true; } if (kept.resolveStatus() == ResolveStatus.UNRESOLVED) { allResolved = false; } } ResolveStatus newResolveStatus = ResolveStatus.fromBoolean(allResolved); boolean newIgnoresFallbacks = fallback.ignoresFallbacks(); if (changed) { return new SimpleConfigObject(mergeOrigins(this, fallback), merged, newResolveStatus, newIgnoresFallbacks); } else if (newResolveStatus == this.resolveStatus() && newIgnoresFallbacks == this.ignoresFallbacks()) { return this; } else { return this.newCopy(newResolveStatus, this.origin(), newIgnoresFallbacks); } } } private SimpleConfigObject modify(NoExceptionsModifier modifier) { try { return this.modifyMayThrow(modifier); } catch (RuntimeException var3) { throw var3; } catch (Exception var4) { throw new ConfigException.BugOrBroken("unexpected checked exception", var4); } } private SimpleConfigObject modifyMayThrow(Modifier modifier) throws Exception { Map<String, AbstractConfigValue> changes = null; for (String k : this.keySet()) { AbstractConfigValue v = this.value.get(k); AbstractConfigValue modified = modifier.modifyChildMayThrow(k, v); if (modified != v) { if (changes == null) { changes = new HashMap<>(); } changes.put(k, modified); } } if (changes == null) { return this; } else { Map<String, AbstractConfigValue> modified = new HashMap<>(); boolean sawUnresolved = false; for (String k : this.keySet()) { AbstractConfigValue newValue; if (changes.containsKey(k)) { newValue = changes.get(k); if (newValue != null) { modified.put(k, newValue); if (newValue.resolveStatus() == ResolveStatus.UNRESOLVED) { sawUnresolved = true; } } } else { newValue = this.value.get(k); modified.put(k, newValue); if (newValue.resolveStatus() == ResolveStatus.UNRESOLVED) { sawUnresolved = true; } } } return new SimpleConfigObject(this.origin(), modified, sawUnresolved ? ResolveStatus.UNRESOLVED : ResolveStatus.RESOLVED, this.ignoresFallbacks()); } } ResolveResult<? extends AbstractConfigObject> resolveSubstitutions(ResolveContext context, ResolveSource source) throws NotPossibleToResolve { if (this.resolveStatus() == ResolveStatus.RESOLVED) { return ResolveResult.make(context, this); } else { ResolveSource sourceWithParent = source.pushParent(this); try { SimpleConfigObject.ResolveModifier modifier = new SimpleConfigObject.ResolveModifier(context, sourceWithParent); AbstractConfigValue value = this.modifyMayThrow(modifier); return ResolveResult.make(modifier.context, value).asObjectResult(); } catch (NotPossibleToResolve | RuntimeException var6) { throw var6; } catch (Exception var8) { throw new ConfigException.BugOrBroken("unexpected checked exception", var8); } } } SimpleConfigObject relativized(final Path prefix) { return this.modify(new NoExceptionsModifier() { public AbstractConfigValue modifyChild(String key, AbstractConfigValue v) { return v.relativized(prefix); } }); } protected void render(StringBuilder sb, int indent, boolean atRoot, ConfigRenderOptions options) { if (this.isEmpty()) { sb.append("{}"); } else { boolean outerBraces = options.getJson() || !atRoot; int innerIndent; if (outerBraces) { innerIndent = indent + 1; sb.append("{"); if (options.getFormatted()) { sb.append('\n'); } } else { innerIndent = indent; } int separatorCount = 0; String[] keys = this.keySet().toArray(new String[0]); for (String k : keys) { AbstractConfigValue v = this.value.get(k); if (options.getOriginComments()) { String[] lines = v.origin().description().split("\n"); for (String l : lines) { indent(sb, indent + 1, options); sb.append('#'); if (!l.isEmpty()) { sb.append(' '); } sb.append(l); sb.append("\n"); } } if (options.getComments()) { for (String comment : v.origin().comments()) { indent(sb, innerIndent, options); sb.append("#"); if (!comment.startsWith(" ")) { sb.append(' '); } sb.append(comment); sb.append("\n"); } } indent(sb, innerIndent, options); v.render(sb, innerIndent, false, k, options); if (options.getFormatted()) { if (options.getJson()) { sb.append(","); separatorCount = 2; } else { separatorCount = 1; } sb.append('\n'); } else { sb.append(","); separatorCount = 1; } } sb.setLength(sb.length() - separatorCount); if (outerBraces) { if (options.getFormatted()) { sb.append('\n'); indent(sb, indent, options); } sb.append("}"); } } if (atRoot && options.getFormatted()) { sb.append('\n'); } } public AbstractConfigValue get(Object key) { return this.value.get(key); } private static boolean mapEquals(Map<String, ConfigValue> a, Map<String, ConfigValue> b) { if (a == b) { return true; } else { Set<String> aKeys = a.keySet(); Set<String> bKeys = b.keySet(); if (aKeys.equals(bKeys)) { Iterator<String> var4 = aKeys.iterator(); String key; do { if (!var4.hasNext()) { return true; } key = var4.next(); } while (a.get(key).equals(b.get(key))); } return false; } } @SuppressWarnings("magicnumber") private static int mapHash(Map<String, ConfigValue> m) { List<String> keys = new ArrayList<>(m.keySet()); Collections.sort(keys); int valuesHash = 0; String k; for (Iterator<String> var3 = keys.iterator(); var3.hasNext(); valuesHash += m.get(k).hashCode()) { k = var3.next(); } return HASH_CODE * (HASH_CODE + keys.hashCode()) + valuesHash; } protected boolean canEqual(Object other) { return other instanceof ConfigObject; } public boolean equals(Object other) { if (!(other instanceof ConfigObject)) { return false; } else { return this.canEqual(other) && mapEquals(this, (ConfigObject) other); } } public int hashCode() { return mapHash(this); } public boolean containsKey(Object key) { return this.value.containsKey(key); } public Set<String> keySet() { return this.value.keySet(); } public boolean containsValue(Object v) { return this.value.containsValue(v); } public Set<Entry<String, ConfigValue>> entrySet() { HashSet<Entry<String, ConfigValue>> entries = new HashSet<>(); for (Entry<String, AbstractConfigValue> stringAbstractConfigValueEntry : this.value.entrySet()) { entries.add(new AbstractMap.SimpleImmutableEntry<>(stringAbstractConfigValueEntry.getKey(), stringAbstractConfigValueEntry.getValue())); } return entries; } public boolean isEmpty() { return this.value.isEmpty(); } public int size() { return this.value.size(); } public Collection<ConfigValue> values() { return new HashSet<>(this.value.values()); } static SimpleConfigObject empty() { return EMPTY_INSTANCE; } static SimpleConfigObject empty(ConfigOrigin origin) { return origin == null ? empty() : new SimpleConfigObject(origin, Collections.emptyMap()); } static SimpleConfigObject emptyMissing(ConfigOrigin baseOrigin) { return new SimpleConfigObject(SimpleConfigOrigin.newSimple(baseOrigin.description() + " (not found)"), Collections.emptyMap()); } private Object writeReplace() throws ObjectStreamException { return new SerializedConfigValue(this); } private static final class ResolveModifier implements Modifier { final Path originalRestrict; ResolveContext context; final ResolveSource source; ResolveModifier(ResolveContext context, ResolveSource source) { this.context = context; this.source = source; this.originalRestrict = context.restrictToChild(); } public AbstractConfigValue modifyChildMayThrow(String key, AbstractConfigValue v) throws NotPossibleToResolve { if (this.context.isRestrictedToChild()) { if (key.equals(this.context.restrictToChild().first())) { Path remainder = this.context.restrictToChild().remainder(); if (remainder != null) { ResolveResult<? extends AbstractConfigValue> result = this.context.restrict(remainder).resolve(v, this.source); this.context = result.context.unrestricted().restrict(this.originalRestrict); return result.value; } else { return v; } } else { return v; } } else { ResolveResult<? extends AbstractConfigValue> result = this.context.unrestricted().resolve(v, this.source); this.context = result.context.unrestricted().restrict(this.originalRestrict); return result.value; } } } }
9,579
778
<reponame>qiongqiong-wu/XPush /* * Copyright (C) 2019 xuexiangjys(<EMAIL>) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.xuexiang.keeplive.whitelist.impl; import android.app.Application; import android.content.ComponentName; import android.content.Context; import android.content.Intent; import android.net.Uri; import android.os.Build; import android.os.PowerManager; import com.xuexiang.keeplive.KeepLive; import com.xuexiang.keeplive.whitelist.IWhiteListProvider; import com.xuexiang.keeplive.whitelist.IntentType; import com.xuexiang.keeplive.whitelist.WhiteList; import com.xuexiang.keeplive.whitelist.WhiteListIntentWrapper; import java.util.ArrayList; import java.util.List; import static android.provider.Settings.ACTION_REQUEST_IGNORE_BATTERY_OPTIMIZATIONS; /** * 默认的白名单跳转意图数据提供者 * * @author xuexiang * @since 2019-09-02 21:44 */ public class DefaultWhiteListProvider implements IWhiteListProvider { @Override public List<WhiteListIntentWrapper> getWhiteList(Application application) { List<WhiteListIntentWrapper> intentWrappers = new ArrayList<>(); //Android 7.0+ Doze 模式 if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) { PowerManager pm = (PowerManager) application.getSystemService(Context.POWER_SERVICE); boolean ignoringBatteryOptimizations = pm.isIgnoringBatteryOptimizations(application.getPackageName()); if (!ignoringBatteryOptimizations) { Intent dozeIntent = new Intent(ACTION_REQUEST_IGNORE_BATTERY_OPTIMIZATIONS); dozeIntent.setData(Uri.parse("package:" + application.getPackageName())); intentWrappers.add(new WhiteListIntentWrapper(dozeIntent, IntentType.DOZE)); } } //华为 自启管理 Intent huaweiIntent = new Intent(); huaweiIntent.setAction("huawei.intent.action.HSM_BOOTAPP_MANAGER"); intentWrappers.add(new WhiteListIntentWrapper(huaweiIntent, IntentType.HUAWEI)); //华为 锁屏清理 Intent huaweiGodIntent = new Intent(); huaweiGodIntent.setComponent(new ComponentName("com.huawei.systemmanager", "com.huawei.systemmanager.optimize.process.ProtectActivity")); intentWrappers.add(new WhiteListIntentWrapper(huaweiGodIntent, IntentType.HUAWEI_GOD)); //小米 自启动管理 Intent xiaomiIntent = new Intent(); xiaomiIntent.setAction("miui.intent.action.OP_AUTO_START"); xiaomiIntent.addCategory(Intent.CATEGORY_DEFAULT); intentWrappers.add(new WhiteListIntentWrapper(xiaomiIntent, IntentType.XIAOMI)); //小米 神隐模式 Intent xiaomiGodIntent = new Intent(); xiaomiGodIntent.setComponent(new ComponentName("com.miui.powerkeeper", "com.miui.powerkeeper.ui.HiddenAppsConfigActivity")); xiaomiGodIntent.putExtra("package_name", application.getPackageName()); xiaomiGodIntent.putExtra("package_label", WhiteList.getApplicationName(KeepLive.getApplication())); intentWrappers.add(new WhiteListIntentWrapper(xiaomiGodIntent, IntentType.XIAOMI_GOD)); //三星 5.0/5.1 自启动应用程序管理 Intent samsungLIntent = application.getPackageManager().getLaunchIntentForPackage("com.samsung.android.sm"); if (samsungLIntent != null) { intentWrappers.add(new WhiteListIntentWrapper(samsungLIntent, IntentType.SAMSUNG_L)); } //三星 6.0+ 未监视的应用程序管理 Intent samsungMIntent = new Intent(); samsungMIntent.setComponent(new ComponentName("com.samsung.android.sm_cn", "com.samsung.android.sm.ui.battery.BatteryActivity")); intentWrappers.add(new WhiteListIntentWrapper(samsungMIntent, IntentType.SAMSUNG_M)); //魅族 自启动管理 Intent meizuIntent = new Intent("com.meizu.safe.security.SHOW_APPSEC"); meizuIntent.addCategory(Intent.CATEGORY_DEFAULT); meizuIntent.putExtra("packageName", application.getPackageName()); intentWrappers.add(new WhiteListIntentWrapper(meizuIntent, IntentType.MEIZU)); //魅族 待机耗电管理 Intent meizuGodIntent = new Intent(); meizuGodIntent.setComponent(new ComponentName("com.meizu.safe", "com.meizu.safe.powerui.PowerAppPermissionActivity")); intentWrappers.add(new WhiteListIntentWrapper(meizuGodIntent, IntentType.MEIZU_GOD)); //Oppo 自启动管理 Intent oppoIntent = new Intent(); oppoIntent.setComponent(new ComponentName("com.coloros.safecenter", "com.coloros.safecenter.permission.startup.StartupAppListActivity")); intentWrappers.add(new WhiteListIntentWrapper(oppoIntent, IntentType.OPPO)); //Oppo 自启动管理(旧版本系统) Intent oppoOldIntent = new Intent(); oppoOldIntent.setComponent(new ComponentName("com.color.safecenter", "com.color.safecenter.permission.startup.StartupAppListActivity")); intentWrappers.add(new WhiteListIntentWrapper(oppoOldIntent, IntentType.OPPO_OLD)); //Vivo 后台高耗电 Intent vivoGodIntent = new Intent(); vivoGodIntent.setComponent(new ComponentName("com.vivo.abe", "com.vivo.applicationbehaviorengine.ui.ExcessivePowerManagerActivity")); intentWrappers.add(new WhiteListIntentWrapper(vivoGodIntent, IntentType.VIVO_GOD)); //金立 应用自启 Intent gioneeIntent = new Intent(); gioneeIntent.setComponent(new ComponentName("com.gionee.softmanager", "com.gionee.softmanager.MainActivity")); intentWrappers.add(new WhiteListIntentWrapper(gioneeIntent, IntentType.GIONEE)); //乐视 自启动管理 Intent letvIntent = new Intent(); letvIntent.setComponent(new ComponentName("com.letv.android.letvsafe", "com.letv.android.letvsafe.AutobootManageActivity")); intentWrappers.add(new WhiteListIntentWrapper(letvIntent, IntentType.LETV)); //乐视 应用保护 Intent letvGodIntent = new Intent(); letvGodIntent.setComponent(new ComponentName("com.letv.android.letvsafe", "com.letv.android.letvsafe.BackgroundAppManageActivity")); intentWrappers.add(new WhiteListIntentWrapper(letvGodIntent, IntentType.LETV_GOD)); //酷派 自启动管理 Intent coolpadIntent = new Intent(); coolpadIntent.setComponent(new ComponentName("com.yulong.android.security", "com.yulong.android.seccenter.tabbarmain")); intentWrappers.add(new WhiteListIntentWrapper(coolpadIntent, IntentType.COOLPAD)); //联想 后台管理 Intent lenovoIntent = new Intent(); lenovoIntent.setComponent(new ComponentName("com.lenovo.security", "com.lenovo.security.purebackground.PureBackgroundActivity")); intentWrappers.add(new WhiteListIntentWrapper(lenovoIntent, IntentType.LENOVO)); //联想 后台耗电优化 Intent lenovoGodIntent = new Intent(); lenovoGodIntent.setComponent(new ComponentName("com.lenovo.powersetting", "com.lenovo.powersetting.ui.Settings$HighPowerApplicationsActivity")); intentWrappers.add(new WhiteListIntentWrapper(lenovoGodIntent, IntentType.LENOVO_GOD)); //中兴 自启管理 Intent zteIntent = new Intent(); zteIntent.setComponent(new ComponentName("com.zte.heartyservice", "com.zte.heartyservice.autorun.AppAutoRunManager")); intentWrappers.add(new WhiteListIntentWrapper(zteIntent, IntentType.ZTE)); //中兴 锁屏加速受保护应用 Intent zteGodIntent = new Intent(); zteGodIntent.setComponent(new ComponentName("com.zte.heartyservice", "com.zte.heartyservice.setting.ClearAppSettingsActivity")); intentWrappers.add(new WhiteListIntentWrapper(zteGodIntent, IntentType.ZTE_GOD)); return intentWrappers; } }
3,403
2,441
<reponame>mananpal1997/pyston # JIT compiles a new function def test_jit_func(): g = {} exec("def f(x): x", g) for i in range(1000): g["f"](i) for i in range(1000): test_jit_func()
97
693
<reponame>Shahzeena/DragRecyclerView package alessandro.draganddrop.model; /** * Created by Alessandro on 12/01/2016. */ public class Item { private int idImage; private String name,description; public Item(int idImage, String name, String description) { this.idImage = idImage; this.name = name; this.description = description; } public int getIdImage() { return idImage; } public void setIdImage(int idImage) { this.idImage = idImage; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } }
320
20,996
{ "index": 41, "lineNumber": 1, "column": 42, "message": "Error: Line 1: Postfix increment/decrement may not have eval or arguments operand in strict mode" }
63
480
<filename>ck/repo/module/package/template-git-soft-customize.py # # Automatically generated # import os ############################################################################## # setup environment def setup(i): s='' cus=i['customize'] env=i['env'] fp=cus.get('full_path','') ep=cus.get('env_prefix','') if ep=='': return {'return':1, 'error':'environment prefix is not defined'} if fp=='': return {'return':1, 'error':''} # p1=os.path.dirname(fp) env[ep]=os.path.join(fp, 'src') # You can extend environent variables for your soft here: # env[ep+'_EXTENSION']=xyz return {'return':0, 'bat':s}
319