max_stars_count
int64 301
224k
| text
stringlengths 6
1.05M
| token_count
int64 3
727k
|
---|---|---|
3,655 | {
"scripts": {
"build": "mkdir -p public && echo 'hello from index.txt' > public/index.txt"
}
}
| 42 |
5,385 | <gh_stars>1000+
/**
* Copyright 2006-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mybatis.generator.eclipse.tests.harness.tests;
import static org.hamcrest.core.IsNull.nullValue;
import static org.junit.Assert.assertThat;
import static org.mybatis.generator.eclipse.tests.harness.Utilities.getCompilationUnitSummaryFromResource;
import static org.mybatis.generator.eclipse.tests.harness.matchers.Matchers.*;
import java.io.IOException;
import java.io.InputStream;
import org.junit.Test;
import org.mybatis.generator.eclipse.tests.harness.summary.AbstractSummary;
import org.mybatis.generator.eclipse.tests.harness.summary.AnnotationSummary;
import org.mybatis.generator.eclipse.tests.harness.summary.ClassSummary;
import org.mybatis.generator.eclipse.tests.harness.summary.CompilationUnitSummary;
import org.mybatis.generator.eclipse.tests.harness.summary.EnumSummary;
import org.mybatis.generator.eclipse.tests.harness.summary.InterfaceSummary;
public class SummarizerTest {
private static final String IMPORT_JAVA_IO_SERIALIZABLE = "import java.io.Serializable";
private static final String SERIALIZABLE = "Serializable";
@Test
public void testAnnotationSummarizer() throws IOException {
InputStream resource = getClass().getResourceAsStream("/org/mybatis/generator/eclipse/tests/harness/tests/resources/OuterAnnotation.src");
CompilationUnitSummary cuSummary = getCompilationUnitSummaryFromResource(resource);
assertThat(cuSummary, hasImportCount(4));
assertThat(cuSummary, hasImport("import java.lang.annotation.ElementType"));
assertThat(cuSummary, hasImport("import java.lang.annotation.Retention"));
assertThat(cuSummary, hasImport("import java.lang.annotation.RetentionPolicy"));
assertThat(cuSummary, hasImport("import java.lang.annotation.Target"));
assertThat(cuSummary, hasAnnotationCount(2));
assertThat(cuSummary, hasAnnotation("OuterAnnotation"));
assertThat(cuSummary, hasAnnotation("SecondOuterAnnotation"));
assertThat(cuSummary, hasClassCount(0));
assertThat(cuSummary, hasEnumCount(0));
assertThat(cuSummary, hasInterfaceCount(0));
AnnotationSummary annotationSummary = cuSummary.getAnnotationSummary("OuterAnnotation");
assertThat(annotationSummary, hasAnnotationMember("name"));
assertThat(annotationSummary, hasAnnotationMemberCount(1));
assertThat(annotationSummary, hasField("id", ofType("int")));
assertThat(annotationSummary, hasField("id2", ofType("int")));
assertThat(annotationSummary, hasField("name", ofType("int")));
assertThat(annotationSummary, hasFieldCount(3));
verifyInners(annotationSummary);
}
@Test
public void testClassSummarizer() throws IOException {
InputStream resource = getClass().getResourceAsStream("/org/mybatis/generator/eclipse/tests/harness/tests/resources/OuterClass.src");
CompilationUnitSummary cuSummary = getCompilationUnitSummaryFromResource(resource);
assertThat(cuSummary, hasImportCount(2));
assertThat(cuSummary, hasImport(IMPORT_JAVA_IO_SERIALIZABLE));
assertThat(cuSummary, hasImport("import java.util.ArrayList"));
assertThat(cuSummary, hasClassCount(2));
assertThat(cuSummary, hasClass("SecondOuterClass"));
assertThat(cuSummary, hasClass("OuterClass"));
assertThat(cuSummary, hasEnumCount(0));
assertThat(cuSummary, hasInterfaceCount(0));
ClassSummary classSummary = cuSummary.getClassSummary("OuterClass");
assertThat(classSummary, hasSuperClass("ArrayList<String>"));
assertThat(classSummary, hasSuperInterface(SERIALIZABLE));
assertThat(classSummary, hasSuperInterfaceCount(1));
assertThat(classSummary, hasField("serialVersionUID", withFieldType("long")));
assertThat(classSummary, hasField("unreliableCount"));
assertThat(classSummary, hasFieldCount(2));
assertThat(classSummary, hasMethod("add(String)"));
assertThat(classSummary, hasMethod("getUnreliableCount()"));
assertThat(classSummary, hasMethodCount(2));
verifyInners(classSummary);
}
@Test
public void testEnumSummarizer() throws IOException {
InputStream resource = getClass().getResourceAsStream("/org/mybatis/generator/eclipse/tests/harness/tests/resources/OuterEnum.src");
CompilationUnitSummary cuSummary = getCompilationUnitSummaryFromResource(resource);
assertThat(cuSummary, hasImportCount(1));
assertThat(cuSummary, hasImport(IMPORT_JAVA_IO_SERIALIZABLE));
assertThat(cuSummary, hasClassCount(0));
assertThat(cuSummary, hasEnumCount(2));
assertThat(cuSummary, hasEnum("SecondOuterEnum"));
assertThat(cuSummary, hasEnum("OuterEnum"));
assertThat(cuSummary, hasInterfaceCount(0));
EnumSummary enumSummary = cuSummary.getEnumSummary("OuterEnum");
assertThat(enumSummary, hasEnumConstant("FRED"));
assertThat(enumSummary, hasEnumConstant("WILMA"));
assertThat(enumSummary, hasEnumConstant("BARNEY"));
assertThat(enumSummary, hasEnumConstant("BETTY"));
assertThat(enumSummary, hasEnumConstantCount(4));
assertThat(enumSummary, hasSuperInterface(SERIALIZABLE));
assertThat(enumSummary, hasSuperInterfaceCount(1));
assertThat(enumSummary, hasField("name"));
assertThat(enumSummary, hasFieldCount(1) );
assertThat(enumSummary, hasMethod("OuterEnum(String)"));
assertThat(enumSummary, hasMethod("getName()"));
assertThat(enumSummary, hasMethodCount(2));
verifyInners(enumSummary);
}
@Test
public void testInterfaceSummarizer() throws IOException {
InputStream resource = getClass().getResourceAsStream("/org/mybatis/generator/eclipse/tests/harness/tests/resources/OuterInterface.src");
CompilationUnitSummary cuSummary = getCompilationUnitSummaryFromResource(resource);
assertThat(cuSummary, hasImportCount(1));
assertThat(cuSummary, hasImport(IMPORT_JAVA_IO_SERIALIZABLE));
assertThat(cuSummary, hasClassCount(0));
assertThat(cuSummary, hasEnumCount(0));
assertThat(cuSummary, hasInterfaceCount(2));
assertThat(cuSummary, hasInterface("SecondOuterInterface"));
assertThat(cuSummary, hasInterface("OuterInterface"));
InterfaceSummary interfaceSummary = cuSummary.getInterfaceSummary("OuterInterface");
assertThat(interfaceSummary, hasSuperInterface(SERIALIZABLE));
assertThat(interfaceSummary, hasSuperInterfaceCount(1));
assertThat(interfaceSummary, hasField("MY_NAME"));
assertThat(interfaceSummary, hasFieldCount(1));
assertThat(interfaceSummary, hasMethod("doSomething(int)"));
assertThat(interfaceSummary, hasMethod("doSomethingElse()"));
assertThat(interfaceSummary, hasMethodCount(2));
verifyInners(interfaceSummary);
}
private void verifyInners(AbstractSummary summary) {
assertThat(summary, hasClass("InnerClass"));
verifyInnerClass(summary.getClassSummary("InnerClass"));
assertThat(summary, hasClassCount(1));
assertThat(summary, hasInterface("InnerInterface"));
verifyInnerInterface(summary.getInterfaceSummary("InnerInterface"));
assertThat(summary, hasInterfaceCount(1));
assertThat(summary, hasEnum("InnerEnum"));
verifyInnerEnum(summary.getEnumSummary("InnerEnum"));
assertThat(summary, hasEnumCount(1));
assertThat(summary, hasAnnotation("InnerAnnotation"));
verifyInnerAnnotation(summary.getAnnotationSummary("InnerAnnotation"));
assertThat(summary, hasAnnotationCount(1));
}
private void verifyInnerAnnotation(AnnotationSummary annotationSummary) {
assertThat(annotationSummary, hasAnnotationMember("amount"));
assertThat(annotationSummary, hasAnnotationMemberCount(1));
assertThat(annotationSummary, hasFieldCount(0));
assertThat(annotationSummary, hasClassCount(0));
assertThat(annotationSummary, hasEnumCount(0));
assertThat(annotationSummary, hasAnnotationCount(0));
assertThat(annotationSummary, hasInterfaceCount(0));
}
private void verifyInnerEnum(EnumSummary enumSummary) {
assertThat(enumSummary, hasEnumConstant("GEORGE"));
assertThat(enumSummary, hasEnumConstant("JANE"));
assertThat(enumSummary, hasEnumConstantCount(2));
assertThat(enumSummary, hasField("index"));
assertThat(enumSummary, hasFieldCount(1));
assertThat(enumSummary, hasMethod("InnerEnum(int)"));
assertThat(enumSummary, hasMethod("getIndex()"));
assertThat(enumSummary, hasMethodCount(2));
assertThat(enumSummary, hasClassCount(0));
assertThat(enumSummary, hasEnumCount(0));
assertThat(enumSummary, hasAnnotationCount(0));
assertThat(enumSummary, hasInterfaceCount(0));
assertThat(enumSummary, hasSuperInterfaceCount(0));
}
private void verifyInnerInterface(InterfaceSummary interfaceSummary) {
assertThat(interfaceSummary, hasMethod("execute()"));
assertThat(interfaceSummary, hasMethodCount(1));
assertThat(interfaceSummary, hasFieldCount(0));
assertThat(interfaceSummary, hasClassCount(0));
assertThat(interfaceSummary, hasEnumCount(0));
assertThat(interfaceSummary, hasAnnotationCount(0));
assertThat(interfaceSummary, hasInterfaceCount(0));
assertThat(interfaceSummary, hasSuperInterfaceCount(0));
}
private void verifyInnerClass(ClassSummary classSummary) {
assertThat(classSummary, hasMethod("getDescription()"));
assertThat(classSummary, hasMethod("setDescription(String)"));
assertThat(classSummary, hasMethodCount(2));
assertThat(classSummary, hasField("description"));
assertThat(classSummary, hasFieldCount(1));
assertThat(classSummary, hasClassCount(0));
assertThat(classSummary, hasEnumCount(0));
assertThat(classSummary, hasAnnotationCount(0));
assertThat(classSummary, hasInterfaceCount(0));
assertThat(classSummary, hasSuperInterfaceCount(0));
assertThat(classSummary, hasSuperClass(nullValue()));
}
}
| 4,097 |
2,542 | <gh_stars>1000+
// ------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License (MIT). See License.txt in the repo root for license information.
// ------------------------------------------------------------
#include "stdafx.h"
using namespace Common;
using namespace std;
using namespace Naming;
using namespace Store;
using namespace Management::ClusterManager;
StringLiteral const TraceComponent("FabricProvisionContextAsyncOperation");
ProcessFabricProvisionContextAsyncOperation::ProcessFabricProvisionContextAsyncOperation(
__in RolloutManager & rolloutManager,
__in FabricProvisionContext & context,
TimeSpan const timeout,
AsyncCallback const & callback,
AsyncOperationSPtr const & root)
: ProcessRolloutContextAsyncOperationBase(
rolloutManager,
context.ReplicaActivityId,
timeout,
callback,
root)
, context_(context)
{
}
void ProcessFabricProvisionContextAsyncOperation::OnStart(AsyncOperationSPtr const & thisSPtr)
{
ErrorCode error(ErrorCodeValue::Success);
if (error.IsSuccess())
{
if (context_.IsProvision)
{
error = this->ImageBuilder.ProvisionFabric(
context_.CodeFilepath,
context_.ClusterManifestFilepath,
this->RemainingTimeout);
}
else
{
error = this->ImageBuilder.CleanupFabricVersion(
context_.Version,
this->RemainingTimeout);
if (!error.IsSuccess() && this->IsImageBuilderSuccessOnCleanup(error))
{
error = ErrorCodeValue::Success;
}
}
}
if (error.IsSuccess())
{
auto storeTx = this->CreateTransaction();
if (context_.IsProvision)
{
error = context_.CompleteProvisioning(storeTx);
}
else
{
error = context_.CompleteUnprovisioning(storeTx);
}
if (error.IsSuccess())
{
auto operation = StoreTransaction::BeginCommit(
move(storeTx),
context_,
[this](AsyncOperationSPtr const & operation) { this->OnCommitComplete(operation, false); },
thisSPtr);
this->OnCommitComplete(operation, true);
}
}
if (!error.IsSuccess())
{
this->TryComplete(thisSPtr, error);
}
}
void ProcessFabricProvisionContextAsyncOperation::OnCommitComplete(AsyncOperationSPtr const & operation, bool expectedCompletedSynchronously)
{
if (operation->CompletedSynchronously != expectedCompletedSynchronously)
{
return;
}
ErrorCode error = StoreTransaction::EndCommit(operation);
this->TryComplete(operation->Parent, error);
}
ErrorCode ProcessFabricProvisionContextAsyncOperation::End(AsyncOperationSPtr const & operation)
{
return AsyncOperation::End<ProcessFabricProvisionContextAsyncOperation>(operation)->Error;
}
| 1,169 |
1,305 | /*
* Copyright (c) 1998, 2001, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*/
package org.omg.CORBA;
/**
* A <tt>PolicyErrorCode</tt> which would be filled if the value
* requested for the <tt>Policy</tt> is of a
* valid type and within the valid range for that type, but this valid value
* is not currently supported.
*
* @author rip-dev
*/
public interface UNSUPPORTED_POLICY_VALUE {
/**
* The Error code for PolicyError exception.
*/
final short value = (short) (4L);
};
| 228 |
14,668 | <gh_stars>1000+
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/media_galleries/fileapi/mtp_device_async_delegate.h"
#include "net/base/io_buffer.h"
MTPDeviceAsyncDelegate::ReadBytesRequest::ReadBytesRequest(
uint32_t file_id,
net::IOBuffer* buf,
int64_t offset,
int buf_len,
ReadBytesSuccessCallback success_callback,
ErrorCallback error_callback)
: file_id(file_id),
buf(buf),
offset(offset),
buf_len(buf_len),
success_callback(std::move(success_callback)),
error_callback(std::move(error_callback)) {}
MTPDeviceAsyncDelegate::ReadBytesRequest::ReadBytesRequest(
ReadBytesRequest&& other) = default;
MTPDeviceAsyncDelegate::ReadBytesRequest::~ReadBytesRequest() = default;
| 313 |
1,273 | <filename>p2m/utils.py
# Copyright (C) 2019 <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Fudan University
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import numpy as np
def construct_feed_dict(pkl, placeholders):
"""Construct feed dictionary."""
coord = pkl[0]
pool_idx = pkl[4]
faces = pkl[5]
# laplace = pkl[6]
lape_idx = pkl[7]
edges = []
for i in range(1,4):
adj = pkl[i][1]
edges.append(adj[0])
feed_dict = dict()
feed_dict.update({placeholders['features']: coord})
feed_dict.update({placeholders['edges'][i]: edges[i] for i in range(len(edges))})
feed_dict.update({placeholders['faces'][i]: faces[i] for i in range(len(faces))})
feed_dict.update({placeholders['pool_idx'][i]: pool_idx[i] for i in range(len(pool_idx))})
feed_dict.update({placeholders['lape_idx'][i]: lape_idx[i] for i in range(len(lape_idx))})
feed_dict.update({placeholders['support1'][i]: pkl[1][i] for i in range(len(pkl[1]))})
feed_dict.update({placeholders['support2'][i]: pkl[2][i] for i in range(len(pkl[2]))})
feed_dict.update({placeholders['support3'][i]: pkl[3][i] for i in range(len(pkl[3]))})
return feed_dict
| 625 |
892 | {
"schema_version": "1.2.0",
"id": "GHSA-g4wc-2gqq-5g7v",
"modified": "2021-12-22T00:01:34Z",
"published": "2021-12-18T00:00:42Z",
"aliases": [
"CVE-2021-40853"
],
"details": "TCMAN GIM does not perform an authorization check when trying to access determined resources. A remote attacker could exploit this vulnerability to access URL that require privileges without having them. The exploitation of this vulnerability might allow a remote attacker to obtain sensible information.",
"severity": [
],
"affected": [
],
"references": [
{
"type": "ADVISORY",
"url": "https://nvd.nist.gov/vuln/detail/CVE-2021-40853"
},
{
"type": "WEB",
"url": "https://www.incibe-cert.es/en/early-warning/security-advisories/tcman-gim-missing-authorization-vulnerability"
}
],
"database_specific": {
"cwe_ids": [
"CWE-862"
],
"severity": "HIGH",
"github_reviewed": false
}
} | 378 |
2,743 | <reponame>eshbeata/open-paperless
from __future__ import unicode_literals
from common.classes import PropertyHelper
class DocumentMetadataHelper(PropertyHelper):
@staticmethod
@property
def constructor(*args, **kwargs):
return DocumentMetadataHelper(*args, **kwargs)
def get_result(self, name):
return self.instance.metadata.get(metadata_type__name=name).value
class MetadataLookup(object):
_registry = []
@classmethod
def get_as_context(cls):
result = {}
for entry in cls._registry:
try:
result[entry.name] = entry.value()
except TypeError:
result[entry.name] = entry.value
return result
@classmethod
def get_as_help_text(cls):
result = []
for entry in cls._registry:
result.append(
'{{{{ {0} }}}} = "{1}"'.format(entry.name, entry.description)
)
return ' '.join(result)
def __init__(self, description, name, value):
self.description = description
self.name = name
self.value = value
self.__class__._registry.append(self)
| 502 |
577 | /**
* Copyright 2009, Google Inc. All rights reserved.
* Licensed to PSF under a Contributor Agreement.
*/
package org.python.indexer.ast;
import org.python.indexer.Scope;
import org.python.indexer.types.NType;
import org.python.indexer.types.NUnknownType;
import java.util.List;
public class NComprehension extends NNode {
static final long serialVersionUID = -598250664243757218L;
public NNode target;
public NNode iter;
public List<NNode> ifs;
public NComprehension(NNode target, NNode iter, List<NNode> ifs) {
this(target, iter, ifs, 0, 1);
}
public NComprehension(NNode target, NNode iter, List<NNode> ifs, int start, int end) {
super(start, end);
this.target = target;
this.iter = iter;
this.ifs = ifs;
addChildren(target, iter);
addChildren(ifs);
}
@Override
public boolean bindsName() {
return true;
}
@Override
protected void bindNames(Scope s) throws Exception {
bindNames(s, target, NameBinder.make());
}
private void bindNames(Scope s, NNode target, NameBinder binder) throws Exception {
if (target instanceof NName) {
binder.bind(s, (NName)target, new NUnknownType());
return;
}
if (target instanceof NSequence) {
for (NNode n : ((NSequence)target).getElements()) {
bindNames(s, n, binder);
}
}
}
@Override
public NType resolve(Scope s) throws Exception {
NameBinder.make().bindIter(s, target, iter);
resolveList(ifs, s);
return setType(target.getType());
}
@Override
public String toString() {
return "<Comprehension:" + start() + ":" + target + ":" + iter + ":" + ifs + ">";
}
@Override
public void visit(NNodeVisitor v) {
if (v.visit(this)) {
visitNode(target, v);
visitNode(iter, v);
visitNodeList(ifs, v);
}
}
}
| 862 |
335 | # ifndef CPPAD_SPEED_DET_BY_LU_HPP
# define CPPAD_SPEED_DET_BY_LU_HPP
/* --------------------------------------------------------------------------
CppAD: C++ Algorithmic Differentiation: Copyright (C) 2003-17 <NAME>
CppAD is distributed under multiple licenses. This distribution is under
the terms of the
GNU General Public License Version 3.
A copy of this license is included in the COPYING file of this distribution.
Please visit http://www.coin-or.org/CppAD/ for information on other licenses.
-------------------------------------------------------------------------- */
/*
$begin det_by_lu$$
$spell
CppAD
cppad
lu
hpp
typedef
const
hpp
Det
CPPAD_TESTVECTOR
namespace
$$
$section Determinant Using Expansion by Lu Factorization$$
$mindex det_by_lu factor$$
$head Syntax$$
$codei%# include <cppad/speed/det_by_lu.hpp>
%$$
$codei%det_by_lu<%Scalar%> %det%(%n%)
%$$
$icode%d% = %det%(%a%)
%$$
$head Inclusion$$
The template class $code det_by_lu$$ is defined in the $code CppAD$$
namespace by including
the file $code cppad/speed/det_by_lu.hpp$$
(relative to the CppAD distribution directory).
$head Constructor$$
The syntax
$codei%
det_by_lu<%Scalar%> %det%(%n%)
%$$
constructs the object $icode det$$ which can be used for
evaluating the determinant of $icode n$$ by $icode n$$ matrices
using LU factorization.
$head Scalar$$
The type $icode Scalar$$ can be any
$cref NumericType$$
$head n$$
The argument $icode n$$ has prototype
$codei%
size_t %n%
%$$
$head det$$
The syntax
$codei%
%d% = %det%(%a%)
%$$
returns the determinant of the matrix $latex A$$ using LU factorization.
$subhead a$$
The argument $icode a$$ has prototype
$codei%
const %Vector% &%a%
%$$
It must be a $icode Vector$$ with length $latex n * n$$ and with
It must be a $icode Vector$$ with length $latex n * n$$ and with
elements of type $icode Scalar$$.
The elements of the $latex n \times n$$ matrix $latex A$$ are defined,
for $latex i = 0 , \ldots , n-1$$ and $latex j = 0 , \ldots , n-1$$, by
$latex \[
A_{i,j} = a[ i * m + j]
\] $$
$subhead d$$
The return value $icode d$$ has prototype
$codei%
%Scalar% %d%
%$$
$head Vector$$
If $icode y$$ is a $icode Vector$$ object,
it must support the syntax
$codei%
%y%[%i%]
%$$
where $icode i$$ has type $code size_t$$ with value less than $latex n * n$$.
This must return a $icode Scalar$$ value corresponding to the $th i$$
element of the vector $icode y$$.
This is the only requirement of the type $icode Vector$$.
$children%
speed/example/det_by_lu.cpp%
omh/det_by_lu_hpp.omh
%$$
$head Example$$
The file
$cref det_by_lu.cpp$$
contains an example and test of $code det_by_lu.hpp$$.
It returns true if it succeeds and false otherwise.
$head Source Code$$
The file
$cref det_by_lu.hpp$$
contains the source for this template function.
$end
---------------------------------------------------------------------------
*/
// BEGIN C++
# include <cppad/utility/vector.hpp>
# include <cppad/utility/lu_solve.hpp>
// BEGIN CppAD namespace
namespace CppAD {
template <class Scalar>
class det_by_lu {
private:
const size_t m_;
const size_t n_;
CppAD::vector<Scalar> A_;
CppAD::vector<Scalar> B_;
CppAD::vector<Scalar> X_;
public:
det_by_lu(size_t n) : m_(0), n_(n), A_(n * n)
{ }
template <class Vector>
inline Scalar operator()(const Vector &x)
{
Scalar logdet;
Scalar det;
int signdet;
size_t i;
// copy matrix so it is not overwritten
for(i = 0; i < n_ * n_; i++)
A_[i] = x[i];
// comput log determinant
signdet = CppAD::LuSolve(
n_, m_, A_, B_, X_, logdet);
/*
// Do not do this for speed test because it makes floating
// point operation sequence very simple.
if( signdet == 0 )
det = 0;
else det = Scalar( signdet ) * exp( logdet );
*/
// convert to determinant
det = Scalar( signdet ) * exp( logdet );
# ifdef FADBAD
// Fadbad requires tempories to be set to constants
for(i = 0; i < n_ * n_; i++)
A_[i] = 0;
# endif
return det;
}
};
} // END CppAD namespace
// END C++
# endif
| 1,567 |
3,428 | <gh_stars>1000+
{"id":"00486","group":"spam-2","checksum":{"type":"MD5","value":"7f5cde6ad9f34dcbe56a1fd73138b351"},"text":"From <EMAIL> Mon Jun 24 17:06:55 2002\nReturn-Path: mal<EMAIL>.ru\nDelivery-Date: Tue May 28 04:58:04 2002\nReceived: from mandark.labs.netnoteinc.com ([213.105.180.140]) by\n dogma.slashnull.org (8.11.6/8.11.6) with ESMTP id g4S3w4e21191 for\n <<EMAIL>>; Tue, 28 May 2002 04:58:04 +0100\nReceived: from dns2.geo.net.co (dns2-la.geo.net.co [216.226.238.12]) by\n mandark.labs.netnoteinc.com (8.11.2/8.11.2) with ESMTP id g4S3vp727900;\n Tue, 28 May 2002 04:57:53 +0100\nReceived: from integracion.com.co [216.226.238.33] by dns2.geo.net.co\n (SMTPD32-7.06) id A59AFD01E4; Mon, 27 May 2002 17:39:22 -0500\nReceived: from 172.16.31.10 ([217.164.58.237]) by integracion.com.co;\n Mon, 27 May 2002 17:41:06 -0500\nMessage-Id: <0000623628de$00001c5a$00005fa8@>\nTo: <<EMAIL>>\nFrom: \"<NAME>\" <<EMAIL>>\nSubject: 6 Figures from homeYLWLNMAN\nDate: Mon, 27 May 2002 16:50:28 -1600\nMIME-Version: 1.0\nX-Priority: 1\nX-Mailer: Microsoft Outlook, Build 10.0.3416\nX-Msmail-Priority: High\nX-Keywords: \nContent-Type: text/html; charset=\"iso-8859-1\"\nContent-Transfer-Encoding: quoted-printable\n\n<html><head></head><body bgcolor=3D#FFFFFF text=3D#000000 leftmargin=3D0 to=\npmargin=3D0 marginwidth=3D0 marginheight=3D0 link=3D#008080 vlink=3D#00CCF=\nF alink=3D#00FFFF><HR color=3D#050C87 noshade width=3D531><table cellpaddi=\nng=3D5 cellspacing=3D0 border=3D0 width=3D100%><tr><td valign=3Dbottom ali=\ngn=3Dcenter><font size=3D1 face=3Darial color=3D#000000></font><center><ta=\nble width=3D600 border=3D0 cellpadding=3D0 cellspacing=3D0><tr><img src=3D=\nhttp://www.wannaberich.net/biz/img2/header1.gif width=3D600 height=3D145><=\ntd width=3D149 height=3D351 align=3Dleft valign=3Dtop rowspan=3D6><table w=\nidth=3D149 border=3D0 cellspacing=3D0 cellpadding=3D0><tr><td><img src=3Dh=\nttp://www.wannaberich.net/biz/img2/index_strip.gif width=3D149 height=3D78=\n3></td></tr></table></td></tr><tr><td width=3D600 height=3D46 align=3Dleft=\n valign=3Dtop><center><b><font face=3D\"Verdana, Arial, Helvetica, sans-ser=\nif\" size=3D4>Let <a href=3Dhttp://www.wannaberich.net/biz/><b>Work From Ho=\nme</b></a> help you, our program has made more Millionaires than\nany other home business opportunity in existance.\nWe have a proven 20 year track record and will help you succeed.</font><br=\n><br><font face=3D\"Verdana, Arial, Helvetica, sans-serif\"><i>If a former r=\nealtor\nand former decorator<br>(John and <NAME>)<br>can quickly build res=\nidual checks like these...<br><br> </i></font></b>\n<a href=3Dhttp://www.wannaberich.net/biz/><img src=3Dhttp://www.wannaberic=\nh.net/biz/img2/checks.gif border=3D0 width=3D454 height=3D201></a>\n<b><font face=3D\"Verdana, Arial, Helvetica, sans-serif\" size=3D5>...THEN S=\nO CAN YOU!<br></font><font face=3D\"Verdana, Arial, Helvetica, sans-serif\" =\nsize=3D3>All you\nhave to do is follow the roadmap to success,<br>with the AUTOMATED DIRECT =\nDATA SYSTEM.<br><font color=3Dred><a href=3Dhttp://www.wannaberich.net/biz=\n/>CLICK HERE to begin your journey.</a></font></font></b></center></td></t=\nr></table><HR color=3D#050C87 noshade width=3D531><p><font size=3D3 color=3D=\nnavy><TABLE BORDER=3D1 ALIGN=3DCENTER><TR><TD>If you do not desire to incu=\nr further suggestion from us, please<br><A HREF=3Dhttp://wannaberich.net>E=\nnter here</A></TD></TR></TABLE></p></BODY></html>\n\n\n\n"} | 1,510 |
325 | import glob
import os
import pathlib
import plistlib
import sqlite3
import scripts.artifacts.artGlobals #use to get iOS version -> iOSversion = scripts.artifacts.artGlobals.versionf
from scripts.artifact_report import ArtifactHtmlReport
from scripts.ilapfuncs import logfunc, tsv, is_platform_windows
def get_dhcphp(files_found, report_folder, seeker):
file_found = str(files_found[0])
data_list = []
reportval = ''
with open(file_found, "r") as filefrom:
for line in filefrom:
cline = line.strip()
if cline == "{":
reportval = reportval + ("<table><tr><td>Key</td><td>Values</td></tr>")
elif cline == "}":
reportval = reportval +("</table>")
data_list.append((reportval,))
reportval = ''
# elif cline == '':
# f.write('<br>')
else:
ll = cline.split("=")
reportval = reportval +(f"<tr><td>{ll[0]}</td>")
reportval = reportval +(f"<td>{ll[1]}</td></tr>")
if len(data_list) > 0:
report = ArtifactHtmlReport('DHCP Hotspot Clients')
report.start_artifact_report(report_folder, 'Hotspot Clients')
report.add_script()
data_headers = ('Hotspot Clients',)
report.write_artifact_data_table(data_headers, data_list, file_found, html_escape=False)
report.end_artifact_report()
tsvname = 'DHCP Hotspot Clients'
tsv(report_folder, data_headers, data_list, tsvname)
else:
logfunc('No data available')
return | 764 |
13,889 | <reponame>VICTORVICKIE/kivy<gh_stars>1000+
'''
Clipboard xclip: an implementation of the Clipboard using xclip
command line tool.
'''
__all__ = ('ClipboardXclip', )
from kivy.utils import platform
from kivy.core.clipboard._clipboard_ext import ClipboardExternalBase
if platform != 'linux':
raise SystemError('unsupported platform for xclip clipboard')
try:
import subprocess
p = subprocess.Popen(['xclip', '-version'], stdout=subprocess.PIPE,
stderr=subprocess.DEVNULL)
p.communicate()
except:
raise
class ClipboardXclip(ClipboardExternalBase):
@staticmethod
def _clip(inout, selection):
pipe = {'std' + inout: subprocess.PIPE}
return subprocess.Popen(
['xclip', '-' + inout, '-selection', selection], **pipe)
| 300 |
335 | <filename>B/Belvedere_noun.json
{
"word": "Belvedere",
"definitions": [
"A summer house or open-sided gallery, typically at rooftop level, commanding a fine view."
],
"parts-of-speech": "Noun"
} | 86 |
2,151 | <gh_stars>1000+
// Copyright 2017 The Dawn Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "dawn_native/Texture.h"
#include <algorithm>
#include "common/Assert.h"
#include "common/Constants.h"
#include "common/Math.h"
#include "dawn_native/Device.h"
#include "dawn_native/PassResourceUsage.h"
#include "dawn_native/ValidationUtils_autogen.h"
namespace dawn_native {
namespace {
// TODO(<EMAIL>): implement texture view format compatibility rule
MaybeError ValidateTextureViewFormatCompatibility(const TextureBase* texture,
const TextureViewDescriptor* descriptor) {
if (texture->GetFormat().format != descriptor->format) {
return DAWN_VALIDATION_ERROR(
"The format of texture view is not compatible to the original texture");
}
return {};
}
// TODO(<EMAIL>): support validation on all texture view dimensions
bool IsTextureViewDimensionCompatibleWithTextureDimension(
wgpu::TextureViewDimension textureViewDimension,
wgpu::TextureDimension textureDimension) {
switch (textureViewDimension) {
case wgpu::TextureViewDimension::e2D:
case wgpu::TextureViewDimension::e2DArray:
case wgpu::TextureViewDimension::Cube:
case wgpu::TextureViewDimension::CubeArray:
return textureDimension == wgpu::TextureDimension::e2D;
default:
UNREACHABLE();
return false;
}
}
// TODO(<EMAIL>): support validation on all texture view dimensions
bool IsArrayLayerValidForTextureViewDimension(
wgpu::TextureViewDimension textureViewDimension,
uint32_t textureViewArrayLayer) {
switch (textureViewDimension) {
case wgpu::TextureViewDimension::e2D:
return textureViewArrayLayer == 1u;
case wgpu::TextureViewDimension::e2DArray:
return true;
case wgpu::TextureViewDimension::Cube:
return textureViewArrayLayer == 6u;
case wgpu::TextureViewDimension::CubeArray:
return textureViewArrayLayer % 6 == 0;
default:
UNREACHABLE();
return false;
}
}
bool IsTextureSizeValidForTextureViewDimension(
wgpu::TextureViewDimension textureViewDimension,
const Extent3D& textureSize) {
switch (textureViewDimension) {
case wgpu::TextureViewDimension::Cube:
case wgpu::TextureViewDimension::CubeArray:
return textureSize.width == textureSize.height;
case wgpu::TextureViewDimension::e2D:
case wgpu::TextureViewDimension::e2DArray:
return true;
default:
UNREACHABLE();
return false;
}
}
// TODO(<EMAIL>): support more sample count.
MaybeError ValidateSampleCount(const TextureDescriptor* descriptor, const Format* format) {
if (!IsValidSampleCount(descriptor->sampleCount)) {
return DAWN_VALIDATION_ERROR("The sample count of the texture is not supported.");
}
if (descriptor->sampleCount > 1) {
if (descriptor->mipLevelCount > 1) {
return DAWN_VALIDATION_ERROR(
"The mipmap level count of a multisampled texture must be 1.");
}
// Multisampled 2D array texture is not supported because on Metal it requires the
// version of macOS be greater than 10.14.
if (descriptor->size.depth > 1) {
return DAWN_VALIDATION_ERROR(
"Multisampled textures with depth > 1 are not supported.");
}
if (format->isCompressed) {
return DAWN_VALIDATION_ERROR(
"The sample counts of the textures in BC formats must be 1.");
}
if (descriptor->usage & wgpu::TextureUsage::Storage) {
return DAWN_VALIDATION_ERROR(
"The sample counts of the storage textures must be 1.");
}
}
return {};
}
MaybeError ValidateTextureViewDimensionCompatibility(
const TextureBase* texture,
const TextureViewDescriptor* descriptor) {
if (!IsArrayLayerValidForTextureViewDimension(descriptor->dimension,
descriptor->arrayLayerCount)) {
return DAWN_VALIDATION_ERROR(
"The dimension of the texture view is not compatible with the layer count");
}
if (!IsTextureViewDimensionCompatibleWithTextureDimension(descriptor->dimension,
texture->GetDimension())) {
return DAWN_VALIDATION_ERROR(
"The dimension of the texture view is not compatible with the dimension of the"
"original texture");
}
if (!IsTextureSizeValidForTextureViewDimension(descriptor->dimension,
texture->GetSize())) {
return DAWN_VALIDATION_ERROR(
"The dimension of the texture view is not compatible with the size of the"
"original texture");
}
return {};
}
MaybeError ValidateTextureSize(const TextureDescriptor* descriptor, const Format* format) {
ASSERT(descriptor->size.width != 0 && descriptor->size.height != 0);
if (descriptor->size.width > kMaxTextureSize ||
descriptor->size.height > kMaxTextureSize) {
return DAWN_VALIDATION_ERROR("Texture max size exceeded");
}
if (Log2(std::max(descriptor->size.width, descriptor->size.height)) + 1 <
descriptor->mipLevelCount) {
return DAWN_VALIDATION_ERROR("Texture has too many mip levels");
}
if (format->isCompressed && (descriptor->size.width % format->blockWidth != 0 ||
descriptor->size.height % format->blockHeight != 0)) {
return DAWN_VALIDATION_ERROR(
"The size of the texture is incompatible with the texture format");
}
if (descriptor->dimension == wgpu::TextureDimension::e2D &&
descriptor->size.depth > kMaxTexture2DArrayLayers) {
return DAWN_VALIDATION_ERROR("Texture 2D array layer count exceeded");
}
if (descriptor->mipLevelCount > kMaxTexture2DMipLevels) {
return DAWN_VALIDATION_ERROR("Max texture 2D mip level exceeded");
}
return {};
}
MaybeError ValidateTextureUsage(const TextureDescriptor* descriptor, const Format* format) {
DAWN_TRY(dawn_native::ValidateTextureUsage(descriptor->usage));
constexpr wgpu::TextureUsage kValidCompressedUsages = wgpu::TextureUsage::Sampled |
wgpu::TextureUsage::CopySrc |
wgpu::TextureUsage::CopyDst;
if (format->isCompressed && (descriptor->usage & (~kValidCompressedUsages))) {
return DAWN_VALIDATION_ERROR(
"Compressed texture format is incompatible with the texture usage");
}
if (!format->isRenderable &&
(descriptor->usage & wgpu::TextureUsage::OutputAttachment)) {
return DAWN_VALIDATION_ERROR(
"Non-renderable format used with OutputAttachment usage");
}
if (!format->supportsStorageUsage &&
(descriptor->usage & wgpu::TextureUsage::Storage)) {
return DAWN_VALIDATION_ERROR("Format cannot be used in storage textures");
}
return {};
}
} // anonymous namespace
MaybeError ValidateTextureDescriptor(const DeviceBase* device,
const TextureDescriptor* descriptor) {
if (descriptor == nullptr) {
return DAWN_VALIDATION_ERROR("Texture descriptor is nullptr");
}
if (descriptor->nextInChain != nullptr) {
return DAWN_VALIDATION_ERROR("nextInChain must be nullptr");
}
const Format* format;
DAWN_TRY_ASSIGN(format, device->GetInternalFormat(descriptor->format));
DAWN_TRY(ValidateTextureUsage(descriptor, format));
DAWN_TRY(ValidateTextureDimension(descriptor->dimension));
DAWN_TRY(ValidateSampleCount(descriptor, format));
// TODO(<EMAIL>): check stuff based on the dimension
if (descriptor->size.width == 0 || descriptor->size.height == 0 ||
descriptor->size.depth == 0 || descriptor->mipLevelCount == 0) {
return DAWN_VALIDATION_ERROR("Cannot create an empty texture");
}
if (descriptor->dimension != wgpu::TextureDimension::e2D) {
return DAWN_VALIDATION_ERROR("Texture dimension must be 2D (for now)");
}
DAWN_TRY(ValidateTextureSize(descriptor, format));
return {};
}
MaybeError ValidateTextureViewDescriptor(const TextureBase* texture,
const TextureViewDescriptor* descriptor) {
if (descriptor->nextInChain != nullptr) {
return DAWN_VALIDATION_ERROR("nextInChain must be nullptr");
}
// Parent texture should have been already validated.
ASSERT(texture);
ASSERT(!texture->IsError());
if (texture->GetTextureState() == TextureBase::TextureState::Destroyed) {
return DAWN_VALIDATION_ERROR("Destroyed texture used to create texture view");
}
DAWN_TRY(ValidateTextureViewDimension(descriptor->dimension));
if (descriptor->dimension == wgpu::TextureViewDimension::e1D ||
descriptor->dimension == wgpu::TextureViewDimension::e3D) {
return DAWN_VALIDATION_ERROR("Texture view dimension must be 2D compatible.");
}
DAWN_TRY(ValidateTextureFormat(descriptor->format));
DAWN_TRY(ValidateTextureAspect(descriptor->aspect));
if (descriptor->aspect != wgpu::TextureAspect::All) {
return DAWN_VALIDATION_ERROR("Texture aspect must be 'all'");
}
// TODO(<EMAIL>): check stuff based on resource limits
if (descriptor->arrayLayerCount == 0 || descriptor->mipLevelCount == 0) {
return DAWN_VALIDATION_ERROR("Cannot create an empty texture view");
}
if (uint64_t(descriptor->baseArrayLayer) + uint64_t(descriptor->arrayLayerCount) >
uint64_t(texture->GetArrayLayers())) {
return DAWN_VALIDATION_ERROR("Texture view array-layer out of range");
}
if (uint64_t(descriptor->baseMipLevel) + uint64_t(descriptor->mipLevelCount) >
uint64_t(texture->GetNumMipLevels())) {
return DAWN_VALIDATION_ERROR("Texture view mip-level out of range");
}
DAWN_TRY(ValidateTextureViewFormatCompatibility(texture, descriptor));
DAWN_TRY(ValidateTextureViewDimensionCompatibility(texture, descriptor));
return {};
}
TextureViewDescriptor GetTextureViewDescriptorWithDefaults(
const TextureBase* texture,
const TextureViewDescriptor* descriptor) {
ASSERT(texture);
TextureViewDescriptor desc = {};
if (descriptor) {
desc = *descriptor;
}
// The default value for the view dimension depends on the texture's dimension with a
// special case for 2DArray being chosen automatically if arrayLayerCount is unspecified.
if (desc.dimension == wgpu::TextureViewDimension::Undefined) {
switch (texture->GetDimension()) {
case wgpu::TextureDimension::e1D:
desc.dimension = wgpu::TextureViewDimension::e1D;
break;
case wgpu::TextureDimension::e2D:
if (texture->GetArrayLayers() > 1u && desc.arrayLayerCount == 0) {
desc.dimension = wgpu::TextureViewDimension::e2DArray;
} else {
desc.dimension = wgpu::TextureViewDimension::e2D;
}
break;
case wgpu::TextureDimension::e3D:
desc.dimension = wgpu::TextureViewDimension::e3D;
break;
default:
UNREACHABLE();
}
}
if (desc.format == wgpu::TextureFormat::Undefined) {
desc.format = texture->GetFormat().format;
}
if (desc.arrayLayerCount == 0) {
desc.arrayLayerCount = texture->GetArrayLayers() - desc.baseArrayLayer;
}
if (desc.mipLevelCount == 0) {
desc.mipLevelCount = texture->GetNumMipLevels() - desc.baseMipLevel;
}
return desc;
}
ResultOrError<TextureDescriptor> FixTextureDescriptor(DeviceBase* device,
const TextureDescriptor* desc) {
TextureDescriptor fixedDesc = *desc;
if (desc->arrayLayerCount != 1) {
if (desc->size.depth != 1) {
return DAWN_VALIDATION_ERROR("arrayLayerCount and size.depth cannot both be != 1");
} else {
fixedDesc.size.depth = fixedDesc.arrayLayerCount;
fixedDesc.arrayLayerCount = 1;
device->EmitDeprecationWarning(
"wgpu::TextureDescriptor::arrayLayerCount is deprecated in favor of "
"::size::depth");
}
}
return {std::move(fixedDesc)};
}
bool IsValidSampleCount(uint32_t sampleCount) {
switch (sampleCount) {
case 1:
case 4:
return true;
default:
return false;
}
}
// static
SubresourceRange SubresourceRange::SingleSubresource(uint32_t baseMipLevel,
uint32_t baseArrayLayer) {
return {baseMipLevel, 1, baseArrayLayer, 1};
}
// TextureBase
TextureBase::TextureBase(DeviceBase* device,
const TextureDescriptor* descriptor,
TextureState state)
: ObjectBase(device),
mDimension(descriptor->dimension),
mFormat(device->GetValidInternalFormat(descriptor->format)),
mSize(descriptor->size),
mMipLevelCount(descriptor->mipLevelCount),
mSampleCount(descriptor->sampleCount),
mUsage(descriptor->usage),
mState(state) {
uint32_t subresourceCount = GetSubresourceCount();
mIsSubresourceContentInitializedAtIndex = std::vector<bool>(subresourceCount, false);
// Add readonly storage usage if the texture has a storage usage. The validation rules in
// ValidatePassResourceUsage will make sure we don't use both at the same time.
if (mUsage & wgpu::TextureUsage::Storage) {
mUsage |= kReadonlyStorageTexture;
}
}
static Format kUnusedFormat;
TextureBase::TextureBase(DeviceBase* device, ObjectBase::ErrorTag tag)
: ObjectBase(device, tag), mFormat(kUnusedFormat) {
}
// static
TextureBase* TextureBase::MakeError(DeviceBase* device) {
return new TextureBase(device, ObjectBase::kError);
}
wgpu::TextureDimension TextureBase::GetDimension() const {
ASSERT(!IsError());
return mDimension;
}
// TODO(<EMAIL>): return more information about texture format
const Format& TextureBase::GetFormat() const {
ASSERT(!IsError());
return mFormat;
}
const Extent3D& TextureBase::GetSize() const {
ASSERT(!IsError());
return mSize;
}
uint32_t TextureBase::GetWidth() const {
ASSERT(!IsError());
return mSize.width;
}
uint32_t TextureBase::GetHeight() const {
ASSERT(!IsError());
ASSERT(mDimension == wgpu::TextureDimension::e2D ||
mDimension == wgpu::TextureDimension::e3D);
return mSize.height;
}
uint32_t TextureBase::GetDepth() const {
ASSERT(!IsError());
ASSERT(mDimension == wgpu::TextureDimension::e3D);
return mSize.depth;
}
uint32_t TextureBase::GetArrayLayers() const {
ASSERT(!IsError());
// TODO(<EMAIL>): Update for 1D / 3D textures when they are supported.
ASSERT(mDimension == wgpu::TextureDimension::e2D);
return mSize.depth;
}
uint32_t TextureBase::GetNumMipLevels() const {
ASSERT(!IsError());
return mMipLevelCount;
}
SubresourceRange TextureBase::GetAllSubresources() const {
ASSERT(!IsError());
return {0, mMipLevelCount, 0, GetArrayLayers()};
}
uint32_t TextureBase::GetSampleCount() const {
ASSERT(!IsError());
return mSampleCount;
}
uint32_t TextureBase::GetSubresourceCount() const {
ASSERT(!IsError());
return mMipLevelCount * mSize.depth;
}
wgpu::TextureUsage TextureBase::GetUsage() const {
ASSERT(!IsError());
return mUsage;
}
TextureBase::TextureState TextureBase::GetTextureState() const {
ASSERT(!IsError());
return mState;
}
uint32_t TextureBase::GetSubresourceIndex(uint32_t mipLevel, uint32_t arraySlice) const {
ASSERT(arraySlice <= kMaxTexture2DArrayLayers);
ASSERT(mipLevel <= kMaxTexture2DMipLevels);
static_assert(kMaxTexture2DMipLevels <=
std::numeric_limits<uint32_t>::max() / kMaxTexture2DArrayLayers,
"texture size overflows uint32_t");
return GetNumMipLevels() * arraySlice + mipLevel;
}
bool TextureBase::IsSubresourceContentInitialized(const SubresourceRange& range) const {
ASSERT(!IsError());
for (uint32_t arrayLayer = range.baseArrayLayer;
arrayLayer < range.baseArrayLayer + range.layerCount; ++arrayLayer) {
for (uint32_t mipLevel = range.baseMipLevel;
mipLevel < range.baseMipLevel + range.levelCount; ++mipLevel) {
uint32_t subresourceIndex = GetSubresourceIndex(mipLevel, arrayLayer);
ASSERT(subresourceIndex < mIsSubresourceContentInitializedAtIndex.size());
if (!mIsSubresourceContentInitializedAtIndex[subresourceIndex]) {
return false;
}
}
}
return true;
}
void TextureBase::SetIsSubresourceContentInitialized(bool isInitialized,
const SubresourceRange& range) {
ASSERT(!IsError());
for (uint32_t arrayLayer = range.baseArrayLayer;
arrayLayer < range.baseArrayLayer + range.layerCount; ++arrayLayer) {
for (uint32_t mipLevel = range.baseMipLevel;
mipLevel < range.baseMipLevel + range.levelCount; ++mipLevel) {
uint32_t subresourceIndex = GetSubresourceIndex(mipLevel, arrayLayer);
ASSERT(subresourceIndex < mIsSubresourceContentInitializedAtIndex.size());
mIsSubresourceContentInitializedAtIndex[subresourceIndex] = isInitialized;
}
}
}
MaybeError TextureBase::ValidateCanUseInSubmitNow() const {
ASSERT(!IsError());
if (mState == TextureState::Destroyed) {
return DAWN_VALIDATION_ERROR("Destroyed texture used in a submit");
}
return {};
}
bool TextureBase::IsMultisampledTexture() const {
ASSERT(!IsError());
return mSampleCount > 1;
}
Extent3D TextureBase::GetMipLevelVirtualSize(uint32_t level) const {
Extent3D extent = {std::max(mSize.width >> level, 1u), 1u, 1u};
if (mDimension == wgpu::TextureDimension::e1D) {
return extent;
}
extent.height = std::max(mSize.height >> level, 1u);
if (mDimension == wgpu::TextureDimension::e2D) {
return extent;
}
extent.depth = std::max(mSize.depth >> level, 1u);
return extent;
}
Extent3D TextureBase::GetMipLevelPhysicalSize(uint32_t level) const {
Extent3D extent = GetMipLevelVirtualSize(level);
// Compressed Textures will have paddings if their width or height is not a multiple of
// 4 at non-zero mipmap levels.
if (mFormat.isCompressed) {
// TODO(<EMAIL>): check if there are any overflows.
uint32_t blockWidth = mFormat.blockWidth;
uint32_t blockHeight = mFormat.blockHeight;
extent.width = (extent.width + blockWidth - 1) / blockWidth * blockWidth;
extent.height = (extent.height + blockHeight - 1) / blockHeight * blockHeight;
}
return extent;
}
Extent3D TextureBase::ClampToMipLevelVirtualSize(uint32_t level,
const Origin3D& origin,
const Extent3D& extent) const {
const Extent3D virtualSizeAtLevel = GetMipLevelVirtualSize(level);
uint32_t clampedCopyExtentWidth = (origin.x + extent.width > virtualSizeAtLevel.width)
? (virtualSizeAtLevel.width - origin.x)
: extent.width;
uint32_t clampedCopyExtentHeight = (origin.y + extent.height > virtualSizeAtLevel.height)
? (virtualSizeAtLevel.height - origin.y)
: extent.height;
return {clampedCopyExtentWidth, clampedCopyExtentHeight, extent.depth};
}
TextureViewBase* TextureBase::CreateView(const TextureViewDescriptor* descriptor) {
return GetDevice()->CreateTextureView(this, descriptor);
}
void TextureBase::Destroy() {
if (GetDevice()->ConsumedError(ValidateDestroy())) {
return;
}
ASSERT(!IsError());
DestroyInternal();
}
void TextureBase::DestroyImpl() {
}
void TextureBase::DestroyInternal() {
DestroyImpl();
mState = TextureState::Destroyed;
}
MaybeError TextureBase::ValidateDestroy() const {
DAWN_TRY(GetDevice()->ValidateObject(this));
return {};
}
// TextureViewBase
TextureViewBase::TextureViewBase(TextureBase* texture, const TextureViewDescriptor* descriptor)
: ObjectBase(texture->GetDevice()),
mTexture(texture),
mAspect(descriptor->aspect),
mFormat(GetDevice()->GetValidInternalFormat(descriptor->format)),
mDimension(descriptor->dimension),
mRange({descriptor->baseMipLevel, descriptor->mipLevelCount, descriptor->baseArrayLayer,
descriptor->arrayLayerCount}) {
}
TextureViewBase::TextureViewBase(DeviceBase* device, ObjectBase::ErrorTag tag)
: ObjectBase(device, tag), mFormat(kUnusedFormat) {
}
// static
TextureViewBase* TextureViewBase::MakeError(DeviceBase* device) {
return new TextureViewBase(device, ObjectBase::kError);
}
const TextureBase* TextureViewBase::GetTexture() const {
ASSERT(!IsError());
return mTexture.Get();
}
TextureBase* TextureViewBase::GetTexture() {
ASSERT(!IsError());
return mTexture.Get();
}
wgpu::TextureAspect TextureViewBase::GetAspect() const {
ASSERT(!IsError());
return mAspect;
}
const Format& TextureViewBase::GetFormat() const {
ASSERT(!IsError());
return mFormat;
}
wgpu::TextureViewDimension TextureViewBase::GetDimension() const {
ASSERT(!IsError());
return mDimension;
}
uint32_t TextureViewBase::GetBaseMipLevel() const {
ASSERT(!IsError());
return mRange.baseMipLevel;
}
uint32_t TextureViewBase::GetLevelCount() const {
ASSERT(!IsError());
return mRange.levelCount;
}
uint32_t TextureViewBase::GetBaseArrayLayer() const {
ASSERT(!IsError());
return mRange.baseArrayLayer;
}
uint32_t TextureViewBase::GetLayerCount() const {
ASSERT(!IsError());
return mRange.layerCount;
}
const SubresourceRange& TextureViewBase::GetSubresourceRange() const {
ASSERT(!IsError());
return mRange;
}
} // namespace dawn_native
| 11,745 |
2,151 | <gh_stars>1000+
/*
* Copyright (c) 2007 Mockito contributors
* This program is made available under the terms of the MIT License.
*/
package org.mockito.internal.util.junit;
import org.junit.runner.notification.Failure;
import org.mockito.internal.exceptions.ExceptionIncludingMockitoWarnings;
import org.mockito.internal.util.reflection.Whitebox;
public class JUnitFailureHacker {
public void appendWarnings(Failure failure, String warnings) {
if (isEmpty(warnings)) {
return;
}
//TODO: this has to protect the use in case jUnit changes and this internal state logic fails
Throwable throwable = (Throwable) Whitebox.getInternalState(failure, "fThrownException");
String newMessage = "contains both: actual test failure *and* Mockito warnings.\n" +
warnings + "\n *** The actual failure is because of: ***\n";
ExceptionIncludingMockitoWarnings e = new ExceptionIncludingMockitoWarnings(newMessage, throwable);
e.setStackTrace(throwable.getStackTrace());
Whitebox.setInternalState(failure, "fThrownException", e);
}
private boolean isEmpty(String warnings) {
return warnings == null || "".equals(warnings); // isEmpty() is in JDK 6+
}
} | 446 |
14,668 | <reponame>zealoussnow/chromium<filename>chrome/browser/enterprise/connectors/device_trust/signals/signals_service_factory.h
// Copyright 2021 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CHROME_BROWSER_ENTERPRISE_CONNECTORS_DEVICE_TRUST_SIGNALS_SIGNALS_SERVICE_FACTORY_H_
#define CHROME_BROWSER_ENTERPRISE_CONNECTORS_DEVICE_TRUST_SIGNALS_SIGNALS_SERVICE_FACTORY_H_
#include <memory>
class PolicyBlocklistService;
class Profile;
namespace enterprise_connectors {
class SignalsService;
// Returns a SignalsService instance properly configured for the current
// environment.
std::unique_ptr<SignalsService> CreateSignalsService(
Profile* profile,
PolicyBlocklistService* policy_blocklist_service);
} // namespace enterprise_connectors
#endif // CHROME_BROWSER_ENTERPRISE_CONNECTORS_DEVICE_TRUST_SIGNALS_SIGNALS_SERVICE_FACTORY_H_
| 312 |
400 | import torch
import torch.nn as nn
from torch import Tensor
from typing import Any
from typing import List
from typing import Callable
from typing import Optional
from .....modules.blocks import Conv2d
def conv3x3(
in_channels: int,
out_channels: int,
stride: int = 1,
groups: int = 1,
dilation: int = 1,
) -> Conv2d:
return Conv2d(
in_channels,
out_channels,
kernel_size=3,
stride=stride,
padding=dilation,
groups=groups,
bias=False,
dilation=dilation,
)
def conv1x1(in_channels: int, out_channels: int, stride: int = 1) -> Conv2d:
return Conv2d(in_channels, out_channels, kernel_size=1, stride=stride, bias=False)
class BasicBlock(nn.Module):
def __init__(
self,
in_channels: int,
out_channels: int,
stride: int = 1,
downsample: Optional[nn.Module] = None,
groups: int = 1,
base_width: int = 64,
dilation: int = 1,
norm_layer: Optional[Callable[[int], nn.Module]] = None,
) -> None:
super().__init__()
if norm_layer is None:
norm_layer = nn.BatchNorm2d
if groups != 1 or base_width != 64:
raise ValueError("BasicBlock only supports groups=1 and base_width=64")
if dilation > 1:
raise NotImplementedError("Dilation > 1 not supported in BasicBlock")
self.conv1 = conv3x3(in_channels, out_channels, stride)
self.bn1 = norm_layer(out_channels)
self.relu = nn.ReLU(inplace=True)
self.conv2 = conv3x3(out_channels, out_channels)
self.bn2 = norm_layer(out_channels)
self.downsample = downsample
self.stride = stride
def forward(self, net: Tensor) -> Tensor:
inp = net
net = self.conv1(net)
net = self.bn1(net)
net = self.relu(net)
net = self.conv2(net)
net = self.bn2(net)
if self.downsample is not None:
inp = self.downsample(inp)
net = net + inp
net = self.relu(net)
return net
class ResNet(nn.Module):
def __init__(
self,
layers: List[int],
zero_init_residual: bool = False,
groups: int = 1,
width_per_group: int = 64,
replace_stride_with_dilation: Optional[List[bool]] = None,
norm_layer: Optional[Callable[[int], nn.Module]] = None,
):
super().__init__()
if norm_layer is None:
norm_layer = nn.BatchNorm2d
self._norm_layer = norm_layer
self.num_channels = 64
self.dilation = 1
if replace_stride_with_dilation is None:
replace_stride_with_dilation = [False, False, False]
if len(replace_stride_with_dilation) != 3:
raise ValueError(
"replace_stride_with_dilation should be None "
"or a 3-element tuple, got {}".format(replace_stride_with_dilation)
)
self.groups = groups
self.base_width = width_per_group
self.conv1 = Conv2d(
3,
self.num_channels,
kernel_size=7,
stride=1,
padding=3,
bias=False,
)
self.bn1 = norm_layer(self.num_channels)
self.relu = nn.ReLU(inplace=True)
self.maxpool1 = nn.MaxPool2d(
kernel_size=3,
stride=2,
padding=1,
return_indices=True,
)
self.maxpool2 = nn.MaxPool2d(
kernel_size=3,
stride=2,
padding=1,
return_indices=True,
)
self.maxpool3 = nn.MaxPool2d(
kernel_size=3,
stride=2,
padding=1,
return_indices=True,
)
self.maxpool4 = nn.MaxPool2d(
kernel_size=3,
stride=2,
padding=1,
return_indices=True,
)
self.maxpool5 = nn.MaxPool2d(
kernel_size=3,
stride=2,
padding=1,
return_indices=True,
)
self.layer1 = self._make_layer(64, layers[0])
self.layer2 = self._make_layer(
128,
layers[1],
stride=1,
dilate=replace_stride_with_dilation[0],
)
self.layer3 = self._make_layer(
256,
layers[2],
stride=1,
dilate=replace_stride_with_dilation[1],
)
self.layer4 = self._make_layer(
512,
layers[3],
stride=1,
dilate=replace_stride_with_dilation[2],
)
self.avgpool = nn.AdaptiveAvgPool2d((1, 1))
self.fc = nn.Linear(512, 1000)
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode="fan_out", nonlinearity="relu")
elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
if zero_init_residual:
for m in self.modules():
if isinstance(m, BasicBlock):
nn.init.constant_(m.bn2.weight, 0)
def _make_layer(
self,
num_channels: int,
num_blocks: int,
stride: int = 1,
dilate: bool = False,
) -> nn.Sequential:
downsample = None
norm_layer = self._norm_layer
previous_dilation = self.dilation
if dilate:
self.dilation *= stride
stride = 1
if stride != 1 or self.num_channels != num_channels:
downsample = nn.Sequential(
conv1x1(self.num_channels, num_channels, stride),
norm_layer(num_channels),
)
layers = [
BasicBlock(
self.num_channels,
num_channels,
stride,
downsample,
self.groups,
self.base_width,
previous_dilation,
norm_layer,
)
]
self.num_channels = num_channels
for _ in range(1, num_blocks):
layers.append(
BasicBlock(
self.num_channels,
num_channels,
groups=self.groups,
base_width=self.base_width,
dilation=self.dilation,
norm_layer=norm_layer,
)
)
return nn.Sequential(*layers)
def forward(self, net: Tensor) -> Tensor:
x1 = self.conv1(net)
x1 = self.bn1(x1)
x1 = self.relu(x1)
x1, idx1 = self.maxpool1(x1)
x2, idx2 = self.maxpool2(x1)
x2 = self.layer1(x2)
x3, idx3 = self.maxpool3(x2)
x3 = self.layer2(x3)
x4, idx4 = self.maxpool4(x3)
x4 = self.layer3(x4)
x5, idx5 = self.maxpool5(x4)
x5 = self.layer4(x5)
x_cls = self.avgpool(x5)
x_cls = torch.flatten(x_cls, 1)
x_cls = self.fc(x_cls)
return x_cls
def resnet34_mp(**kwargs: Any) -> ResNet:
return ResNet([3, 4, 6, 3], **kwargs)
__all__ = ["resnet34_mp"]
| 3,964 |
435 | <gh_stars>100-1000
// #include "util/recordio.h"
// #include <zlib.h>
// #include <string>
// #include "glog/logging.h"
// namespace PS {
// const int RecordWriter::kMagicNumber = 0x3ed7230a;
// std::string RecordWriter::Compress(std::string const& s) const {
// const unsigned long source_size = s.size(); // NOLINT
// const char* source = s.c_str();
// unsigned long dsize = source_size + (source_size * 0.1f) + 16; // NOLINT
// std::unique_ptr<char[]> destination(new char[dsize]);
// // Use compress() from zlib.h.
// const int result =
// compress(reinterpret_cast<unsigned char*>(destination.get()), &dsize,
// reinterpret_cast<const unsigned char*>(source), source_size);
// if (result != Z_OK) {
// LOG(FATAL) << "Compress error occured! Error code: " << result;
// }
// return std::string(destination.get(), dsize);
// }
// void RecordReader::Uncompress(const char* const source, uint64 source_size,
// char* const output_buffer,
// uint64 output_size) const {
// unsigned long result_size = output_size; // NOLINT
// // Use uncompress() from zlib.h
// const int result =
// uncompress(reinterpret_cast<unsigned char*>(output_buffer), &result_size,
// reinterpret_cast<const unsigned char*>(source), source_size);
// if (result != Z_OK) {
// LOG(FATAL) << "Uncompress error occured! Error code: " << result;
// }
// CHECK_LE(result_size, static_cast<unsigned long>(output_size)); // NOLINT
// }
// } // namespace PS
| 629 |
619 | <gh_stars>100-1000
// Copyright <NAME> 2017.
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#include "gtest/gtest.h"
#include "react/state.h"
#include "react/observer.h"
#include <thread>
#include <chrono>
using namespace react;
TEST(StateTest, Construction)
{
Group g;
// State variable
{
auto t1 = StateVar<int>::Create(g, 0);
StateVar<int> t2( t1 );
StateVar<int> t3( std::move(t1) );
StateVar<int> ref1( t2 );
State<int> ref2( t3 );
EXPECT_TRUE(ref1 == ref2);
}
// State slot
{
auto t0 = StateVar<int>::Create(g, 0);
auto t1 = StateSlot<int>::Create(g, t0);
StateSlot<int> t2( t1 );
StateSlot<int> t3( std::move(t1) );
StateSlot<int> ref1( t2 );
State<int> ref2( t3 );
EXPECT_TRUE(ref1 == ref2);
}
// State link
{
auto t0 = StateVar<int>::Create(g, 0);
auto s1 = StateSlot<int>::Create(g, t0);
auto t1 = StateLink<int>::Create(g, s1);
StateLink<int> t2( t1 );
StateLink<int> t3( std::move(t1) );
StateLink<int> ref1( t2 );
State<int> ref2( t3 );
EXPECT_TRUE(ref1 == ref2);
}
}
TEST(StateTest, BasicOutput)
{
Group g;
auto st = StateVar<int>::Create(g);
int output = 0;
auto obs2 = Observer::Create([&] (const auto& v)
{
output += v;
}, st);
EXPECT_EQ(0, output);
st.Set(1);
EXPECT_EQ(1, output);
st.Set(2);
EXPECT_EQ(3, output);
}
TEST(StateTest, Slots)
{
Group g;
auto st1 = StateVar<int>::Create(g);
auto st2 = StateVar<int>::Create(g);
auto slot = StateSlot<int>::Create(g, st1);
int output = 0;
int turns = 0;
auto obs = Observer::Create([&] (const auto& v)
{
++turns;
output += v;
}, slot);
EXPECT_EQ(0, output);
EXPECT_EQ(1, turns);
slot.Set(st1);
st1.Set(5);
st2.Set(2);
EXPECT_EQ(5, output);
EXPECT_EQ(2, turns);
output = 0;
slot.Set(st2);
st1.Set(5);
st2.Set(2);
EXPECT_EQ(2, output);
EXPECT_EQ(3, turns);
}
TEST(StateTest, Transactions)
{
Group g;
auto st = StateVar<int>::Create(g, 1);
int output = 0;
int turns = 0;
auto obs = Observer::Create([&] (const auto& v)
{
++turns;
output += v;
}, st);
EXPECT_EQ(1, output);
g.DoTransaction([&]
{
st.Set(1);
st.Set(2);
st.Set(3);
st.Set(4);
});
EXPECT_EQ(5, output);
EXPECT_EQ(2, turns);
}
TEST(StateTest, Links)
{
Group g1;
Group g2;
Group g3;
auto st1 = StateVar<int>::Create(g1, 1);
auto st2 = StateVar<int>::Create(g2, 2);
auto st3 = StateVar<int>::Create(g3, 3);
auto slot = StateSlot<int>::Create(g1, st1);
int output = 0;
int turns = 0;
auto obs = Observer::Create([&] (const auto& v)
{
++turns;
output = v;
}, slot);
EXPECT_EQ(1, turns);
st1.Set(10);
EXPECT_EQ(10, output);
EXPECT_EQ(2, turns);
// Explicit link
auto lnk2 = StateLink<int>::Create(g1, st2);
slot.Set(lnk2);
std::this_thread::sleep_for(std::chrono::seconds(1));
EXPECT_EQ(2, output);
EXPECT_EQ(3, turns);
st2.Set(20);
std::this_thread::sleep_for(std::chrono::seconds(1));
EXPECT_EQ(20, output);
EXPECT_EQ(4, turns);
// Implicit link
slot.Set(st3);
std::this_thread::sleep_for(std::chrono::seconds(1));
EXPECT_EQ(3, output);
EXPECT_EQ(5, turns);
st3.Set(30);
std::this_thread::sleep_for(std::chrono::seconds(1));
EXPECT_EQ(30, output);
EXPECT_EQ(6, turns);
std::this_thread::sleep_for(std::chrono::seconds(1));
}
namespace
{
template <typename T>
static T Sum2(T a, T b)
{
return a + b;
}
template <typename T>
static T Sum3(T a, T b, T c)
{
return a + b + c;
}
} // ~namespace
TEST(StateTest, StateCombination1)
{
Group g;
auto a = StateVar<int>::Create(g, 0);
auto b = StateVar<int>::Create(g, 0);
auto c = StateVar<int>::Create(g, 0);
auto s1 = State<int>::Create(Sum2<int>, a, b);
auto x = State<int>::Create(Sum2<int>, s1, c);
auto y = State<int>::Create(Sum3<int>, a, b, c);
int output1 = 0;
int output2 = 0;
int turns1 = 0;
int turns2 = 0;
auto obs1 = Observer::Create([&] (int v)
{
++turns1;
output1 = v;
}, x);
EXPECT_EQ(0, output1);
EXPECT_EQ(1, turns1);
auto obs2 = Observer::Create([&] (int v)
{
++turns2;
output2 = v;
}, y);
EXPECT_EQ(0, output2);
EXPECT_EQ(1, turns2);
a.Set(1);
b.Set(1);
c.Set(1);
EXPECT_EQ(3, output1);
EXPECT_EQ(4, turns1);
EXPECT_EQ(3, output2);
EXPECT_EQ(4, turns2);
}
TEST(StateTest, StateCombination2)
{
Group g;
std::vector<int> results;
auto n1 = StateVar<int>::Create(g, 1);
auto n2 = State<int>::Create([] (int n1)
{ return n1 + 1; }, n1);
auto n3 = State<int>::Create([] (int n1, int n2)
{ return n2 + n1 + 1; }, n1, n2);
auto n4 = State<int>::Create([] (int n3)
{ return n3 + 1; }, n3);
auto n5 = State<int>::Create([] (int n1, int n3, int n4)
{ return n4 + n3 + n1 + 1; }, n1, n3, n4);
auto n6 = State<int>::Create([] (int n5)
{ return n5 + 1; }, n5);
auto n7 = State<int>::Create([] (int n5, int n6)
{ return n6 + n5 + 1; }, n5, n6);
auto n8 = State<int>::Create([] (int n7)
{ return n7 + 1; }, n7);
auto n9 = State<int>::Create([] (int n1, int n5, int n7, int n8)
{ return n8 + n7 + n5 + n1 + 1; }, n1, n5, n7, n8);
auto n10 = State<int>::Create([] (int n9)
{ return n9 + 1; }, n9);
auto n11 = State<int>::Create([] (int n9, int n10)
{ return n10 + n9 + 1; }, n9, n10);
auto n12 = State<int>::Create([] (int n11)
{ return n11 + 1; }, n11);
auto n13 = State<int>::Create([] (int n9, int n11, int n12)
{ return n12 + n11 + n9 + 1; }, n9, n11, n12);
auto n14 = State<int>::Create([] (int n13)
{ return n13 + 1; }, n13);
auto n15 = State<int>::Create([] (int n13, int n14)
{ return n14 + n13 + 1; }, n13, n14);
auto n16 = State<int>::Create([] (int n15)
{ return n15 + 1; }, n15);
auto n17 = State<int>::Create([] (int n9, int n13, int n15, int n16)
{ return n16 + n15 + n13 + n9 + 1; }, n9, n13, n15, n16);
auto obs = Observer::Create([&] (int v) { results.push_back(v); }, n17);
n1.Set(10); // 7732
n1.Set(100); // 68572
n1.Set(1000); // 676972
EXPECT_EQ(results.size(), 4);
EXPECT_EQ(results[0], 1648);
EXPECT_EQ(results[1], 7732);
EXPECT_EQ(results[2], 68572);
EXPECT_EQ(results[3], 676972);
}
TEST(StateTest, Modify1)
{
Group g;
std::vector<int> results;
auto var = StateVar<std::vector<int>>::Create(g, std::vector<int>{ });
int turns = 0;
auto obs = Observer::Create([&] (const std::vector<int>& v)
{
++turns;
results = v;
}, var);
var.Modify([] (std::vector<int>& v)
{
v.push_back(30);
v.push_back(50);
v.push_back(70);
});
EXPECT_EQ(results[0], 30);
EXPECT_EQ(results[1], 50);
EXPECT_EQ(results[2], 70);
EXPECT_EQ(turns, 2);
}
TEST(StateTest, Modify2)
{
Group g;
std::vector<int> results;
auto var = StateVar<std::vector<int>>::Create(g, std::vector<int>{ });
int turns = 0;
auto obs = Observer::Create([&] (const std::vector<int>& v)
{
++turns;
results = v;
}, var);
g.DoTransaction([&]
{
var.Modify([] (std::vector<int>& v) { v.push_back(30); });
var.Modify([] (std::vector<int>& v) { v.push_back(50); });
var.Modify([] (std::vector<int>& v) { v.push_back(70); });
});
EXPECT_EQ(results[0], 30);
EXPECT_EQ(results[1], 50);
EXPECT_EQ(results[2], 70);
EXPECT_EQ(turns, 2);
}
TEST(StateTest, Modify3)
{
Group g;
std::vector<int> results;
auto var = StateVar<std::vector<int>>::Create(g, std::vector<int>{ });
int turns = 0;
auto obs = Observer::Create([&] (const std::vector<int>& v)
{
++turns;
results = v;
}, var);
g.DoTransaction([&]
{
var.Set(std::vector<int>{ 30, 50 });
var.Modify([] (std::vector<int>& v) { v.push_back(70); });
});
EXPECT_EQ(results[0], 30);
EXPECT_EQ(results[1], 50);
EXPECT_EQ(results[2], 70);
ASSERT_EQ(turns, 2);
}
| 4,613 |
3,372 | <gh_stars>1000+
/*
* Copyright 2016-2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.chime.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/chime-2018-05-01/UpdateSipRule" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class UpdateSipRuleRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The SIP rule ID.
* </p>
*/
private String sipRuleId;
/**
* <p>
* The new name for the specified SIP rule.
* </p>
*/
private String name;
/**
* <p>
* The new value specified to indicate whether the rule is disabled.
* </p>
*/
private Boolean disabled;
/**
* <p>
* The new value of the list of target applications.
* </p>
*/
private java.util.List<SipRuleTargetApplication> targetApplications;
/**
* <p>
* The SIP rule ID.
* </p>
*
* @param sipRuleId
* The SIP rule ID.
*/
public void setSipRuleId(String sipRuleId) {
this.sipRuleId = sipRuleId;
}
/**
* <p>
* The SIP rule ID.
* </p>
*
* @return The SIP rule ID.
*/
public String getSipRuleId() {
return this.sipRuleId;
}
/**
* <p>
* The SIP rule ID.
* </p>
*
* @param sipRuleId
* The SIP rule ID.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateSipRuleRequest withSipRuleId(String sipRuleId) {
setSipRuleId(sipRuleId);
return this;
}
/**
* <p>
* The new name for the specified SIP rule.
* </p>
*
* @param name
* The new name for the specified SIP rule.
*/
public void setName(String name) {
this.name = name;
}
/**
* <p>
* The new name for the specified SIP rule.
* </p>
*
* @return The new name for the specified SIP rule.
*/
public String getName() {
return this.name;
}
/**
* <p>
* The new name for the specified SIP rule.
* </p>
*
* @param name
* The new name for the specified SIP rule.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateSipRuleRequest withName(String name) {
setName(name);
return this;
}
/**
* <p>
* The new value specified to indicate whether the rule is disabled.
* </p>
*
* @param disabled
* The new value specified to indicate whether the rule is disabled.
*/
public void setDisabled(Boolean disabled) {
this.disabled = disabled;
}
/**
* <p>
* The new value specified to indicate whether the rule is disabled.
* </p>
*
* @return The new value specified to indicate whether the rule is disabled.
*/
public Boolean getDisabled() {
return this.disabled;
}
/**
* <p>
* The new value specified to indicate whether the rule is disabled.
* </p>
*
* @param disabled
* The new value specified to indicate whether the rule is disabled.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateSipRuleRequest withDisabled(Boolean disabled) {
setDisabled(disabled);
return this;
}
/**
* <p>
* The new value specified to indicate whether the rule is disabled.
* </p>
*
* @return The new value specified to indicate whether the rule is disabled.
*/
public Boolean isDisabled() {
return this.disabled;
}
/**
* <p>
* The new value of the list of target applications.
* </p>
*
* @return The new value of the list of target applications.
*/
public java.util.List<SipRuleTargetApplication> getTargetApplications() {
return targetApplications;
}
/**
* <p>
* The new value of the list of target applications.
* </p>
*
* @param targetApplications
* The new value of the list of target applications.
*/
public void setTargetApplications(java.util.Collection<SipRuleTargetApplication> targetApplications) {
if (targetApplications == null) {
this.targetApplications = null;
return;
}
this.targetApplications = new java.util.ArrayList<SipRuleTargetApplication>(targetApplications);
}
/**
* <p>
* The new value of the list of target applications.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setTargetApplications(java.util.Collection)} or {@link #withTargetApplications(java.util.Collection)} if
* you want to override the existing values.
* </p>
*
* @param targetApplications
* The new value of the list of target applications.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateSipRuleRequest withTargetApplications(SipRuleTargetApplication... targetApplications) {
if (this.targetApplications == null) {
setTargetApplications(new java.util.ArrayList<SipRuleTargetApplication>(targetApplications.length));
}
for (SipRuleTargetApplication ele : targetApplications) {
this.targetApplications.add(ele);
}
return this;
}
/**
* <p>
* The new value of the list of target applications.
* </p>
*
* @param targetApplications
* The new value of the list of target applications.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public UpdateSipRuleRequest withTargetApplications(java.util.Collection<SipRuleTargetApplication> targetApplications) {
setTargetApplications(targetApplications);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getSipRuleId() != null)
sb.append("SipRuleId: ").append(getSipRuleId()).append(",");
if (getName() != null)
sb.append("Name: ").append(getName()).append(",");
if (getDisabled() != null)
sb.append("Disabled: ").append(getDisabled()).append(",");
if (getTargetApplications() != null)
sb.append("TargetApplications: ").append(getTargetApplications());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof UpdateSipRuleRequest == false)
return false;
UpdateSipRuleRequest other = (UpdateSipRuleRequest) obj;
if (other.getSipRuleId() == null ^ this.getSipRuleId() == null)
return false;
if (other.getSipRuleId() != null && other.getSipRuleId().equals(this.getSipRuleId()) == false)
return false;
if (other.getName() == null ^ this.getName() == null)
return false;
if (other.getName() != null && other.getName().equals(this.getName()) == false)
return false;
if (other.getDisabled() == null ^ this.getDisabled() == null)
return false;
if (other.getDisabled() != null && other.getDisabled().equals(this.getDisabled()) == false)
return false;
if (other.getTargetApplications() == null ^ this.getTargetApplications() == null)
return false;
if (other.getTargetApplications() != null && other.getTargetApplications().equals(this.getTargetApplications()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getSipRuleId() == null) ? 0 : getSipRuleId().hashCode());
hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode());
hashCode = prime * hashCode + ((getDisabled() == null) ? 0 : getDisabled().hashCode());
hashCode = prime * hashCode + ((getTargetApplications() == null) ? 0 : getTargetApplications().hashCode());
return hashCode;
}
@Override
public UpdateSipRuleRequest clone() {
return (UpdateSipRuleRequest) super.clone();
}
}
| 3,773 |
462 |
#ifndef __PTR_MANAGER_H__
#define __PTR_MANAGER_H__
// node
#include <node.h>
// nan
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wunused-parameter"
#include <nan.h>
#pragma GCC diagnostic pop
// gdal
#include <gdal_priv.h>
// ogr
#include <ogrsf_frmts.h>
#include <map>
#include <list>
using namespace v8;
struct PtrManagerDatasetItem;
struct PtrManagerLayerItem {
long uid;
PtrManagerDatasetItem *parent;
OGRLayer *ptr;
bool is_result_set;
};
struct PtrManagerRasterBandItem {
long uid;
PtrManagerDatasetItem *parent;
GDALRasterBand *ptr;
};
struct PtrManagerDatasetItem {
long uid;
std::list<PtrManagerLayerItem*> layers;
std::list<PtrManagerRasterBandItem*> bands;
GDALDataset *ptr;
#if GDAL_VERSION_MAJOR < 2
OGRDataSource *ptr_datasource;
#endif
};
namespace node_gdal {
// A class for cleaning up GDAL objects that depend on open datasets
class PtrManager {
public:
long add(GDALDataset* ptr);
#if GDAL_VERSION_MAJOR < 2
long add(OGRDataSource* ptr);
#endif
long add(GDALRasterBand* ptr, long parent_uid);
long add(OGRLayer* ptr, long parent_uid, bool is_result_set);
void dispose(long uid);
bool isAlive(long uid);
PtrManager();
~PtrManager();
private:
long uid;
void dispose(PtrManagerLayerItem* item);
void dispose(PtrManagerRasterBandItem* item);
void dispose(PtrManagerDatasetItem* item);
std::map<long, PtrManagerLayerItem*> layers;
std::map<long, PtrManagerRasterBandItem*> bands;
std::map<long, PtrManagerDatasetItem*> datasets;
};
}
#endif | 586 |
14,668 | <reponame>zealoussnow/chromium
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Tests for upload_test_result_artifacts."""
from __future__ import print_function
import json
import mock
import os
import random
import string
import tempfile
import unittest
import upload_test_result_artifacts
class UploadTestResultArtifactsTest(unittest.TestCase):
def setUp(self):
# Used for load tests
self._temp_files = []
def tearDown(self):
# Used for load tests
for fname in self._temp_files:
os.unlink(fname)
### These are load tests useful for seeing how long it takes to upload
### different kinds of test results files. They won't be run as part of
### presubmit testing, since they take a while and talk to the network,
### but the code will stay here in case anyone wants to edit the code
### and wants to check performance. Change the test names from 'loadTestBlah'
### to 'testBlah' to get them to run.
def makeTemp(self, size):
_, fname = tempfile.mkstemp()
with open(fname, 'w') as f:
f.write(random.choice(string.ascii_letters) * size)
self._temp_files.append(fname)
return os.path.basename(fname)
def makeTestJson(self, num_tests, artifact_size):
return {
'tests': {
'suite': {
'test%d' % i: {
'artifacts': {
'artifact': self.makeTemp(artifact_size),
},
'expected': 'PASS',
'actual': 'PASS',
} for i in range(num_tests)
}
},
'artifact_type_info': {
'artifact': 'text/plain'
}
}
def _loadTest(self, json_data, upload):
return upload_test_result_artifacts.upload_artifacts(
json_data, '/tmp', upload, 'test-bucket')
def loadTestEndToEndSimple(self):
test_data = self.makeTestJson(1, 10)
print(self._loadTest(test_data, False))
def loadTestEndToEndManySmall(self):
test_data = self.makeTestJson(1000, 10)
self._loadTest(test_data, False)
def loadTestEndToEndSomeBig(self):
test_data = self.makeTestJson(100, 10000000)
self._loadTest(test_data, False)
def loadTestEndToEndVeryBig(self):
test_data = self.makeTestJson(2, 1000000000)
self._loadTest(test_data, False)
### End load test section.
def testGetTestsSimple(self):
self.assertEqual(upload_test_result_artifacts.get_tests({
'foo': {
'expected': 'PASS',
'actual': 'PASS',
},
}), {
('foo',): {
'actual': 'PASS',
'expected': 'PASS',
}
})
def testGetTestsNested(self):
self.assertEqual(upload_test_result_artifacts.get_tests({
'foo': {
'bar': {
'baz': {
'actual': 'PASS',
'expected': 'PASS',
},
'bam': {
'actual': 'PASS',
'expected': 'PASS',
},
},
},
}), {
('foo', 'bar', 'baz'): {
'actual': 'PASS',
'expected': 'PASS',
},
('foo', 'bar', 'bam'): {
'actual': 'PASS',
'expected': 'PASS',
}
})
def testGetTestsError(self):
with self.assertRaises(ValueError):
upload_test_result_artifacts.get_tests([])
def testUploadArtifactsMissingType(self):
"""Tests that the type information is used for validation."""
data = {
'artifact_type_info': {
'log': 'text/plain'
},
'tests': {
'foo': {
'actual': 'PASS',
'expected': 'PASS',
'artifacts': {
'screenshot': 'foo.png',
}
}
}
}
with self.assertRaises(ValueError):
upload_test_result_artifacts.upload_artifacts(
data, '/tmp', True, 'test-bucket')
@mock.patch('upload_test_result_artifacts.get_file_digest')
@mock.patch('upload_test_result_artifacts.tempfile.mkdtemp')
@mock.patch('upload_test_result_artifacts.shutil.rmtree')
@mock.patch('upload_test_result_artifacts.shutil.copyfile')
def testUploadArtifactsNoUpload(
self, copy_patch, rmtree_patch, mkd_patch, digest_patch):
"""Simple test; no artifacts, so data shouldn't change."""
mkd_patch.return_value = 'foo_dir'
data = {
'artifact_type_info': {
'log': 'text/plain'
},
'tests': {
'foo': {
'actual': 'PASS',
'expected': 'PASS',
}
}
}
self.assertEqual(upload_test_result_artifacts.upload_artifacts(
data, '/tmp', True, 'test-bucket'), data)
mkd_patch.assert_called_once_with(prefix='upload_test_artifacts')
digest_patch.assert_not_called()
copy_patch.assert_not_called()
rmtree_patch.assert_called_once_with('foo_dir')
@mock.patch('upload_test_result_artifacts.get_file_digest')
@mock.patch('upload_test_result_artifacts.tempfile.mkdtemp')
@mock.patch('upload_test_result_artifacts.shutil.rmtree')
@mock.patch('upload_test_result_artifacts.shutil.copyfile')
@mock.patch('upload_test_result_artifacts.os.path.exists')
def testUploadArtifactsBasic(
self, exists_patch, copy_patch, rmtree_patch, mkd_patch, digest_patch):
"""Upload a single artifact."""
mkd_patch.return_value = 'foo_dir'
exists_patch.return_value = False
digest_patch.return_value = 'deadbeef'
data = {
'artifact_type_info': {
'log': 'text/plain'
},
'tests': {
'foo': {
'actual': 'PASS',
'expected': 'PASS',
'artifacts': {
'log': 'foo.txt',
}
}
}
}
self.assertEqual(upload_test_result_artifacts.upload_artifacts(
data, '/tmp', True, 'test-bucket'), {
'artifact_type_info': {
'log': 'text/plain'
},
'tests': {
'foo': {
'actual': 'PASS',
'expected': 'PASS',
'artifacts': {
'log': 'deadbeef',
}
}
},
'artifact_permanent_location': 'gs://chromium-test-artifacts/sha1',
})
mkd_patch.assert_called_once_with(prefix='upload_test_artifacts')
digest_patch.assert_called_once_with('/tmp/foo.txt')
copy_patch.assert_called_once_with('/tmp/foo.txt', 'foo_dir/deadbeef')
rmtree_patch.assert_called_once_with('foo_dir')
@mock.patch('upload_test_result_artifacts.get_file_digest')
@mock.patch('upload_test_result_artifacts.tempfile.mkdtemp')
@mock.patch('upload_test_result_artifacts.shutil.rmtree')
@mock.patch('upload_test_result_artifacts.shutil.copyfile')
@mock.patch('upload_test_result_artifacts.os.path.exists')
def testUploadArtifactsComplex(
self, exists_patch, copy_patch, rmtree_patch, mkd_patch, digest_patch):
"""Upload multiple artifacts."""
mkd_patch.return_value = 'foo_dir'
exists_patch.return_value = False
digest_patch.side_effect = [
'deadbeef1', 'deadbeef2', 'deadbeef3', 'deadbeef4']
data = {
'artifact_type_info': {
'log': 'text/plain',
'screenshot': 'image/png',
},
'tests': {
'bar': {
'baz': {
'actual': 'PASS',
'expected': 'PASS',
'artifacts': {
'log': 'baz.log.txt',
'screenshot': 'baz.png',
}
}
},
'foo': {
'actual': 'PASS',
'expected': 'PASS',
'artifacts': {
'log': 'foo.log.txt',
'screenshot': 'foo.png',
}
},
}
}
self.assertEqual(upload_test_result_artifacts.upload_artifacts(
data, '/tmp', True, 'test-bucket'), {
'artifact_type_info': {
'log': 'text/plain',
'screenshot': 'image/png',
},
'tests': {
'bar': {
'baz': {
'actual': 'PASS',
'expected': 'PASS',
'artifacts': {
'log': 'deadbeef1',
'screenshot': 'deadbeef2',
}
}
},
'foo': {
'actual': 'PASS',
'expected': 'PASS',
'artifacts': {
'log': 'deadbeef3',
'screenshot': 'deadbeef4',
}
},
},
'artifact_permanent_location': 'gs://chromium-test-artifacts/sha1',
})
mkd_patch.assert_called_once_with(prefix='upload_test_artifacts')
digest_patch.assert_has_calls([
mock.call('/tmp/baz.log.txt'), mock.call('/tmp/baz.png'),
mock.call('/tmp/foo.log.txt'), mock.call('/tmp/foo.png')])
copy_patch.assert_has_calls([
mock.call('/tmp/baz.log.txt', 'foo_dir/deadbeef1'),
mock.call('/tmp/baz.png', 'foo_dir/deadbeef2'),
mock.call('/tmp/foo.log.txt', 'foo_dir/deadbeef3'),
mock.call('/tmp/foo.png', 'foo_dir/deadbeef4'),
])
rmtree_patch.assert_called_once_with('foo_dir')
def testFileDigest(self):
_, path = tempfile.mkstemp(prefix='file_digest_test')
with open(path, 'w') as f:
f.write('a')
self.assertEqual(
upload_test_result_artifacts.get_file_digest(path),
'86f7e437faa5a7fce15d1ddcb9eaeaea377667b8')
if __name__ == '__main__':
unittest.main()
| 4,455 |
453 | <filename>upvote/gae/lib/bit9/api.py<gh_stars>100-1000
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module for interacting with the Bit9 REST API ORM."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from upvote.gae.lib.bit9 import constants
from upvote.gae.lib.bit9 import context
from upvote.gae.lib.bit9 import exceptions
from upvote.gae.lib.bit9 import model # pylint: disable=unused-import
# Import only the externally relevant names into the modules's namespace.
# pylint: disable=invalid-name
METHOD = constants.METHOD
VERSION = constants.VERSION
BaseContext = context.BaseContext
Context = context.Context
UnicodeToAscii = context.UnicodeToAscii
Error = exceptions.Error
NotFoundError = exceptions.NotFoundError
QueryError = exceptions.QueryError
RequestError = exceptions.RequestError
PropertyError = exceptions.PropertyError
# pylint: enable=invalid-name
# NOTE: This API is automatically generated by the build_from_docs.py script.
# Updates can be implemented there and regenerated from the docs.
# pylint: disable=missing-docstring,line-too-long
################################################################################
####################### Generated by build_from_docs.py ########################
################################################################################
class ApprovalRequest(model.Model):
ROUTE = 'approvalRequest'
id = model.Int32Property('id')
file_catalog_id = model.Int32Property('fileCatalogId', expands_to='FileCatalog')
installer_file_catalog_id = model.Int32Property('installerFileCatalogId', expands_to='FileCatalog')
process_file_catalog_id = model.Int32Property('processFileCatalogId', expands_to='FileCatalog')
computer_id = model.Int32Property('computerId', expands_to='Computer')
computer_name = model.StringProperty('computerName')
date_created = model.DateTimeProperty('dateCreated')
created_by = model.StringProperty('createdBy')
created_by_user_id = model.Int32Property('createdByUserId')
date_modified = model.DateTimeProperty('dateModified')
modified_by = model.StringProperty('modifiedBy')
modified_by_user_id = model.Int32Property('modifiedByUserId')
enforcement_level = model.Int32Property('enforcementLevel')
resolution = model.Int32Property('resolution', allow_update=True)
request_type = model.Int32Property('requestType')
requestor_comments = model.StringProperty('requestorComments')
requestor_email = model.StringProperty('requestorEmail', allow_update=True)
priority = model.Int32Property('priority')
resolution_comments = model.StringProperty('resolutionComments', allow_update=True)
status = model.Int32Property('status', allow_update=True)
policy_id = model.Int32Property('policyId', expands_to='Policy')
multiple_blocks = model.BooleanProperty('multipleBlocks')
file_name = model.StringProperty('fileName')
path_name = model.StringProperty('pathName')
process = model.StringProperty('process')
custom_rule_id = model.Int32Property('customRuleId')
class Certificate(model.Model):
ROUTE = 'certificate'
id = model.Int32Property('id')
parent_certificate_id = model.Int32Property(
'parentCertificateId', expands_to='Certificate')
publisher_id = model.Int32Property('publisherId', expands_to='Publisher')
thumbprint = model.StringProperty('thumbprint')
thumbprint_algorithm = model.StringProperty('thumbprintAlgorithm')
subject_name = model.StringProperty('subjectName')
signature_algorithm = model.StringProperty('signatureAlgorithm')
serial_number = model.StringProperty('serialNumber')
valid_from = model.DateTimeProperty('validFrom')
valid_to = model.DateTimeProperty('validTo')
public_key_algorithm = model.StringProperty('publicKeyAlgorithm')
public_key_size = model.Int32Property('publicKeySize')
first_seen_computer_id = model.Int32Property(
'firstSeenComputerId', expands_to='Computer')
description = model.StringProperty('description', allow_update=True)
source_type = model.Int32Property('sourceType')
date_created = model.DateTimeProperty('dateCreated')
date_modified = model.DateTimeProperty('dateModified')
modified_by_user = model.StringProperty('modifiedByUser')
modified_by_user_id = model.Int32Property('modifiedByUserId')
intermediary = model.BooleanProperty('intermediary')
valid = model.BooleanProperty('valid')
embedded = model.BooleanProperty('embedded')
detached = model.BooleanProperty('detached')
signer = model.BooleanProperty('signer')
cosigner = model.BooleanProperty('cosigner')
certificate_state = model.Int32Property('certificateState', allow_update=True)
certificate_effective_state = model.Int32Property('certificateEffectiveState')
cl_version = model.Int64Property('clVersion')
class Computer(model.Model):
ROUTE = 'computer'
id = model.Int32Property('id')
name = model.StringProperty('name', allow_update=True)
computer_tag = model.StringProperty('computerTag', allow_update=True)
description = model.StringProperty('description', allow_update=True)
policy_id = model.Int32Property('policyId', allow_update=True, expands_to='Policy')
previous_policy_id = model.Int32Property('previousPolicyId', expands_to='Policy')
policy_name = model.StringProperty('policyName')
automatic_policy = model.BooleanProperty('automaticPolicy', allow_update=True)
local_approval = model.BooleanProperty('localApproval', allow_update=True)
users = model.StringProperty('users')
ip_address = model.StringProperty('ipAddress')
connected = model.BooleanProperty('connected')
enforcement_level = model.Int32Property('enforcementLevel')
disconnected_enforcement_level = model.Int32Property('disconnectedEnforcementLevel')
cli_password = model.StringProperty('CLIPassword')
last_register_date = model.DateTimeProperty('lastRegisterDate')
last_poll_date = model.DateTimeProperty('lastPollDate')
os_short_name = model.StringProperty('osShortName')
os_name = model.StringProperty('osName')
platform_id = model.Int32Property('platformId')
virtualized = model.StringProperty('virtualized')
virtual_platform = model.StringProperty('virtualPlatform')
date_created = model.DateTimeProperty('dateCreated')
agent_version = model.StringProperty('agentVersion')
days_offline = model.Int32Property('daysOffline')
uninstalled = model.BooleanProperty('uninstalled')
deleted = model.BooleanProperty('deleted')
processor_count = model.Int32Property('processorCount')
processor_speed = model.DoubleProperty('processorSpeed')
processor_model = model.StringProperty('processorModel')
machine_model = model.StringProperty('machineModel')
memory_size = model.Int32Property('memorySize')
upgrade_status = model.StringProperty('upgradeStatus')
upgrade_error = model.StringProperty('upgradeError')
upgrade_error_time = model.DateTimeProperty('upgradeErrorTime')
upgrade_error_count = model.Int32Property('upgradeErrorCount')
sync_flags = model.Int32Property('syncFlags')
refresh_flags = model.Int32Property('refreshFlags', allow_update=True)
policy_status = model.StringProperty('policyStatus')
policy_status_details = model.StringProperty('policyStatusDetails')
prioritized = model.BooleanProperty('prioritized', allow_update=True)
mac_address = model.StringProperty('macAddress')
debug_level = model.Int32Property('debugLevel', allow_update=True)
kernel_debug_level = model.Int32Property('kernelDebugLevel', allow_update=True)
debug_flags = model.Int32Property('debugFlags', allow_update=True)
debug_duration = model.Int32Property('debugDuration', allow_update=True)
active_debug_level = model.Int32Property('activeDebugLevel')
active_kernel_debug_level = model.Int32Property('activeKernelDebugLevel')
active_debug_flags = model.Int32Property('activeDebugFlags')
cc_level = model.Int32Property('ccLevel', allow_update=True)
cc_flags = model.Int32Property('ccFlags', allow_update=True)
supported_kernel = model.BooleanProperty('supportedKernel')
force_upgrade = model.BooleanProperty('forceUpgrade', allow_update=True)
has_health_check_errors = model.BooleanProperty('hasHealthCheckErrors')
cl_version = model.Int32Property('clVersion')
agent_memory_dumps = model.Int32Property('agentMemoryDumps')
system_memory_dumps = model.Int32Property('systemMemoryDumps')
initializing = model.BooleanProperty('initializing')
is_active = model.BooleanProperty('isActive')
tamper_protection_active = model.BooleanProperty('tamperProtectionActive')
agent_cache_size = model.Int32Property('agentCacheSize')
agent_queue_size = model.Int32Property('agentQueueSize')
sync_percent = model.Int32Property('syncPercent')
init_percent = model.Int32Property('initPercent')
td_count = model.Int32Property('tdCount')
template = model.BooleanProperty('template', allow_update=True)
template_computer_id = model.Int32Property('templateComputerId', expands_to='Computer')
template_date = model.DateTimeProperty('templateDate')
template_clone_cleanup_mode = model.Int32Property('templateCloneCleanupMode', allow_update=True)
template_clone_cleanup_time = model.Int32Property('templateCloneCleanupTime', allow_update=True)
template_clone_cleanup_time_scale = model.Int32Property('templateCloneCleanupTimeScale', allow_update=True)
template_track_mods_only = model.BooleanProperty('templateTrackModsOnly', allow_update=True)
cb_sensor_id = model.Int32Property('cbSensorId')
cb_sensor_version = model.StringProperty('cbSensorVersion')
cb_sensor_flags = model.Int32Property('cbSensorFlags')
has_duplicates = model.BooleanProperty('hasDuplicates')
scep_status = model.Int32Property('SCEPStatus')
class Event(model.Model):
ROUTE = 'event'
id = model.Int64Property('id')
timestamp = model.DateTimeProperty('timestamp')
received_timestamp = model.DateTimeProperty('receivedTimestamp')
description = model.StringProperty('description')
type = model.Int32Property('type')
subtype = model.Int32Property('subtype')
subtype_name = model.StringProperty('subtypeName')
ip_address = model.StringProperty('ipAddress')
computer_id = model.Int32Property('computerId', expands_to='Computer')
computer_name = model.StringProperty('computerName')
policy_id = model.Int32Property('policyId', expands_to='Policy')
policy_name = model.StringProperty('policyName')
file_catalog_id = model.Int32Property('fileCatalogId', expands_to='FileCatalog')
installer_file_catalog_id = model.Int32Property('installerFileCatalogId', expands_to='FileCatalog')
process_file_catalog_id = model.Int32Property('processFileCatalogId', expands_to='FileCatalog')
file_name = model.StringProperty('fileName')
path_name = model.StringProperty('pathName')
command_line = model.StringProperty('commandLine')
process_path_name = model.StringProperty('processPathName')
process_file_name = model.StringProperty('processFileName')
installer_file_name = model.StringProperty('installerFileName')
process_key = model.StringProperty('processKey')
severity = model.Int32Property('severity')
user_name = model.StringProperty('userName')
rule_name = model.StringProperty('ruleName')
ban_name = model.StringProperty('banName')
updater_name = model.StringProperty('updaterName')
indicator_name = model.StringProperty('indicatorName')
param1 = model.StringProperty('param1')
param2 = model.StringProperty('param2')
param3 = model.StringProperty('param3')
string_id = model.Int32Property('stringId')
class FileCatalog(model.Model):
ROUTE = 'fileCatalog'
id = model.Int32Property('id')
date_created = model.DateTimeProperty('dateCreated')
path_name = model.StringProperty('pathName')
file_name = model.StringProperty('fileName')
file_extension = model.StringProperty('fileExtension')
computer_id = model.Int32Property('computerId', expands_to='Computer')
md5 = model.StringProperty('md5')
sha1 = model.StringProperty('sha1')
sha256 = model.StringProperty('sha256')
sha256_hash_type = model.Int32Property('sha256HashType')
file_type = model.StringProperty('fileType')
file_size = model.Int64Property('fileSize')
product_name = model.StringProperty('productName')
publisher = model.StringProperty('publisher')
company = model.StringProperty('company')
publisher_or_company = model.StringProperty('publisherOrCompany')
product_version = model.StringProperty('productVersion')
installed_program_name = model.StringProperty('installedProgramName')
reputation_available = model.BooleanProperty('reputationAvailable')
trust = model.Int32Property('trust')
trust_messages = model.StringProperty('trustMessages')
threat = model.Int16Property('threat')
category = model.StringProperty('category')
file_state = model.Int32Property('fileState')
publisher_state = model.Int32Property('publisherState')
certificate_state = model.Int32Property('certificateState')
effective_state = model.StringProperty('effectiveState')
approved_by_reputation = model.BooleanProperty('approvedByReputation')
reputation_enabled = model.BooleanProperty('reputationEnabled')
prevalence = model.Int32Property('prevalence')
file_flags = model.Int32Property('fileFlags')
publisher_id = model.Int32Property('publisherId', expands_to='Publisher')
certificate_id = model.Int32Property('certificateId', expands_to='Certificate')
class FileInstance(model.Model):
ROUTE = 'fileInstance'
id = model.Int64Property('id')
file_catalog_id = model.Int32Property('fileCatalogId', expands_to='FileCatalog')
file_instance_group_id = model.Int64Property('fileInstanceGroupId')
computer_id = model.Int32Property('computerId', expands_to='Computer')
date_created = model.DateTimeProperty('dateCreated')
file_name = model.StringProperty('fileName')
path_name = model.StringProperty('pathName')
executed = model.BooleanProperty('executed')
local_state = model.Int32Property('localState', allow_update=True)
detailed_local_state = model.Int32Property('detailedLocalState')
detached_publisher_id = model.Int32Property('detachedPublisherId', expands_to='Publisher')
detached_certificate_id = model.Int32Property('detachedCertificateId', expands_to='Certificate')
class FileRule(model.Model):
ROUTE = 'fileRule'
id = model.Int64Property('id')
file_catalog_id = model.Int32Property('fileCatalogId', allow_update=True, expands_to='FileCatalog')
name = model.StringProperty('name', allow_update=True)
description = model.StringProperty('description', allow_update=True)
file_state = model.Int32Property('fileState', allow_update=True)
source_type = model.Int32Property('sourceType')
source_id = model.Int32Property('sourceId')
report_only = model.BooleanProperty('reportOnly', allow_update=True)
reputation_approvals_enabled = model.BooleanProperty('reputationApprovalsEnabled', allow_update=True)
force_installer = model.BooleanProperty('forceInstaller', allow_update=True)
force_not_installer = model.BooleanProperty('forceNotInstaller', allow_update=True)
policy_ids = model.StringProperty('policyIds', allow_update=True)
hash = model.StringProperty('hash', allow_update=True)
platform_flags = model.Int32Property('platformFlags', allow_update=True)
date_created = model.DateTimeProperty('dateCreated')
created_by = model.StringProperty('createdBy')
created_by_user_id = model.Int32Property('createdByUserId')
date_modified = model.DateTimeProperty('dateModified')
modified_by = model.StringProperty('modifiedBy')
modified_by_user_id = model.Int32Property('modifiedByUserId')
cl_version = model.Int64Property('clVersion')
class Policy(model.Model):
ROUTE = 'policy'
id = model.Int32Property('id')
name = model.StringProperty('name', allow_update=True)
description = model.StringProperty('description', allow_update=True)
package_name = model.StringProperty('packageName')
enforcement_level = model.Int32Property('enforcementLevel', allow_update=True)
disconnected_enforcement_level = model.Int32Property('disconnectedEnforcementLevel', allow_update=True)
help_desk_url = model.StringProperty('helpDeskUrl')
image_url = model.StringProperty('imageUrl')
date_created = model.DateTimeProperty('dateCreated')
created_by_user_id = model.Int32Property('createdByUserId')
date_modified = model.DateTimeProperty('dateModified')
modified_by_user_id = model.Int32Property('modifiedByUserId')
read_only = model.BooleanProperty('readOnly')
hidden = model.BooleanProperty('hidden')
automatic = model.BooleanProperty('automatic', allow_update=True)
load_agent_in_safe_mode = model.BooleanProperty('loadAgentInSafeMode', allow_update=True)
reputation_enabled = model.BooleanProperty('reputationEnabled', allow_update=True)
file_tracking_enabled = model.BooleanProperty('fileTrackingEnabled', allow_update=True)
custom_logo = model.BooleanProperty('customLogo', allow_update=True)
automatic_approvals_on_transition = model.BooleanProperty('automaticApprovalsOnTransition', allow_update=True)
allow_agent_upgrades = model.BooleanProperty('allowAgentUpgrades', allow_update=True)
total_computers = model.Int32Property('totalComputers')
connected_computers = model.Int32Property('connectedComputers')
at_enforcement_computers = model.Int32Property('atEnforcementComputers')
cl_version_max = model.Int32Property('clVersionMax')
class Publisher(model.Model):
ROUTE = 'publisher'
id = model.Int32Property('id')
name = model.StringProperty('name')
description = model.StringProperty('description', allow_update=True)
date_created = model.DateTimeProperty('dateCreated')
modified_by = model.StringProperty('modifiedBy')
modified_by_user_id = model.Int32Property('modifiedByUserId')
date_modified = model.DateTimeProperty('dateModified')
publisher_reputation = model.Int32Property('publisherReputation')
publisher_state = model.Int32Property('publisherState', allow_update=True)
policy_ids = model.StringProperty('policyIds', allow_update=True)
reputation_approvals_enabled = model.BooleanProperty('reputationApprovalsEnabled', allow_update=True)
source_type = model.Int32Property('sourceType')
first_seen_computer_id = model.Int32Property('firstSeenComputerId', expands_to='Computer')
platform_flags = model.Int32Property('platformFlags')
signed_files_count = model.Int32Property('signedFilesCount')
signed_certificate_count = model.Int32Property('signedCertificateCount')
hidden = model.BooleanProperty('hidden')
cl_version = model.Int64Property('clVersion')
| 5,478 |
1,745 | <filename>Source/Scripting/bsfScript/Generated/BsScriptLimitConeRange.generated.h<gh_stars>1000+
//********************************* bs::framework - Copyright 2018-2019 <NAME> ************************************//
//*********** Licensed under the MIT license. See LICENSE.md for full terms. This notice is not to be removed. ***********//
#pragma once
#include "BsScriptEnginePrerequisites.h"
#include "BsScriptObject.h"
#include "../../../Foundation/bsfCore/Physics/BsJoint.h"
#include "Math/BsRadian.h"
#include "../../../Foundation/bsfCore/Physics/BsJoint.h"
namespace bs
{
struct __LimitConeRangeInterop
{
Radian yLimitAngle;
Radian zLimitAngle;
float contactDist;
float restitution;
Spring spring;
};
class BS_SCR_BE_EXPORT ScriptLimitConeRange : public ScriptObject<ScriptLimitConeRange>
{
public:
SCRIPT_OBJ(ENGINE_ASSEMBLY, ENGINE_NS, "LimitConeRange")
static MonoObject* box(const __LimitConeRangeInterop& value);
static __LimitConeRangeInterop unbox(MonoObject* value);
static LimitConeRange fromInterop(const __LimitConeRangeInterop& value);
static __LimitConeRangeInterop toInterop(const LimitConeRange& value);
private:
ScriptLimitConeRange(MonoObject* managedInstance);
};
}
| 407 |
777 | <reponame>google-ar/chromium
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/chromeos/file_manager/zip_file_creator.h"
#include <vector>
#include "base/bind.h"
#include "base/files/file_path.h"
#include "base/files/file_util.h"
#include "base/files/scoped_temp_dir.h"
#include "base/rand_util.h"
#include "base/run_loop.h"
#include "chrome/test/base/in_process_browser_test.h"
#include "content/public/test/test_utils.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "third_party/zlib/google/zip_reader.h"
namespace file_manager {
namespace {
void TestCallback(bool* out_success, const base::Closure& quit, bool success) {
*out_success = success;
quit.Run();
}
class ZipFileCreatorTest : public InProcessBrowserTest {
protected:
void SetUpOnMainThread() override {
ASSERT_TRUE(dir_.CreateUniqueTempDir());
ASSERT_TRUE(base::CreateDirectory(zip_base_dir()));
}
base::FilePath zip_archive_path() const {
return dir_.GetPath().AppendASCII("test.zip");
}
base::FilePath zip_base_dir() const {
return dir_.GetPath().AppendASCII("files");
}
protected:
base::ScopedTempDir dir_;
};
} // namespace
IN_PROC_BROWSER_TEST_F(ZipFileCreatorTest, FailZipForAbsentFile) {
base::RunLoop run_loop;
bool success = true;
std::vector<base::FilePath> paths;
paths.push_back(base::FilePath(FILE_PATH_LITERAL("not.exist")));
(new ZipFileCreator(
base::Bind(&TestCallback, &success,
content::GetDeferredQuitTaskForRunLoop(&run_loop)),
zip_base_dir(), paths, zip_archive_path()))
->Start();
content::RunThisRunLoop(&run_loop);
EXPECT_FALSE(success);
}
IN_PROC_BROWSER_TEST_F(ZipFileCreatorTest, SomeFilesZip) {
// Prepare files.
const base::FilePath kDir1(FILE_PATH_LITERAL("foo"));
const base::FilePath kFile1(kDir1.AppendASCII("bar"));
const base::FilePath kFile2(FILE_PATH_LITERAL("random"));
const int kRandomDataSize = 100000;
const std::string kRandomData = base::RandBytesAsString(kRandomDataSize);
base::CreateDirectory(zip_base_dir().Append(kDir1));
base::WriteFile(zip_base_dir().Append(kFile1), "123", 3);
base::WriteFile(zip_base_dir().Append(kFile2),
kRandomData.c_str(), kRandomData.size());
bool success = false;
base::RunLoop run_loop;
std::vector<base::FilePath> paths;
paths.push_back(kDir1);
paths.push_back(kFile1);
paths.push_back(kFile2);
(new ZipFileCreator(
base::Bind(&TestCallback, &success,
content::GetDeferredQuitTaskForRunLoop(&run_loop)),
zip_base_dir(), paths, zip_archive_path()))
->Start();
content::RunThisRunLoop(&run_loop);
EXPECT_TRUE(success);
// Check the archive content.
zip::ZipReader reader;
ASSERT_TRUE(reader.Open(zip_archive_path()));
EXPECT_EQ(3, reader.num_entries());
while (reader.HasMore()) {
ASSERT_TRUE(reader.OpenCurrentEntryInZip());
const zip::ZipReader::EntryInfo* entry = reader.current_entry_info();
// ZipReader returns directory path with trailing slash.
if (entry->file_path() == kDir1.AsEndingWithSeparator()) {
EXPECT_TRUE(entry->is_directory());
} else if (entry->file_path() == kFile1) {
EXPECT_FALSE(entry->is_directory());
EXPECT_EQ(3, entry->original_size());
} else if (entry->file_path() == kFile2) {
EXPECT_FALSE(entry->is_directory());
EXPECT_EQ(kRandomDataSize, entry->original_size());
const base::FilePath out = dir_.GetPath().AppendASCII("archived_content");
EXPECT_TRUE(reader.ExtractCurrentEntryToFilePath(out));
EXPECT_TRUE(base::ContentsEqual(zip_base_dir().Append(kFile2), out));
} else {
ADD_FAILURE();
}
ASSERT_TRUE(reader.AdvanceToNextEntry());
}
}
} // namespace file_manager
| 1,488 |
348 | <reponame>chamberone/Leaflet.PixiOverlay
{"nom":"Plaines-Saint-Lange","circ":"2ème circonscription","dpt":"Aube","inscrits":208,"abs":128,"votants":80,"blancs":7,"nuls":2,"exp":71,"res":[{"nuance":"REM","nom":"<NAME>","voix":39},{"nuance":"LR","nom":"<NAME>","voix":32}]} | 112 |
5,169 | {
"name": "NKJNetworkActivityIndicator",
"version": "0.1.2",
"summary": "A network activity manager that show and hide network activity.",
"description": " NKJNetworkIndicator. A network activity manager that show and hide network activity.\n\n Manage with above keys.\n * name\n * identifier\n",
"homepage": "https://github.com/nakajijapan/NKJNetworkActivityIndicator",
"license": "MIT",
"authors": {
"nakajijapan": "<EMAIL>"
},
"source": {
"git": "https://github.com/nakajijapan/NKJNetworkActivityIndicator.git",
"tag": "0.1.2"
},
"social_media_url": "https://twitter.com/nakajijapan",
"platforms": {
"ios": "7.0"
},
"requires_arc": true,
"source_files": "Pod/Classes/**/*",
"resource_bundles": {
"NKJNetworkIndicator": [
"Pod/Assets/*.png"
]
}
}
| 401 |
1,895 | <reponame>a759389099/mybatis-plus-samples<gh_stars>1000+
package com.baomidou.mybatisplus.samples.reduce.springmvc.entity;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.KeySequence;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import lombok.Data;
@Data
@KeySequence("SEQ_DISTRICT")
@TableName("district")
public class District {
@TableId(value = "id", type = IdType.INPUT)
private Long id;
private String name;
private String city;
private String district;
}
| 228 |
5,300 | package hello.home.entity;
import com.techempower.data.annotation.*;
import com.techempower.js.legacy.*;
/**
* A fortune entity.
*/
@Entity
public class Fortune
extends GhDataEntity
implements Comparable<Fortune>
{
private String message;
/**
* Default Constructor.
*/
public Fortune()
{
// Does nothing.
}
/**
* Set the message.
*/
public Fortune setMessage(String message)
{
this.message = message;
return this;
}
/**
* Get the message.
*/
public String getMessage()
{
return this.message;
}
/**
* A visitor factory used to map this class to JSON.
*/
public static final VisitorFactory<Fortune> VISITOR_FACTORY = new VisitorFactory<Fortune>()
{
@Override
public Visitor visitor(Fortune fortune)
{
return Visitors.map(
"id", fortune.getId(),
"message", fortune.getMessage());
}
};
/**
* For our purposes, Fortunes sort by their message text.
*/
@Override
public int compareTo(Fortune other)
{
return getMessage().compareTo(other.getMessage());
}
}
| 420 |
434 | #include "kvm/virtio-vsock.h"
#include "kvm/virtio-pci-dev.h"
#include "kvm/kvm.h"
#include "kvm/pci.h"
#include "kvm/ioeventfd.h"
#include "kvm/guest_compat.h"
#include "kvm/virtio-pci.h"
#include "kvm/virtio.h"
#include <linux/kernel.h>
#include <linux/virtio_vsock.h>
#include <linux/vhost.h>
#define VIRTIO_VSOCK_QUEUE_SIZE 128
static LIST_HEAD(vdevs);
static int compat_id = -1;
enum {
VSOCK_VQ_RX = 0, /* for host to guest data */
VSOCK_VQ_TX = 1, /* for guest to host data */
VSOCK_VQ_EVENT = 2,
VSOCK_VQ_MAX = 3,
};
struct vsock_dev {
struct virt_queue vqs[VSOCK_VQ_MAX];
struct virtio_vsock_config config;
u32 features;
int vhost_fd;
struct virtio_device vdev;
struct list_head list;
struct kvm *kvm;
bool started;
};
static u8 *get_config(struct kvm *kvm, void *dev)
{
struct vsock_dev *vdev = dev;
return ((u8 *)(&vdev->config));
}
static u32 get_host_features(struct kvm *kvm, void *dev)
{
return 1UL << VIRTIO_RING_F_EVENT_IDX
| 1UL << VIRTIO_RING_F_INDIRECT_DESC;
}
static void set_guest_features(struct kvm *kvm, void *dev, u32 features)
{
struct vsock_dev *vdev = dev;
vdev->features = features;
}
static bool is_event_vq(u32 vq)
{
return vq == VSOCK_VQ_EVENT;
}
static int init_vq(struct kvm *kvm, void *dev, u32 vq, u32 page_size, u32 align,
u32 pfn)
{
struct vhost_vring_state state = { .index = vq };
struct vhost_vring_addr addr;
struct vsock_dev *vdev = dev;
struct virt_queue *queue;
void *p;
int r;
compat__remove_message(compat_id);
queue = &vdev->vqs[vq];
queue->pfn = pfn;
p = virtio_get_vq(kvm, queue->pfn, page_size);
vring_init(&queue->vring, VIRTIO_VSOCK_QUEUE_SIZE, p, align);
virtio_init_device_vq(&vdev->vdev, queue);
if (vdev->vhost_fd == -1)
return 0;
if (is_event_vq(vq))
return 0;
state.num = queue->vring.num;
r = ioctl(vdev->vhost_fd, VHOST_SET_VRING_NUM, &state);
if (r < 0)
die_perror("VHOST_SET_VRING_NUM failed");
state.num = 0;
r = ioctl(vdev->vhost_fd, VHOST_SET_VRING_BASE, &state);
if (r < 0)
die_perror("VHOST_SET_VRING_BASE failed");
addr = (struct vhost_vring_addr) {
.index = vq,
.desc_user_addr = (u64)(unsigned long)queue->vring.desc,
.avail_user_addr = (u64)(unsigned long)queue->vring.avail,
.used_user_addr = (u64)(unsigned long)queue->vring.used,
};
r = ioctl(vdev->vhost_fd, VHOST_SET_VRING_ADDR, &addr);
if (r < 0)
die_perror("VHOST_SET_VRING_ADDR failed");
return 0;
}
static void notify_vq_eventfd(struct kvm *kvm, void *dev, u32 vq, u32 efd)
{
struct vsock_dev *vdev = dev;
struct vhost_vring_file file = {
.index = vq,
.fd = efd,
};
int r;
if (is_event_vq(vq))
return;
if (vdev->vhost_fd == -1)
return;
r = ioctl(vdev->vhost_fd, VHOST_SET_VRING_KICK, &file);
if (r < 0)
die_perror("VHOST_SET_VRING_KICK failed");
}
static void notify_status(struct kvm *kvm, void *dev, u32 status)
{
struct vsock_dev *vdev = dev;
int r, start;
start = !!(status & VIRTIO_CONFIG_S_DRIVER_OK);
if (vdev->started == start)
return;
r = ioctl(vdev->vhost_fd, VHOST_VSOCK_SET_RUNNING, &start);
if (r != 0)
die("VHOST_VSOCK_SET_RUNNING failed %d", errno);
vdev->started = start;
}
static int notify_vq(struct kvm *kvm, void *dev, u32 vq)
{
return 0;
}
static struct virt_queue *get_vq(struct kvm *kvm, void *dev, u32 vq)
{
struct vsock_dev *vdev = dev;
return &vdev->vqs[vq];
}
static int get_size_vq(struct kvm *kvm, void *dev, u32 vq)
{
return VIRTIO_VSOCK_QUEUE_SIZE;
}
static int set_size_vq(struct kvm *kvm, void *dev, u32 vq, int size)
{
return size;
}
static void notify_vq_gsi(struct kvm *kvm, void *dev, u32 vq, u32 gsi)
{
struct vhost_vring_file file;
struct vsock_dev *vdev = dev;
struct kvm_irqfd irq;
int r;
if (vdev->vhost_fd == -1)
return;
if (is_event_vq(vq))
return;
irq = (struct kvm_irqfd) {
.gsi = gsi,
.fd = eventfd(0, 0),
};
file = (struct vhost_vring_file) {
.index = vq,
.fd = irq.fd,
};
r = ioctl(kvm->vm_fd, KVM_IRQFD, &irq);
if (r < 0)
die_perror("KVM_IRQFD failed");
r = ioctl(vdev->vhost_fd, VHOST_SET_VRING_CALL, &file);
if (r < 0)
die_perror("VHOST_SET_VRING_CALL failed");
}
static int get_vq_count(struct kvm *kvm, void *dev)
{
return VSOCK_VQ_MAX;
}
static struct virtio_ops vsock_dev_virtio_ops = {
.get_config = get_config,
.get_host_features = get_host_features,
.set_guest_features = set_guest_features,
.init_vq = init_vq,
.get_vq = get_vq,
.get_size_vq = get_size_vq,
.set_size_vq = set_size_vq,
.notify_vq_eventfd = notify_vq_eventfd,
.notify_status = notify_status,
.notify_vq_gsi = notify_vq_gsi,
.notify_vq = notify_vq,
.get_vq_count = get_vq_count,
};
static void virtio_vhost_vsock_init(struct kvm *kvm, struct vsock_dev *vdev)
{
struct kvm_mem_bank *bank;
struct vhost_memory *mem;
u64 features;
int r, i;
vdev->vhost_fd = open("/dev/vhost-vsock", O_RDWR);
if (vdev->vhost_fd < 0)
die_perror("Failed opening vhost-vsock device");
mem = calloc(1, sizeof(*mem) + sizeof(struct vhost_memory_region));
if (mem == NULL)
die("Failed allocating memory for vhost memory map");
i = 0;
list_for_each_entry(bank, &kvm->mem_banks, list) {
mem->regions[i] = (struct vhost_memory_region) {
.guest_phys_addr = bank->guest_phys_addr,
.memory_size = bank->size,
.userspace_addr = (unsigned long)bank->host_addr,
};
i++;
}
mem->nregions = i;
r = ioctl(vdev->vhost_fd, VHOST_SET_OWNER);
if (r != 0)
die_perror("VHOST_SET_OWNER failed");
r = ioctl(vdev->vhost_fd, VHOST_SET_MEM_TABLE, mem);
if (r != 0)
die_perror("VHOST_SET_MEM_TABLE failed");
r = ioctl(vdev->vhost_fd, VHOST_GET_FEATURES, &features);
if (r != 0)
die_perror("VHOST_GET_FEATURES failed");
r = ioctl(vdev->vhost_fd, VHOST_SET_FEATURES, &features);
if (r != 0)
die_perror("VHOST_SET_FEATURES failed");
r = ioctl(vdev->vhost_fd, VHOST_VSOCK_SET_GUEST_CID, &vdev->config.guest_cid);
if (r != 0)
die_perror("VHOST_VSOCK_SET_GUEST_CID failed");
vdev->vdev.use_vhost = true;
free(mem);
}
static int virtio_vsock_init_one(struct kvm *kvm, u64 guest_cid)
{
struct vsock_dev *vdev;
int r;
vdev = calloc(1, sizeof(struct vsock_dev));
if (vdev == NULL)
return -ENOMEM;
*vdev = (struct vsock_dev) {
.config = (struct virtio_vsock_config) {
.guest_cid = guest_cid,
},
.vhost_fd = -1,
.kvm = kvm,
};
list_add_tail(&vdev->list, &vdevs);
r = virtio_init(kvm, vdev, &vdev->vdev, &vsock_dev_virtio_ops,
VIRTIO_DEFAULT_TRANS(kvm), PCI_DEVICE_ID_VIRTIO_VSOCK,
VIRTIO_ID_VSOCK, PCI_CLASS_VSOCK);
if (r < 0)
return r;
virtio_vhost_vsock_init(kvm, vdev);
if (compat_id == -1)
compat_id = virtio_compat_add_message("virtio-vsock", "CONFIG_VIRTIO_VSOCK");
return 0;
}
static int virtio_vsock_exit_one(struct kvm *kvm, struct vsock_dev *vdev)
{
list_del(&vdev->list);
free(vdev);
return 0;
}
int virtio_vsock_init(struct kvm *kvm)
{
int r;
if (kvm->cfg.vsock_cid == 0)
return 0;
r = virtio_vsock_init_one(kvm, kvm->cfg.vsock_cid);
if (r < 0)
goto cleanup;
return 0;
cleanup:
return virtio_vsock_exit(kvm);
}
virtio_dev_init(virtio_vsock_init);
int virtio_vsock_exit(struct kvm *kvm)
{
while (!list_empty(&vdevs)) {
struct vsock_dev *vdev;
vdev = list_first_entry(&vdevs, struct vsock_dev, list);
virtio_vsock_exit_one(kvm, vdev);
}
return 0;
}
virtio_dev_exit(virtio_vsock_exit);
| 3,530 |
1,687 | <filename>12-Backtracking-And-DFS/Type-3-FloodFill/1034-coloring-a-border/src/Solution2.java
import java.util.LinkedList;
import java.util.Queue;
public class Solution2 {
// 方法二:广度优先遍历
private int rows;
private int cols;
private int[][] grid;
private int origin;
public static final int[][] DIRECTIONS = {{1, 0}, {0, 1}, {-1, 0}, {0, -1}};
public int[][] colorBorder(int[][] grid, int r0, int c0, int color) {
this.rows = grid.length;
this.cols = grid[0].length;
this.grid = grid;
if (grid[r0][c0] == color) {
return grid;
}
this.origin = grid[r0][c0];
boolean[][] visited = new boolean[rows][cols];
visited[r0][c0] = true;
Queue<int[]> queue = new LinkedList<>();
queue.add(new int[]{r0, c0});
while (!queue.isEmpty()) {
int[] top = queue.poll();
// 注意区分:x、y 表示当前坐标,newX 、newY 表示扩散了一步以后的新坐标
int x = top[0];
int y = top[1];
for (int[] direction : DIRECTIONS) {
int newX = x + direction[0];
int newY = y + direction[1];
if (inArea(newX, newY)) {
if (visited[newX][newY]) {
continue;
}
// 情况 2:如果扩散了一步以后还是 origin ,继续递归求解
if (grid[newX][newY] == origin) {
queue.add(new int[]{newX, newY});
visited[newX][newY] = true;
} else {
// 情况 3:如果扩散了一步以后还是不是 origin,当前单元格变色
grid[x][y] = color;
}
} else {
// 情况 1:如果向四个方向走一步越界了,说明当前单元格是边界,当前颜色修改
grid[x][y] = color;
}
}
}
return grid;
}
private boolean inArea(int x, int y) {
return x >= 0 && x < rows && y >= 0 && y < cols;
}
} | 1,308 |
1,010 | {
"profiles": {
"CommonCache.Test.MsalJava": {
"commandName": "Project",
"commandLineArgs": "--inputPath \"C:\\Users\\henrikm\\AppData\\Local\\Temp\\tmpDDB4.tmp\" "
}
}
} | 85 |
938 | //===- MipsOptionRecord.cpp - Abstraction for storing information ---------===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
#include "MipsOptionRecord.h"
#include "MipsABIInfo.h"
#include "MipsELFStreamer.h"
#include "MipsTargetStreamer.h"
#include "llvm/BinaryFormat/ELF.h"
#include "llvm/MC/MCAssembler.h"
#include "llvm/MC/MCContext.h"
#include "llvm/MC/MCRegisterInfo.h"
#include "llvm/MC/MCSectionELF.h"
#include <cassert>
using namespace llvm;
void MipsRegInfoRecord::EmitMipsOptionRecord() {
MCAssembler &MCA = Streamer->getAssembler();
MipsTargetStreamer *MTS =
static_cast<MipsTargetStreamer *>(Streamer->getTargetStreamer());
Streamer->PushSection();
// We need to distinguish between N64 and the rest because at the moment
// we don't emit .Mips.options for other ELFs other than N64.
// Since .reginfo has the same information as .Mips.options (ODK_REGINFO),
// we can use the same abstraction (MipsRegInfoRecord class) to handle both.
if (MTS->getABI().IsN64()) {
// The EntrySize value of 1 seems strange since the records are neither
// 1-byte long nor fixed length but it matches the value GAS emits.
MCSectionELF *Sec =
Context.getELFSection(".MIPS.options", ELF::SHT_MIPS_OPTIONS,
ELF::SHF_ALLOC | ELF::SHF_MIPS_NOSTRIP, 1, "");
MCA.registerSection(*Sec);
Sec->setAlignment(8);
Streamer->SwitchSection(Sec);
Streamer->EmitIntValue(ELF::ODK_REGINFO, 1); // kind
Streamer->EmitIntValue(40, 1); // size
Streamer->EmitIntValue(0, 2); // section
Streamer->EmitIntValue(0, 4); // info
Streamer->EmitIntValue(ri_gprmask, 4);
Streamer->EmitIntValue(0, 4); // pad
Streamer->EmitIntValue(ri_cprmask[0], 4);
Streamer->EmitIntValue(ri_cprmask[1], 4);
Streamer->EmitIntValue(ri_cprmask[2], 4);
Streamer->EmitIntValue(ri_cprmask[3], 4);
Streamer->EmitIntValue(ri_gp_value, 8);
} else {
MCSectionELF *Sec = Context.getELFSection(".reginfo", ELF::SHT_MIPS_REGINFO,
ELF::SHF_ALLOC, 24, "");
MCA.registerSection(*Sec);
Sec->setAlignment(MTS->getABI().IsN32() ? 8 : 4);
Streamer->SwitchSection(Sec);
Streamer->EmitIntValue(ri_gprmask, 4);
Streamer->EmitIntValue(ri_cprmask[0], 4);
Streamer->EmitIntValue(ri_cprmask[1], 4);
Streamer->EmitIntValue(ri_cprmask[2], 4);
Streamer->EmitIntValue(ri_cprmask[3], 4);
assert((ri_gp_value & 0xffffffff) == ri_gp_value);
Streamer->EmitIntValue(ri_gp_value, 4);
}
Streamer->PopSection();
}
void MipsRegInfoRecord::SetPhysRegUsed(unsigned Reg,
const MCRegisterInfo *MCRegInfo) {
unsigned Value = 0;
for (MCSubRegIterator SubRegIt(Reg, MCRegInfo, true); SubRegIt.isValid();
++SubRegIt) {
unsigned CurrentSubReg = *SubRegIt;
unsigned EncVal = MCRegInfo->getEncodingValue(CurrentSubReg);
Value |= 1 << EncVal;
if (GPR32RegClass->contains(CurrentSubReg) ||
GPR64RegClass->contains(CurrentSubReg))
ri_gprmask |= Value;
else if (COP0RegClass->contains(CurrentSubReg))
ri_cprmask[0] |= Value;
// MIPS COP1 is the FPU.
else if (FGR32RegClass->contains(CurrentSubReg) ||
FGR64RegClass->contains(CurrentSubReg) ||
AFGR64RegClass->contains(CurrentSubReg) ||
MSA128BRegClass->contains(CurrentSubReg))
ri_cprmask[1] |= Value;
else if (COP2RegClass->contains(CurrentSubReg))
ri_cprmask[2] |= Value;
else if (COP3RegClass->contains(CurrentSubReg))
ri_cprmask[3] |= Value;
}
}
| 1,597 |
355 | #!/usr/bin/env python3
"""
Notes: I wanted to use this C-based graph library, but it's apt source
URLs were missing files AND the site used to log issues was failing
Google App Auth. Not a good sign?
https://graph-tool.skewed.de
igraph notes:
NOTES:
Terms with namespace: external are skipped.
Developer:
Once parsed, the graphs can be saved with Graph.write_pickle() : http://igraph.org/python/doc/igraph.Graph-class.html#write_pickle
"""
import argparse
import os
import pickle
import re
import igraph
from biocode import gff
def main():
parser = argparse.ArgumentParser( description='Creates a slim version of a set of ontology terms.')
## output file to be written
parser.add_argument('-i', '--input_file', type=str, required=True, help='Input file with one term per line. This is your file with ALL terms.' )
parser.add_argument('-s', '--slim_terms', type=str, required=True, help='Plain text file with one term per line - your slim.' )
parser.add_argument('-obo', '--ontology_file', type=str, required=True, help='Full obo file, providing the network of terms' )
args = parser.parse_args()
# Parse the OBO file and store a term lookup as well as graphs for each namespace
terms, g = parse_obo_graph(args.ontology_file)
# parse list of source GO terms
source_go_terms = dict()
assemblies, features = gff.get_gff3_features(args.input_file)
for assembly_id in assemblies:
for gene in assemblies[assembly_id].genes():
for mRNA in gene.mRNAs():
for polypeptide in mRNA.polypeptides():
annot = polypeptide.annotation
for go_annot in annot.go_annotations:
if go_annot.go_id in source_go_terms:
source_go_terms[go_annot.go_id] += 1
else:
source_go_terms[go_annot.go_id] = 1
print("Slimming {0} unique source GO terms".format(len(source_go_terms)))
# For testing, show all descendents of
# GO:0008150 - biological_process
# GO:0005575 - cellular_component
# GO:0003674 - molecular_function
biological_process_idx = terms['GO:0008150']['idx']
slim_targets = {'unknown': 0}
for edge_id in g['biological_process'].incident(biological_process_idx, mode='IN'):
#print("Found edge id {0}".format(edge_id))
# only looking at those getting more specific
source_idx = g['biological_process'].es[edge_id].source
target_idx = g['biological_process'].es[edge_id].target
slim_targets[source_idx] = 0
# how many do we find of each target?
for source_id in source_go_terms:
source_id = "GO:{0}".format(source_id)
matching_targets = list()
best_path_dist = 1000
# check this annotated GO ID against the SLIM targets
if source_id in terms:
for target_idx in slim_targets:
if target_idx != 'unknown':
paths = g['biological_process'].shortest_paths_dijkstra(source=terms[source_id]['idx'], target=target_idx)
path_dist = paths[0][0]
if path_dist != float('inf'):
if path_dist < best_path_dist:
best_path_dist = path_dist
matching_targets = [target_idx]
elif path_dist == best_path_dist:
matching_targets.append(target_idx)
if len(matching_targets) > 0:
for t_idx in matching_targets:
slim_targets[t_idx] += 1
else:
slim_targets['unknown'] += 1
print("Slim counts:")
for id in slim_targets:
print("\t{0} - {1}".format(id, slim_targets[id]))
def parse_obo_graph(path):
stored_pickle_file_prefix = 'obo.graphs'
stored_pickles_found = False
g = {'biological_process': igraph.Graph(directed=True),
'cellular_component': igraph.Graph(directed=True),
'molecular_function': igraph.Graph(directed=True) }
for ns in g:
pickle_file_path = "{0}.{1}".format(stored_pickle_file_prefix, ns)
if os.path.exists(pickle_file_path):
print("Using stored ontology graph: {0}".format(pickle_file_path))
g[ns] = igraph.Graph.Read_Pickle(fname=pickle_file_path)
stored_pickles_found = True
# key: GO:ID, value = {'ns': 'biological_process', 'idx': 25}
terms = dict()
if stored_pickles_found is True:
print("Using stored terms data structure: {0}".format(pickle_file_path))
with open("{0}.terms".format(stored_pickle_file_prefix), 'rb') as f:
terms = pickle.load(f)
# key: namespace, value=int
next_idx = {'biological_process': 0,
'cellular_component': 0,
'molecular_function': 0 }
id = None
namespace = None
name = None
# Pass through the file once just to get all the GO terms and their namespaces
# This makes the full pass far easier, since terms can be referenced which haven't
# been seen yet.
if stored_pickles_found is False:
for line in open(path):
line = line.rstrip()
if line.startswith('[Term]'):
if id is not None:
# error checking
if namespace is None:
raise Exception("Didn't find a namespace for term {0}".format(id))
g[namespace].add_vertices(1)
idx = next_idx[namespace]
g[namespace].vs[idx]['id'] = id
g[namespace].vs[idx]['name'] = name
next_idx[namespace] += 1
terms[id] = {'ns': namespace, 'idx': idx}
# reset for next term
id = None
namespace = None
name = None
elif line.startswith('id:'):
id = line.split(' ')[1]
elif line.startswith('namespace:'):
namespace = line.split(' ')[1]
elif line.startswith('name:'):
m = re.match('name: (.+)', line)
if m:
name = m.group(1).rstrip()
else:
raise Exception("Failed to regex this line: {0}".format(line))
id = None
alt_ids = list()
namespace = None
name = None
is_obsolete = False
is_a = list()
# Now actually parse the rest of the properties
if stored_pickles_found is False:
for line in open(path):
line = line.rstrip()
if line.startswith('[Term]'):
if id is not None:
# make any edges in the graph
for is_a_id in is_a:
# these two terms should be in the same namespace
if terms[id]['ns'] != terms[is_a_id]['ns']:
raise Exception("is_a relationship found with terms in different namespaces")
#g[namespace].add_edges([(terms[id]['idx'], terms[is_a_id]['idx']), ])
# the line above is supposed to be able to instead be this, according to the
# documentation, but it fails:
g[namespace].add_edge(terms[id]['idx'], terms[is_a_id]['idx'])
# reset for this term
id = None
alt_ids = list()
namespace = None
is_obsolete = False
is_a = list()
elif line.startswith('id:'):
id = line.split(' ')[1]
elif line.startswith('namespace:'):
namespace = line.split(' ')[1]
elif line.startswith('is_a:'):
is_a.append(line.split(' ')[1])
if stored_pickles_found is False:
for ns in g:
pickle_file_path = "{0}.{1}".format(stored_pickle_file_prefix, ns)
g[ns].write_pickle(fname=pickle_file_path)
## save the terms too so we don't have to redo that parse
with open("{0}.terms".format(stored_pickle_file_prefix), 'wb') as f:
pickle.dump(terms, f, pickle.HIGHEST_PROTOCOL)
return terms, g
if __name__ == '__main__':
main()
| 4,002 |
348 | <filename>docs/data/t2/064/64158.json
{"nom":"Cabidos","dpt":"Pyrénées-Atlantiques","inscrits":173,"abs":27,"votants":146,"blancs":11,"nuls":5,"exp":130,"res":[{"panneau":"1","voix":93},{"panneau":"2","voix":37}]} | 93 |
763 | <gh_stars>100-1000
package org.batfish.datamodel.acl;
import static org.batfish.datamodel.acl.AclLineMatchExprs.ESTABLISHED_TCP_FLOWS;
import static org.batfish.datamodel.acl.AclLineMatchExprs.NEW_FLOWS;
import static org.batfish.datamodel.acl.AclLineMatchExprs.NEW_TCP_FLOWS;
import static org.batfish.datamodel.acl.AclLineMatchExprs.and;
import static org.batfish.datamodel.acl.AclLineMatchExprs.matchSrcInterface;
import static org.batfish.datamodel.acl.AclLineMatchExprs.or;
import static org.hamcrest.Matchers.equalTo;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import java.util.function.Supplier;
import org.batfish.datamodel.Flow;
import org.batfish.datamodel.IpProtocol;
import org.batfish.datamodel.TcpFlags;
import org.junit.Test;
public class AclLineMatchExprsTest {
private static boolean matches(AclLineMatchExpr expr, Flow flow) {
return new Evaluator(flow, "src", ImmutableMap.of(), ImmutableMap.of()).visit(expr);
}
private static final Flow NEW_TCP_FLOW;
private static final Flow ACK_FLOW;
private static final Flow RST_FLOW;
private static final Flow UDP_FLOW;
static {
Supplier<Flow.Builder> fb = () -> Flow.builder().setIngressNode("n");
Supplier<Flow.Builder> tcpFb =
() -> fb.get().setIpProtocol(IpProtocol.TCP).setSrcPort(1).setDstPort(2);
ACK_FLOW = tcpFb.get().setTcpFlags(TcpFlags.builder().setAck(true).build()).build();
RST_FLOW = tcpFb.get().setTcpFlags(TcpFlags.builder().setRst(true).build()).build();
NEW_TCP_FLOW = tcpFb.get().build();
UDP_FLOW = fb.get().setIpProtocol(IpProtocol.UDP).setSrcPort(1).setDstPort(2).build();
}
@Test
public void testAnd() {
assertThat(and(ImmutableList.of()), equalTo(TrueExpr.INSTANCE));
MatchSrcInterface expr = matchSrcInterface("a");
assertThat(and(ImmutableList.of(expr)), equalTo(expr));
}
@Test
public void testOr() {
assertThat(or(ImmutableList.of()), equalTo(FalseExpr.INSTANCE));
MatchSrcInterface expr = matchSrcInterface("a");
assertThat(or(ImmutableList.of(expr)), equalTo(expr));
}
@Test
public void testEstablishedTcpFlows() {
assertTrue(matches(ESTABLISHED_TCP_FLOWS, ACK_FLOW));
assertTrue(matches(ESTABLISHED_TCP_FLOWS, RST_FLOW));
assertFalse(matches(ESTABLISHED_TCP_FLOWS, NEW_TCP_FLOW));
}
@Test
public void testNewTcpFlows() {
assertFalse(matches(NEW_TCP_FLOWS, ACK_FLOW));
assertFalse(matches(NEW_TCP_FLOWS, RST_FLOW));
assertFalse(matches(NEW_TCP_FLOWS, UDP_FLOW));
assertTrue(matches(NEW_TCP_FLOWS, NEW_TCP_FLOW));
}
@Test
public void testNewFlows() {
assertFalse(matches(NEW_FLOWS, ACK_FLOW));
assertFalse(matches(NEW_FLOWS, RST_FLOW));
assertTrue(matches(NEW_FLOWS, NEW_TCP_FLOW));
assertTrue(matches(NEW_FLOWS, UDP_FLOW));
}
}
| 1,184 |
2,124 | <gh_stars>1000+
{"person":{"id":"34790912@N05","nsid":"34790912@N05","ispro":0,"can_buy_pro":0,"iconserver":"3729","iconfarm":4,"path_alias":null,"username":{"_content":"me.json"},"realname":{"_content":"<NAME>"},"mbox_sha1sum":{"_content":"48808a5fde9fed5261370554bdf317f891009e0d"},"location":{"_content":""},"timezone":{"label":"Canberra, Melbourne, Sydney","offset":"+10:00"},"description":{"_content":"human"},"photosurl":{"_content":"https:\/\/www.flickr.com\/photos\/34790912@N05\/"},"profileurl":{"_content":"https:\/\/www.flickr.com\/people\/34790912@N05\/"},"mobileurl":{"_content":"https:\/\/m.flickr.com\/photostream.gne?id=34785572"},"photos":{"firstdatetaken":{"_content":"2004-03-19 13:23:17"},"firstdate":{"_content":"1233238601"},"count":{"_content":152},"views":{"_content":"41"}}},"stat":"ok"} | 286 |
488 | // This is an example of a recursive macro, it is detect when it is in the input source file.
// to demonstrate the bug it has to be in a header file.
// #define always_inline __attribute__ (( always_inline )) __inline__
// #define inline always_inline
#include "test2015_159.h"
static void foobar();
static inline void foobar()
{
}
void foo()
{
foobar();
}
| 120 |
380 | /*
Copyright (C) 2006 <NAME>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
/* convolution */
#ifndef CONVOLVE_H
#define CONVOLVE_H
#include <vector>
#include <algorithm>
#include <cmath>
#include "image.h"
/* convolve src with mask. dst is flipped! */
static void convolve_even(image<float> *src, image<float> *dst,
std::vector<float> &mask) {
int width = src->width();
int height = src->height();
int len = mask.size();
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
float sum = mask[0] * imRef(src, x, y);
for (int i = 1; i < len; i++) {
sum += mask[i] *
(imRef(src, std::max(x-i,0), y) +
imRef(src, std::min(x+i, width-1), y));
}
imRef(dst, y, x) = sum;
}
}
}
/* convolve src with mask. dst is flipped! */
static void convolve_odd(image<float> *src, image<float> *dst,
std::vector<float> &mask) {
int width = src->width();
int height = src->height();
int len = mask.size();
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
float sum = mask[0] * imRef(src, x, y);
for (int i = 1; i < len; i++) {
sum += mask[i] *
(imRef(src, std::max(x-i,0), y) -
imRef(src, std::min(x+i, width-1), y));
}
imRef(dst, y, x) = sum;
}
}
}
#endif
| 734 |
568 | <gh_stars>100-1000
//
// Copyright (c) 2009, <NAME>
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
//
#ifndef VIEWER_H
#define VIEWER_H
#include <QWidget>
#include <Inventor/Qt/viewers/SoQtExaminerViewer.h>
#include <Inventor/nodes/SoDrawStyle.h>
#include <Inventor/VRMLnodes/SoVRMLAppearance.h>
#include <Inventor/VRMLnodes/SoVRMLColor.h>
#include <Inventor/VRMLnodes/SoVRMLCoordinate.h>
#include <Inventor/VRMLnodes/SoVRMLGroup.h>
#include <Inventor/VRMLnodes/SoVRMLIndexedLineSet.h>
#include <Inventor/VRMLnodes/SoVRMLMaterial.h>
#include <Inventor/VRMLnodes/SoVRMLPointSet.h>
#include <Inventor/VRMLnodes/SoVRMLShape.h>
#include <Inventor/VRMLnodes/SoVRMLSwitch.h>
#include <Inventor/VRMLnodes/SoVRMLTransform.h>
#include <rl/plan/Model.h>
#include <rl/plan/VectorList.h>
#include <rl/plan/Viewer.h>
class SoGradientBackground;
class Viewer : public QWidget, public rl::plan::Viewer
{
Q_OBJECT
public:
Viewer(QWidget* parent = nullptr, Qt::WindowFlags f = Qt::WindowFlags());
virtual ~Viewer();
void setBackgroundColor(const QColor& color);
rl::math::Real delta;
rl::math::Real deltaSwept;
rl::plan::Model* model;
SoVRMLGroup* sceneGroup;
SoQtExaminerViewer* viewer;
public slots:
void drawConfiguration(const rl::math::Vector& q);
void drawConfigurationEdge(const rl::math::Vector& u, const rl::math::Vector& v, const bool& free = true);
void drawConfigurationPath(const rl::plan::VectorList& path);
void drawConfigurationVertex(const rl::math::Vector& q, const bool& free = true);
void drawLine(const rl::math::Vector& xyz0, const rl::math::Vector& xyz1);
void drawPoint(const rl::math::Vector& xyz);
void drawSphere(const rl::math::Vector& center, const rl::math::Real& radius);
void drawSweptVolume(const rl::plan::VectorList& path);
void drawWork(const rl::math::Transform& t);
void drawWorkEdge(const rl::math::Vector& u, const rl::math::Vector& v);
void drawWorkPath(const rl::plan::VectorList& path);
void drawWorkVertex(const rl::math::Vector& q);
void reset();
void resetEdges();
void resetLines();
void resetPath();
void resetPath3();
void resetPoints();
void resetSpheres();
void resetVertices();
void saveImage(bool withAlpha);
void saveScene();
void showMessage(const std::string& message);
void toggleConfigurationEdges(const bool& doOn);
void toggleConfigurationVertices(const bool& doOn);
void toggleLines(const bool& doOn);
void togglePathEdges(const bool& doOn);
void togglePathVertices(const bool& doOn);
void togglePoints(const bool& doOn);
void toggleSpheres(const bool& doOn);
void toggleSweptVolume(const bool& doOn);
void toggleWorkFrames(const bool& doOn);
protected:
private:
SoVRMLSwitch* background;
SoGradientBackground* backgroundGradientBackground;
SoVRMLSwitch* edges;
SoVRMLSwitch* edgesColliding;
SoVRMLAppearance* edgesCollidingAppearance;
SoVRMLCoordinate* edgesCollidingCoordinate;
SoDrawStyle* edgesCollidingDrawStyle;
SoVRMLIndexedLineSet* edgesCollidingIndexedLineSet;
SoVRMLMaterial* edgesCollidingMaterial;
SoVRMLShape* edgesCollidingShape;
SoVRMLSwitch* edgesFree;
SoVRMLAppearance* edgesFreeAppearance;
SoVRMLCoordinate* edgesFreeCoordinate;
SoDrawStyle* edgesFreeDrawStyle;
SoVRMLIndexedLineSet* edgesFreeIndexedLineSet;
SoVRMLMaterial* edgesFreeMaterial;
SoVRMLShape* edgesFreeShape;
SoVRMLSwitch* edges3;
SoVRMLAppearance* edges3Appearance;
SoVRMLCoordinate* edges3Coordinate;
SoDrawStyle* edges3DrawStyle;
SoVRMLIndexedLineSet* edges3IndexedLineSet;
SoVRMLMaterial* edges3Material;
SoVRMLShape* edges3Shape;
SoVRMLIndexedLineSet* frameIndexedLineSet;
SoVRMLSwitch* lines;
SoVRMLAppearance* linesAppearance;
SoVRMLCoordinate* linesCoordinate;
SoDrawStyle* linesDrawStyle;
SoVRMLIndexedLineSet* linesIndexedLineSet;
SoVRMLMaterial* linesMaterial;
SoVRMLShape* linesShape;
SoVRMLSwitch* path;
SoVRMLSwitch* pathEdges;
SoVRMLAppearance* pathEdgesAppearance;
SoVRMLCoordinate* pathEdgesCoordinate;
SoDrawStyle* pathEdgesDrawStyle;
SoVRMLIndexedLineSet* pathEdgesIndexedLineSet;
SoVRMLMaterial* pathEdgesMaterial;
SoVRMLShape* pathEdgesShape;
SoVRMLSwitch* pathVertices;
SoVRMLAppearance* pathVerticesAppearance;
SoVRMLCoordinate* pathVerticesCoordinate;
SoDrawStyle* pathVerticesDrawStyle;
SoVRMLPointSet* pathVerticesPointSet;
SoVRMLMaterial* pathVerticesMaterial;
SoVRMLShape* pathVerticesShape;
SoVRMLSwitch* path3;
SoVRMLAppearance* path3Appearance;
SoVRMLCoordinate* path3Coordinate;
SoDrawStyle* path3DrawStyle;
SoVRMLIndexedLineSet* path3IndexedLineSet;
SoVRMLMaterial* path3Material;
SoVRMLShape* path3Shape;
SoVRMLSwitch* points;
SoVRMLAppearance* pointsAppearance;
SoVRMLCoordinate* pointsCoordinate;
SoDrawStyle* pointsDrawStyle;
SoVRMLPointSet* pointsPointSet;
SoVRMLMaterial* pointsMaterial;
SoVRMLShape* pointsShape;
SoVRMLSwitch* root;
SoVRMLSwitch* scene;
SoDrawStyle* sceneDrawStyle;
SoVRMLSwitch* spheres;
SoVRMLAppearance* spheresAppearance;
SoDrawStyle* spheresDrawStyle;
SoVRMLGroup* spheresGroup;
SoVRMLMaterial* spheresMaterial;
SoVRMLSwitch* swept;
SoVRMLGroup* sweptGroup;
SoVRMLSwitch* vertices;
SoVRMLSwitch* verticesColliding;
SoVRMLAppearance* verticesCollidingAppearance;
SoVRMLColor* verticesCollidingColor;
SoVRMLCoordinate* verticesCollidingCoordinate;
SoDrawStyle* verticesCollidingDrawStyle;
SoVRMLPointSet* verticesCollidingPointSet;
SoVRMLMaterial* verticesCollidingMaterial;
SoVRMLShape* verticesCollidingShape;
SoVRMLSwitch* verticesFree;
SoVRMLAppearance* verticesFreeAppearance;
SoVRMLColor* verticesFreeColor;
SoVRMLCoordinate* verticesFreeCoordinate;
SoDrawStyle* verticesFreeDrawStyle;
SoVRMLPointSet* verticesFreePointSet;
SoVRMLMaterial* verticesFreeMaterial;
SoVRMLShape* verticesFreeShape;
SoVRMLSwitch* work;
SoDrawStyle* workDrawStyle;
SoVRMLTransform* workTransform;
};
#endif // VIEWER_H
| 2,664 |
876 | #pragma once
#include "rapidcheck/Seq.h"
namespace rc {
namespace shrink {
/// Tries to shrink the given container by removing successively smaller
/// consecutive chunks of it.
///
/// `Container` must support the following:
/// - `RandomAccessIterator begin(Container)`
/// - `RandomAccessIterator end(Container)`
/// - `Container::insert(It, It, It)`
/// - `Container::reserve(std::size_t)`
template <typename Container>
Seq<Container> removeChunks(Container elements);
/// Tries to shrink each element of the given container using the given
/// callable to create sequences of shrinks for that element.
///
/// `Container` must support `begin(Container)` and `end(Container)` which must
/// return random access iterators.
///
/// @param elements The collection whose elements to shrink.
/// @param shrink A callable which returns a `Seq<T>` given an element
/// to shrink.
template <typename Container, typename Shrink>
Seq<Container> eachElement(Container elements, Shrink shrink);
/// Shrinks an integral value towards another integral value.
///
/// @param value The value to shrink.
/// @param target The integer to shrink towards.
template <typename T>
Seq<T> towards(T value, T target);
/// Shrinks an arbitrary integral value.
template <typename T>
Seq<T> integral(T value);
/// Shrinks an arbitrary real value.
template <typename T>
Seq<T> real(T value);
/// Shrinks an arbitrary boolean value.
inline Seq<bool> boolean(bool value);
/// Shrinks a text character.
template <typename T>
Seq<T> character(T value);
} // namespace shrink
} // namespace rc
#include "Shrink.hpp"
| 492 |
333 | <reponame>alipay/alipay-sdk-java-all<filename>src/main/java/com/alipay/api/domain/AlipayAssetVoucherTemplateInfoQuerybudgetModel.java
package com.alipay.api.domain;
import com.alipay.api.AlipayObject;
import com.alipay.api.internal.mapping.ApiField;
/**
* 券模板预算查询
*
* @author auto create
* @since 1.0, 2021-02-01 11:19:11
*/
public class AlipayAssetVoucherTemplateInfoQuerybudgetModel extends AlipayObject {
private static final long serialVersionUID = 5179155475615675211L;
/**
* 是否查询实时数据,true:查询实时数据,false:查询非实时数据(5分钟延迟)
*/
@ApiField("is_real_time_data")
private Boolean isRealTimeData;
/**
* 预算类型,取值范围只能是MONEY、AMOUNT,MONEY代表金额预算,AMOUNT代表数量预算
*/
@ApiField("mode")
private String mode;
/**
* 券模板ID列表,请使用json数组格式
*/
@ApiField("template_id_list")
private String templateIdList;
public Boolean getIsRealTimeData() {
return this.isRealTimeData;
}
public void setIsRealTimeData(Boolean isRealTimeData) {
this.isRealTimeData = isRealTimeData;
}
public String getMode() {
return this.mode;
}
public void setMode(String mode) {
this.mode = mode;
}
public String getTemplateIdList() {
return this.templateIdList;
}
public void setTemplateIdList(String templateIdList) {
this.templateIdList = templateIdList;
}
}
| 682 |
1,729 | /************************************************************************
* file name : round_progress_widget.h
* ----------------- :
* creation time : 2018/05/17
* author : <NAME>
* email : <EMAIL>
* ----------------- :
* description : The file contains declaration of RoundProgressWidget.
* ----------------- :
* license : Lightweight profiler library for c++
* : Copyright(C) 2016-2019 <NAME>, <NAME>
* :
* : Licensed under either of
* : * MIT license (LICENSE.MIT or http://opensource.org/licenses/MIT)
* : * Apache License, Version 2.0, (LICENSE.APACHE or http://www.apache.org/licenses/LICENSE-2.0)
* : at your option.
* :
* : The MIT License
* :
* : Permission is hereby granted, free of charge, to any person obtaining a copy
* : of this software and associated documentation files (the "Software"), to deal
* : in the Software without restriction, including without limitation the rights
* : to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
* : of the Software, and to permit persons to whom the Software is furnished
* : to do so, subject to the following conditions:
* :
* : The above copyright notice and this permission notice shall be included in all
* : copies or substantial portions of the Software.
* :
* : THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
* : INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
* : PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
* : LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* : TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
* : USE OR OTHER DEALINGS IN THE SOFTWARE.
* :
* : The Apache License, Version 2.0 (the "License")
* :
* : You may not use this file except in compliance with the License.
* : You may obtain a copy of the License at
* :
* : http://www.apache.org/licenses/LICENSE-2.0
* :
* : Unless required by applicable law or agreed to in writing, software
* : distributed under the License is distributed on an "AS IS" BASIS,
* : WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* : See the License for the specific language governing permissions and
* : limitations under the License.
************************************************************************/
#ifndef ROUND_PROGRESS_WIDGET_H
#define ROUND_PROGRESS_WIDGET_H
#include <stdint.h>
#include <QColor>
#include <QDialog>
#include <QTimer>
#include <QWidget>
class RoundProgressIndicator : public QWidget
{
Q_OBJECT
public:
enum ButtonStyle { NoButton = 0, Cross, Stop };
enum Style { Percent = 0, Infinite };
private:
using Parent = QWidget;
using This = RoundProgressIndicator;
QTimer m_animationTimer;
QString m_text;
QColor m_background;
QColor m_color;
QColor m_buttonColor;
qreal m_buttonSize;
Style m_style;
ButtonStyle m_buttonStyle;
QDialog::DialogCode m_buttonRole;
int m_angle;
int m_indicatorWidth;
int m_crossWidth;
int8_t m_value;
bool m_pressed;
public:
Q_PROPERTY(QColor color READ color WRITE setColor);
Q_PROPERTY(QColor background READ background WRITE setBackground);
Q_PROPERTY(QColor buttonColor READ buttonColor WRITE setButtonColor);
Q_PROPERTY(qreal buttonSize READ buttonSize WRITE setButtonSize);
Q_PROPERTY(int indicatorWidth READ indicatorWidth WRITE setIndicatorWidth);
Q_PROPERTY(int crossWidth READ crossWidth WRITE setCrossWidth);
Q_PROPERTY(QString buttonStyle READ buttonStyleStr WRITE setButtonStyle);
Q_PROPERTY(QString buttonRole READ buttonRoleStr WRITE setButtonRole);
Q_PROPERTY(QString style READ styleStr WRITE setStyle);
explicit RoundProgressIndicator(QWidget* parent = nullptr);
~RoundProgressIndicator() override;
int value() const;
void setValue(int value);
void reset();
QColor background() const;
QColor color() const;
QColor buttonColor() const;
qreal buttonSize() const;
int indicatorWidth() const;
int crossWidth() const;
ButtonStyle buttonStyle() const;
QString buttonStyleStr() const;
void setButtonStyle(ButtonStyle style);
void setButtonStyle(QString style);
QDialog::DialogCode buttonRole() const;
QString buttonRoleStr() const;
void setButtonRole(QDialog::DialogCode role);
void setButtonRole(QString role);
Style style() const;
QString styleStr() const;
void setStyle(Style style);
void setStyle(QString style);
signals:
void buttonClicked(int role);
void sizeChanged();
public slots:
void setBackground(QColor color);
void setBackground(QString color);
void setColor(QColor color);
void setColor(QString color);
void setButtonColor(QColor color);
void setButtonColor(QString color);
void setButtonSize(qreal size);
void setIndicatorWidth(int width);
void setCrossWidth(int width);
private slots:
void onTimeout();
protected:
void showEvent(QShowEvent* event) override;
void paintEvent(QPaintEvent* event) override;
void enterEvent(QEvent* event) override;
void leaveEvent(QEvent* event) override;
void mousePressEvent(QMouseEvent* event) override;
void mouseReleaseEvent(QMouseEvent* event) override;
void mouseMoveEvent(QMouseEvent* event) override;
private:
void updateSize();
void paintCrossButton(QPainter& painter, QRect& r);
void paintStopButton(QPainter& painter, QRect& r);
}; // end of class RoundProgressIndicator.
using RoundProgressButtonStyle = RoundProgressIndicator::ButtonStyle;
using RoundProgressStyle = RoundProgressIndicator::Style;
class RoundProgressWidget : public QWidget
{
Q_OBJECT
using Parent = QWidget;
using This = RoundProgressWidget;
public:
Q_PROPERTY(bool topTitlePosition READ isTopTitlePosition WRITE setTopTitlePosition NOTIFY titlePositionChanged);
enum TitlePosition : int8_t
{
Top = 0,
Bottom,
};
private:
class QLabel* m_title;
QWidget* m_indicatorWrapper;
RoundProgressIndicator* m_indicator;
TitlePosition m_titlePosition;
public:
explicit RoundProgressWidget(QWidget* parent = nullptr);
explicit RoundProgressWidget(const QString& title, QWidget* parent = nullptr);
~RoundProgressWidget() override;
void setTitle(const QString& title);
int value() const;
TitlePosition titlePosition() const;
bool isTopTitlePosition() const;
RoundProgressButtonStyle buttonStyle() const;
void setButtonStyle(RoundProgressButtonStyle style);
QDialog::DialogCode buttonRole() const;
void setButtonRole(QDialog::DialogCode role);
RoundProgressStyle style() const;
void setStyle(RoundProgressStyle style);
public slots:
void setValue(int value);
void reset();
void setTitlePosition(TitlePosition pos);
void setTopTitlePosition(bool isTop);
signals:
void valueChanged(int value);
void finished(int role);
void titlePositionChanged();
}; // end of class RoundProgressWidget.
class RoundProgressDialog : public QDialog
{
Q_OBJECT
using Parent = QDialog;
using This = RoundProgressDialog;
RoundProgressWidget* m_progress;
QColor m_background;
int m_borderRadius;
public:
Q_PROPERTY(QColor background READ background WRITE setBackground);
Q_PROPERTY(int borderRadius READ borderRadius WRITE setBorderRadius);
explicit RoundProgressDialog(const QString& title, QWidget* parent = nullptr);
RoundProgressDialog(
const QString& title,
RoundProgressIndicator::ButtonStyle button,
QDialog::DialogCode buttonRole,
QWidget* parent = nullptr
);
~RoundProgressDialog() override;
QColor background() const;
int borderRadius() const;
RoundProgressButtonStyle buttonStyle() const;
void setButtonStyle(RoundProgressButtonStyle style);
QDialog::DialogCode buttonRole() const;
void setButtonRole(QDialog::DialogCode role);
RoundProgressStyle style() const;
void setStyle(RoundProgressStyle style);
void setTitle(const QString& title);
protected:
void showEvent(QShowEvent* event) override;
void paintEvent(QPaintEvent* event) override;
public slots:
void setBackground(QColor color);
void setBackground(QString color);
void setBorderRadius(int radius);
void setValue(int value);
signals:
void valueChanged(int value);
private slots:
void onFinished(int role);
}; // end of RoundProgressDialog.
#endif // ROUND_PROGRESS_WIDGET_H
| 3,761 |
903 | <gh_stars>100-1000
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package software.amazon.smithy.cli;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.logging.ConsoleHandler;
import java.util.logging.Formatter;
import java.util.logging.Handler;
import java.util.logging.Level;
import java.util.logging.LogRecord;
import java.util.logging.Logger;
import java.util.logging.SimpleFormatter;
/**
* Package-private utilities for configuring CLI logging.
*/
final class LoggingUtil {
private static final SimpleDateFormat FORMAT = new SimpleDateFormat("HH:mm:ss.SSS");
private LoggingUtil() {}
static void configureLogging(boolean debug, Level level) {
Logger rootLogger = Logger.getLogger("");
removeConsoleHandler(rootLogger);
addCliHandler(debug, level, rootLogger);
}
private static void addCliHandler(boolean debug, Level level, Logger rootLogger) {
if (level != Level.OFF) {
Handler handler = debug
// Debug ignores the given logging level and just logs everything.
? new CliLogHandler(new DebugFormatter())
: new CliLogHandler(new BasicFormatter());
handler.setLevel(level);
rootLogger.addHandler(handler);
}
rootLogger.setLevel(level);
for (Handler h : rootLogger.getHandlers()) {
h.setLevel(level);
}
}
private static void removeConsoleHandler(Logger rootLogger) {
for (Handler handler : rootLogger.getHandlers()) {
if (handler instanceof ConsoleHandler) {
// Remove any console log handlers.
rootLogger.removeHandler(handler);
}
}
}
private static final class BasicFormatter extends SimpleFormatter {
@Override
public synchronized String format(LogRecord r) {
return FORMAT.format(new Date(r.getMillis()))
+ " " + r.getLevel().getLocalizedName() + " - "
+ r.getMessage();
}
}
private static final class DebugFormatter extends SimpleFormatter {
@Override
public synchronized String format(LogRecord r) {
return FORMAT.format(new Date(r.getMillis()))
+ " [" + Thread.currentThread().getName() + "] "
+ r.getLevel().getLocalizedName() + " "
+ r.getLoggerName() + " - "
+ r.getMessage();
}
}
/**
* Logs messages to the CLI's redirect stderr.
*/
private static final class CliLogHandler extends Handler {
private final Formatter formatter;
CliLogHandler(Formatter formatter) {
this.formatter = formatter;
}
@Override
public void publish(LogRecord record) {
if (isLoggable(record)) {
Cli.stderr(formatter.format(record));
}
}
@Override
public void flush() {
}
@Override
public void close() {
}
}
}
| 1,466 |
834 | #using <System.Windows.Forms.dll>
#using <System.Drawing.dll>
#using <System.dll>
#using <System.Data.dll>
using namespace System;
using namespace System::Data;
using namespace System::ComponentModel;
using namespace System::Windows::Forms;
public ref class Form1: public Form
{
protected:
RichTextBox^ richTextBox1;
// <Snippet1>
public:
bool FindMyText( String^ text )
{
// Initialize the return value to false by default.
bool returnValue = false;
// Ensure a search string has been specified.
if ( text->Length > 0 )
{
// Obtain the location of the search string in richTextBox1.
int indexToText = richTextBox1->Find( text );
// Determine whether the text was found in richTextBox1.
if ( indexToText >= 0 )
{
returnValue = true;
}
}
return returnValue;
}
// </Snippet1>
};
| 358 |
988 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.modules.j2ee.deployment.impl;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.channels.FileChannel;
import java.nio.charset.StandardCharsets;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Locale;
import java.util.Set;
import java.util.jar.JarOutputStream;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.zip.ZipEntry;
import javax.enterprise.deploy.shared.CommandType;
import javax.enterprise.deploy.spi.Target;
import org.netbeans.modules.j2ee.deployment.devmodules.api.J2eeApplication;
import org.netbeans.modules.j2ee.deployment.devmodules.api.J2eeModule;
import org.netbeans.modules.j2ee.deployment.execution.ModuleConfigurationProvider;
import org.netbeans.modules.j2ee.deployment.impl.projects.DeploymentTarget;
import org.netbeans.modules.j2ee.deployment.plugins.api.ServerProgress;
import org.netbeans.modules.j2ee.deployment.plugins.spi.IncrementalDeployment;
import org.openide.filesystems.FileLock;
import org.openide.filesystems.FileObject;
import org.openide.filesystems.FileUtil;
import org.openide.util.NbBundle;
import org.openide.util.Utilities;
/**
*
* @author nn136682
*/
public class InitialServerFileDistributor extends ServerProgress {
private static final Logger LOGGER = Logger.getLogger(InitialServerFileDistributor.class.getName());
private final ServerString serverString;
private final DeploymentTarget dtarget;
private final IncrementalDeployment incDeployment;
private final Target target;
boolean inPlace = false;
public InitialServerFileDistributor(DeploymentTarget dtarget, Target target) {
super(dtarget.getServer().getServerInstance());
this.serverString = dtarget.getServer();
this.dtarget = dtarget;
this.target = target;
incDeployment = serverString.getServerInstance().getIncrementalDeployment();
}
public File distribute() {
ModuleConfigurationProvider deployment = dtarget.getModuleConfigurationProvider();
J2eeModule source = dtarget.getModule();
String name = dtarget.getDeploymentName();
File dir = incDeployment.getDirectoryForNewApplication(name, target, deployment.getModuleConfiguration());
try {
if (dir == null) {
inPlace = true;
if (dtarget.getModule().getContentDirectory() != null) {
dir = FileUtil.toFile(dtarget.getModule().getContentDirectory());
}
if (dir == null) {
String msg = NbBundle.getMessage(InitialServerFileDistributor.class, "MSG_InPlaceNoSupport");
setStatusDistributeFailed(msg);
return null;
} else {
setStatusDistributeCompleted(NbBundle.getMessage(InitialServerFileDistributor.class, "MSG_InPlaceDeployment", dir)); //NOI18N
return dir;
}
}
setStatusDistributeRunning(NbBundle.getMessage(
InitialServerFileDistributor.class, "MSG_RunningInitialDeploy", dtarget.getDeploymentName(), dir));
_distribute(source.getArchiveContents(), dir, collectChildModuleNames(source));
if (source instanceof J2eeApplication) {
J2eeModule[] childModules = ((J2eeApplication) source).getModules();
for (int i = 0; i < childModules.length; i++) {
String uri = childModules[i].getUrl();
J2eeModule childModule = deployment.getJ2eeModule(uri);
File subdir = incDeployment.getDirectoryForNewModule(dir, uri, childModule, deployment.getModuleConfiguration());
_distribute(childModules[i].getArchiveContents(), subdir, null);
}
}
setStatusDistributeCompleted(NbBundle.getMessage(
InitialServerFileDistributor.class, "MSG_DoneInitialDistribute", dtarget.getDeploymentName()));
return dir;
} catch (Exception e) {
LOGGER.log(Level.INFO, null, e);
setStatusDistributeFailed(e.getMessage());
if (!inPlace && !cleanup(dir)) {
setStatusDistributeFailed("Failed to cleanup the data after unsucesful distribution");
}
}
return null;
}
// We are collecting module names to be able to skip .jar and .war files under
// the application root with the same name as one of the deployed modules. Those
// are typically jars coresponding to already existing exploded directory and we
// don't want to deploy them --> see also #199096 and #222924 for more details
private Set<String> collectChildModuleNames(J2eeModule source) {
final Set<String> childModuleNames = new HashSet<String>();
if (source instanceof J2eeApplication) {
for (J2eeModule module : ((J2eeApplication) source).getModules()) {
// We have to use getUrl() --> it's the only method that take the
// maven ear plugin fileNameMapping attribute into account
String moduleURL = module.getUrl();
if (moduleURL != null) {
moduleURL = moduleURL.substring(moduleURL.lastIndexOf("/") + 1); // NOI18N
childModuleNames.add(moduleURL);
}
}
}
return childModuleNames;
}
private boolean cleanup(File f) {
String [] chNames = f.list();
boolean deleted = true;
if (chNames != null) {
for (int i = 0; i < chNames.length; i++) {
File ch = new File(f.getAbsolutePath(), chNames[i]);
if (ch.isDirectory()) {
deleted = deleted && cleanup(ch);
} else {
deleted = deleted && ch.delete();
}
}
}
deleted = deleted && f.delete();
return deleted;
}
private void _distribute(Iterator<J2eeModule.RootedEntry> rootedEntries, File dir, Set<String> childModuleNames) {
FileLock lock = null;
try {
// this is just safeguard - should not happen anymore
// used to happen in EAR when folder had a same name as jar
// and jar was copied to exploded dir
if (dir.exists() && dir.isFile()) {
dir.delete();
}
// mkdirs()/toFileObject is not not tolerated any more.
FileObject destRoot = FileUtil.createFolder(dir);
FileObject[] garbages = destRoot.getChildren();
for (int i = 0; i < garbages.length; i++) {
try {
garbages[i].delete();
} catch (java.io.IOException ioe) {
LOGGER.log(Level.FINER, null, ioe);
if (Utilities.isWindows()) {
String ext = garbages[i].getExt().toLowerCase(Locale.ENGLISH);
if ("jar".equals(ext) || "zip".equals(ext)) {
zeroOutArchive(garbages[i]);
} else {
throw ioe;
}
} else {
throw ioe;
}
}
}
while (rootedEntries.hasNext()) {
J2eeModule.RootedEntry entry = rootedEntries.next();
String relativePath = entry.getRelativePath();
FileObject sourceFO = entry.getFileObject();
if (childModuleNames != null && childModuleNames.contains(relativePath) && sourceFO.isData()) {
continue;
}
FileObject dest = ServerFileDistributor.findOrCreateParentFolder(destRoot, relativePath);
if (sourceFO.isData()) {
copyFile(sourceFO, dir, relativePath);
} else if (dest != null && sourceFO.isFolder()) {
FileUtil.createFolder(dest, new File(relativePath).getName());
}
}
} catch (Exception e) {
LOGGER.log(Level.FINER, null, e);
String msg = NbBundle.getMessage(InitialServerFileDistributor.class, "MSG_IncrementalDeployFailed", e);
setStatusDistributeFailed(msg);
throw new RuntimeException(e);
} finally {
if (lock != null) {
try {
lock.releaseLock();
} catch (Exception ex) {
}
}
}
}
//ServerProgress methods
private void setStatusDistributeRunning(String message) {
notify(createRunningProgressEvent(CommandType.DISTRIBUTE, message));
}
private void setStatusDistributeFailed(String message) {
notify(createFailedProgressEvent(CommandType.DISTRIBUTE, message));
}
private void setStatusDistributeCompleted(String message) {
notify(createCompletedProgressEvent(CommandType.DISTRIBUTE, message));
}
// Make this method speedie quick... since folks can have large
// projects, but expect the IDE to be as fast or faster that zip or jar
//
private void copyFile(FileObject sourceObject, File directory, String relativePath) throws IOException {
String ext = sourceObject.getExt();
if (sourceObject.getSize() == 0 && ("zip".equals(ext) || "jar".equals(ext))) { // NOI18N
// a zero length jar or zip file is NEVER ok...
return;
}
File destFile = new File(directory, relativePath);
FileOutputStream os = new FileOutputStream(destFile);
FileInputStream fis = null;
InputStream is = null;
FileChannel in = null;
FileChannel out = null;
try {
File sourceFile = FileUtil.toFile(sourceObject);
if (null != sourceFile && sourceFile.canRead()) {
// we are coming from a readable file
fis = new FileInputStream(sourceFile);
in = fis.getChannel();
out = os.getChannel();
long fileSize = sourceFile.length();
long bufSize = Math.min(65536, fileSize);
long offset = 0;
do {
offset += in.transferTo(offset, bufSize, out);
} while (offset < fileSize);
} else {
is = sourceObject.getInputStream();
FileUtil.copy(is, os);
}
} finally {
if (null != out) {
try {
out.close();
} catch (IOException ioe) {
LOGGER.log(Level.INFO, null, ioe);
}
}
if (null != in) {
try {
in.close();
} catch (IOException ioe) {
LOGGER.log(Level.INFO, null, ioe);
}
}
if (null != is) {
try {
is.close();
} catch (IOException ioe) {
LOGGER.log(Level.INFO, null, ioe);
}
}
if (null != fis) {
try {
fis.close();
} catch (IOException ioe) {
LOGGER.log(Level.INFO, null, ioe);
}
}
if (null != os) {
try {
os.close();
} catch (IOException ioe) {
LOGGER.log(Level.INFO, null, ioe);
}
}
}
}
private void zeroOutArchive(FileObject garbage) throws IOException {
OutputStream fileToOverwrite = garbage.getOutputStream();
try {
JarOutputStream jos = new JarOutputStream(fileToOverwrite);
try {
jos.putNextEntry(new ZipEntry("META-INF/MANIFEST.MF")); // NOI18N
// UTF-8 guaranteed on any platform
jos.write("Manifest-Version: 1.0\n".getBytes(StandardCharsets.UTF_8)); // NOI18N
} finally {
jos.close();
}
} finally {
fileToOverwrite.close();
}
}
}
| 6,009 |
1,887 | <gh_stars>1000+
package org.springframework.test.ioc;
import org.junit.Test;
import org.springframework.beans.factory.support.DefaultListableBeanFactory;
import org.springframework.beans.factory.xml.XmlBeanDefinitionReader;
import org.springframework.test.bean.Car;
import org.springframework.test.bean.Person;
import static org.assertj.core.api.Assertions.assertThat;
/**
* @author derekyi
* @date 2020/11/26
*/
public class XmlFileDefineBeanTest {
@Test
public void testXmlFile() throws Exception {
DefaultListableBeanFactory beanFactory = new DefaultListableBeanFactory();
XmlBeanDefinitionReader beanDefinitionReader = new XmlBeanDefinitionReader(beanFactory);
beanDefinitionReader.loadBeanDefinitions("classpath:spring.xml");
Person person = (Person) beanFactory.getBean("person");
System.out.println(person);
assertThat(person.getName()).isEqualTo("derek");
assertThat(person.getCar().getBrand()).isEqualTo("porsche");
Car car = (Car) beanFactory.getBean("car");
System.out.println(car);
assertThat(car.getBrand()).isEqualTo("porsche");
}
}
| 357 |
1,738 | /*
* All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or
* its licensors.
*
* For complete copyright and license terms please see the LICENSE at the root of this
* distribution (the "License"). All use of this software is governed by the License,
* or, if provided, by the license below or the license accompanying this file. Do not
* remove or modify any license notices. This file is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
*/
// Original file Copyright Crytek GMBH or its affiliates, used under license.
// Description : Backend part of geometry cache rendering
#include "StdAfx.h"
#if defined(USE_GEOM_CACHES)
#include "RendElement.h"
#include "CREGeomCache.h"
#include "I3DEngine.h"
#include "../Renderer.h"
#include "../Common/PostProcess/PostEffects.h"
StaticInstance<CREGeomCache::UpdateList> CREGeomCache::sm_updateList[2];
CREGeomCache::CREGeomCache()
: m_geomCacheVertexFormat(eVF_P3F_C4B_T2F)
{
m_bUpdateFrame[0] = false;
m_bUpdateFrame[1] = false;
m_transformUpdateState[0] = 0;
m_transformUpdateState[1] = 0;
mfSetType(eDATA_GeomCache);
mfUpdateFlags(FCEF_TRANSFORM);
}
CREGeomCache::~CREGeomCache()
{
CryAutoLock<CryCriticalSection> lock1(sm_updateList[0]->m_mutex);
CryAutoLock<CryCriticalSection> lock2(sm_updateList[1]->m_mutex);
stl::find_and_erase(sm_updateList[0]->m_geoms, this);
stl::find_and_erase(sm_updateList[1]->m_geoms, this);
}
void CREGeomCache::InitializeRenderElement(const uint numMeshes, _smart_ptr<IRenderMesh>* pMeshes, uint16 materialId)
{
m_bUpdateFrame[0] = false;
m_bUpdateFrame[1] = false;
m_meshFillData[0].clear();
m_meshFillData[1].clear();
m_meshRenderData.clear();
m_meshFillData[0].reserve(numMeshes);
m_meshFillData[1].reserve(numMeshes);
m_meshRenderData.reserve(numMeshes);
for (uint i = 0; i < numMeshes; ++i)
{
SMeshRenderData meshRenderData;
meshRenderData.m_pRenderMesh = pMeshes[i];
m_meshRenderData.push_back(meshRenderData);
m_meshFillData[0].push_back(meshRenderData);
m_meshFillData[1].push_back(meshRenderData);
}
m_materialId = materialId;
}
void CREGeomCache::mfPrepare(bool bCheckOverflow)
{
FUNCTION_PROFILER_RENDER_FLAT
CRenderer* const pRenderer = gRenDev;
if (bCheckOverflow)
{
pRenderer->FX_CheckOverflow(0, 0, this);
}
pRenderer->m_RP.m_CurVFormat = GetVertexFormat();
pRenderer->m_RP.m_pRE = this;
pRenderer->m_RP.m_FirstVertex = 0;
pRenderer->m_RP.m_FirstIndex = 0;
pRenderer->m_RP.m_RendNumIndices = 0;
pRenderer->m_RP.m_RendNumVerts = 0;
}
void CREGeomCache::SetupMotionBlur(CRenderObject* pRenderObject, const SRenderingPassInfo& passInfo)
{
CMotionBlur::SetupObject(pRenderObject, passInfo);
if (pRenderObject->m_fDistance < CRenderer::CV_r_MotionBlurMaxViewDist)
{
pRenderObject->m_ObjFlags |= FOB_VERTEX_VELOCITY | FOB_MOTION_BLUR;
}
}
bool CREGeomCache::Update(const int flags, const bool bTessellation)
{
FUNCTION_PROFILER_RENDER_FLAT
// Wait until render node update has finished
const int threadId = gRenDev->m_RP.m_nProcessThreadID;
while (m_transformUpdateState[threadId])
{
CrySleep(0);
}
// Check if update was successful and if so copy data to render buffer
if (m_bUpdateFrame[threadId])
{
m_meshRenderData = m_meshFillData[threadId];
}
const uint numMeshes = m_meshFillData[threadId].size();
bool bRet = true;
for (uint nMesh = 0; nMesh < numMeshes; ++nMesh)
{
SMeshRenderData& meshData = m_meshFillData[threadId][nMesh];
CRenderMesh* const pRenderMesh = static_cast<CRenderMesh*>(meshData.m_pRenderMesh.get());
if (pRenderMesh && pRenderMesh->m_Modified[threadId].linked())
{
// Sync the async render mesh update. This waits for the fill thread started from main thread if it's still running.
// We need to do this manually, because geom caches don't use CREMesh.
pRenderMesh->SyncAsyncUpdate(threadId);
CRenderMesh* pVertexContainer = pRenderMesh->_GetVertexContainer();
bool bSucceed = pRenderMesh->RT_CheckUpdate(pVertexContainer, flags | VSM_MASK, bTessellation);
if (bSucceed)
{
pRenderMesh->m_Modified[threadId].erase();
}
if (!bSucceed || !pVertexContainer->_HasVBStream(VSF_GENERAL))
{
bRet = false;
}
}
}
return bRet;
}
void CREGeomCache::UpdateModified()
{
FUNCTION_PROFILER_RENDER_FLAT
const int threadId = gRenDev->m_RP.m_nProcessThreadID;
AZ_Assert(threadId >= 0 && threadId <= 2, "Container is not expecting this index");
CryAutoLock<CryCriticalSection> lock(sm_updateList[threadId]->m_mutex);
for (auto iter = sm_updateList[threadId]->m_geoms.begin();
iter != sm_updateList[threadId]->m_geoms.end(); iter = sm_updateList[threadId]->m_geoms.erase(iter))
{
CREGeomCache* pRenderElement = *iter;
pRenderElement->Update(0, false);
}
}
bool CREGeomCache::mfUpdate(int Flags, bool bTessellation)
{
const bool bRet = Update(Flags, bTessellation);
const int threadId = gRenDev->m_RP.m_nProcessThreadID;
CryAutoLock<CryCriticalSection> lock(sm_updateList[threadId]->m_mutex);
stl::find_and_erase(sm_updateList[threadId]->m_geoms, this);
m_Flags &= ~FCEF_DIRTY;
return bRet;
}
volatile int* CREGeomCache::SetAsyncUpdateState(int& threadId)
{
FUNCTION_PROFILER_RENDER_FLAT
ASSERT_IS_MAIN_THREAD(gRenDev->m_pRT);
threadId = gRenDev->m_RP.m_nFillThreadID;
m_bUpdateFrame[threadId] = false;
CryAutoLock<CryCriticalSection> lock(sm_updateList[threadId]->m_mutex);
stl::push_back_unique(sm_updateList[threadId]->m_geoms, this);
CryInterlockedIncrement(&m_transformUpdateState[threadId]);
return &m_transformUpdateState[threadId];
}
DynArray<CREGeomCache::SMeshRenderData>* CREGeomCache::GetMeshFillDataPtr()
{
FUNCTION_PROFILER_RENDER_FLAT
assert(gEnv->IsEditor() || !gRenDev->m_pRT->IsRenderThread(true));
const int threadId = gRenDev->m_RP.m_nFillThreadID;
return &m_meshFillData[threadId];
}
DynArray<CREGeomCache::SMeshRenderData>* CREGeomCache::GetRenderDataPtr()
{
FUNCTION_PROFILER_RENDER_FLAT
assert(gEnv->IsEditor() || !gRenDev->m_pRT->IsRenderThread(true));
return &m_meshRenderData;
}
void CREGeomCache::DisplayFilledBuffer(const int threadId)
{
if (m_bUpdateFrame[threadId])
{
// You need to call SetAsyncUpdateState before DisplayFilledBuffer
__debugbreak();
}
m_bUpdateFrame[threadId] = true;
}
AZ::Vertex::Format CREGeomCache::GetVertexFormat() const
{
return m_geomCacheVertexFormat;
}
bool CREGeomCache::GetGeometryInfo(SGeometryInfo &streams)
{
ZeroStruct(streams);
streams.vertexFormat = GetVertexFormat();
streams.nFirstIndex = 0;
streams.nFirstVertex = 0;
streams.nNumIndices = 0;
streams.primitiveType = eptTriangleList;
streams.streamMask = 0;
return true;
}
#endif | 3,002 |
446 | <gh_stars>100-1000
from datetime import date
from unittest import mock
from freezegun import freeze_time
from core.utils import NOMINATIM_URL, get_coordinates_for_city, next_deadline
@mock.patch('requests.get')
def test_get_coordinates_for_city(mock_get):
mock_get.return_value.json.return_value = [{
'lat': '1.23',
'lon': '4.56',
}]
result = get_coordinates_for_city('London', 'UK')
mock_get.assert_called_once_with(
NOMINATIM_URL,
params={
'format': 'json',
'q': 'London, UK',
}
)
assert result == '1.23, 4.56'
@mock.patch('requests.get')
def test_returns_none_when_no_results(mock_get):
# Results are an empty list
mock_get.return_value.json.return_value = []
result = get_coordinates_for_city('PretendTown', 'UK')
mock_get.assert_called_once_with(
NOMINATIM_URL,
params={
'format': 'json',
'q': 'PretendTown, UK',
}
)
assert result is None
@mock.patch('requests.get')
def test_returns_none_when_invalid_results(mock_get):
# Empty dict returned in results
mock_get.return_value.json.return_value = [{}]
result = get_coordinates_for_city('PretendTown', 'UK')
mock_get.assert_called_once_with(
NOMINATIM_URL,
params={
'format': 'json',
'q': 'PretendTown, UK',
}
)
assert result is None
@freeze_time('2016-10-10')
def test_a_week_before_deadline():
result = next_deadline()
assert result == date(2016, 10, 16)
@freeze_time('2016-10-15')
def test_day_before_deadline():
result = next_deadline()
assert result == date(2016, 10, 16)
@freeze_time('2016-10-16') # noqa: F811
def test_day_before_deadline(): # noqa: F811
result = next_deadline()
assert result == date(2016, 10, 16)
@freeze_time('2016-10-17') # noqa: F811
def test_day_before_deadline(): # noqa: F811
result = next_deadline()
assert result == date(2016, 10, 30)
| 902 |
1,562 | /* This file was generated by upbc (the upb compiler) from the input
* file:
*
* envoy/api/v2/core/health_check.proto
*
* Do not edit -- your changes will be discarded when the file is
* regenerated. */
#include <stddef.h>
#if COCOAPODS==1
#include "third_party/upb/upb/msg.h"
#else
#include "upb/msg.h"
#endif
#if COCOAPODS==1
#include "src/core/ext/upb-generated/envoy/api/v2/core/health_check.upb.h"
#else
#include "envoy/api/v2/core/health_check.upb.h"
#endif
#if COCOAPODS==1
#include "src/core/ext/upb-generated/envoy/api/v2/core/base.upb.h"
#else
#include "envoy/api/v2/core/base.upb.h"
#endif
#if COCOAPODS==1
#include "src/core/ext/upb-generated/envoy/type/http.upb.h"
#else
#include "envoy/type/http.upb.h"
#endif
#if COCOAPODS==1
#include "src/core/ext/upb-generated/envoy/type/matcher/string.upb.h"
#else
#include "envoy/type/matcher/string.upb.h"
#endif
#if COCOAPODS==1
#include "src/core/ext/upb-generated/envoy/type/range.upb.h"
#else
#include "envoy/type/range.upb.h"
#endif
#if COCOAPODS==1
#include "src/core/ext/upb-generated/google/protobuf/any.upb.h"
#else
#include "google/protobuf/any.upb.h"
#endif
#if COCOAPODS==1
#include "src/core/ext/upb-generated/google/protobuf/duration.upb.h"
#else
#include "google/protobuf/duration.upb.h"
#endif
#if COCOAPODS==1
#include "src/core/ext/upb-generated/google/protobuf/struct.upb.h"
#else
#include "google/protobuf/struct.upb.h"
#endif
#if COCOAPODS==1
#include "src/core/ext/upb-generated/google/protobuf/wrappers.upb.h"
#else
#include "google/protobuf/wrappers.upb.h"
#endif
#if COCOAPODS==1
#include "src/core/ext/upb-generated/envoy/annotations/deprecation.upb.h"
#else
#include "envoy/annotations/deprecation.upb.h"
#endif
#if COCOAPODS==1
#include "src/core/ext/upb-generated/udpa/annotations/migrate.upb.h"
#else
#include "udpa/annotations/migrate.upb.h"
#endif
#if COCOAPODS==1
#include "src/core/ext/upb-generated/validate/validate.upb.h"
#else
#include "validate/validate.upb.h"
#endif
#if COCOAPODS==1
#include "third_party/upb/upb/port_def.inc"
#else
#include "upb/port_def.inc"
#endif
static const upb_msglayout *const envoy_api_v2_core_HealthCheck_submsgs[17] = {
&envoy_api_v2_core_HealthCheck_CustomHealthCheck_msginit,
&envoy_api_v2_core_HealthCheck_GrpcHealthCheck_msginit,
&envoy_api_v2_core_HealthCheck_HttpHealthCheck_msginit,
&envoy_api_v2_core_HealthCheck_TcpHealthCheck_msginit,
&envoy_api_v2_core_HealthCheck_TlsOptions_msginit,
&google_protobuf_BoolValue_msginit,
&google_protobuf_Duration_msginit,
&google_protobuf_UInt32Value_msginit,
};
static const upb_msglayout_field envoy_api_v2_core_HealthCheck__fields[20] = {
{1, UPB_SIZE(16, 24), 0, 6, 11, 1},
{2, UPB_SIZE(20, 32), 0, 6, 11, 1},
{3, UPB_SIZE(24, 40), 0, 6, 11, 1},
{4, UPB_SIZE(28, 48), 0, 7, 11, 1},
{5, UPB_SIZE(32, 56), 0, 7, 11, 1},
{6, UPB_SIZE(36, 64), 0, 7, 11, 1},
{7, UPB_SIZE(40, 72), 0, 5, 11, 1},
{8, UPB_SIZE(68, 128), UPB_SIZE(-73, -137), 2, 11, 1},
{9, UPB_SIZE(68, 128), UPB_SIZE(-73, -137), 3, 11, 1},
{11, UPB_SIZE(68, 128), UPB_SIZE(-73, -137), 1, 11, 1},
{12, UPB_SIZE(44, 80), 0, 6, 11, 1},
{13, UPB_SIZE(68, 128), UPB_SIZE(-73, -137), 0, 11, 1},
{14, UPB_SIZE(48, 88), 0, 6, 11, 1},
{15, UPB_SIZE(52, 96), 0, 6, 11, 1},
{16, UPB_SIZE(56, 104), 0, 6, 11, 1},
{17, UPB_SIZE(8, 8), 0, 0, 9, 1},
{18, UPB_SIZE(0, 0), 0, 0, 13, 1},
{19, UPB_SIZE(4, 4), 0, 0, 8, 1},
{20, UPB_SIZE(60, 112), 0, 6, 11, 1},
{21, UPB_SIZE(64, 120), 0, 4, 11, 1},
};
const upb_msglayout envoy_api_v2_core_HealthCheck_msginit = {
&envoy_api_v2_core_HealthCheck_submsgs[0],
&envoy_api_v2_core_HealthCheck__fields[0],
UPB_SIZE(80, 144), 20, false,
};
static const upb_msglayout_field envoy_api_v2_core_HealthCheck_Payload__fields[2] = {
{1, UPB_SIZE(0, 0), UPB_SIZE(-9, -17), 0, 9, 1},
{2, UPB_SIZE(0, 0), UPB_SIZE(-9, -17), 0, 12, 1},
};
const upb_msglayout envoy_api_v2_core_HealthCheck_Payload_msginit = {
NULL,
&envoy_api_v2_core_HealthCheck_Payload__fields[0],
UPB_SIZE(16, 32), 2, false,
};
static const upb_msglayout *const envoy_api_v2_core_HealthCheck_HttpHealthCheck_submsgs[5] = {
&envoy_api_v2_core_HeaderValueOption_msginit,
&envoy_api_v2_core_HealthCheck_Payload_msginit,
&envoy_type_Int64Range_msginit,
&envoy_type_matcher_StringMatcher_msginit,
};
static const upb_msglayout_field envoy_api_v2_core_HealthCheck_HttpHealthCheck__fields[11] = {
{1, UPB_SIZE(12, 16), 0, 0, 9, 1},
{2, UPB_SIZE(20, 32), 0, 0, 9, 1},
{3, UPB_SIZE(36, 64), 0, 1, 11, 1},
{4, UPB_SIZE(40, 72), 0, 1, 11, 1},
{5, UPB_SIZE(28, 48), 0, 0, 9, 1},
{6, UPB_SIZE(48, 88), 0, 0, 11, 3},
{7, UPB_SIZE(8, 8), 0, 0, 8, 1},
{8, UPB_SIZE(52, 96), 0, 0, 9, 3},
{9, UPB_SIZE(56, 104), 0, 2, 11, 3},
{10, UPB_SIZE(0, 0), 0, 0, 14, 1},
{11, UPB_SIZE(44, 80), 0, 3, 11, 1},
};
const upb_msglayout envoy_api_v2_core_HealthCheck_HttpHealthCheck_msginit = {
&envoy_api_v2_core_HealthCheck_HttpHealthCheck_submsgs[0],
&envoy_api_v2_core_HealthCheck_HttpHealthCheck__fields[0],
UPB_SIZE(64, 112), 11, false,
};
static const upb_msglayout *const envoy_api_v2_core_HealthCheck_TcpHealthCheck_submsgs[2] = {
&envoy_api_v2_core_HealthCheck_Payload_msginit,
};
static const upb_msglayout_field envoy_api_v2_core_HealthCheck_TcpHealthCheck__fields[2] = {
{1, UPB_SIZE(0, 0), 0, 0, 11, 1},
{2, UPB_SIZE(4, 8), 0, 0, 11, 3},
};
const upb_msglayout envoy_api_v2_core_HealthCheck_TcpHealthCheck_msginit = {
&envoy_api_v2_core_HealthCheck_TcpHealthCheck_submsgs[0],
&envoy_api_v2_core_HealthCheck_TcpHealthCheck__fields[0],
UPB_SIZE(8, 16), 2, false,
};
static const upb_msglayout_field envoy_api_v2_core_HealthCheck_RedisHealthCheck__fields[1] = {
{1, UPB_SIZE(0, 0), 0, 0, 9, 1},
};
const upb_msglayout envoy_api_v2_core_HealthCheck_RedisHealthCheck_msginit = {
NULL,
&envoy_api_v2_core_HealthCheck_RedisHealthCheck__fields[0],
UPB_SIZE(8, 16), 1, false,
};
static const upb_msglayout_field envoy_api_v2_core_HealthCheck_GrpcHealthCheck__fields[2] = {
{1, UPB_SIZE(0, 0), 0, 0, 9, 1},
{2, UPB_SIZE(8, 16), 0, 0, 9, 1},
};
const upb_msglayout envoy_api_v2_core_HealthCheck_GrpcHealthCheck_msginit = {
NULL,
&envoy_api_v2_core_HealthCheck_GrpcHealthCheck__fields[0],
UPB_SIZE(16, 32), 2, false,
};
static const upb_msglayout *const envoy_api_v2_core_HealthCheck_CustomHealthCheck_submsgs[2] = {
&google_protobuf_Any_msginit,
&google_protobuf_Struct_msginit,
};
static const upb_msglayout_field envoy_api_v2_core_HealthCheck_CustomHealthCheck__fields[3] = {
{1, UPB_SIZE(0, 0), 0, 0, 9, 1},
{2, UPB_SIZE(8, 16), UPB_SIZE(-13, -25), 1, 11, 1},
{3, UPB_SIZE(8, 16), UPB_SIZE(-13, -25), 0, 11, 1},
};
const upb_msglayout envoy_api_v2_core_HealthCheck_CustomHealthCheck_msginit = {
&envoy_api_v2_core_HealthCheck_CustomHealthCheck_submsgs[0],
&envoy_api_v2_core_HealthCheck_CustomHealthCheck__fields[0],
UPB_SIZE(16, 32), 3, false,
};
static const upb_msglayout_field envoy_api_v2_core_HealthCheck_TlsOptions__fields[1] = {
{1, UPB_SIZE(0, 0), 0, 0, 9, 3},
};
const upb_msglayout envoy_api_v2_core_HealthCheck_TlsOptions_msginit = {
NULL,
&envoy_api_v2_core_HealthCheck_TlsOptions__fields[0],
UPB_SIZE(4, 8), 1, false,
};
#if COCOAPODS==1
#include "third_party/upb/upb/port_undef.inc"
#else
#include "upb/port_undef.inc"
#endif
| 3,485 |
5,169 | <reponame>Gantios/Specs
{
"name": "Fengmap_iOS_SDK",
"version": "2.2.2",
"summary": "蜂鸟视图地图端 iOS SDK",
"homepage": "https://www.fengmap.com",
"license": "MIT",
"authors": {
"fengmap-devs": "<EMAIL>"
},
"platforms": {
"ios": "8.0"
},
"source": {
"git": "https://github.com/fengmap-devs/Fengmap_iOS_SDK.git",
"tag": "2.2.2"
},
"resources": "FMBundle.bundle",
"frameworks": [
"OpenGLES",
"CoreGraphics",
"GLKit"
],
"libraries": [
"z",
"c++"
],
"xcconfig": {
"OTHER_LDFLAGS": "-ObjC",
"ENABLE_BITCODE": "YES"
},
"vendored_libraries": "Fengmap_iOS_SDK/libFMMapKit.a",
"public_header_files": "Fengmap_iOS_SDK/include/FMMapKit/*.h",
"source_files": "Fengmap_iOS_SDK/include/FMMapKit/*.h"
}
| 395 |
2,042 | <reponame>oguzturker8sdfep/imranvisualpath1<gh_stars>1000+
package com.yc.music.inter;
public interface EventCallback<T> {
void onEvent(T t);
}
| 61 |
2,542 | <gh_stars>1000+
// ------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License (MIT). See License.txt in the repo root for license information.
// ------------------------------------------------------------
#include "stdafx.h"
using namespace std;
using namespace Common;
using namespace Store;
StringLiteral const TraceComponent("Defragmenter");
EseDefragmenter::EseDefragmenter(__in EseLocalStore & eseStore)
: ComponentRoot()
, eseStore_(eseStore)
, settings_(eseStore.Settings)
, traceId_(eseStore.TraceId)
, session_()
, database_()
, lock_()
, isCanceled_(false)
, defragTimer_()
{
}
shared_ptr<EseDefragmenter> EseDefragmenter::CreateAndStart(__in EseLocalStore & eseStore)
{
auto defragmenter = shared_ptr<EseDefragmenter>(new EseDefragmenter(eseStore));
defragmenter->Start();
return defragmenter;
}
EseDefragmenter::~EseDefragmenter()
{
}
void EseDefragmenter::Start()
{
// Use pending transaction counts to prevent EseLocalStore destruction.
// Another option would be to convert EseLocalStore into a ComponentRoot,
// but the transaction count model already exists.
//
this->IncrementLocalStoreRefCount();
this->DoStart();
}
void EseDefragmenter::Cancel()
{
{
AcquireWriteLock lock(lock_);
if (defragTimer_)
{
defragTimer_->Cancel();
}
isCanceled_ = true;
WriteInfo(
TraceComponent,
"{0} defragmenter canceled",
this->TraceId);
}
// EseLocalStore is free to destruct at this point
// once any pending callback completes
//
this->DecrementLocalStoreRefCount();
}
void EseDefragmenter::DoStart()
{
TimeSpan maxDefragFrequency;
{
AcquireReadLock lock(lock_);
if (isCanceled_)
{
return;
}
maxDefragFrequency = settings_.MaxDefragFrequency;
auto error = this->InitializeSession_ReadLock();
if (!error.IsSuccess())
{
WriteInfo(
TraceComponent,
"{0} initialize defragmentation session+database failed: error={1}",
this->TraceId,
error);
}
// Do this after initializing ESE session and database so that explicit
// calls to StartDefragmentation() are still supported.
//
if (maxDefragFrequency <= TimeSpan::Zero)
{
WriteInfo(
TraceComponent,
"{0} auto defragmentation is disabled: MaxDefragFrequency={1}",
this->TraceId,
maxDefragFrequency);
return;
}
if (!this->IsJetDbidInSupportedRange())
{
WriteInfo(
TraceComponent,
"{0} Disabling defragmentation: JET_DBID=[{1}] is > 255",
this->TraceId,
database_->DatabaseId);
return;
}
}
if (!session_)
{
this->ScheduleStartRetry();
return;
}
defragTimer_.reset();
// First defrag triggers immediately upon opening, which
// will be more or less no-op by ESE if the DB is already
// defragmented.
//
this->ScheduleDefragmentation(TimeSpan::Zero);
}
// Leverages EseLocalStore logic that waits for pending transaction count
// in dtor to keep EseLocalStore alive when there are outstanding callbacks
//
class EseDefragmenter::LocalStoreRoot
{
DENY_COPY(LocalStoreRoot);
public:
static shared_ptr<LocalStoreRoot> Create(__in EseDefragmenter & owner)
{
return shared_ptr<LocalStoreRoot>(new LocalStoreRoot(owner));
}
~LocalStoreRoot()
{
owner_.DecrementLocalStoreRefCount();
}
private:
LocalStoreRoot(__in EseDefragmenter & owner) : owner_(owner)
{
owner_.IncrementLocalStoreRefCount();
}
private:
EseDefragmenter & owner_;
};
void EseDefragmenter::ScheduleStartRetry()
{
auto root = LocalStoreRoot::Create(*this);
this->ScheduleTimerCallback(
[this, root](TimerSPtr const &) { this->DoStart(); },
TimeSpan::FromSeconds(StoreConfig::GetConfig().DefragRetryDelayInSeconds),
L"DoStart");
}
void EseDefragmenter::ScheduleDefragmentation(TimeSpan const targetDelay)
{
auto root = LocalStoreRoot::Create(*this);
this->ScheduleTimerCallback(
[this, root](TimerSPtr const &) { this->DefragmentationCallback(); },
targetDelay,
L"DefragmentationCallback");
}
void EseDefragmenter::ScheduleDefragmentationRetry()
{
this->ScheduleDefragmentation(TimeSpan::FromSeconds(StoreConfig::GetConfig().DefragRetryDelayInSeconds));
}
void EseDefragmenter::DefragmentationCallback()
{
ErrorCode error;
TimeSpan maxDefragFrequency = TimeSpan::Zero;
{
AcquireReadLock lock(lock_);
if (isCanceled_)
{
return;
}
maxDefragFrequency = settings_.MaxDefragFrequency;
size_t dbSizeBytes = 0;
error = eseStore_.EstimateDbSizeBytes(dbSizeBytes);
if (!error.IsSuccess())
{
WriteInfo(
TraceComponent,
"{0} failed to estimate DB size: error={1}",
this->TraceId,
error);
}
else
{
auto defragThresholdMB = settings_.DefragThresholdInMB;
if (dbSizeBytes / (1024 * 1024) < static_cast<size_t>(defragThresholdMB))
{
WriteInfo(
TraceComponent,
"{0} skipping defragmentation: {1} bytes < threshold={2} MB",
this->TraceId,
dbSizeBytes,
defragThresholdMB);
}
else
{
WriteInfo(
TraceComponent,
"{0} starting defragmentation: {1} bytes >= threshold={2} MB",
this->TraceId,
dbSizeBytes,
defragThresholdMB);
error = this->StartDefragmentation();
}
}
} // isCanceled_ lock
if (error.IsSuccess())
{
this->ScheduleDefragmentation(maxDefragFrequency);
}
else
{
this->ScheduleDefragmentationRetry();
}
}
// Automatic defragmentation (triggered by a scan) can also be
// enabled by specifying JET_bitRetrieveHintTableScanForward during table
// creation, but it seems to be less aggressive and does not result
// in as much an improvement as an explicit defragment call using
// JET_bitDefragmentBTree. The latter comes very close to the performance
// achieved by a full offline compaction.
//
ErrorCode EseDefragmenter::StartDefragmentation()
{
auto sessionId = session_->SessionId;
auto dbId = database_->DatabaseId;
if (!this->IsJetDbidInSupportedRange())
{
WriteInfo(
TraceComponent,
"{0} skipping defragmentation: JET_DBID=[{1}] is > 255",
this->TraceId,
dbId);
return ErrorCodeValue::Success;
}
auto jetError = CALL_ESE_NOTHROW(
JetDefragment2(
sessionId,
dbId,
Constants::TableName->c_str(),
NULL, // unlimited max # of passes
NULL, // unlimited max # of seconds
//
// Defragmentation callback does not seem to get called (possibly only supported by newer ESE versions).
// Leave callback here for potential debugging, but do not rely on it.
//
DefragmentationCompletedCallback,
JET_bitDefragmentBTree));
if (JET_errSuccess != jetError)
{
return eseStore_.JetToErrorCode(jetError);
}
WriteInfo(
TraceComponent,
"{0} started database defragmentation: session={1} db={2}",
this->TraceId,
sessionId,
dbId);
return ErrorCodeValue::Success;
}
ErrorCode EseDefragmenter::StopDefragmentation()
{
unsigned long passes = 0;
unsigned long elapsed = 0;
auto sessionId = session_->SessionId;
auto dbId = database_->DatabaseId;
auto jetError = CALL_ESE_NOTHROW(
JetDefragment2(
sessionId,
dbId,
Constants::TableName->c_str(),
&passes,
&elapsed,
NULL,
JET_bitDefragmentBatchStop));
if (JET_wrnDefragNotRunning == jetError)
{
WriteInfo(
TraceComponent,
"{0} defragmentation already stopped: db={1}",
this->TraceId,
dbId);
jetError = JET_errSuccess;
}
else
{
WriteInfo(
TraceComponent,
"{0} database defragmentation stopped: error={1} passes={2} elapsed={3} db={4}",
this->TraceId,
jetError,
passes,
elapsed,
dbId);
}
return eseStore_.JetToErrorCode(jetError);
}
void EseDefragmenter::IncrementLocalStoreRefCount()
{
eseStore_.OnConstructTransaction();
}
void EseDefragmenter::DecrementLocalStoreRefCount()
{
eseStore_.OnDestructTransaction();
}
ErrorCode EseDefragmenter::InitializeSession_ReadLock()
{
auto tempSession = EseSession::CreateSPtr(eseStore_.instance_);
auto jetError = tempSession->Initialize();
if (JET_errSuccess != jetError)
{
WriteInfo(
TraceComponent,
"{0} create defragmentation session failed: error={1}",
this->TraceId,
jetError);
return eseStore_.JetToErrorCode(jetError);
}
auto tempDatabase = EseDatabase::CreateSPtr(tempSession);
jetError = tempDatabase->InitializeOpen(Path::Combine(eseStore_.Directory, eseStore_.FileName));
if (JET_errSuccess != jetError)
{
WriteInfo(
TraceComponent,
"{0} attach defragmentation database failed: error={1}",
this->TraceId,
jetError);
return eseStore_.JetToErrorCode(jetError);
}
session_ = move(tempSession);
database_ = move(tempDatabase);
return eseStore_.JetToErrorCode(jetError);
}
bool EseDefragmenter::IsJetDbidInSupportedRange()
{
return (database_->DatabaseId <= 255);
}
void EseDefragmenter::ScheduleTimerCallback(
Timer::TimerCallback const & timerCallback,
TimeSpan const targetDelay,
wstring const & debugTag)
{
{
AcquireReadLock lock(lock_);
if (isCanceled_)
{
return;
}
if (!defragTimer_)
{
defragTimer_ = Timer::Create(TimerTagDefault, timerCallback);
}
}
WriteInfo(
TraceComponent,
"{0} scheduling {1} in {2}",
this->TraceId,
debugTag,
targetDelay);
defragTimer_->Change(targetDelay);
}
JET_ERR EseDefragmenter::DefragmentationCompletedCallback(
JET_SESID sessionId,
JET_DBID dbId,
JET_TABLEID tableId,
JET_CBTYP cbType,
void *,
void *,
void *,
JET_API_PTR)
{
Trace.WriteInfo(
TraceComponent,
"DefragmentationCompletedCallback: type={0} session={1} db={2} table={3}",
cbType,
sessionId,
dbId,
tableId);
return JET_errSuccess;
}
| 5,076 |
1,350 | // Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.datafactory.models;
import com.azure.core.http.rest.Response;
import com.azure.core.util.Context;
/** Resource collection API of PrivateEndpointConnectionOperations. */
public interface PrivateEndpointConnectionOperations {
/**
* Gets a private endpoint connection.
*
* @param resourceGroupName The resource group name.
* @param factoryName The factory name.
* @param privateEndpointConnectionName The private endpoint connection name.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return a private endpoint connection.
*/
PrivateEndpointConnectionResource get(
String resourceGroupName, String factoryName, String privateEndpointConnectionName);
/**
* Gets a private endpoint connection.
*
* @param resourceGroupName The resource group name.
* @param factoryName The factory name.
* @param privateEndpointConnectionName The private endpoint connection name.
* @param ifNoneMatch ETag of the private endpoint connection entity. Should only be specified for get. If the ETag
* matches the existing entity tag, or if * was provided, then no content will be returned.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return a private endpoint connection.
*/
Response<PrivateEndpointConnectionResource> getWithResponse(
String resourceGroupName,
String factoryName,
String privateEndpointConnectionName,
String ifNoneMatch,
Context context);
/**
* Deletes a private endpoint connection.
*
* @param resourceGroupName The resource group name.
* @param factoryName The factory name.
* @param privateEndpointConnectionName The private endpoint connection name.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
*/
void delete(String resourceGroupName, String factoryName, String privateEndpointConnectionName);
/**
* Deletes a private endpoint connection.
*
* @param resourceGroupName The resource group name.
* @param factoryName The factory name.
* @param privateEndpointConnectionName The private endpoint connection name.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the response.
*/
Response<Void> deleteWithResponse(
String resourceGroupName, String factoryName, String privateEndpointConnectionName, Context context);
/**
* Gets a private endpoint connection.
*
* @param id the resource ID.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return a private endpoint connection.
*/
PrivateEndpointConnectionResource getById(String id);
/**
* Gets a private endpoint connection.
*
* @param id the resource ID.
* @param ifNoneMatch ETag of the private endpoint connection entity. Should only be specified for get. If the ETag
* matches the existing entity tag, or if * was provided, then no content will be returned.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return a private endpoint connection.
*/
Response<PrivateEndpointConnectionResource> getByIdWithResponse(String id, String ifNoneMatch, Context context);
/**
* Deletes a private endpoint connection.
*
* @param id the resource ID.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
*/
void deleteById(String id);
/**
* Deletes a private endpoint connection.
*
* @param id the resource ID.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the response.
*/
Response<Void> deleteByIdWithResponse(String id, Context context);
/**
* Begins definition for a new PrivateEndpointConnectionResource resource.
*
* @param name resource name.
* @return the first stage of the new PrivateEndpointConnectionResource definition.
*/
PrivateEndpointConnectionResource.DefinitionStages.Blank define(String name);
}
| 1,803 |
480 | <gh_stars>100-1000
/*
Main initializer for GEOS wrapper
*/
#include "preface.h"
#ifdef RGEO_GEOS_SUPPORTED
#include <ruby.h>
#include <geos_c.h>
#include "errors.h"
#include "factory.h"
#include "geometry.h"
#include "point.h"
#include "line_string.h"
#include "polygon.h"
#include "geometry_collection.h"
#include "analysis.h"
#endif
RGEO_BEGIN_C
void Init_geos_c_impl()
{
#ifdef RGEO_GEOS_SUPPORTED
RGeo_Globals* globals;
globals = rgeo_init_geos_factory();
rgeo_init_geos_geometry(globals);
rgeo_init_geos_point(globals);
rgeo_init_geos_line_string(globals);
rgeo_init_geos_polygon(globals);
rgeo_init_geos_geometry_collection(globals);
rgeo_init_geos_analysis(globals);
rgeo_init_geos_errors();
#endif
}
RGEO_END_C
| 345 |
1,062 | //
// Generated by class-dump 3.5b1 (64 bit) (Debug version compiled Dec 3 2019 19:59:57).
//
// Copyright (C) 1997-2019 <NAME>.
//
#import "NSObject-Protocol.h"
@class NSImmediateActionGestureRecognizer;
@protocol NSImmediateActionAnimationController <NSObject>
@optional
- (void)recognizerDidDismissAnimation:(NSImmediateActionGestureRecognizer *)arg1;
- (void)recognizerDidCompleteAnimation:(NSImmediateActionGestureRecognizer *)arg1;
- (void)recognizerDidCancelAnimation:(NSImmediateActionGestureRecognizer *)arg1;
- (void)recognizerDidUpdateAnimation:(NSImmediateActionGestureRecognizer *)arg1;
- (void)recognizerWillBeginAnimation:(NSImmediateActionGestureRecognizer *)arg1;
@end
| 232 |
561 | ##########################################################################
#
# Copyright (c) 2020, Cinesite VFX Ltd. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of <NAME> nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
from Qt import QtCore
from ._PlugTableModel import _PlugTableModel
class __PlugTableProxyModel( QtCore.QAbstractProxyModel ) :
def __init__( self, startRow = 0, startColumn = 0, rowCount = None, columnCount = 0, parent = None ) :
QtCore.QAbstractProxyModel.__init__( self, parent = parent )
self.__startRow = startRow
self.__startColumn = startColumn
self.__rowCount = rowCount
self.__columnCount = columnCount
def index( self, row, column, parent = QtCore.QModelIndex() ) :
if parent.isValid() :
return QtCore.QModelIndex()
return self.createIndex( row, column )
def parent( self, index ) :
return QtCore.QModelIndex()
def setSourceModel( self, model ) :
assert( isinstance( model, _PlugTableModel ) )
# _PlugTableModel only emits modelReset and [header]DataChanged signals (for now)
# so we can avoid the headache of remapping the plethora of row/column signals.
oldModel = self.sourceModel()
if oldModel :
oldModel.disconnect( self )
if model :
model.dataChanged.connect( self.__dataChanged )
model.headerDataChanged.connect( self.__headerDataChanged )
model.modelReset.connect( self.modelReset )
self.beginResetModel()
QtCore.QAbstractProxyModel.setSourceModel( self, model )
self.endResetModel()
def columnCount( self, parent = QtCore.QModelIndex() ) :
if parent.isValid() :
return 0
if self.__columnCount :
return self.__columnCount
else :
return self.sourceModel().columnCount() - self.__startColumn
def rowCount( self, parent = QtCore.QModelIndex() ) :
if parent.isValid() :
return 0
if self.__rowCount :
return self.__rowCount
else :
return self.sourceModel().rowCount() - self.__startRow
def mapFromSource( self, sourceIndex ) :
if not sourceIndex.isValid() :
return QtCore.QModelIndex()
row = sourceIndex.row() - self.__startRow
column = sourceIndex.column() - self.__startColumn
if row < 0 or row >= self.rowCount() :
return QtCore.QModelIndex()
if column < 0 or column >= self.columnCount() :
return QtCore.QModelIndex()
return self.index( row, column )
def mapToSource( self, proxyIndex ) :
if not proxyIndex.isValid():
return QtCore.QModelIndex()
row = proxyIndex.row() + self.__startRow
column = proxyIndex.column() + self.__startColumn
return self.sourceModel().index( row, column )
def rowsPlug( self ) :
return self.sourceModel().rowsPlug()
def plugForIndex( self, index ) :
return self.sourceModel().plugForIndex( self.mapToSource( index ) )
def valuePlugForIndex( self, index ) :
return self.sourceModel().valuePlugForIndex( self.mapToSource( index ) )
def indexForPlug( self, plug ) :
return self.mapFromSource( self.sourceModel().indexForPlug( plug ) )
def presentsCheckstate( self, index ) :
return self.sourceModel().presentsCheckstate( self.mapToSource( index ) )
def __dataChanged( self, topLeft, bottomRight, roles ) :
# Early out if the changed range doesn't intersect the remapped model
if bottomRight.row() < self.__startRow or bottomRight.column() < self.__startColumn :
return
if topLeft.row() - self.__startRow >= self.rowCount() \
or topLeft.column() - self.__startColumn >= self.columnCount() :
return
# Clamp to presented range
proxyTopLeft = self.mapFromSource( topLeft )
if not proxyTopLeft.isValid() :
proxyTopLeft = self.index( 0, 0 )
proxyBottomRight = self.mapFromSource( bottomRight )
if not proxyBottomRight.isValid() :
proxyBottomRight = self.index( self.rowCount() - 1, self.columnCount() - 1 )
self.dataChanged.emit( proxyTopLeft, proxyBottomRight, roles )
def __headerDataChanged( self, orientation, first, last ) :
if orientation == QtCore.Qt.Vertical :
limit = self.rowCount() - 1
offset = self.__startRow
else :
limit = self.columnCount() - 1
offset = self.__startColumn
# Don't propagate if the changed range is outside of our remapping
if first - offset > limit or last < offset :
return
first = max( first - offset, 0 )
last = min( last - offset, limit )
self.headerDataChanged.emit( orientation, first, last )
class RowNamesProxyModel( __PlugTableProxyModel ) :
def __init__( self, parent = None ) :
super( RowNamesProxyModel, self ).__init__( startRow = 1, columnCount = 2, parent = parent )
class DefaultsProxyModel( __PlugTableProxyModel ) :
def __init__( self, parent = None ) :
super( DefaultsProxyModel, self ).__init__( startColumn = 2, rowCount = 1, parent = parent )
class CellsProxyModel( __PlugTableProxyModel ) :
def __init__( self, parent = None ) :
super( CellsProxyModel, self ).__init__( startRow = 1, startColumn = 2, parent = parent )
| 2,078 |
60,910 | //Given a circular linked list, implement an algorithm which returns
//the node at the beginning of the loop
public class FindBeginning {
LinkedListNode findBeginning(LinkedListNode head) {
LinkedListNode slow = head;
LinkedListNode fast = head;
/* find meeting point. This will be LOOP_SIZE - k
* steps int othe linked list */
while(fast != null && fast.next != null) {
slow = slow.next;
fast = fast.next.next;
if(fast == slow) {
break;
}
}
/* error checking - no meeting point, and therefore no loop */
if(fast == null || fast.next == null) {
return null;
}
/* move slow to head. Keep fast at meeting point. Each are k
* steps from the loop start. If they move at the same pace,
* they must meet at the loop start */
slow = head;
while(slow != fast) {
slow = slow.next;
fast = fast.next;
}
/* both now point to the start of the loop */
return fast;
}
} | 317 |
370 | import numpy as np
from fastdtw import fastdtw
from nnmnkwii.baseline.gmm import MLPG
from nnmnkwii.preprocessing import trim_zeros_frames
from numpy.linalg import norm
from sklearn.mixture import GaussianMixture
class DTWAligner(object):
"""Align feature matrices using fastdtw_.
.. _fastdtw: https://github.com/slaypni/fastdtw
Attributes:
dist (function): Distance function. Default is :func:`numpy.linalg.norm`.
radius (int): Radius parameter in fastdtw_.
verbose (int): Verbose flag. Default is 0.
Examples:
>>> from nnmnkwii.util import example_file_data_sources_for_duration_model
>>> from nnmnkwii.datasets import FileSourceDataset
>>> from nnmnkwii.preprocessing.alignment import DTWAligner
>>> _, X = example_file_data_sources_for_duration_model()
>>> X = FileSourceDataset(X).asarray()
>>> X.shape
(3, 40, 5)
>>> Y = X.copy()
>>> X_aligned, Y_aligned = DTWAligner().transform((X, Y))
>>> X_aligned.shape
(3, 40, 5)
>>> Y_aligned.shape
(3, 40, 5)
"""
def __init__(self, dist=lambda x, y: norm(x - y), radius=1, verbose=0):
self.verbose = verbose
self.dist = dist
self.radius = radius
def transform(self, XY):
X, Y = XY
assert X.ndim == 3 and Y.ndim == 3
longer_features = X if X.shape[1] > Y.shape[1] else Y
X_aligned = np.zeros_like(longer_features)
Y_aligned = np.zeros_like(longer_features)
for idx, (x, y) in enumerate(zip(X, Y)):
x, y = trim_zeros_frames(x), trim_zeros_frames(y)
dist, path = fastdtw(x, y, radius=self.radius, dist=self.dist)
dist /= len(x) + len(y)
pathx = list(map(lambda l: l[0], path))
pathy = list(map(lambda l: l[1], path))
x, y = x[pathx], y[pathy]
max_len = max(len(x), len(y))
if max_len > X_aligned.shape[1] or max_len > Y_aligned.shape[1]:
pad_size = max(
max_len - X_aligned.shape[1], max_len > Y_aligned.shape[1]
)
X_aligned = np.pad(
X_aligned,
[(0, 0), (0, pad_size), (0, 0)],
mode="constant",
constant_values=0,
)
Y_aligned = np.pad(
Y_aligned,
[(0, 0), (0, pad_size), (0, 0)],
mode="constant",
constant_values=0,
)
X_aligned[idx][: len(x)] = x
Y_aligned[idx][: len(y)] = y
if self.verbose > 0:
print("{}, distance: {}".format(idx, dist))
return X_aligned, Y_aligned
class IterativeDTWAligner(object):
"""Align feature matrices iteratively using GMM-based feature conversion.
.. _fastdtw: https://github.com/slaypni/fastdtw
Attributes:
n_iter (int): Number of iterations.
dist (function): Distance function
radius (int): Radius parameter in fastdtw_.
verbose (int): Verbose flag. Default is 0.
max_iter_gmm (int): Maximum iteration to train GMM.
n_components_gmm (int): Number of mixture components in GMM.
Examples:
>>> from nnmnkwii.util import example_file_data_sources_for_duration_model
>>> from nnmnkwii.datasets import FileSourceDataset
>>> from nnmnkwii.preprocessing.alignment import IterativeDTWAligner
>>> _, X = example_file_data_sources_for_duration_model()
>>> X = FileSourceDataset(X).asarray()
>>> X.shape
(3, 40, 5)
>>> Y = X.copy()
>>> X_aligned, Y_aligned = IterativeDTWAligner(n_iter=1).transform((X, Y))
>>> X_aligned.shape
(3, 40, 5)
>>> Y_aligned.shape
(3, 40, 5)
"""
def __init__(
self,
n_iter=3,
dist=lambda x, y: norm(x - y),
radius=1,
max_iter_gmm=100,
n_components_gmm=16,
verbose=0,
):
self.n_iter = n_iter
self.dist = dist
self.radius = radius
self.max_iter_gmm = max_iter_gmm
self.n_components_gmm = n_components_gmm
self.verbose = verbose
def transform(self, XY):
X, Y = XY
assert X.ndim == 3 and Y.ndim == 3
longer_features = X if X.shape[1] > Y.shape[1] else Y
Xc = X.copy() # this will be updated iteratively
X_aligned = np.zeros_like(longer_features)
Y_aligned = np.zeros_like(longer_features)
refined_paths = np.empty(len(X), dtype=np.object)
for idx in range(self.n_iter):
for idx, (x, y) in enumerate(zip(Xc, Y)):
x, y = trim_zeros_frames(x), trim_zeros_frames(y)
dist, path = fastdtw(x, y, radius=self.radius, dist=self.dist)
dist /= len(x) + len(y)
pathx = list(map(lambda l: l[0], path))
pathy = list(map(lambda l: l[1], path))
refined_paths[idx] = pathx
x, y = x[pathx], y[pathy]
max_len = max(len(x), len(y))
if max_len > X_aligned.shape[1] or max_len > Y_aligned.shape[1]:
pad_size = max(
max_len - X_aligned.shape[1], max_len > Y_aligned.shape[1]
)
X_aligned = np.pad(
X_aligned,
[(0, 0), (0, pad_size), (0, 0)],
mode="constant",
constant_values=0,
)
Y_aligned = np.pad(
Y_aligned,
[(0, 0), (0, pad_size), (0, 0)],
mode="constant",
constant_values=0,
)
X_aligned[idx][: len(x)] = x
Y_aligned[idx][: len(y)] = y
if self.verbose > 0:
print("{}, distance: {}".format(idx, dist))
# Fit
gmm = GaussianMixture(
n_components=self.n_components_gmm,
covariance_type="full",
max_iter=self.max_iter_gmm,
)
XY = np.concatenate((X_aligned, Y_aligned), axis=-1).reshape(
-1, X.shape[-1] * 2
)
gmm.fit(XY)
windows = [(0, 0, np.array([1.0]))] # no delta
paramgen = MLPG(gmm, windows=windows)
for idx in range(len(Xc)):
x = trim_zeros_frames(Xc[idx])
Xc[idx][: len(x)] = paramgen.transform(x)
# Finally we can get aligned X
for idx in range(len(X_aligned)):
x = X[idx][refined_paths[idx]]
X_aligned[idx][: len(x)] = x
return X_aligned, Y_aligned
| 3,713 |
428 | <reponame>cping/LGame
/**
* Copyright 2008 - 2015 The Loon Game Engine Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*
* @project loon
* @author cping
* @email:<EMAIL>
* @version 0.5
*/
package loon;
import loon.canvas.LColor;
import loon.events.GameKey;
import loon.events.GameTouch;
import loon.events.InputMake;
import loon.events.KeyMake;
import loon.events.MouseMake;
import loon.events.SysInputFactory;
import loon.events.SysInputFactoryImpl;
import loon.events.TouchMake;
import loon.events.Updateable;
import loon.geom.Vector2f;
import loon.opengl.GLEx;
import loon.opengl.ShaderSource;
import loon.utils.HelperUtils;
import loon.utils.ListMap;
import loon.utils.MathUtils;
import loon.utils.ObjectBundle;
import loon.utils.Resolution;
import loon.utils.StringUtils;
import loon.utils.TArray;
import loon.utils.processes.GameProcessType;
import loon.utils.processes.RealtimeProcess;
import loon.utils.processes.RealtimeProcessManager;
import loon.utils.reply.Port;
import loon.utils.timer.LTimerContext;
public class LProcess implements LRelease {
protected TArray<Updateable> resumes;
protected TArray<Updateable> loads;
protected TArray<Updateable> unloads;
protected EmulatorListener emulatorListener;
private EmulatorButtons _emulatorButtons;
private final ListMap<CharSequence, Screen> _screenMap;
private boolean _isInstance;
private int _curId;
private boolean _waitTransition;
private boolean _running;
private Screen _currentScreen, _loadingScreen;
private LTransition _transition;
private SysInputFactory _currentInput;
private final ObjectBundle _bundle;
private final LGame _game;
public LProcess(LGame game) {
this._game = game;
this._bundle = new ObjectBundle();
this._screenMap = new ListMap<CharSequence, Screen>();
this.initSetting();
}
public LProcess initSetting() {
if (_game != null) {
LSetting setting = _game.setting;
setting.updateScale();
setInputFactory(null);
clearProcess();
}
return this;
}
public LProcess setInputFactory(SysInputFactory factory) {
if (factory == null) {
this._currentInput = new SysInputFactoryImpl();
} else {
this._currentInput = factory;
}
InputMake input = _game.input();
if (input != null) {
if (input.mouseEvents.hasConnections()) {
input.mouseEvents.clearConnections();
}
if (input.touchEvents.hasConnections()) {
input.touchEvents.clearConnections();
}
if (input.keyboardEvents.hasConnections()) {
input.keyboardEvents.clearConnections();
}
if (input != null) {
if (!_game.setting.emulateTouch && !_game.isMobile() && !_game.input().hasTouch()) {
input.mouseEvents.connect(new MouseMake.ButtonSlot() {
@Override
public void onEmit(MouseMake.ButtonEvent event) {
_currentInput.callMouse(event);
}
});
} else {
input.touchEvents.connect(new Port<TouchMake.Event[]>() {
@Override
public void onEmit(TouchMake.Event[] events) {
_currentInput.callTouch(events);
}
});
}
input.keyboardEvents.connect(new KeyMake.KeyPort() {
@Override
public void onEmit(KeyMake.KeyEvent e) {
_currentInput.callKey(e);
}
});
}
}
return this;
}
public SysInputFactory getSysInputFactory() {
return this._currentInput;
}
public LProcess setShaderSource(ShaderSource src) {
LSystem.setShaderSource(src);
return this;
}
public ShaderSource getShaderSource() {
return LSystem.getShaderSource();
}
private final static void callUpdateable(final TArray<Updateable> list) {
synchronized (LProcess.class) {
TArray<Updateable> loadCache;
synchronized (list) {
loadCache = new TArray<Updateable>(list);
list.clear();
}
for (int i = 0, size = loadCache.size; i < size; i++) {
Updateable r = loadCache.get(i);
if (r == null) {
continue;
}
synchronized (r) {
try {
r.action(null);
} catch (Throwable cause) {
LSystem.error("Updateable dispatch failure", cause);
}
}
}
loadCache = null;
}
}
public final SysInputFactory getCurrentSysInput() {
return _currentInput;
}
public boolean addResume(Updateable u) {
synchronized (resumes) {
return resumes.add(u);
}
}
public boolean removeResume(Updateable u) {
synchronized (resumes) {
return resumes.remove(u);
}
}
// --- Load start ---//
public boolean addLoad(Updateable u) {
synchronized (loads) {
return loads.add(u);
}
}
public boolean containsLoad(Updateable u) {
synchronized (loads) {
return loads.contains(u);
}
}
public boolean removeLoad(Updateable u) {
synchronized (loads) {
return loads.remove(u);
}
}
public void removeAllLoad() {
synchronized (loads) {
loads.clear();
}
}
public void load() {
if (_isInstance) {
final int count = loads.size;
if (count > 0) {
callUpdateable(loads);
}
}
}
// --- Load end ---//
// --- UnLoad start ---//
public boolean addUnLoad(Updateable u) {
synchronized (unloads) {
return unloads.add(u);
}
}
public boolean containsUnLoad(Updateable u) {
synchronized (unloads) {
return unloads.contains(u);
}
}
public boolean removeUnLoad(Updateable u) {
synchronized (unloads) {
return unloads.remove(u);
}
}
public void removeAllUnLoad() {
synchronized (unloads) {
unloads.clear();
}
}
public void unload() {
if (_isInstance) {
final int count = unloads.size;
if (count > 0) {
callUpdateable(unloads);
}
}
}
// --- UnLoad end ---//
private void setScreen(final Screen screen, boolean put) {
if (_loadingScreen != null && _loadingScreen.isOnLoadComplete()) {
return;
}
try {
synchronized (this) {
if (screen == null) {
this._isInstance = false;
throw new LSysException("Cannot create a [Screen] instance !");
}
if (!_game.displayImpl.showLogo) {
if (_currentScreen != null) {
setTransition(screen.onTransition());
} else {
// * 为了防止画面单调,Loon默认为未设定Transition时,让首个Screen随机使用一次渐变
// * 不想使用,或者需要自行设定的话,请重载Screen的onTransition函数。
// * 不使用,返回: LTransition.newEmpty()
// * 使用,返回: 设定或者自定义一个LTransition对象.
LTransition _transition = screen.onTransition();
if (_transition == null) {
int rad = MathUtils.random(0, 12);
switch (rad) {
case 0:
_transition = LTransition.newFadeIn();
break;
case 1:
_transition = LTransition.newArc();
break;
case 2:
_transition = LTransition.newSplitRandom(LColor.black);
break;
case 3:
_transition = LTransition.newCrossRandom(LColor.black);
break;
case 4:
_transition = LTransition.newFadeOvalIn(LColor.black);
break;
case 5:
_transition = LTransition.newPixelWind(LColor.white);
break;
case 6:
_transition = LTransition.newPixelDarkOut(LColor.black);
break;
case 7:
_transition = LTransition.newPixelThunder(LColor.black);
break;
case 8:
_transition = LTransition.newFadeDotIn(LColor.black);
break;
case 9:
_transition = LTransition.newFadeTileIn(LColor.black);
break;
case 10:
_transition = LTransition.newFadeSpiralIn(LColor.black);
break;
case 11:
_transition = LTransition.newFadeSwipeIn(LColor.black);
break;
case 12:
_transition = LTransition.newFadeBoardIn(LColor.black);
break;
}
}
setTransition(_transition);
}
}
_game.displayImpl.clearLog();
screen.setOnLoadState(false);
if (_currentScreen == null) {
_currentScreen = screen;
} else {
killScreen(screen);
}
this._isInstance = true;
if (screen instanceof EmulatorListener) {
setEmulatorListener((EmulatorListener) screen);
} else {
setEmulatorListener(null);
}
screen.onCreate(LSystem.viewSize.getWidth(), LSystem.viewSize.getHeight());
RealtimeProcess process = new RealtimeProcess() {
@Override
public void run(LTimerContext time) {
if (_game != null && !_game.displayImpl.showLogo) {
try {
startTransition();
screen.setClose(false);
screen.resetOrder();
screen.resetSize();
screen.onLoad();
screen.onLoaded();
screen.setOnLoadState(true);
screen.resume();
endTransition();
} catch (Throwable cause) {
LSystem.error("Screen onLoad dispatch failed: " + screen, cause);
} finally {
kill();
}
}
}
};
process.setProcessType(GameProcessType.Initialize);
process.setDelay(0);
RealtimeProcessManager.get().addProcess(process);
if (put) {
addScreen(screen);
}
_loadingScreen = null;
}
} catch (Throwable cause) {
LSystem.error("Update Screen failed: " + screen, cause);
}
}
private void killScreen(Screen screen) {
try {
synchronized (_currentScreen) {
if (_currentScreen != null) {
_currentScreen.destroy();
}
if (screen == _currentScreen) {
screen.pause();
}
screen.destroy();
_currentScreen = screen;
}
} catch (Throwable cause) {
LSystem.error("Destroy screen failure", cause);
}
}
public void start() {
if (!_running) {
if (_loadingScreen != null) {
setScreen(_loadingScreen);
}
_running = true;
}
}
public void resize(int w, int h) {
if (_isInstance) {
_currentInput.reset();
_currentScreen.resetSize(w, h);
}
}
public void resume() {
if (_isInstance) {
final int count = resumes.size;
if (count > 0) {
callUpdateable(resumes);
}
_currentInput.reset();
_currentScreen.resume();
}
}
public void pause() {
if (_isInstance) {
_currentInput.reset();
_currentScreen.pause();
}
}
public void resetTouch() {
_currentInput.resetSysTouch();
}
public void clearProcess() {
if (resumes == null) {
resumes = new TArray<Updateable>();
} else {
resumes.clear();
}
if (loads == null) {
loads = new TArray<Updateable>();
} else {
loads.clear();
}
if (unloads == null) {
unloads = new TArray<Updateable>();
} else {
unloads.clear();
}
clearScreens();
}
public boolean next() {
if (_isInstance) {
if (_currentScreen.next() && !LSystem.PAUSED) {
return true;
}
}
return false;
}
public void runTimer(LTimerContext context) {
if (_isInstance) {
if (_waitTransition) {
if (_transition != null) {
switch (_transition.code) {
default:
if (!_currentScreen.isOnLoadComplete()) {
_transition.update(context.timeSinceLastUpdate);
}
break;
case 1:
if (!_transition.completed()) {
_transition.update(context.timeSinceLastUpdate);
} else {
endTransition();
}
break;
}
}
} else {
_currentScreen.runTimer(context);
return;
}
}
}
public void draw(GLEx g) {
if (_isInstance) {
if (_waitTransition) {
if (_transition != null) {
if (_transition.isDisplayGameUI) {
_currentScreen.createUI(g);
}
switch (_transition.code) {
default:
if (!_currentScreen.isOnLoadComplete()) {
_transition.draw(g);
}
break;
case 1:
if (!_transition.completed()) {
_transition.draw(g);
}
break;
}
}
} else {
_currentScreen.createUI(g);
return;
}
}
}
public void drawFrist(GLEx g) {
if (_isInstance && !_waitTransition) {
_currentScreen.drawFrist(g);
}
}
public void drawLast(GLEx g) {
if (_isInstance && !_waitTransition) {
_currentScreen.drawLast(g);
}
}
public void drawEmulator(GLEx gl) {
if (_emulatorButtons != null) {
_emulatorButtons.draw(gl);
}
}
public LColor getBackgroundColor() {
if (_isInstance) {
return _currentScreen.getBackgroundColor();
}
return null;
}
public float getScaleX() {
if (_isInstance) {
return _currentScreen.getScaleX();
}
return 1f;
}
public float getScaleY() {
if (_isInstance) {
return _currentScreen.getScaleY();
}
return 1f;
}
public boolean isFlipX() {
if (_isInstance) {
return _currentScreen.isFlipX();
}
return false;
}
public boolean isFlipY() {
if (_isInstance) {
return _currentScreen.isFlipY();
}
return false;
}
public float getRotation() {
if (_isInstance) {
return _currentScreen.getRotation();
}
return 0;
}
public LTexture getBackground() {
if (_isInstance || _currentScreen != null) {
return _currentScreen.getBackground();
}
return null;
}
public int getRepaintMode() {
if (_isInstance) {
return _currentScreen.getRepaintMode();
}
return Screen.SCREEN_NOT_REPAINT;
}
/**
* 设定模拟按钮监听器
*
* @param emulatorListener
*/
public void setEmulatorListener(EmulatorListener emulator) {
this.emulatorListener = emulator;
if (emulatorListener != null) {
if (_emulatorButtons == null) {
_emulatorButtons = new EmulatorButtons(emulatorListener, LSystem.viewSize.getWidth(),
LSystem.viewSize.getHeight());
} else {
_emulatorButtons.setEmulatorListener(emulator);
}
} else {
_emulatorButtons = null;
}
}
/**
* 获得模拟器监听
*
* @return
*/
public EmulatorListener getEmulatorListener() {
return emulatorListener;
}
/**
* 获得模拟器按钮
*
* @return
*/
public EmulatorButtons getEmulatorButtons() {
return _emulatorButtons;
}
public void setScreenID(int _curId) {
if (_isInstance) {
_currentScreen.setID(_curId);
}
}
public int getScreenID() {
return _isInstance ? -1 : _currentScreen.getID();
}
public void setID(int i) {
this._curId = i;
}
public int getID() {
return _curId;
}
public final void setTransition(LTransition t) {
this._transition = t;
}
public final boolean isTransitioning() {
return _waitTransition;
}
public boolean isTransitionCompleted() {
return !_waitTransition;
}
public final LTransition getTransition() {
return this._transition;
}
private final void startTransition() {
if (_transition != null) {
_waitTransition = true;
if (_isInstance) {
_currentScreen.setLock(true);
}
}
}
private final void endTransition() {
if (_transition != null) {
switch (_transition.code) {
default:
_waitTransition = false;
_transition.close();
break;
case 1:
if (_transition.completed()) {
_waitTransition = false;
_transition.close();
}
break;
}
if (_isInstance) {
_currentScreen.setLock(false);
}
} else {
_waitTransition = false;
}
}
public LColor getColor() {
if (_isInstance) {
return _currentScreen.getColor();
}
return LColor.white;
}
public float getX() {
if (_isInstance) {
return _currentScreen.getX();
}
return 0;
}
public float getY() {
if (_isInstance) {
return _currentScreen.getY();
}
return 0;
}
private final Vector2f _pointLocaltion = new Vector2f();
public Vector2f convertXY(float x, float y) {
float newX = ((x - getX()) / (LSystem.getScaleWidth()));
float newY = ((y - getY()) / (LSystem.getScaleHeight()));
if (_isInstance && _currentScreen.isTxUpdate()) {
float oldW = getWidth();
float oldH = getHeight();
float newW = getWidth() * getScaleX();
float newH = getHeight() * getScaleY();
float offX = oldW / 2f - newW / 2f;
float offY = oldH / 2f - newH / 2f;
float posX = (newX - offX);
float posY = (newY - offY);
final int r = (int) getRotation();
switch (r) {
case -90:
offX = oldH / 2f - newW / 2f;
offY = oldW / 2f - newH / 2f;
posX = (newX - offY);
posY = (newY - offX);
_pointLocaltion.set(posX / getScaleX(), posY / getScaleY()).rotateSelf(-90);
_pointLocaltion.set(-(_pointLocaltion.x - getWidth()), MathUtils.abs(_pointLocaltion.y));
break;
case 0:
case 360:
_pointLocaltion.set(posX / getScaleX(), posY / getScaleY());
break;
case 90:
offX = oldH / 2f - newW / 2f;
offY = oldW / 2f - newH / 2f;
posX = (newX - offY);
posY = (newY - offX);
_pointLocaltion.set(posX / getScaleX(), posY / getScaleY()).rotateSelf(90);
_pointLocaltion.set(-_pointLocaltion.x, MathUtils.abs(_pointLocaltion.y - getHeight()));
break;
case -180:
case 180:
_pointLocaltion.set(posX / getScaleX(), posY / getScaleY()).rotateSelf(getRotation())
.addSelf(getWidth(), getHeight());
break;
default: // 原则上不处理非水平角度的触点
float rad = MathUtils.toRadians(getRotation());
float sin = MathUtils.sin(rad);
float cos = MathUtils.cos(rad);
float dx = offX / getScaleX();
float dy = offY / getScaleY();
float dx2 = cos * dx - sin * dy;
float dy2 = sin * dx + cos * dy;
_pointLocaltion.x = getWidth() - (newX - dx2);
_pointLocaltion.y = getHeight() - (newY - dy2);
break;
}
} else {
_pointLocaltion.set(newX, newY);
}
if (isFlipX() || isFlipY()) {
HelperUtils.local2Global(isFlipX(), isFlipY(), getWidth() / 2, getHeight() / 2, _pointLocaltion.x,
_pointLocaltion.y, _pointLocaltion);
return _pointLocaltion;
}
return _pointLocaltion;
}
public Screen getScreen() {
return _currentScreen;
}
public LProcess clearScreens() {
for (Screen screen : _screenMap) {
if (screen != null) {
screen.destroy();
}
}
_screenMap.clear();
return this;
}
public LProcess addScreen(CharSequence name, Screen screen) {
if (screen == null) {
throw new LSysException("Cannot create a Screen instance !");
}
CharSequence key = StringUtils.isEmpty(name) ? LSystem.UNKNOWN : name;
if (!_screenMap.containsKey(key)) {
_screenMap.put(key, screen);
}
return this;
}
public LProcess addScreen(final Screen screen) {
if (screen == null) {
throw new LSysException("Cannot create a Screen instance !");
}
if (!_screenMap.containsValue(screen)) {
addScreen(screen.getName(), screen);
}
return this;
}
public boolean containsScreen(CharSequence name) {
return _screenMap.containsKey(name);
}
public boolean containsScreenValue(Screen screen) {
return _screenMap.containsValue(screen);
}
public Screen getScreen(CharSequence name) {
return _screenMap.get(name);
}
public Screen runScreenClassName(CharSequence name) {
for (Screen screen : _screenMap) {
if (screen != null) {
if (name.equals(screen.getName())) {
setScreen(screen, false);
return screen;
}
}
}
return null;
}
public Screen runScreenName(CharSequence name) {
for (Screen screen : _screenMap) {
if (screen != null) {
if (name.equals(screen.getScreenName())) {
setScreen(screen, false);
return screen;
}
}
}
return null;
}
public Screen runScreen(CharSequence name) {
Screen screen = getScreen(name);
if (screen != null) {
setScreen(screen, false);
return screen;
}
return null;
}
public void runPopScreen() {
int size = _screenMap.size;
if (size > 0) {
Screen o = _screenMap.pop();
if (o != _currentScreen) {
setScreen(o, false);
}
}
}
public void runPeekScreen() {
runLastScreen();
}
public void runFirstScreen() {
int size = _screenMap.size;
if (size > 0) {
Screen o = _screenMap.first();
if (o != _currentScreen) {
setScreen(o, false);
}
}
}
public void runLastScreen() {
int size = _screenMap.size;
if (size > 0) {
Screen o = _screenMap.last();
if (o != _currentScreen) {
setScreen(o, false);
}
}
}
public void runPreviousScreen() {
int size = _screenMap.size;
if (size > 0) {
for (int i = 0; i < size; i++) {
if (_currentScreen == _screenMap.getValueAt(i)) {
if (i - 1 > -1) {
setScreen(_screenMap.getValueAt(i - 1), false);
return;
}
}
}
}
}
public void runNextScreen() {
int size = _screenMap.size;
if (size > 0) {
for (int i = 0; i < size; i++) {
if (_currentScreen == _screenMap.getValueAt(i)) {
if (i + 1 < size) {
setScreen(_screenMap.getValueAt(i + 1), false);
return;
}
}
}
}
}
public void runIndexScreen(int index) {
int size = _screenMap.size;
if (size > 0 && index > -1 && index < size) {
Object o = _screenMap.getValueAt(index);
if (_currentScreen != o) {
setScreen(_screenMap.getValueAt(index), false);
}
}
}
public boolean containsScreen(final Screen screen) {
if (screen == null) {
throw new LSysException("Cannot create a Screen instance !");
}
return _screenMap.containsValue(screen);
}
public TArray<Screen> getScreens() {
return _screenMap.valuesToArray();
}
public int getScreenCount() {
return _screenMap.size;
}
public void setScreen(final Screen screen) {
if (screen.handler == null) {
screen.resetOrder();
screen.resetSize();
}
if (_game.setting.isLogo && _game.displayImpl.showLogo) {
_loadingScreen = screen;
} else {
setScreen(screen, true);
}
}
public int getHeight() {
if (_isInstance) {
return _currentScreen.getHeight();
}
return 0;
}
public int getWidth() {
if (_isInstance) {
return _currentScreen.getWidth();
}
return 0;
}
public void setCurrentScreen(final Screen screen) {
setCurrentScreen(screen, true);
}
public void setCurrentScreen(final Screen screen, boolean closed) {
if (screen != null) {
this._isInstance = false;
if (closed && _currentScreen != null) {
_currentScreen.destroy();
}
this._currentScreen = screen;
_currentScreen.setLock(false);
_currentScreen.setLocation(0, 0);
_currentScreen.setClose(false);
_currentScreen.setOnLoadState(true);
if (screen.getBackground() != null) {
_currentScreen.setRepaintMode(Screen.SCREEN_TEXTURE_REPAINT);
}
this._isInstance = true;
if (screen instanceof EmulatorListener) {
setEmulatorListener((EmulatorListener) screen);
} else {
setEmulatorListener(null);
}
addScreen(screen);
}
}
public void keyDown(GameKey e) {
if (_isInstance) {
_currentScreen.keyPressed(e);
}
}
public void keyUp(GameKey e) {
if (_isInstance) {
_currentScreen.keyReleased(e);
}
}
public void keyTyped(GameKey e) {
if (_isInstance) {
_currentScreen.keyTyped(e);
}
}
public void mousePressed(GameTouch e) {
if (_isInstance) {
_currentScreen.mousePressed(e);
}
}
public void mouseReleased(GameTouch e) {
if (_isInstance) {
_currentScreen.mouseReleased(e);
}
}
public void mouseMoved(GameTouch e) {
if (_isInstance) {
_currentScreen.mouseMoved(e);
}
}
public void mouseDragged(GameTouch e) {
if (_isInstance) {
_currentScreen.mouseDragged(e);
}
}
public LProcess addBundle(String key, Object val) {
_bundle.put(key, val);
return this;
}
public LProcess removeBundle(String key) {
_bundle.remove(key);
return this;
}
public ObjectBundle getBundle() {
return _bundle;
}
public Screen getCurrentScreen() {
return _currentScreen;
}
public Resolution getOriginResolution() {
if (_game != null && _game.setting != null) {
return new Resolution(_game.setting.width, _game.setting.height);
}
return new Resolution(LSystem.viewSize.getWidth(), LSystem.viewSize.getHeight());
}
public Resolution getDisplayResolution() {
if (_game != null && _game.setting != null) {
return new Resolution(_game.setting.width_zoom, _game.setting.height_zoom);
}
return new Resolution(LSystem.viewSize.getWidth(), LSystem.viewSize.getHeight());
}
public String getOriginResolutionMode() {
return getOriginResolution().matchMode();
}
public String getDisplayResolutionMode() {
return getDisplayResolution().matchMode();
}
public LGame getGame() {
return _game;
}
@Override
public void close() {
_running = false;
if (_isInstance) {
_currentScreen.stop();
}
endTransition();
if (_isInstance) {
_isInstance = false;
if (loads != null) {
loads.clear();
}
if (unloads != null) {
unloads.clear();
}
if (resumes != null) {
resumes.clear();
}
if (_currentScreen != null) {
_currentScreen.destroy();
_currentScreen = null;
}
}
}
}
| 10,343 |
7,892 | /*
Copyright 2012-2016 <NAME> <http://drobilla.net>
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THIS SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
/**
@defgroup patch Patch
Messages for accessing and manipulating properties, see
<http://lv2plug.in/ns/ext/patch> for details.
Note the patch extension is purely data, this header merely defines URIs for
convenience.
@{
*/
#ifndef LV2_PATCH_H
#define LV2_PATCH_H
#define LV2_PATCH_URI "http://lv2plug.in/ns/ext/patch" ///< http://lv2plug.in/ns/ext/patch
#define LV2_PATCH_PREFIX LV2_PATCH_URI "#" ///< http://lv2plug.in/ns/ext/patch#
#define LV2_PATCH__Ack LV2_PATCH_PREFIX "Ack" ///< http://lv2plug.in/ns/ext/patch#Ack
#define LV2_PATCH__Delete LV2_PATCH_PREFIX "Delete" ///< http://lv2plug.in/ns/ext/patch#Delete
#define LV2_PATCH__Copy LV2_PATCH_PREFIX "Copy" ///< http://lv2plug.in/ns/ext/patch#Copy
#define LV2_PATCH__Error LV2_PATCH_PREFIX "Error" ///< http://lv2plug.in/ns/ext/patch#Error
#define LV2_PATCH__Get LV2_PATCH_PREFIX "Get" ///< http://lv2plug.in/ns/ext/patch#Get
#define LV2_PATCH__Message LV2_PATCH_PREFIX "Message" ///< http://lv2plug.in/ns/ext/patch#Message
#define LV2_PATCH__Move LV2_PATCH_PREFIX "Move" ///< http://lv2plug.in/ns/ext/patch#Move
#define LV2_PATCH__Patch LV2_PATCH_PREFIX "Patch" ///< http://lv2plug.in/ns/ext/patch#Patch
#define LV2_PATCH__Post LV2_PATCH_PREFIX "Post" ///< http://lv2plug.in/ns/ext/patch#Post
#define LV2_PATCH__Put LV2_PATCH_PREFIX "Put" ///< http://lv2plug.in/ns/ext/patch#Put
#define LV2_PATCH__Request LV2_PATCH_PREFIX "Request" ///< http://lv2plug.in/ns/ext/patch#Request
#define LV2_PATCH__Response LV2_PATCH_PREFIX "Response" ///< http://lv2plug.in/ns/ext/patch#Response
#define LV2_PATCH__Set LV2_PATCH_PREFIX "Set" ///< http://lv2plug.in/ns/ext/patch#Set
#define LV2_PATCH__accept LV2_PATCH_PREFIX "accept" ///< http://lv2plug.in/ns/ext/patch#accept
#define LV2_PATCH__add LV2_PATCH_PREFIX "add" ///< http://lv2plug.in/ns/ext/patch#add
#define LV2_PATCH__body LV2_PATCH_PREFIX "body" ///< http://lv2plug.in/ns/ext/patch#body
#define LV2_PATCH__context LV2_PATCH_PREFIX "context" ///< http://lv2plug.in/ns/ext/patch#context
#define LV2_PATCH__destination LV2_PATCH_PREFIX "destination" ///< http://lv2plug.in/ns/ext/patch#destination
#define LV2_PATCH__property LV2_PATCH_PREFIX "property" ///< http://lv2plug.in/ns/ext/patch#property
#define LV2_PATCH__readable LV2_PATCH_PREFIX "readable" ///< http://lv2plug.in/ns/ext/patch#readable
#define LV2_PATCH__remove LV2_PATCH_PREFIX "remove" ///< http://lv2plug.in/ns/ext/patch#remove
#define LV2_PATCH__request LV2_PATCH_PREFIX "request" ///< http://lv2plug.in/ns/ext/patch#request
#define LV2_PATCH__subject LV2_PATCH_PREFIX "subject" ///< http://lv2plug.in/ns/ext/patch#subject
#define LV2_PATCH__sequenceNumber LV2_PATCH_PREFIX "sequenceNumber" ///< http://lv2plug.in/ns/ext/patch#sequenceNumber
#define LV2_PATCH__value LV2_PATCH_PREFIX "value" ///< http://lv2plug.in/ns/ext/patch#value
#define LV2_PATCH__wildcard LV2_PATCH_PREFIX "wildcard" ///< http://lv2plug.in/ns/ext/patch#wildcard
#define LV2_PATCH__writable LV2_PATCH_PREFIX "writable" ///< http://lv2plug.in/ns/ext/patch#writable
#endif /* LV2_PATCH_H */
/**
@}
*/
| 1,962 |
323 | /*
* Copyright (c) 2014 <NAME> on behalf of AirPair.com
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.ultimate.camera.fragments;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.net.Uri;
import android.os.Bundle;
import android.provider.MediaStore;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.ImageView;
import com.ultimate.camera.R;
import com.ultimate.camera.activities.MainActivity;
/**
* Example of loading an image into an image view using the image picker.
*
* Created by <NAME> (on behalf of AirPair.com) on 3/4/14.
*/
public class SimpleAndroidImagePickerFragment extends BaseFragment implements Button.OnClickListener {
// Code for our image picker select action.
private static final int IMAGE_PICKER_SELECT = 999;
// Reference to our image view we will use
private ImageView mSelectedImage;
// Reference to picker button.
private Button mPickPhotoButton;
/**
* Default empty constructor.
*/
public SimpleAndroidImagePickerFragment(){
super();
}
/**
* Static factory method
* @param sectionNumber
* @return
*/
public static SimpleAndroidImagePickerFragment newInstance(int sectionNumber) {
SimpleAndroidImagePickerFragment fragment = new SimpleAndroidImagePickerFragment();
Bundle args = new Bundle();
args.putInt(ARG_SECTION_NUMBER, sectionNumber);
fragment.setArguments(args);
return fragment;
}
/**
* OnCreateView fragment override
* @param inflater
* @param container
* @param savedInstanceState
* @return
*/
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View view = null;
view = inflater.inflate(R.layout.fragment_photo_picker, container, false);
// Set the image view
mSelectedImage = (ImageView)view.findViewById(R.id.imageViewFullSized);
mPickPhotoButton = (Button)view.findViewById(R.id.button);
// Set OnItemClickListener so we can be notified on button clicks
mPickPhotoButton.setOnClickListener(this);
return view;
}
@Override
public void onClick(View view) {
Intent i = new Intent(Intent.ACTION_PICK,android.provider.MediaStore.Images.Media.EXTERNAL_CONTENT_URI);
startActivityForResult(i, IMAGE_PICKER_SELECT);
}
/**
* Photo Selection result
*/
public void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == IMAGE_PICKER_SELECT && resultCode == Activity.RESULT_OK) {
MainActivity activity = (MainActivity)getActivity();
Bitmap bitmap = getBitmapFromCameraData(data, activity);
mSelectedImage.setImageBitmap(bitmap);
}
}
/**
* Scale the photo down and fit it to our image views.
*
* "Drastically increases performance" to set images using this technique.
* Read more:http://developer.android.com/training/camera/photobasics.html
*/
private void setFullImageFromFilePath(String imagePath) {
// Get the dimensions of the View
int targetW = mSelectedImage.getWidth();
int targetH = mSelectedImage.getHeight();
// Get the dimensions of the bitmap
BitmapFactory.Options bmOptions = new BitmapFactory.Options();
bmOptions.inJustDecodeBounds = true;
BitmapFactory.decodeFile(imagePath, bmOptions);
int photoW = bmOptions.outWidth;
int photoH = bmOptions.outHeight;
// Determine how much to scale down the image
int scaleFactor = Math.min(photoW/targetW, photoH/targetH);
// Decode the image file into a Bitmap sized to fill the View
bmOptions.inJustDecodeBounds = false;
bmOptions.inSampleSize = scaleFactor;
bmOptions.inPurgeable = true;
Bitmap bitmap = BitmapFactory.decodeFile(imagePath, bmOptions);
mSelectedImage.setImageBitmap(bitmap);
}
/**
* Use for decoding camera response data.
*
* @param data
* @param context
* @return
*/
public static Bitmap getBitmapFromCameraData(Intent data, Context context){
Uri selectedImage = data.getData();
String[] filePathColumn = { MediaStore.Images.Media.DATA };
Cursor cursor = context.getContentResolver().query(selectedImage,filePathColumn, null, null, null);
cursor.moveToFirst();
int columnIndex = cursor.getColumnIndex(filePathColumn[0]);
String picturePath = cursor.getString(columnIndex);
cursor.close();
return BitmapFactory.decodeFile(picturePath);
}
}
| 2,084 |
3,384 | <reponame>teemobean/XVim<filename>XcodeClasses/Xcode7.0/Developer/Platforms/iPhoneOS.platform/Developer/Library/PrivateFrameworks/GPUToolsMobileKit.h
//
// Generated by class-dump 3.5 (64 bit).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2013 by <NAME>.
//
#pragma mark -
//
// File: /Applications/Xcode-7GM.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/Library/PrivateFrameworks/GPUToolsMobileKit.framework/Versions/A/GPUToolsMobileKit
// UUID: 70693A07-6438-3FD2-B62E-39966DE8B0A3
//
// Arch: x86_64
// Current version: 18017.0.0
// Compatibility version: 1.0.0
// Source version: 18017.0.0.0.0
// Minimum Mac OS X version: 10.10.0
// SDK version: 10.11.0
//
// Objective-C Garbage Collection: Unsupported
//
@interface DYMobileAppIconCache : NSObject
{
struct dispatch_queue_s *_fetchQueue;
struct dispatch_queue_s *_accessQueue;
NSMutableDictionary *_identifierToMasterIconMap;
NSMutableDictionary *_identifierToSmallIconMap;
}
+ (BOOL)accessInstanceVariablesDirectly;
+ (id)smallDefaultMobileAppIcon;
+ (id)defaultMobileAppIcon;
+ (id)sharedMobileAppIconCache;
- (void).cxx_destruct;
- (id)smallIconForIdentifier:(id)arg1;
- (id)iconForIdentifier:(id)arg1;
- (id)cacheIconsForIdentifiers:(id)arg1 device:(id)arg2 force:(BOOL)arg3;
- (void)dealloc;
- (id)init;
@end
| 593 |
2,151 | // Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "components/metrics/ui/screen_info_metrics_provider.h"
#include "build/build_config.h"
#include "third_party/metrics_proto/system_profile.pb.h"
#include "ui/display/display.h"
#include "ui/display/screen.h"
#if defined(OS_WIN)
#include <windows.h>
#endif
namespace metrics {
#if defined(OS_WIN)
namespace {
struct ScreenDPIInformation {
double max_dpi_x;
double max_dpi_y;
};
// Called once for each connected monitor.
BOOL CALLBACK GetMonitorDPICallback(HMONITOR, HDC hdc, LPRECT, LPARAM dwData) {
const double kMillimetersPerInch = 25.4;
ScreenDPIInformation* screen_info =
reinterpret_cast<ScreenDPIInformation*>(dwData);
// Size of screen, in mm.
DWORD size_x = GetDeviceCaps(hdc, HORZSIZE);
DWORD size_y = GetDeviceCaps(hdc, VERTSIZE);
double dpi_x = (size_x > 0) ?
GetDeviceCaps(hdc, HORZRES) / (size_x / kMillimetersPerInch) : 0;
double dpi_y = (size_y > 0) ?
GetDeviceCaps(hdc, VERTRES) / (size_y / kMillimetersPerInch) : 0;
screen_info->max_dpi_x = std::max(dpi_x, screen_info->max_dpi_x);
screen_info->max_dpi_y = std::max(dpi_y, screen_info->max_dpi_y);
return TRUE;
}
void WriteScreenDPIInformationProto(SystemProfileProto::Hardware* hardware) {
HDC desktop_dc = GetDC(nullptr);
if (desktop_dc) {
ScreenDPIInformation si = {0, 0};
if (EnumDisplayMonitors(desktop_dc, nullptr, GetMonitorDPICallback,
reinterpret_cast<LPARAM>(&si))) {
hardware->set_max_dpi_x(si.max_dpi_x);
hardware->set_max_dpi_y(si.max_dpi_y);
}
ReleaseDC(GetDesktopWindow(), desktop_dc);
}
}
} // namespace
#endif // defined(OS_WIN)
ScreenInfoMetricsProvider::ScreenInfoMetricsProvider() {
}
ScreenInfoMetricsProvider::~ScreenInfoMetricsProvider() {
}
void ScreenInfoMetricsProvider::ProvideSystemProfileMetrics(
SystemProfileProto* system_profile_proto) {
SystemProfileProto::Hardware* hardware =
system_profile_proto->mutable_hardware();
const gfx::Size display_size = GetScreenSize();
hardware->set_primary_screen_width(display_size.width());
hardware->set_primary_screen_height(display_size.height());
hardware->set_primary_screen_scale_factor(GetScreenDeviceScaleFactor());
hardware->set_screen_count(GetScreenCount());
#if defined(OS_WIN)
WriteScreenDPIInformationProto(hardware);
#endif
}
gfx::Size ScreenInfoMetricsProvider::GetScreenSize() const {
return display::Screen::GetScreen()->GetPrimaryDisplay().GetSizeInPixel();
}
float ScreenInfoMetricsProvider::GetScreenDeviceScaleFactor() const {
return display::Screen::GetScreen()
->GetPrimaryDisplay()
.device_scale_factor();
}
int ScreenInfoMetricsProvider::GetScreenCount() const {
return display::Screen::GetScreen()->GetNumDisplays();
}
} // namespace metrics
| 1,074 |
1,673 | <filename>src/ca65/segment.c
/*****************************************************************************/
/* */
/* segment.c */
/* */
/* Segments for the ca65 macroassembler */
/* */
/* */
/* */
/* (C) 1998-2011, <NAME> */
/* Roemerstrasse 52 */
/* D-70794 Filderstadt */
/* EMail: <EMAIL> */
/* */
/* */
/* This software is provided 'as-is', without any expressed or implied */
/* warranty. In no event will the authors be held liable for any damages */
/* arising from the use of this software. */
/* */
/* Permission is granted to anyone to use this software for any purpose, */
/* including commercial applications, and to alter it and redistribute it */
/* freely, subject to the following restrictions: */
/* */
/* 1. The origin of this software must not be misrepresented; you must not */
/* claim that you wrote the original software. If you use this software */
/* in a product, an acknowledgment in the product documentation would be */
/* appreciated but is not required. */
/* 2. Altered source versions must be plainly marked as such, and must not */
/* be misrepresented as being the original software. */
/* 3. This notice may not be removed or altered from any source */
/* distribution. */
/* */
/*****************************************************************************/
#include <string.h>
#include <errno.h>
/* common */
#include "addrsize.h"
#include "alignment.h"
#include "coll.h"
#include "mmodel.h"
#include "segdefs.h"
#include "segnames.h"
#include "xmalloc.h"
/* cc65 */
#include "error.h"
#include "fragment.h"
#include "global.h"
#include "lineinfo.h"
#include "listing.h"
#include "objcode.h"
#include "objfile.h"
#include "segment.h"
#include "span.h"
#include "spool.h"
#include "studyexpr.h"
#include "symtab.h"
/*****************************************************************************/
/* Data */
/*****************************************************************************/
/* If OrgPerSeg is false, all segments share the RelocMode flag and a PC
** used when in absolute mode. OrgPerSeg may be set by .feature org_per_seg
*/
static int RelocMode = 1;
static unsigned long AbsPC = 0; /* PC if in absolute mode */
/* Definitions for predefined segments */
SegDef NullSegDef = STATIC_SEGDEF_INITIALIZER (SEGNAME_NULL, ADDR_SIZE_ABS);
SegDef ZeropageSegDef = STATIC_SEGDEF_INITIALIZER (SEGNAME_ZEROPAGE, ADDR_SIZE_ZP);
SegDef DataSegDef = STATIC_SEGDEF_INITIALIZER (SEGNAME_DATA, ADDR_SIZE_ABS);
SegDef BssSegDef = STATIC_SEGDEF_INITIALIZER (SEGNAME_BSS, ADDR_SIZE_ABS);
SegDef RODataSegDef = STATIC_SEGDEF_INITIALIZER (SEGNAME_RODATA, ADDR_SIZE_ABS);
SegDef CodeSegDef = STATIC_SEGDEF_INITIALIZER (SEGNAME_CODE, ADDR_SIZE_ABS);
/* Collection containing all segments */
Collection SegmentList = STATIC_COLLECTION_INITIALIZER;
/* Currently active segment */
Segment* ActiveSeg;
/*****************************************************************************/
/* Code */
/*****************************************************************************/
static Segment* NewSegFromDef (SegDef* Def)
/* Create a new segment from a segment definition. Used only internally, no
** checks.
*/
{
/* Create a new segment */
Segment* S = xmalloc (sizeof (*S));
/* Initialize it */
S->Root = 0;
S->Last = 0;
S->FragCount = 0;
S->Num = CollCount (&SegmentList);
S->Flags = SEG_FLAG_NONE;
S->Align = 1;
S->RelocMode = 1;
S->PC = 0;
S->AbsPC = 0;
S->Def = Def;
/* Insert it into the segment list */
CollAppend (&SegmentList, S);
/* And return it... */
return S;
}
static Segment* NewSegment (const char* Name, unsigned char AddrSize)
/* Create a new segment, insert it into the global list and return it */
{
/* Check for too many segments */
if (CollCount (&SegmentList) >= 256) {
Fatal ("Too many segments");
}
/* Check the segment name for invalid names */
if (!ValidSegName (Name)) {
Error ("Illegal segment name: '%s'", Name);
}
/* Create a new segment and return it */
return NewSegFromDef (NewSegDef (Name, AddrSize));
}
Fragment* GenFragment (unsigned char Type, unsigned short Len)
/* Generate a new fragment, add it to the current segment and return it. */
{
/* Create the new fragment */
Fragment* F = NewFragment (Type, Len);
/* Insert the fragment into the current segment */
if (ActiveSeg->Root) {
ActiveSeg->Last->Next = F;
ActiveSeg->Last = F;
} else {
ActiveSeg->Root = ActiveSeg->Last = F;
}
++ActiveSeg->FragCount;
/* Add this fragment to the current listing line */
if (LineCur) {
if (LineCur->FragList == 0) {
LineCur->FragList = F;
} else {
LineCur->FragLast->LineList = F;
}
LineCur->FragLast = F;
}
/* Increment the program counter */
ActiveSeg->PC += F->Len;
if (OrgPerSeg) {
/* Relocatable mode is switched per segment */
if (!ActiveSeg->RelocMode) {
ActiveSeg->AbsPC += F->Len;
}
} else {
/* Relocatable mode is switched globally */
if (!RelocMode) {
AbsPC += F->Len;
}
}
/* Return the fragment */
return F;
}
void UseSeg (const SegDef* D)
/* Use the segment with the given name */
{
unsigned I;
for (I = 0; I < CollCount (&SegmentList); ++I) {
Segment* Seg = CollAtUnchecked (&SegmentList, I);
if (strcmp (Seg->Def->Name, D->Name) == 0) {
/* We found this segment. Check if the type is identical */
if (D->AddrSize != ADDR_SIZE_DEFAULT &&
Seg->Def->AddrSize != D->AddrSize) {
Error ("Segment attribute mismatch");
/* Use the new attribute to avoid errors */
Seg->Def->AddrSize = D->AddrSize;
}
ActiveSeg = Seg;
return;
}
}
/* Segment is not in list, create a new one */
if (D->AddrSize == ADDR_SIZE_DEFAULT) {
ActiveSeg = NewSegment (D->Name, ADDR_SIZE_ABS);
} else {
ActiveSeg = NewSegment (D->Name, D->AddrSize);
}
}
unsigned long GetPC (void)
/* Get the program counter of the current segment */
{
if (OrgPerSeg) {
/* Relocatable mode is switched per segment */
return ActiveSeg->RelocMode? ActiveSeg->PC : ActiveSeg->AbsPC;
} else {
/* Relocatable mode is switched globally */
return RelocMode? ActiveSeg->PC : AbsPC;
}
}
void EnterAbsoluteMode (unsigned long PC)
/* Enter absolute (non relocatable mode). Depending on the OrgPerSeg flag,
** this will either switch the mode globally or for the current segment.
*/
{
if (OrgPerSeg) {
/* Relocatable mode is switched per segment */
ActiveSeg->RelocMode = 0;
ActiveSeg->AbsPC = PC;
} else {
/* Relocatable mode is switched globally */
RelocMode = 0;
AbsPC = PC;
}
}
int GetRelocMode (void)
/* Return true if we're currently in relocatable mode */
{
if (OrgPerSeg) {
/* Relocatable mode is switched per segment */
return ActiveSeg->RelocMode;
} else {
/* Relocatable mode is switched globally */
return RelocMode;
}
}
void EnterRelocMode (void)
/* Enter relocatable mode. Depending on the OrgPerSeg flag, this will either
** switch the mode globally or for the current segment.
*/
{
if (OrgPerSeg) {
/* Relocatable mode is switched per segment */
ActiveSeg->RelocMode = 1;
} else {
/* Relocatable mode is switched globally */
RelocMode = 1;
}
}
void SegAlign (unsigned long Alignment, int FillVal)
/* Align the PC segment to Alignment. If FillVal is -1, emit fill fragments
** (the actual fill value will be determined by the linker), otherwise use
** the given value.
*/
{
unsigned char Data [4];
unsigned long CombinedAlignment;
unsigned long Count;
/* The segment must have the combined alignment of all separate alignments
** in the source. Calculate this alignment and check it for sanity.
*/
CombinedAlignment = LeastCommonMultiple (ActiveSeg->Align, Alignment);
if (CombinedAlignment > MAX_ALIGNMENT) {
Error ("Combined alignment for active segment is %lu which exceeds %lu",
CombinedAlignment, MAX_ALIGNMENT);
/* Avoid creating large fills for an object file that is thrown away
** later.
*/
Count = 1;
} else {
ActiveSeg->Align = CombinedAlignment;
/* Output a warning for larger alignments if not suppressed */
if (CombinedAlignment >= LARGE_ALIGNMENT && !LargeAlignment) {
Warning (0, "Combined alignment is suspiciously large (%lu)",
CombinedAlignment);
}
/* Calculate the number of fill bytes */
Count = AlignCount (ActiveSeg->PC, Alignment);
}
/* Emit the data or a fill fragment */
if (FillVal != -1) {
/* User defined fill value */
memset (Data, FillVal, sizeof (Data));
while (Count) {
if (Count > sizeof (Data)) {
EmitData (Data, sizeof (Data));
Count -= sizeof (Data);
} else {
EmitData (Data, Count);
Count = 0;
}
}
} else {
/* Linker defined fill value */
EmitFill (Count);
}
}
unsigned char GetSegAddrSize (unsigned SegNum)
/* Return the address size of the segment with the given number */
{
/* Is there such a segment? */
if (SegNum >= CollCount (&SegmentList)) {
FAIL ("Invalid segment number");
}
/* Return the address size */
return ((Segment*) CollAtUnchecked (&SegmentList, SegNum))->Def->AddrSize;
}
void SegDone (void)
/* Check the segments for range and other errors. Do cleanup. */
{
static const unsigned long U_Hi[4] = {
0x000000FFUL, 0x0000FFFFUL, 0x00FFFFFFUL, 0xFFFFFFFFUL
};
static const long S_Hi[4] = {
0x0000007FL, 0x00007FFFL, 0x007FFFFFL, 0x7FFFFFFFL
};
unsigned I;
for (I = 0; I < CollCount (&SegmentList); ++I) {
Segment* S = CollAtUnchecked (&SegmentList, I);
Fragment* F = S->Root;
while (F) {
if (F->Type == FRAG_EXPR || F->Type == FRAG_SEXPR) {
/* We have an expression, study it */
ExprDesc ED;
ED_Init (&ED);
StudyExpr (F->V.Expr, &ED);
/* Check if the expression is constant */
if (ED_IsConst (&ED)) {
unsigned J;
/* The expression is constant. Check for range errors. */
CHECK (F->Len <= 4);
if (F->Type == FRAG_SEXPR) {
long Hi = S_Hi[F->Len-1];
long Lo = ~Hi;
if (ED.Val > Hi || ED.Val < Lo) {
LIError (&F->LI,
"Range error (%ld not in [%ld..%ld])",
ED.Val, Lo, Hi);
}
} else {
if (((unsigned long)ED.Val) > U_Hi[F->Len-1]) {
LIError (&F->LI,
"Range error (%lu not in [0..%lu])",
(unsigned long)ED.Val, U_Hi[F->Len-1]);
}
}
/* We don't need the expression tree any longer */
FreeExpr (F->V.Expr);
/* Convert the fragment into a literal fragment */
for (J = 0; J < F->Len; ++J) {
F->V.Data[J] = ED.Val & 0xFF;
ED.Val >>= 8;
}
F->Type = FRAG_LITERAL;
} else if (RelaxChecks == 0) {
/* We cannot evaluate the expression now, leave the job for
** the linker. However, we can check if the address size
** matches the fragment size. Mismatches are errors in
** most situations.
*/
if ((F->Len == 1 && ED.AddrSize > ADDR_SIZE_ZP) ||
(F->Len == 2 && ED.AddrSize > ADDR_SIZE_ABS) ||
(F->Len == 3 && ED.AddrSize > ADDR_SIZE_FAR)) {
LIError (&F->LI, "Range error");
}
}
/* Release memory allocated for the expression decriptor */
ED_Done (&ED);
}
F = F->Next;
}
}
}
void SegDump (void)
/* Dump the contents of all segments */
{
unsigned I;
unsigned X = 0;
printf ("\n");
for (I = 0; I < CollCount (&SegmentList); ++I) {
Segment* S = CollAtUnchecked (&SegmentList, I);
unsigned I;
Fragment* F;
int State = -1;
printf ("New segment: %s", S->Def->Name);
F = S->Root;
while (F) {
if (F->Type == FRAG_LITERAL) {
if (State != 0) {
printf ("\n Literal:");
X = 15;
State = 0;
}
for (I = 0; I < F->Len; ++I) {
printf (" %02X", F->V.Data [I]);
X += 3;
}
} else if (F->Type == FRAG_EXPR || F->Type == FRAG_SEXPR) {
State = 1;
printf ("\n Expression (%u): ", F->Len);
DumpExpr (F->V.Expr, SymResolve);
} else if (F->Type == FRAG_FILL) {
State = 1;
printf ("\n Fill bytes (%u)", F->Len);
} else {
Internal ("Unknown fragment type: %u", F->Type);
}
if (X > 65) {
State = -1;
}
F = F->Next;
}
printf ("\n End PC = $%04X\n", (unsigned)(S->PC & 0xFFFF));
}
printf ("\n");
}
void SegInit (void)
/* Initialize segments */
{
/* Create the predefined segments. Code segment is active */
ActiveSeg = NewSegFromDef (&CodeSegDef);
NewSegFromDef (&RODataSegDef);
NewSegFromDef (&BssSegDef);
NewSegFromDef (&DataSegDef);
NewSegFromDef (&ZeropageSegDef);
NewSegFromDef (&NullSegDef);
}
void SetSegmentSizes (void)
/* Set the default segment sizes according to the memory model */
{
/* Initialize segment sizes. The segment definitions do already contain
** the correct values for the default case (near), so we must only change
** things that should be different.
*/
switch (MemoryModel) {
case MMODEL_NEAR:
break;
case MMODEL_FAR:
CodeSegDef.AddrSize = ADDR_SIZE_FAR;
break;
case MMODEL_HUGE:
CodeSegDef.AddrSize = ADDR_SIZE_FAR;
DataSegDef.AddrSize = ADDR_SIZE_FAR;
BssSegDef.AddrSize = ADDR_SIZE_FAR;
RODataSegDef.AddrSize = ADDR_SIZE_FAR;
break;
default:
Internal ("Invalid memory model: %d", MemoryModel);
}
}
static void WriteOneSeg (Segment* Seg)
/* Write one segment to the object file */
{
Fragment* Frag;
unsigned long DataSize;
unsigned long EndPos;
/* Remember the file position, then write a dummy for the size of the
** following data
*/
unsigned long SizePos = ObjGetFilePos ();
ObjWrite32 (0);
/* Write the segment data */
ObjWriteVar (GetStringId (Seg->Def->Name)); /* Name of the segment */
ObjWriteVar (Seg->Flags); /* Segment flags */
ObjWriteVar (Seg->PC); /* Size */
ObjWriteVar (Seg->Align); /* Segment alignment */
ObjWrite8 (Seg->Def->AddrSize); /* Address size of the segment */
ObjWriteVar (Seg->FragCount); /* Number of fragments */
/* Now walk through the fragment list for this segment and write the
** fragments.
*/
Frag = Seg->Root;
while (Frag) {
/* Write data depending on the type */
switch (Frag->Type) {
case FRAG_LITERAL:
ObjWrite8 (FRAG_LITERAL);
ObjWriteVar (Frag->Len);
ObjWriteData (Frag->V.Data, Frag->Len);
break;
case FRAG_EXPR:
switch (Frag->Len) {
case 1: ObjWrite8 (FRAG_EXPR8); break;
case 2: ObjWrite8 (FRAG_EXPR16); break;
case 3: ObjWrite8 (FRAG_EXPR24); break;
case 4: ObjWrite8 (FRAG_EXPR32); break;
default: Internal ("Invalid fragment size: %u", Frag->Len);
}
WriteExpr (Frag->V.Expr);
break;
case FRAG_SEXPR:
switch (Frag->Len) {
case 1: ObjWrite8 (FRAG_SEXPR8); break;
case 2: ObjWrite8 (FRAG_SEXPR16); break;
case 3: ObjWrite8 (FRAG_SEXPR24); break;
case 4: ObjWrite8 (FRAG_SEXPR32); break;
default: Internal ("Invalid fragment size: %u", Frag->Len);
}
WriteExpr (Frag->V.Expr);
break;
case FRAG_FILL:
ObjWrite8 (FRAG_FILL);
ObjWriteVar (Frag->Len);
break;
default:
Internal ("Invalid fragment type: %u", Frag->Type);
}
/* Write the line infos for this fragment */
WriteLineInfo (&Frag->LI);
/* Next fragment */
Frag = Frag->Next;
}
/* Calculate the size of the data, seek back and write it */
EndPos = ObjGetFilePos (); /* Remember where we are */
DataSize = EndPos - SizePos - 4; /* Don't count size itself */
ObjSetFilePos (SizePos); /* Seek back to the size */
ObjWrite32 (DataSize); /* Write the size */
ObjSetFilePos (EndPos); /* Seek back to the end */
}
void WriteSegments (void)
/* Write the segment data to the object file */
{
unsigned I;
/* Tell the object file module that we're about to start the seg list */
ObjStartSegments ();
/* First thing is segment count */
ObjWriteVar (CollCount (&SegmentList));
/* Now walk through all segments and write them to the object file */
for (I = 0; I < CollCount (&SegmentList); ++I) {
/* Write one segment */
WriteOneSeg (CollAtUnchecked (&SegmentList, I));
}
/* Done writing segments */
ObjEndSegments ();
}
| 9,877 |
591 | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: CarlifeMusicInitProto.proto
#define INTERNAL_SUPPRESS_PROTOBUF_FIELD_DEPRECATION
#include "CarlifeMusicInitProto.pb.h"
#include <algorithm>
#include <google/protobuf/stubs/common.h>
#include <google/protobuf/stubs/once.h>
#include <google/protobuf/io/coded_stream.h>
#include <google/protobuf/wire_format_lite_inl.h>
#include <google/protobuf/descriptor.h>
#include <google/protobuf/generated_message_reflection.h>
#include <google/protobuf/reflection_ops.h>
#include <google/protobuf/wire_format.h>
// @@protoc_insertion_point(includes)
namespace com {
namespace baidu {
namespace carlife {
namespace protobuf {
namespace {
const ::google::protobuf::Descriptor* CarlifeMusicInit_descriptor_ = NULL;
const ::google::protobuf::internal::GeneratedMessageReflection*
CarlifeMusicInit_reflection_ = NULL;
} // namespace
void protobuf_AssignDesc_CarlifeMusicInitProto_2eproto() {
protobuf_AddDesc_CarlifeMusicInitProto_2eproto();
const ::google::protobuf::FileDescriptor* file =
::google::protobuf::DescriptorPool::generated_pool()->FindFileByName(
"CarlifeMusicInitProto.proto");
GOOGLE_CHECK(file != NULL);
CarlifeMusicInit_descriptor_ = file->message_type(0);
static const int CarlifeMusicInit_offsets_[3] = {
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(CarlifeMusicInit, samplerate_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(CarlifeMusicInit, channelconfig_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(CarlifeMusicInit, sampleformat_),
};
CarlifeMusicInit_reflection_ =
new ::google::protobuf::internal::GeneratedMessageReflection(
CarlifeMusicInit_descriptor_,
CarlifeMusicInit::default_instance_,
CarlifeMusicInit_offsets_,
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(CarlifeMusicInit, _has_bits_[0]),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(CarlifeMusicInit, _unknown_fields_),
-1,
::google::protobuf::DescriptorPool::generated_pool(),
::google::protobuf::MessageFactory::generated_factory(),
sizeof(CarlifeMusicInit));
}
namespace {
GOOGLE_PROTOBUF_DECLARE_ONCE(protobuf_AssignDescriptors_once_);
inline void protobuf_AssignDescriptorsOnce() {
::google::protobuf::GoogleOnceInit(&protobuf_AssignDescriptors_once_,
&protobuf_AssignDesc_CarlifeMusicInitProto_2eproto);
}
void protobuf_RegisterTypes(const ::std::string&) {
protobuf_AssignDescriptorsOnce();
::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage(
CarlifeMusicInit_descriptor_, &CarlifeMusicInit::default_instance());
}
} // namespace
void protobuf_ShutdownFile_CarlifeMusicInitProto_2eproto() {
delete CarlifeMusicInit::default_instance_;
delete CarlifeMusicInit_reflection_;
}
void protobuf_AddDesc_CarlifeMusicInitProto_2eproto() {
static bool already_here = false;
if (already_here) return;
already_here = true;
GOOGLE_PROTOBUF_VERIFY_VERSION;
::google::protobuf::DescriptorPool::InternalAddGeneratedFile(
"\n\033CarlifeMusicInitProto.proto\022\032com.baidu"
".carlife.protobuf\"S\n\020CarlifeMusicInit\022\022\n"
"\nsampleRate\030\001 \002(\005\022\025\n\rchannelConfig\030\002 \002(\005"
"\022\024\n\014sampleFormat\030\003 \002(\005", 142);
::google::protobuf::MessageFactory::InternalRegisterGeneratedFile(
"CarlifeMusicInitProto.proto", &protobuf_RegisterTypes);
CarlifeMusicInit::default_instance_ = new CarlifeMusicInit();
CarlifeMusicInit::default_instance_->InitAsDefaultInstance();
::google::protobuf::internal::OnShutdown(&protobuf_ShutdownFile_CarlifeMusicInitProto_2eproto);
}
// Force AddDescriptors() to be called at static initialization time.
struct StaticDescriptorInitializer_CarlifeMusicInitProto_2eproto {
StaticDescriptorInitializer_CarlifeMusicInitProto_2eproto() {
protobuf_AddDesc_CarlifeMusicInitProto_2eproto();
}
} static_descriptor_initializer_CarlifeMusicInitProto_2eproto_;
// ===================================================================
#ifndef _MSC_VER
const int CarlifeMusicInit::kSampleRateFieldNumber;
const int CarlifeMusicInit::kChannelConfigFieldNumber;
const int CarlifeMusicInit::kSampleFormatFieldNumber;
#endif // !_MSC_VER
CarlifeMusicInit::CarlifeMusicInit()
: ::google::protobuf::Message() {
SharedCtor();
}
void CarlifeMusicInit::InitAsDefaultInstance() {
}
CarlifeMusicInit::CarlifeMusicInit(const CarlifeMusicInit& from)
: ::google::protobuf::Message() {
SharedCtor();
MergeFrom(from);
}
void CarlifeMusicInit::SharedCtor() {
_cached_size_ = 0;
samplerate_ = 0;
channelconfig_ = 0;
sampleformat_ = 0;
::memset(_has_bits_, 0, sizeof(_has_bits_));
}
CarlifeMusicInit::~CarlifeMusicInit() {
SharedDtor();
}
void CarlifeMusicInit::SharedDtor() {
if (this != default_instance_) {
}
}
void CarlifeMusicInit::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const ::google::protobuf::Descriptor* CarlifeMusicInit::descriptor() {
protobuf_AssignDescriptorsOnce();
return CarlifeMusicInit_descriptor_;
}
const CarlifeMusicInit& CarlifeMusicInit::default_instance() {
if (default_instance_ == NULL) protobuf_AddDesc_CarlifeMusicInitProto_2eproto();
return *default_instance_;
}
CarlifeMusicInit* CarlifeMusicInit::default_instance_ = NULL;
CarlifeMusicInit* CarlifeMusicInit::New() const {
return new CarlifeMusicInit;
}
void CarlifeMusicInit::Clear() {
if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) {
samplerate_ = 0;
channelconfig_ = 0;
sampleformat_ = 0;
}
::memset(_has_bits_, 0, sizeof(_has_bits_));
mutable_unknown_fields()->Clear();
}
bool CarlifeMusicInit::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!(EXPRESSION)) return false
::google::protobuf::uint32 tag;
while ((tag = input->ReadTag()) != 0) {
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// required int32 sampleRate = 1;
case 1: {
if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_VARINT) {
DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::int32, ::google::protobuf::internal::WireFormatLite::TYPE_INT32>(
input, &samplerate_)));
set_has_samplerate();
} else {
goto handle_uninterpreted;
}
if (input->ExpectTag(16)) goto parse_channelConfig;
break;
}
// required int32 channelConfig = 2;
case 2: {
if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_VARINT) {
parse_channelConfig:
DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::int32, ::google::protobuf::internal::WireFormatLite::TYPE_INT32>(
input, &channelconfig_)));
set_has_channelconfig();
} else {
goto handle_uninterpreted;
}
if (input->ExpectTag(24)) goto parse_sampleFormat;
break;
}
// required int32 sampleFormat = 3;
case 3: {
if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_VARINT) {
parse_sampleFormat:
DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::int32, ::google::protobuf::internal::WireFormatLite::TYPE_INT32>(
input, &sampleformat_)));
set_has_sampleformat();
} else {
goto handle_uninterpreted;
}
if (input->ExpectAtEnd()) return true;
break;
}
default: {
handle_uninterpreted:
if (::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
return true;
}
DO_(::google::protobuf::internal::WireFormat::SkipField(
input, tag, mutable_unknown_fields()));
break;
}
}
}
return true;
#undef DO_
}
void CarlifeMusicInit::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// required int32 sampleRate = 1;
if (has_samplerate()) {
::google::protobuf::internal::WireFormatLite::WriteInt32(1, this->samplerate(), output);
}
// required int32 channelConfig = 2;
if (has_channelconfig()) {
::google::protobuf::internal::WireFormatLite::WriteInt32(2, this->channelconfig(), output);
}
// required int32 sampleFormat = 3;
if (has_sampleformat()) {
::google::protobuf::internal::WireFormatLite::WriteInt32(3, this->sampleformat(), output);
}
if (!unknown_fields().empty()) {
::google::protobuf::internal::WireFormat::SerializeUnknownFields(
unknown_fields(), output);
}
}
::google::protobuf::uint8* CarlifeMusicInit::SerializeWithCachedSizesToArray(
::google::protobuf::uint8* target) const {
// required int32 sampleRate = 1;
if (has_samplerate()) {
target = ::google::protobuf::internal::WireFormatLite::WriteInt32ToArray(1, this->samplerate(), target);
}
// required int32 channelConfig = 2;
if (has_channelconfig()) {
target = ::google::protobuf::internal::WireFormatLite::WriteInt32ToArray(2, this->channelconfig(), target);
}
// required int32 sampleFormat = 3;
if (has_sampleformat()) {
target = ::google::protobuf::internal::WireFormatLite::WriteInt32ToArray(3, this->sampleformat(), target);
}
if (!unknown_fields().empty()) {
target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray(
unknown_fields(), target);
}
return target;
}
int CarlifeMusicInit::ByteSize() const {
int total_size = 0;
if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) {
// required int32 sampleRate = 1;
if (has_samplerate()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::Int32Size(
this->samplerate());
}
// required int32 channelConfig = 2;
if (has_channelconfig()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::Int32Size(
this->channelconfig());
}
// required int32 sampleFormat = 3;
if (has_sampleformat()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::Int32Size(
this->sampleformat());
}
}
if (!unknown_fields().empty()) {
total_size +=
::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize(
unknown_fields());
}
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = total_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void CarlifeMusicInit::MergeFrom(const ::google::protobuf::Message& from) {
GOOGLE_CHECK_NE(&from, this);
const CarlifeMusicInit* source =
::google::protobuf::internal::dynamic_cast_if_available<const CarlifeMusicInit*>(
&from);
if (source == NULL) {
::google::protobuf::internal::ReflectionOps::Merge(from, this);
} else {
MergeFrom(*source);
}
}
void CarlifeMusicInit::MergeFrom(const CarlifeMusicInit& from) {
GOOGLE_CHECK_NE(&from, this);
if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) {
if (from.has_samplerate()) {
set_samplerate(from.samplerate());
}
if (from.has_channelconfig()) {
set_channelconfig(from.channelconfig());
}
if (from.has_sampleformat()) {
set_sampleformat(from.sampleformat());
}
}
mutable_unknown_fields()->MergeFrom(from.unknown_fields());
}
void CarlifeMusicInit::CopyFrom(const ::google::protobuf::Message& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
void CarlifeMusicInit::CopyFrom(const CarlifeMusicInit& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool CarlifeMusicInit::IsInitialized() const {
if ((_has_bits_[0] & 0x00000007) != 0x00000007) return false;
return true;
}
void CarlifeMusicInit::Swap(CarlifeMusicInit* other) {
if (other != this) {
std::swap(samplerate_, other->samplerate_);
std::swap(channelconfig_, other->channelconfig_);
std::swap(sampleformat_, other->sampleformat_);
std::swap(_has_bits_[0], other->_has_bits_[0]);
_unknown_fields_.Swap(&other->_unknown_fields_);
std::swap(_cached_size_, other->_cached_size_);
}
}
::google::protobuf::Metadata CarlifeMusicInit::GetMetadata() const {
protobuf_AssignDescriptorsOnce();
::google::protobuf::Metadata metadata;
metadata.descriptor = CarlifeMusicInit_descriptor_;
metadata.reflection = CarlifeMusicInit_reflection_;
return metadata;
}
// @@protoc_insertion_point(namespace_scope)
} // namespace protobuf
} // namespace carlife
} // namespace baidu
} // namespace com
// @@protoc_insertion_point(global_scope)
| 5,083 |
14,570 | package io.swagger.client.api;
import io.swagger.client.ApiClient;
import io.swagger.client.EncodingUtils;
import io.swagger.client.model.Client;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import feign.*;
public interface FakeClassnameTags123Api extends ApiClient.Api {
/**
* To test class name in snake case
* To test class name in snake case
* @param body client model (required)
* @return Client
*/
@RequestLine("PATCH /fake_classname_test?")
@Headers({
"Content-Type: application/json",
"Accept: application/json",
})
Client testClassname(Client body);
/**
* To test class name in snake case
* To test class name in snake case
* Note, this is equivalent to the other <code>testClassname</code> method,
* but with the query parameters collected into a single Map parameter. This
* is convenient for services with optional query parameters, especially when
* used with the {@link TestClassnameQueryParams} class that allows for
* building up this map in a fluent style.
* @param body client model (required)
* @param queryParams Map of query parameters as name-value pairs
* <p>The following elements may be specified in the query map:</p>
* <ul>
* </ul>
* @return Client
*/
@RequestLine("PATCH /fake_classname_test?")
@Headers({
"Content-Type: application/json",
"Accept: application/json",
})
Client testClassname(Client body, @QueryMap(encoded=true) Map<String, Object> queryParams);
/**
* A convenience class for generating query parameters for the
* <code>testClassname</code> method in a fluent style.
*/
public static class TestClassnameQueryParams extends HashMap<String, Object> {
}
}
| 558 |
945 | <filename>tests/algorithms/algorithm_settings.py
# Copyright 2022 MosaicML Composer authors
# SPDX-License-Identifier: Apache-2.0
"""This file provides the canonical settings (dataset, model, algorithms, arguments)
for each algorithm to be tested. This can be used throughout the codebase for
functional tests, serialization tests, etc.
Each algorithm is keyed based on its name in the algorithm registry.
"""
from typing import Any, Dict, Optional, Type
import pytest
from torch.utils.data import Dataset
import composer
from composer import Algorithm
from composer.algorithms import (AGC, EMA, SAM, SWA, Alibi, AugMix, BlurPool, ChannelsLast, ColOut, CutMix, CutOut,
Factorize, GhostBatchNorm, LabelSmoothing, LayerFreezing, MixUp, NoOpModel,
ProgressiveResizing, RandAugment, SelectiveBackprop, SeqLengthWarmup, SqueezeExcite,
StochasticDepth)
from composer.models import ComposerResNet
from composer.models.base import ComposerModel
from tests import common
simple_vision_settings = {
'model': common.SimpleConvModel,
'dataset': common.RandomImageDataset,
'kwargs': {},
}
simple_vision_pil_settings = {
'model': common.SimpleConvModel,
'dataset': (common.RandomImageDataset, {
'is_PIL': True
}),
'kwargs': {},
}
simple_resnet_settings = {
'model': (ComposerResNet, {
'model_name': 'resnet18',
'num_classes': 2
}),
'dataset': (common.RandomImageDataset, {
'shape': (3, 224, 224),
}),
'kwargs': {},
}
_settings: Dict[Type[Algorithm], Optional[Dict[str, Any]]] = {
AGC: simple_vision_settings,
Alibi: None, # NLP settings needed
AugMix: simple_vision_settings,
BlurPool: {
'model': common.SimpleConvModel,
'dataset': common.RandomImageDataset,
'kwargs': {
'min_channels': 0,
},
},
ChannelsLast: simple_vision_settings,
ColOut: simple_vision_settings,
CutMix: {
'model': common.SimpleConvModel,
'dataset': common.RandomImageDataset,
'kwargs': {
'num_classes': 2
}
},
CutOut: simple_vision_settings,
EMA: {
'model': common.SimpleConvModel,
'dataset': common.RandomImageDataset,
'kwargs': {
'half_life': "1ba",
},
},
Factorize: simple_resnet_settings,
GhostBatchNorm: {
'model': (ComposerResNet, {
'model_name': 'resnet18',
'num_classes': 2
}),
'dataset': (common.RandomImageDataset, {
'shape': (3, 224, 224)
}),
'kwargs': {
'ghost_batch_size': 2,
}
},
LabelSmoothing: simple_vision_settings,
LayerFreezing: simple_vision_settings,
MixUp: simple_vision_settings,
ProgressiveResizing: simple_vision_settings,
RandAugment: simple_vision_settings,
NoOpModel: simple_vision_settings,
SAM: simple_vision_settings,
SelectiveBackprop: simple_vision_settings,
SeqLengthWarmup: None, # NLP settings needed
SqueezeExcite: simple_resnet_settings,
StochasticDepth: {
'model': (ComposerResNet, {
'model_name': 'resnet50',
'num_classes': 2
}),
'dataset': (common.RandomImageDataset, {
'shape': (3, 224, 224),
}),
'kwargs': {
'stochastic_method': 'block',
'target_layer_name': 'ResNetBottleneck',
'drop_rate': 0.2,
'drop_distribution': 'linear',
'drop_warmup': "0.0dur",
'use_same_gpu_seed': False,
}
},
SWA: {
'model': common.SimpleConvModel,
'dataset': common.RandomImageDataset,
'kwargs': {
'swa_start': "0.2dur",
'swa_end': "0.97dur",
'update_interval': '1ep',
'schedule_swa_lr': True,
}
},
}
def _get_alg_settings(alg_cls: Type[Algorithm]):
if alg_cls not in _settings or _settings[alg_cls] is None:
raise ValueError(f"Algorithm {alg_cls.__name__} not in the settings dictionary.")
settings = _settings[alg_cls]
assert settings is not None
return settings
def get_alg_kwargs(alg_cls: Type[Algorithm]) -> Dict[str, Any]:
"""Return the kwargs for an algorithm."""
return _get_alg_settings(alg_cls)['kwargs']
def get_alg_model(alg_cls: Type[Algorithm]) -> ComposerModel:
"""Return an instance of the model for an algorithm."""
settings = _get_alg_settings(alg_cls)['model']
if isinstance(settings, tuple):
(cls, kwargs) = settings
else:
(cls, kwargs) = (settings, {})
return cls(**kwargs)
def get_alg_dataset(alg_cls: Type[Algorithm]) -> Dataset:
"""Return an instance of the dataset for an algorithm."""
settings = _get_alg_settings(alg_cls)['dataset']
if isinstance(settings, tuple):
(cls, kwargs) = settings
else:
(cls, kwargs) = (settings, {})
return cls(**kwargs)
def get_algs_with_marks():
"""Returns a list of algorithms appropriate markers for a subsequent call to pytest.mark.parameterize.
It applies markers as appropriate (e.g. XFAIL for algs missing config)
It reads from the algorithm registry
E.g. @pytest.mark.parametrize("alg_class", get_algs_with_marks())
"""
ans = []
for alg_cls in common.get_module_subclasses(composer.algorithms, Algorithm):
marks = []
settings = _settings[alg_cls]
if alg_cls in (CutMix, MixUp, LabelSmoothing):
# see: https://github.com/mosaicml/composer/issues/362
pytest.importorskip("torch", minversion="1.10", reason="Pytorch 1.10 required.")
if alg_cls == SWA:
# TODO(matthew): Fix
marks.append(
pytest.mark.filterwarnings(
r'ignore:Detected call of `lr_scheduler.step\(\)` before `optimizer.step\(\)`:UserWarning'))
if alg_cls == MixUp:
# TODO(Landen): Fix
marks.append(
pytest.mark.filterwarnings(r"ignore:Some targets have less than 1 total probability:UserWarning"))
if settings is None:
marks.append(pytest.mark.xfail(reason=f"Algorithm {alg_cls.__name__} is missing settings."))
ans.append(pytest.param(alg_cls, marks=marks, id=alg_cls.__name__))
return ans
| 2,860 |
14,668 | // Copyright 2021 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "testing/gtest/include/gtest/gtest.h"
#include "third_party/blink/public/platform/web_back_forward_cache_loader_helper.h"
#include "third_party/blink/renderer/core/loader/resource/script_resource.h"
#include "third_party/blink/renderer/platform/exported/wrapped_resource_response.h"
#include "third_party/blink/renderer/platform/loader/fetch/cached_metadata.h"
#include "third_party/blink/renderer/platform/loader/fetch/resource_fetcher.h"
#include "third_party/blink/renderer/platform/loader/fetch/resource_loader.h"
#include "third_party/blink/renderer/platform/loader/fetch/url_loader/cached_metadata_handler.h"
#include "third_party/blink/renderer/platform/loader/testing/mock_fetch_context.h"
#include "third_party/blink/renderer/platform/loader/testing/test_resource_fetcher_properties.h"
#include "third_party/blink/renderer/platform/scheduler/test/fake_task_runner.h"
#include "third_party/blink/renderer/platform/testing/code_cache_loader_mock.h"
#include "third_party/blink/renderer/platform/testing/mock_context_lifecycle_notifier.h"
#include "third_party/blink/renderer/platform/testing/noop_web_url_loader.h"
#include "third_party/blink/renderer/platform/testing/testing_platform_support_with_mock_scheduler.h"
#include "third_party/blink/renderer/platform/weborigin/scheme_registry.h"
namespace blink {
namespace {
class ResourceLoaderCodeCacheTest : public testing::Test {
protected:
static scoped_refptr<base::SingleThreadTaskRunner> CreateTaskRunner() {
return base::MakeRefCounted<scheduler::FakeTaskRunner>();
}
ResourceFetcher* MakeResourceFetcher(
TestResourceFetcherProperties* properties,
FetchContext* context,
ResourceFetcher::LoaderFactory* loader_factory) {
return MakeGarbageCollected<ResourceFetcher>(ResourceFetcherInit(
properties->MakeDetachable(), context, CreateTaskRunner(),
CreateTaskRunner(), loader_factory,
MakeGarbageCollected<MockContextLifecycleNotifier>(),
nullptr /* back_forward_cache_loader_helper */));
}
class CodeCacheTestLoaderFactory : public ResourceFetcher::LoaderFactory {
public:
explicit CodeCacheTestLoaderFactory(
scoped_refptr<CodeCacheLoaderMock::Controller> controller)
: controller_(std::move(controller)) {}
std::unique_ptr<WebURLLoader> CreateURLLoader(
const ResourceRequest& request,
const ResourceLoaderOptions& options,
scoped_refptr<base::SingleThreadTaskRunner> freezable_task_runner,
scoped_refptr<base::SingleThreadTaskRunner> unfreezable_task_runner,
WebBackForwardCacheLoaderHelper back_forward_cache_loader_helper)
override {
return std::make_unique<NoopWebURLLoader>(
std::move(freezable_task_runner));
}
std::unique_ptr<WebCodeCacheLoader> CreateCodeCacheLoader() override {
return std::make_unique<CodeCacheLoaderMock>(controller_);
}
private:
scoped_refptr<CodeCacheLoaderMock::Controller> controller_;
};
void CommonSetup(const char* url_string = nullptr) {
SchemeRegistry::RegisterURLSchemeAsCodeCacheWithHashing(
"codecachewithhashing");
auto* properties = MakeGarbageCollected<TestResourceFetcherProperties>();
FetchContext* context = MakeGarbageCollected<MockFetchContext>();
controller_ = base::MakeRefCounted<CodeCacheLoaderMock::Controller>();
controller_->DelayResponse();
auto* loader_factory =
MakeGarbageCollected<CodeCacheTestLoaderFactory>(controller_);
auto* fetcher = MakeResourceFetcher(properties, context, loader_factory);
KURL url(url_string ? url_string
: "codecachewithhashing://www.example.com/");
ResourceRequest request(url);
request.SetRequestContext(mojom::blink::RequestContextType::SCRIPT);
FetchParameters params = FetchParameters::CreateForTest(std::move(request));
resource_ = ScriptResource::Fetch(params, fetcher, nullptr,
ScriptResource::kNoStreaming);
loader_ = resource_->Loader();
response_ = ResourceResponse(url);
response_.SetHttpStatusCode(200);
}
static const size_t kSha256Bytes = 256 / 8;
std::vector<uint8_t> MakeSerializedCodeCacheData(
base::span<uint8_t> data,
absl::optional<String> source_text = {},
uint32_t data_type_id = 0,
CachedMetadataHandler::CachedMetadataType outer_type =
CachedMetadataHandler::kSingleEntryWithHash,
CachedMetadataHandler::CachedMetadataType inner_type =
CachedMetadataHandler::kSingleEntry) {
const size_t kCachedMetadataTypeSize = sizeof(uint32_t);
const size_t kSerializedDataSize = kCachedMetadataTypeSize + kSha256Bytes +
kCachedMetaDataStart + data.size();
std::vector<uint8_t> serialized_data(kSerializedDataSize);
*reinterpret_cast<uint32_t*>(&serialized_data[0]) = outer_type;
if (source_text.has_value()) {
DigestValue hash;
CHECK(ComputeDigest(kHashAlgorithmSha256,
static_cast<const char*>(source_text->Bytes()),
source_text->CharactersSizeInBytes(), hash));
CHECK_EQ(hash.size(), kSha256Bytes);
memcpy(&serialized_data[kCachedMetadataTypeSize], hash.data(),
kSha256Bytes);
}
*reinterpret_cast<uint32_t*>(
&serialized_data[kCachedMetadataTypeSize + kSha256Bytes]) = inner_type;
*reinterpret_cast<uint32_t*>(
&serialized_data[kCachedMetadataTypeSize + kSha256Bytes +
kCacheDataTypeStart]) = data_type_id;
memcpy(&serialized_data[kCachedMetadataTypeSize + kSha256Bytes +
kCachedMetaDataStart],
data.data(), data.size());
return serialized_data;
}
ScopedTestingPlatformSupport<TestingPlatformSupportWithMockScheduler>
platform_;
// State initialized by CommonSetup().
Persistent<ScriptResource> resource_;
Persistent<ResourceLoader> loader_;
ResourceResponse response_;
scoped_refptr<CodeCacheLoaderMock::Controller> controller_;
};
TEST_F(ResourceLoaderCodeCacheTest, WebUICodeCacheEmptyResponseFirst) {
CommonSetup();
loader_->DidReceiveResponse(WrappedResourceResponse(response_));
// Nothing has changed yet because the code cache hasn't yet responded.
EXPECT_FALSE(resource_->CodeCacheSize());
// An empty code cache response means no data was found.
controller_->Respond(base::Time(), mojo_base::BigBuffer());
// No code cache data was present.
EXPECT_FALSE(resource_->CodeCacheSize());
}
TEST_F(ResourceLoaderCodeCacheTest, WebUICodeCacheEmptyResponseSecond) {
CommonSetup();
// An empty code cache response means no data was found.
controller_->Respond(base::Time(), mojo_base::BigBuffer());
// Nothing has changed yet because the content response hasn't arrived yet.
EXPECT_FALSE(resource_->CodeCacheSize());
loader_->DidReceiveResponse(WrappedResourceResponse(response_));
// No code cache data was present.
EXPECT_FALSE(resource_->CodeCacheSize());
}
TEST_F(ResourceLoaderCodeCacheTest, WebUICodeCacheFullResponseFirst) {
CommonSetup();
loader_->DidReceiveResponse(WrappedResourceResponse(response_));
// Nothing has changed yet because the code cache hasn't yet responded.
EXPECT_FALSE(resource_->CodeCacheSize());
std::vector<uint8_t> cache_data{2, 3, 4, 5, 6};
controller_->Respond(
base::Time(),
mojo_base::BigBuffer(MakeSerializedCodeCacheData(cache_data)));
// Code cache data was present.
EXPECT_EQ(resource_->CodeCacheSize(),
cache_data.size() + kCachedMetaDataStart);
}
TEST_F(ResourceLoaderCodeCacheTest, WebUICodeCacheFullResponseSecond) {
CommonSetup();
std::vector<uint8_t> cache_data{2, 3, 4, 5, 6};
controller_->Respond(
base::Time(),
mojo_base::BigBuffer(MakeSerializedCodeCacheData(cache_data)));
// Nothing has changed yet because the content response hasn't arrived yet.
EXPECT_FALSE(resource_->CodeCacheSize());
loader_->DidReceiveResponse(WrappedResourceResponse(response_));
// Code cache data was present.
EXPECT_EQ(resource_->CodeCacheSize(),
cache_data.size() + kCachedMetaDataStart);
}
TEST_F(ResourceLoaderCodeCacheTest, WebUICodeCacheFullHttpsScheme) {
CommonSetup("https://www.example.com/");
std::vector<uint8_t> cache_data{2, 3, 4, 5, 6};
controller_->Respond(
base::Time(),
mojo_base::BigBuffer(MakeSerializedCodeCacheData(cache_data)));
// Nothing has changed yet because the content response hasn't arrived yet.
EXPECT_FALSE(resource_->CodeCacheSize());
loader_->DidReceiveResponse(WrappedResourceResponse(response_));
// Since the URL was https, and the response times were not set, the cached
// metadata should not be set.
EXPECT_FALSE(resource_->CodeCacheSize());
}
TEST_F(ResourceLoaderCodeCacheTest, WebUICodeCacheInvalidOuterType) {
CommonSetup();
std::vector<uint8_t> cache_data{2, 3, 4, 5, 6};
controller_->Respond(
base::Time(),
mojo_base::BigBuffer(MakeSerializedCodeCacheData(
cache_data, {}, 0, CachedMetadataHandler::kSingleEntry)));
// Nothing has changed yet because the content response hasn't arrived yet.
EXPECT_FALSE(resource_->CodeCacheSize());
loader_->DidReceiveResponse(WrappedResourceResponse(response_));
// The serialized metadata was rejected due to an invalid outer type.
EXPECT_FALSE(resource_->CodeCacheSize());
}
TEST_F(ResourceLoaderCodeCacheTest, WebUICodeCacheInvalidInnerType) {
CommonSetup();
std::vector<uint8_t> cache_data{2, 3, 4, 5, 6};
controller_->Respond(
base::Time(),
mojo_base::BigBuffer(MakeSerializedCodeCacheData(
cache_data, {}, 0, CachedMetadataHandler::kSingleEntryWithHash,
CachedMetadataHandler::kSourceKeyedMap)));
// Nothing has changed yet because the content response hasn't arrived yet.
EXPECT_FALSE(resource_->CodeCacheSize());
loader_->DidReceiveResponse(WrappedResourceResponse(response_));
// The serialized metadata was rejected due to an invalid inner type.
EXPECT_FALSE(resource_->CodeCacheSize());
}
TEST_F(ResourceLoaderCodeCacheTest, WebUICodeCacheHashCheckSuccess) {
CommonSetup();
std::vector<uint8_t> cache_data{2, 3, 4, 5, 6};
String source_text("alert('hello world');");
controller_->Respond(
base::Time(), mojo_base::BigBuffer(
MakeSerializedCodeCacheData(cache_data, source_text)));
// Nothing has changed yet because the content response hasn't arrived yet.
EXPECT_FALSE(resource_->CodeCacheSize());
loader_->DidReceiveResponse(WrappedResourceResponse(response_));
// Code cache data was present.
EXPECT_EQ(resource_->CodeCacheSize(),
cache_data.size() + kCachedMetaDataStart);
// Make sure the following steps don't try to do anything too fancy.
resource_->CacheHandler()->DisableSendToPlatformForTesting();
// Successful check.
resource_->CacheHandler()->Check(nullptr, ParkableString(source_text.Impl()));
// Now the metadata can be accessed.
scoped_refptr<CachedMetadata> cached_metadata =
resource_->CacheHandler()->GetCachedMetadata(0);
EXPECT_EQ(cached_metadata->size(), cache_data.size());
EXPECT_EQ(*(cached_metadata->Data() + 2), cache_data[2]);
// But trying to load the metadata with the wrong data_type_id fails.
EXPECT_FALSE(resource_->CacheHandler()->GetCachedMetadata(4));
}
TEST_F(ResourceLoaderCodeCacheTest, WebUICodeCacheHashCheckFailure) {
CommonSetup();
std::vector<uint8_t> cache_data{2, 3, 4, 5, 6};
String source_text("alert('hello world');");
controller_->Respond(
base::Time(), mojo_base::BigBuffer(
MakeSerializedCodeCacheData(cache_data, source_text)));
// Nothing has changed yet because the content response hasn't arrived yet.
EXPECT_FALSE(resource_->CodeCacheSize());
loader_->DidReceiveResponse(WrappedResourceResponse(response_));
// Code cache data was present.
EXPECT_EQ(resource_->CodeCacheSize(),
cache_data.size() + kCachedMetaDataStart);
// Make sure the following steps don't try to do anything too fancy.
resource_->CacheHandler()->DisableSendToPlatformForTesting();
// Failed check: source text is different.
String source_text_2("alert('improved program');");
resource_->CacheHandler()->Check(nullptr,
ParkableString(source_text_2.Impl()));
// The metadata has been cleared.
EXPECT_FALSE(resource_->CodeCacheSize());
EXPECT_FALSE(resource_->CacheHandler()->GetCachedMetadata(0));
}
} // namespace
} // namespace blink
| 4,497 |
567 | package com.kennycason.kumo.nlp.tokenizer.core;
import com.kennycason.kumo.nlp.tokenizer.api.WordTokenizer;
import java.util.Collections;
import java.util.List;
/**
* Disables tokenization
*
* @author @wolfposd
*
*/
public class NoTokenizer implements WordTokenizer {
public List<String> tokenize(final String sentence) {
return Collections.singletonList(sentence);
}
}
| 143 |
1,444 |
package mage.cards.c;
import java.util.UUID;
import mage.MageInt;
import mage.abilities.keyword.MountainwalkAbility;
import mage.cards.CardImpl;
import mage.cards.CardSetInfo;
import mage.constants.CardType;
import mage.constants.SubType;
/**
*
* @author Loki
*/
public final class CliffThreader extends CardImpl {
public CliffThreader (UUID ownerId, CardSetInfo setInfo) {
super(ownerId,setInfo,new CardType[]{CardType.CREATURE},"{1}{W}");
this.subtype.add(SubType.KOR);
this.subtype.add(SubType.SCOUT);
this.power = new MageInt(2);
this.toughness = new MageInt(1);
this.addAbility(new MountainwalkAbility());
}
public CliffThreader (final CliffThreader card) {
super(card);
}
@Override
public CliffThreader copy() {
return new CliffThreader(this);
}
}
| 334 |
679 | /**************************************************************
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*************************************************************/
#ifndef _SVTOOLS_CENUMITM_HXX
#define _SVTOOLS_CENUMITM_HXX
#include "svl/svldllapi.h"
#include <tools/debug.hxx>
#include <svl/poolitem.hxx>
//============================================================================
DBG_NAMEEX(SfxEnumItemInterface)
class SVL_DLLPUBLIC SfxEnumItemInterface: public SfxPoolItem
{
protected:
SfxEnumItemInterface(sal_uInt16 which): SfxPoolItem(which) {}
SfxEnumItemInterface(const SfxEnumItemInterface & rItem):
SfxPoolItem(rItem) {}
public:
TYPEINFO();
virtual int operator ==(const SfxPoolItem & rItem) const;
virtual SfxItemPresentation GetPresentation(SfxItemPresentation,
SfxMapUnit, SfxMapUnit,
XubString & rText,
const IntlWrapper * = 0)
const;
virtual sal_Bool QueryValue(com::sun::star::uno::Any & rVal, sal_uInt8 = 0) const;
virtual sal_Bool PutValue(const com::sun::star::uno::Any & rVal, sal_uInt8 = 0);
virtual sal_uInt16 GetValueCount() const = 0;
virtual XubString GetValueTextByPos(sal_uInt16 nPos) const;
virtual sal_uInt16 GetValueByPos(sal_uInt16 nPos) const;
/// Return the position of some value within this enumeration.
///
/// @descr This method is implemented using GetValueCount() and
/// GetValueByPos(). Derived classes may replace this with a more
/// efficient implementation.
///
/// @param nValue Some value.
///
/// @return The position of nValue within this enumeration, or USHRT_MAX
/// if not included.
virtual sal_uInt16 GetPosByValue(sal_uInt16 nValue) const;
virtual sal_Bool IsEnabled(sal_uInt16 nValue) const;
virtual sal_uInt16 GetEnumValue() const = 0;
virtual void SetEnumValue(sal_uInt16 nValue) = 0;
virtual int HasBoolValue() const;
virtual sal_Bool GetBoolValue() const;
virtual void SetBoolValue(sal_Bool bValue);
};
//============================================================================
DBG_NAMEEX(CntEnumItem)
class SVL_DLLPUBLIC CntEnumItem: public SfxEnumItemInterface
{
sal_uInt16 m_nValue;
protected:
CntEnumItem(sal_uInt16 which = 0, sal_uInt16 nTheValue = 0):
SfxEnumItemInterface(which), m_nValue(nTheValue) {}
CntEnumItem(sal_uInt16 which, SvStream & rStream);
CntEnumItem(const CntEnumItem & rItem):
SfxEnumItemInterface(rItem), m_nValue(rItem.m_nValue) {}
public:
TYPEINFO();
virtual SvStream & Store(SvStream & rStream, sal_uInt16) const;
virtual sal_uInt16 GetEnumValue() const;
virtual void SetEnumValue(sal_uInt16 nTheValue);
sal_uInt16 GetValue() const { return m_nValue; }
inline void SetValue(sal_uInt16 nTheValue);
};
inline void CntEnumItem::SetValue(sal_uInt16 nTheValue)
{
DBG_ASSERT(GetRefCount() == 0, "CntEnumItem::SetValue(): Pooled item");
m_nValue = nTheValue;
}
//============================================================================
DBG_NAMEEX(CntBoolItem)
class SVL_DLLPUBLIC CntBoolItem: public SfxPoolItem
{
sal_Bool m_bValue;
public:
TYPEINFO();
CntBoolItem(sal_uInt16 which = 0, sal_Bool bTheValue = sal_False):
SfxPoolItem(which), m_bValue(bTheValue) {}
CntBoolItem(sal_uInt16 nWhich, SvStream & rStream);
CntBoolItem(const CntBoolItem & rItem):
SfxPoolItem(rItem), m_bValue(rItem.m_bValue) {}
virtual int operator ==(const SfxPoolItem & rItem) const;
using SfxPoolItem::Compare;
virtual int Compare(const SfxPoolItem & rWith) const;
virtual SfxItemPresentation GetPresentation(SfxItemPresentation,
SfxMapUnit, SfxMapUnit,
UniString & rText,
const IntlWrapper * = 0)
const;
virtual sal_Bool QueryValue(com::sun::star::uno::Any& rVal, sal_uInt8 = 0) const;
virtual sal_Bool PutValue(const com::sun::star::uno::Any& rVal, sal_uInt8 = 0);
virtual SfxPoolItem * Create(SvStream & rStream, sal_uInt16) const;
virtual SvStream & Store(SvStream & rStream, sal_uInt16) const;
virtual SfxPoolItem * Clone(SfxItemPool * = 0) const;
virtual sal_uInt16 GetValueCount() const;
virtual UniString GetValueTextByVal(sal_Bool bTheValue) const;
sal_Bool GetValue() const { return m_bValue; }
void SetValue(sal_Bool bTheValue) { m_bValue = bTheValue; }
};
#endif // _SVTOOLS_CENUMITM_HXX
| 1,822 |
697 | // Copyright (C) 2012-2019 The VPaint Developers.
// See the COPYRIGHT file at the top-level directory of this distribution
// and at https://github.com/dalboris/vpaint/blob/master/COPYRIGHT
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef INSTANTHOLE_H
#define INSTANTHOLE_H
#include "ProperPath.h"
#include "ProperCycle.h"
class QTextStream;
////////////// Forward declare global serialization operators /////////////////
namespace VectorAnimationComplex { class CycleHelper; }
QTextStream & operator<<(QTextStream &, const VectorAnimationComplex::CycleHelper &);
QTextStream & operator>>(QTextStream &, VectorAnimationComplex::CycleHelper &);
///////////////////////////////////////////////////////////////////////////////
namespace VectorAnimationComplex
{
class CycleHelper
{
public:
// Invalid hole
CycleHelper();
// Create a hole that is a single vertex
CycleHelper(KeyVertex * vertex);
// create a hole that is not a single vertex
CycleHelper(const KeyEdgeSet & edgeSet);
// type of the hole
bool isValid() const;
bool isSingleVertex() const;
// all methods below assume that the hole is valid
// Time
Time time() const;
// In case of single vertex (can be call a Steiner vertex)
KeyVertex * vertex() const;
// In case of not a single vertex
int nLoops() const;
const ProperCycle & loop(int i) const;
int nPaths() const;
const ProperPath & path(int i) const;
// serialization and copy
void remapPointers(VAC * newVAC);
friend QTextStream & ::operator<<(QTextStream & out, const CycleHelper & hole);
friend QTextStream & ::operator>>(QTextStream & in, CycleHelper & hole);
void convertTempIdsToPointers(VAC * vac);
// The set of cells this helper points to
KeyCellSet cells() const;
// Replace pointed edges
void replaceEdges(KeyEdge * oldEdge, const KeyEdgeList & newEdges);
private:
// Single vertex (==null if the hole is not a single vertex)
KeyVertex * singleVertex_;
int tempId_;
// Internal holes
QList<ProperCycle> loops_;
// External cracks
QList<ProperPath> paths_;
};
} // end namespace VectorAnimationComplex
#endif // INSTANTHOLE_H
| 841 |
488 |
template <typename T>
struct A {
static T v;
};
template <typename T>
void foo() {
auto v = A<T>::v;
}
void bar() {
foo<float>();
foo<double>();
}
| 71 |
3,012 | <reponame>mefff/edk2
/** @file
This file declares Sec Platform Information PPI.
This service is the primary handoff state into the PEI Foundation.
The Security (SEC) component creates the early, transitory memory
environment and also encapsulates knowledge of at least the
location of the Boot Firmware Volume (BFV).
Copyright (c) 2020, Intel Corporation. All rights reserved.<BR>
SPDX-License-Identifier: BSD-2-Clause-Patent
@par Revision Reference:
This PPI is introduced in PI Version 1.0.
**/
#ifndef __REPUBLISH_SEC_PPI_H__
#define __REPUBLISH_SEC_PPI_H__
#include <Pi/PiPeiCis.h>
#define REPUBLISH_SEC_PPI_PPI_GUID \
{ \
0x27a71b1e, 0x73ee, 0x43d6, { 0xac, 0xe3, 0x52, 0x1a, 0x2d, 0xc5, 0xd0, 0x92 } \
}
typedef struct _REPUBLISH_SEC_PPI_PPI REPUBLISH_SEC_PPI_PPI;
/**
This interface re-installs PPIs installed in SecCore from a post-memory PEIM.
This is to allow a platform that may not support relocation of SecCore to update the PPI instance to a post-memory
copy from a PEIM that has been shadowed to permanent memory.
@retval EFI_SUCCESS The SecCore PPIs were re-installed successfully.
@retval Others An error occurred re-installing the SecCore PPIs.
**/
typedef
EFI_STATUS
(EFIAPI *REPUBLISH_SEC_PPI_REPUBLISH_SEC_PPIS)(
VOID
);
///
/// Republish SEC PPIs
///
struct _REPUBLISH_SEC_PPI_PPI {
REPUBLISH_SEC_PPI_REPUBLISH_SEC_PPIS RepublishSecPpis;
};
extern EFI_GUID gRepublishSecPpiPpiGuid;
#endif
| 629 |
777 | // Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CHROME_BROWSER_FAVICON_CHROME_FALLBACK_ICON_CLIENT_H_
#define CHROME_BROWSER_FAVICON_CHROME_FALLBACK_ICON_CLIENT_H_
#include <string>
#include <vector>
#include "base/macros.h"
#include "components/favicon/core/fallback_icon_client.h"
// ChromeFallbackIconClient implements the FallbackIconClient interface.
class ChromeFallbackIconClient : public favicon::FallbackIconClient {
public:
ChromeFallbackIconClient();
~ChromeFallbackIconClient() override;
// FallbackIconClient implementation:
const std::vector<std::string>& GetFontNameList() const override;
private:
std::vector<std::string> font_list_;
DISALLOW_COPY_AND_ASSIGN(ChromeFallbackIconClient);
};
#endif // CHROME_BROWSER_FAVICON_CHROME_FALLBACK_ICON_CLIENT_H_
| 311 |
3,013 | # WaveDB / To-do App
# A multi-user To-do list application using WaveDB for data management.
# This example is very similar to the todo.py example, except that this
# example uses WaveDB instead of an in-memory list.
# ---
from h2o_wave import main, app, Q, ui, connect, WaveDB, expando_to_dict
# A simple class that represents a to-do item.
class TodoItem:
def __init__(self, id, label, done):
self.id = id
self.label = label
self.done = done
async def setup_db() -> WaveDB:
db = connect()['todo']
_, err = await db.exec_atomic(
"""
create table if not exists todo (
id integer primary key,
user text not null,
label text not null,
done integer not null default 0
)
"""
)
if err:
raise RuntimeError(f'Failed setting up database: {err}')
return db
@app('/demo')
async def serve(q: Q):
if q.app.db is None:
q.app.db = await setup_db()
if q.args.new_todo: # Display an input form.
await new_todo(q)
elif q.args.add_todo: # Add an item.
await add_todo(q)
else: # Show all items.
await show_todos(q)
async def show_todos(q: Q):
# Get items for this user.
db: WaveDB = q.app.db
# Check if we have any updates, i.e. the user has checked/unchecked any item.
updates = []
for key, done in expando_to_dict(q.args).items():
# We've named each todo item `todo_{id}' (e.g. todo_42, todo_43, and so on)
# So identify the todo items from their 'todo_' prefix, then extract the ids from the names.
if key.startswith('todo_'):
_, id = key.split('_', 1)
updates.append(('update todo set done=? where id=?', 1 if done else 0, int(id)))
# If we have updates, update our database.
if len(updates):
_, err = await db.exec_many(*updates)
if err:
raise RuntimeError(f'Failed updating todos: {err}')
# Fetch latest todos for our user
rows, err = await db.exec('select id, label, done from todo where user=?', q.auth.subject)
if err:
raise RuntimeError(f'Failed fetching todos: {err}')
todos = [TodoItem(id, label, done) for id, label, done in rows]
# Create done/not-done checkboxes.
done = [ui.checkbox(name=f'todo_{todo.id}', label=todo.label, value=True, trigger=True) for todo in todos if
todo.done]
not_done = [ui.checkbox(name=f'todo_{todo.id}', label=todo.label, trigger=True) for todo in todos if not todo.done]
# Display list
q.page['form'] = ui.form_card(box='1 1 4 10', items=[
ui.text_l('To Do'),
ui.button(name='new_todo', label='Add To Do...', primary=True),
*not_done,
*([ui.separator('Done')] if len(done) else []),
*done,
])
await q.page.save()
async def add_todo(q: Q):
# Insert a new item
db: WaveDB = q.app.db
_, err = await db.exec('insert into todo (user, label) values (? , ?)', q.auth.subject, q.args.label or 'Untitled')
if err:
raise RuntimeError(f'Failed inserting todo: {err}')
# Go back to our list.
await show_todos(q)
async def new_todo(q: Q):
# Display an input form
q.page['form'] = ui.form_card(box='1 1 4 10', items=[
ui.text_l('Add To Do'),
ui.textbox(name='label', label='What needs to be done?', multiline=True),
ui.buttons([
ui.button(name='add_todo', label='Add', primary=True),
ui.button(name='show_todos', label='Back'),
]),
])
await q.page.save()
| 1,573 |
348 | <reponame>arcturus2/constellation
/*
* Copyright 2010-2021 Australian Signals Directorate
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package au.gov.asd.tac.constellation.plugins.arrangements.hde;
import au.gov.asd.tac.constellation.graph.GraphElementType;
import au.gov.asd.tac.constellation.graph.GraphWriteMethods;
import au.gov.asd.tac.constellation.graph.schema.visual.concept.VisualConcept;
import au.gov.asd.tac.constellation.plugins.arrangements.Arranger;
import java.security.SecureRandom;
import java.util.Arrays;
/**
* Arrange by high dimension embedding.
* <p>
* <NAME> and <NAME>, "Graph Drawing by High-Dimensional<br>
* Embedding", proceedings of Graph Drawing 2002, Volume<br>
* 2528 of Lecture Notes in Computer Science, pp. 207-219,<br>
* Springer Verlag, 2002.
* <p>
* TODO: scale x,y,z after arrangement so the graph isn't cramped.
*
* @author algol
*/
public class HighDimensionEmbeddingArranger implements Arranger {
/**
* Dimensionality of embedding space.
*/
public static final int M = 50;
// Scale graph up so it isn't too cramped.
private static final float SCALE = 10;
private final int dimensions;
// These should all be final, but can't be set until arrange().
private GraphWriteMethods wg;
private int[] centres;
private double[] mean;
private boolean[] known;
private boolean[] pivot;
private int[] distance;
// Coordinates of each node relative to pivot.
private double[][] X;
private static final boolean PART_ONLY = false;
private final SecureRandom random = new SecureRandom();
public HighDimensionEmbeddingArranger(final int dimensions) {
this.dimensions = dimensions;
}
private void set(final GraphWriteMethods wg) {
this.wg = wg;
final int vxCapacity = wg.getVertexCapacity();
centres = new int[M];
mean = new double[M];
known = new boolean[vxCapacity];
pivot = new boolean[vxCapacity];
distance = new int[vxCapacity];
X = new double[M][vxCapacity];
}
@Override
public void arrange(final GraphWriteMethods wg) {
set(wg);
final int vxCount = wg.getVertexCount();
for (int position = 0; position < vxCount; position++) {
final int vxId = wg.getVertex(position);
distance[vxId] = Integer.MAX_VALUE;
pivot[vxId] = false;
known[vxId] = false;
}
centres[0] = wg.getVertex(0);
pivot[wg.getVertex(0)] = true;
int currx = 0;
// Find coordinates of nodes relative to first pivot.
positionFrom(0, X[currx]);
// For the remaining pivot points...
for (int m = 1; m < M; m++) {
// Find which point to use as pivot by searching
// distances for a non-pivot point at the furthest
// distance from that of any other pivot.
int pdist = 0;
int pnode = wg.getVertex(0);
for (int npos = 0; npos < vxCount; npos++) {
final int n = wg.getVertex(npos);
// Choose node if its distance is at least that of the current best candidate.
// and it is not already a pivot.
if (distance[n] >= pdist && !pivot[n]) {
pnode = n;
pdist = distance[n];
}
// Clear known flag for next round.
known[n] = false;
}
// Mark the chosen node as a pivot.
centres[m] = pnode;
pivot[pnode] = true;
// View the graph relative to this new pivot.
currx++;
positionFrom(m, X[currx]);
}
// Centre the coordinates by subtracting the mean.
currx = 0;
for (int a = 0; a < M; a++) {
mean[a] /= vxCount;
for (int vpos = 0; vpos < vxCount; vpos++) {
final int v = wg.getVertex(vpos);
X[currx][v] -= mean[a];
}
currx++;
}
// Compute the covariance matrix.
// S = *X*X^T)/n.
final double[][] S = new double[M][M];
for (int r = 0; r < M; r++) {
for (int c = 0; c < M; c++) {
S[r][c] = 0;
for (int vpos = 0; vpos < vxCount; vpos++) {
final int v = wg.getVertex(vpos);
S[r][c] += X[r][v] * X[c][v];
}
// Not necessary because multiplication by a constant does not change the eigenvectors?
// But we said we're computing the covariance matrix, so do it.
S[r][c] /= vxCount;
}
}
if (PART_ONLY) {
return;
}
final double[] ui = new double[M];
final double[] uihat = new double[M];
// Compute the first C eigenvectors of S.
final double[][] U = new double[dimensions][M];
for (int u = 0; u < dimensions; u++) {
// Initialise uihat to a normalised random vector.
double norm = 0;
for (int r = 0; r < M; r++) {
uihat[r] = random.nextDouble();
norm += uihat[r] * uihat[r];
}
norm = Math.sqrt(norm);
for (int s = 0; s < M; s++) {
uihat[s] /= norm;
}
final double ε = 0.001;
double dot = 1000;
double prevDot;
int counter = 100;
do {
prevDot = dot;
System.arraycopy(uihat, 0, ui, 0, M);
// Orthogonalise ui against previous eigenvectors.
for (int j = 0; j < u; j++) {
// Compute dot product ui*U[j].
dot = 0;
for (int m = 0; m < M; m++) {
dot += ui[m] * U[j][m];
}
// Subtract ui = ui - (transpose(ui).uj)uj.
norm = 0;
for (int m = 0; m < M; m++) {
ui[m] -= dot * U[j][m];
norm += ui[m] * ui[m];
}
for (int m = 0; m < M; m++) {
ui[m] /= norm;
}
}
// uihat = S*ui.
for (int i = 0; i < M; i++) {
uihat[i] = 0;
for (int j = 0; j < M; j++) {
uihat[i] += S[i][j] * ui[j];
}
}
// Normalise uihat amd compute uihat(normed) * ui.
norm = 0;
for (int r = 0; r < M; r++) {
norm += uihat[r] * uihat[r];
}
norm = Math.sqrt(norm);
dot = 0;
for (int r = 0; r < M; r++) {
uihat[r] /= norm;
dot += uihat[r] * ui[r];
}
} while ((dot < (1 - ε) && Math.abs(dot - prevDot) > ε) && --counter == 0);
System.arraycopy(uihat, 0, U[u], 0, M);
}
// Now compute actual coordinates.
// Initialise coordinates in case C<3.
final double[] pos = new double[3];
Arrays.fill(pos, 0);
final int xId = wg.getAttribute(GraphElementType.VERTEX, VisualConcept.VertexAttribute.X.getName());
final int yId = wg.getAttribute(GraphElementType.VERTEX, VisualConcept.VertexAttribute.Y.getName());
final int zId = wg.getAttribute(GraphElementType.VERTEX, VisualConcept.VertexAttribute.Z.getName());
// pos = X * U^T.
for (int npos = 0; npos < vxCount; npos++) {
final int n = wg.getVertex(npos);
for (int c = 0; c < dimensions; c++) {
pos[c] = 0;
for (int m = 0; m < M; m++) {
pos[c] += X[m][n] * U[c][m];
}
}
wg.setFloatValue(xId, n, SCALE * (float) pos[0]);
wg.setFloatValue(yId, n, SCALE * (float) pos[1]);
wg.setFloatValue(zId, n, SCALE * (float) pos[2]);
}
}
/**
*
* @param axis The index in the centres array of the vertex id to use as an
* axis.
* @param coord
*/
private void positionFrom(final int axis, final double[] coord) {
Arrays.fill(coord, -1);
int node = centres[axis];
coord[node] = 0;
distance[node] = 0;
mean[axis] = 0;
// Queue for use in breadth-first traversal.
final int[] queue = new int[wg.getVertexCount()];
int head = 0;
int tail = 0;
queue[head++] = node;
while (head != tail) {
node = queue[tail++];
final int neighbours = wg.getVertexNeighbourCount(node);
for (int npos = 0; npos < neighbours; npos++) {
final int neighbour = wg.getVertexNeighbour(node, npos);
if (coord[neighbour] < 0) {
coord[neighbour] = coord[node] + 1;
mean[axis] += coord[neighbour];
if (distance[neighbour] > coord[neighbour]) {
distance[neighbour] = (int) coord[neighbour];
}
queue[head++] = neighbour;
}
}
}
}
@Override
public void setMaintainMean(final boolean b) {
// Required for Arranger, intentionally left blank
}
}
| 5,048 |
332 | <gh_stars>100-1000
/*
* Copyright 2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.xd.jdbc;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import java.util.Map;
import org.junit.Test;
public class JdbcMessagePayloadTransformerTests {
private final JdbcMessagePayloadTransformer transformer = new JdbcMessagePayloadTransformer();
@Test
public void testTransformWithColumns() throws Exception {
String payload = "{\"id\": 123, \"name\":\"Sven\", \"age\":22}";
transformer.setColumnNames("name, age");
Map<String, Object> results = transformer.transformPayload(payload);
assertEquals(3, results.size());
assertEquals(123, results.get("id"));
assertEquals("Sven", results.get("name"));
assertEquals(22, results.get("age"));
assertEquals("name, age", transformer.getColumns());
assertEquals(":payload[name], :payload[age]", transformer.getValues());
}
@Test
public void testTransformPayload() throws Exception {
String payload = "{\"id\": 123, \"name\":\"Sven\", \"age\":22}";
transformer.setColumnNames("payload");
Map<String, Object> results = transformer.transformPayload(payload);
assertEquals(1, results.size());
assertEquals(payload, results.get("payload"));
assertEquals("payload", transformer.getColumns());
assertEquals(":payload[payload]", transformer.getValues());
}
@Test
public void testNoColumns() throws Exception {
assertEquals("", transformer.getColumns());
assertEquals("", transformer.getValues());
}
@Test
public void testColumnsWithSpace() throws Exception {
transformer.setColumnNames("name, age");
assertEquals("name, age", transformer.getColumns());
assertEquals(":payload[name], :payload[age]", transformer.getValues());
}
@Test
public void testColumnsNoSpace() throws Exception {
transformer.setColumnNames("name,age");
assertEquals("name, age", transformer.getColumns());
assertEquals(":payload[name], :payload[age]", transformer.getValues());
}
@Test
public void testTransformWithColumnsUsingUnderscore() throws Exception {
String payload = "{\"id\": 123, \"userName\":\"Sven\", \"lastName\":\"Jansson\", \"theUserAge\":22}";
transformer.setColumnNames("user_name, lastName, the_user_age");
Map<String, Object> results = transformer.transformPayload(payload);
assertEquals(6, results.size());
assertEquals(123, results.get("id"));
assertEquals("Sven", results.get("user_name"));
assertEquals("Sven", results.get("userName"));
assertEquals("Jansson", results.get("lastName"));
assertNull(results.get("last_name"));
assertEquals(22, results.get("the_user_age"));
assertEquals(22, results.get("theUserAge"));
assertEquals("user_name, lastName, the_user_age", transformer.getColumns());
assertEquals(":payload[user_name], :payload[lastName], :payload[the_user_age]", transformer.getValues());
}
}
| 1,100 |
1,283 | package com.pengrad.telegrambot.request;
import com.pengrad.telegrambot.model.request.InlineKeyboardMarkup;
import com.pengrad.telegrambot.response.BaseResponse;
import com.pengrad.telegrambot.response.SendResponse;
/**
* <NAME>
* 11 October 2017
*/
public class EditMessageLiveLocation extends BaseRequest<EditMessageLiveLocation, BaseResponse> {
public EditMessageLiveLocation(Object chatId, int messageId, float latitude, float longitude) {
super(SendResponse.class);
add("chat_id", chatId).add("message_id", messageId).add("latitude", latitude).add("longitude", longitude);
}
public EditMessageLiveLocation(String inlineMessageId, float latitude, float longitude) {
super(BaseResponse.class);
add("inline_message_id", inlineMessageId).add("latitude", latitude).add("longitude", longitude);
}
public EditMessageLiveLocation horizontalAccuracy(float horizontalAccuracy) {
return add("horizontal_accuracy", horizontalAccuracy);
}
public EditMessageLiveLocation heading(int heading) {
return add("heading", heading);
}
public EditMessageLiveLocation proximityAlertRadius(int proximityAlertRadius) {
return add("proximity_alert_radius", proximityAlertRadius);
}
public EditMessageLiveLocation replyMarkup(InlineKeyboardMarkup replyMarkup) {
return add("reply_markup", replyMarkup);
}
}
| 453 |
369 | <reponame>JiazeWang/PVN3D
#!/usr/bin/env python3
import os
import cv2
import tqdm
import torch
import os.path
import numpy as np
from common import Config
import pickle as pkl
from lib.utils.basic_utils import Basic_Utils
import scipy.io as scio
import scipy.misc
from datasets.ycb.ycb_dataset import YCB_Dataset
config = Config(dataset_name='ycb')
bs_utils = Basic_Utils(config)
torch.multiprocessing.set_sharing_strategy('file_system')
def worker_init_fn(worker_id):
np.random.seed(np.random.get_state()[1][0] + worker_id)
def main():
if os.path.exists(config.preprocessed_testset_pth):
return
test_ds = YCB_Dataset('test')
test_loader = torch.utils.data.DataLoader(
test_ds, batch_size=config.test_mini_batch_size, shuffle=False,
num_workers=40, worker_init_fn=worker_init_fn
)
data_lst = []
for i, data in tqdm.tqdm(
enumerate(test_loader), leave=False, desc='Preprocessing valtestset'
):
bs, _, _, _ = data[0].shape
for ibs in range(bs):
i_data = [item[ibs] for item in data]
if len(i_data) < 11:
print(len(i_data))
data_lst.append(i_data)
pkl.dump(data_lst, open(config.preprocessed_testset_pth, 'wb'))
if __name__ == "__main__":
main()
# vim: ts=4 sw=4 sts=4 expandtab
| 596 |
488 | <reponame>maurizioabba/rose
#ifndef FIXUP_DEFINING_AND_NONDEFINING_DECLARATIONS_H
#define FIXUP_DEFINING_AND_NONDEFINING_DECLARATIONS_H
// DQ (6/27/2005):
/*! \brief Fixup all SgDeclarationStatement references to defining and non-defining declarations.
\implementation Some declarations are there own defining declarations.
*/
void fixupAstDefiningAndNondefiningDeclarations ( SgNode* node );
class FixupAstDefiningAndNondefiningDeclarations
// : public AstSimpleProcessing
: public ROSE_VisitTraversal
{
// This class uses a traversal to test the values of the definingDeclaration and
// firstNondefiningDeclaration pointers in each SgDeclarationStatement. See code for
// details, since both of these pointers are not always set.
public:
virtual ~FixupAstDefiningAndNondefiningDeclarations() {};
void visit ( SgNode* node );
};
// endif for FIXUP_DEFINING_AND_NONDEFINING_DECLARATIONS_H
#endif
| 314 |
574 | /*
* Copyright (C) 2012 Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.swift.codec.recursion;
import com.facebook.swift.codec.ThriftField;
import com.facebook.swift.codec.ThriftIdlAnnotation;
import com.facebook.swift.codec.ThriftStruct;
import java.util.Objects;
import static com.facebook.swift.codec.ThriftField.Requiredness;
import static com.facebook.swift.codec.ThriftField.RECURSIVE_REFERENCE_ANNOTATION_NAME;
@ThriftStruct
public class WithIdlRecursiveAnnotation
{
@ThriftField(
value = 1,
requiredness = Requiredness.OPTIONAL,
idlAnnotations = { @ThriftIdlAnnotation(key = RECURSIVE_REFERENCE_ANNOTATION_NAME, value = "true") })
public WithIdlRecursiveAnnotation child;
@ThriftField(2)
public String data;
@Override
public boolean equals(Object obj)
{
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
final WithIdlRecursiveAnnotation that = (WithIdlRecursiveAnnotation) obj;
return Objects.equals(data, that.data) &&
Objects.equals(child, that.child);
}
}
| 626 |
1,467 | <reponame>Deluxe123123/aws-sdk-java-v2
/*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package software.amazon.awssdk.services.s3.internal.resource;
import java.util.Optional;
import software.amazon.awssdk.annotations.SdkInternalApi;
import software.amazon.awssdk.utils.Validate;
/**
* An {@link S3Resource} that represents an S3 object.
*/
@SdkInternalApi
public final class S3ObjectResource implements S3Resource {
private static final S3ResourceType S3_RESOURCE_TYPE = S3ResourceType.OBJECT;
private final S3Resource parentS3Resource;
private final String key;
private S3ObjectResource(Builder b) {
this.parentS3Resource = validateParentS3Resource(b.parentS3Resource);
this.key = Validate.paramNotBlank(b.key, "key");
}
/**
* Get a new builder for this class.
* @return A newly initialized instance of a builder.
*/
public static Builder builder() {
return new Builder();
}
/**
* Gets the resource type for this S3 object.
* @return This will always return "object".
*/
@Override
public String type() {
return S3_RESOURCE_TYPE.toString();
}
/**
* Gets the AWS partition name associated with the S3 object (e.g.: 'aws').
* @return the name of the partition.
*/
@Override
public Optional<String> partition() {
return parentS3Resource.partition();
}
/**
* Gets the AWS region name associated with the S3 object (e.g.: 'us-east-1').
* @return the name of the region or null if the region has not been specified (e.g. the resource is in the
* global namespace).
*/
@Override
public Optional<String> region() {
return parentS3Resource.region();
}
/**
* Gets the AWS account ID associated with the S3 object if it has been specified.
* @return the optional AWS account ID or empty if the account ID has not been specified.
*/
@Override
public Optional<String> accountId() {
return parentS3Resource.accountId();
}
/**
* Gets the key of the S3 object.
* @return the key of the S3 object.
*/
public String key() {
return this.key;
}
@Override
public Optional<S3Resource> parentS3Resource() {
return Optional.of(parentS3Resource);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
S3ObjectResource that = (S3ObjectResource) o;
if (parentS3Resource != null ? !parentS3Resource.equals(that.parentS3Resource) : that.parentS3Resource != null) {
return false;
}
return key != null ? key.equals(that.key) : that.key == null;
}
@Override
public int hashCode() {
int result = parentS3Resource != null ? parentS3Resource.hashCode() : 0;
result = 31 * result + (key != null ? key.hashCode() : 0);
return result;
}
private S3Resource validateParentS3Resource(S3Resource parentS3Resource) {
Validate.paramNotNull(parentS3Resource, "parentS3Resource");
if (!S3ResourceType.ACCESS_POINT.toString().equals(parentS3Resource.type())
&& !S3ResourceType.BUCKET.toString().equals(parentS3Resource.type())) {
throw new IllegalArgumentException("Invalid 'parentS3Resource' type. An S3 object resource must be " +
"associated with either a bucket or access-point parent resource.");
}
return parentS3Resource;
}
/**
* A builder for {@link S3ObjectResource} objects.
*/
public static final class Builder {
private S3Resource parentS3Resource;
private String key;
private Builder() {
}
/**
* The key of the S3 object.
*/
public Builder key(String key) {
this.key = key;
return this;
}
/**
* The S3 resource this object is associated with (contained within). Only {@link S3BucketResource} and
* {@link S3AccessPointResource} are valid parent resource types.
*/
public Builder parentS3Resource(S3Resource parentS3Resource) {
this.parentS3Resource = parentS3Resource;
return this;
}
/**
* Builds an instance of {@link S3BucketResource}.
*/
public S3ObjectResource build() {
return new S3ObjectResource(this);
}
}
}
| 1,966 |
453 | #include "headers/log1pf4.h"
#include "headers/dom_chkf_less_than.h"
static __inline float _log1pf(float x)
{
float res;
vector float vx;
vector float vc = { -1.0, -1.0, -1.0, -1.0 };
vx = spu_promote(x, 0);
res = spu_extract(_log1pf4(vx), 0);
#ifndef _IEEE_LIBM
dom_chkf_less_than(vx, vc);
#endif
return res;
}
| 164 |
347 | from unittest import TestCase
import filters as f
from filters.test import BaseFilterTestCase
from iota import TryteString
from iota.adapter import MockAdapter, async_return
from iota.crypto.types import Digest
from iota.filters import Trytes
from iota.multisig import MultisigIota, AsyncMultisigIota
from iota.multisig.commands import CreateMultisigAddressCommand
from iota.multisig.types import MultisigAddress
from test import patch, MagicMock, async_test
class CreateMultisigAddressCommandTestCase(TestCase):
def setUp(self):
super(CreateMultisigAddressCommandTestCase, self).setUp()
self.adapter = MockAdapter()
self.command = CreateMultisigAddressCommand(self.adapter)
# Define some tryte sequences that we can reuse between tests.
self.digest_1 =\
Digest(
trytes =
b'FWNEPVJNGUKTSHSBDO9AORBCVWWLVXC9KAMKYYNKPYNJDKSAUURI9ELKOEEYPKVTYP'
b'CKOCJQESYFEMINIFKX9PDDGRBEEHYYXCJW9LHGWFZGHKCPVDBGMGQKIPCNKNITGMZT'
b'DIWVUB9PCHCOPHMIWKSUKRHZOJPMAY',
key_index = 0,
)
self.digest_2 =\
Digest(
trytes =
b'PAIRLDJQY9XAUSKIGCTHRJHZVARBEY9NNHYJ9UI9HWWZXFSDWEZEGDCWNVVYSYDV9O'
b'HTR9NGGZURISWTNECFTCMEWQQFJ9VKLFPDTYJYXC99OLGRH9OSFJLMEOGHFDHZYEAF'
b'IMIZTJRBQUVCR9U9ZWTMUXTUEOUBLC',
key_index = 0,
)
def test_wireup(self):
"""
Verify that the command is wired up correctly. (sync)
The API method indeed calls the appropiate command.
"""
with patch('iota.multisig.commands.create_multisig_address.CreateMultisigAddressCommand.__call__',
MagicMock(return_value=async_return('You found me!'))
) as mocked_command:
api = MultisigIota(self.adapter)
# Don't need to call with proper args here.
response = api.create_multisig_address('digests')
self.assertTrue(mocked_command.called)
self.assertEqual(
response,
'You found me!'
)
@async_test
async def test_wireup_async(self):
"""
Verify that the command is wired up correctly. (async)
The API method indeed calls the appropiate command.
"""
with patch('iota.multisig.commands.create_multisig_address.CreateMultisigAddressCommand.__call__',
MagicMock(return_value=async_return('You found me!'))
) as mocked_command:
api = AsyncMultisigIota(self.adapter)
# Don't need to call with proper args here.
response = await api.create_multisig_address('digests')
self.assertTrue(mocked_command.called)
self.assertEqual(
response,
'You found me!'
)
@async_test
async def test_happy_path(self):
"""
Generating a multisig address.
"""
result = await self.command(digests=[self.digest_1, self.digest_2])
self.assertDictEqual(
result,
{
'address':
MultisigAddress(
trytes =
b'ZYKDKGXTMGINTQLUMVNBBI9XCEI9BWYF9YOPCBFT'
b'UUJZWM9YIWHNYZEWOPEVRVLKZCPRKLCQD9BR9FVLC',
digests = [self.digest_1, self.digest_2],
),
},
)
class CreateMultisigAddressRequestFilterTestCase(BaseFilterTestCase):
filter_type = CreateMultisigAddressCommand(MockAdapter()).get_request_filter
skip_value_check = True
def setUp(self):
super(CreateMultisigAddressRequestFilterTestCase, self).setUp()
# Define some tryte sequences that we can reuse between tests.
self.digest_1 =\
Digest(
trytes =
b'FWNEPVJNGUKTSHSBDO9AORBCVWWLVXC9KAMKYYNKPYNJDKSAUURI9ELKOEEYPKVTYP'
b'CKOCJQESYFEMINIFKX9PDDGRBEEHYYXCJW9LHGWFZGHKCPVDBGMGQKIPCNKNITGMZT'
b'DIWVUB9PCHCOPHMIWKSUKRHZOJPMAY',
key_index = 0,
)
self.digest_2 =\
Digest(
trytes =
b'PAIRLDJQY9XAUSKIGCTHRJHZVARBEY9NNHYJ9UI9HWWZXFSDWEZEGDCWNVVYSYDV9O'
b'HTR9NGGZURISWTNECFTCMEWQQFJ9VKLFPDTYJYXC99OLGRH9OSFJLMEOGHFDHZYEAF'
b'IMIZTJRBQUVCR9U9ZWTMUXTUEOUBLC',
key_index = 0,
)
def test_pass_happy_path(self):
"""
Request is valid.
"""
request = {
'digests': [self.digest_1, self.digest_2],
}
filter_ = self._filter(request)
self.assertFilterPasses(filter_)
self.assertDictEqual(filter_.cleaned_data, request)
def test_pass_compatible_types(self):
"""
Request contains values that can be converted to the expected
types.
"""
filter_ = self._filter({
# ``digests`` may contain any values that can be converted into
# :py:class:`Digest` objects.
'digests': [bytes(self.digest_1), TryteString(self.digest_2)],
})
self.assertFilterPasses(filter_)
self.assertDictEqual(
filter_.cleaned_data,
{
'digests': [self.digest_1, self.digest_2],
},
)
def test_fail_empty(self):
"""
Request is empty.
"""
self.assertFilterErrors(
{},
{
'digests': [f.FilterMapper.CODE_MISSING_KEY],
},
)
def test_fail_unexpected_parameters(self):
"""
Request contains unexpected parameters.
"""
self.assertFilterErrors(
{
'digests': [self.digest_1, self.digest_2],
# Oh, and I suppose that's completely inconspicuous.
'foo': 'bar',
},
{
'foo': [f.FilterMapper.CODE_EXTRA_KEY],
},
)
def test_fail_digests_null(self):
"""
``digests`` is null.
"""
self.assertFilterErrors(
{
'digests': None,
},
{
'digests': [f.Required.CODE_EMPTY],
},
)
def test_fail_digests_wrong_type(self):
"""
``digests`` is not an array.
"""
self.assertFilterErrors(
{
'digests': self.digest_1,
},
{
'digests': [f.Array.CODE_WRONG_TYPE],
},
)
def test_fail_digests_empty(self):
"""
``digests`` is an array, but it's empty.
"""
self.assertFilterErrors(
{
'digests': [],
},
{
'digests': [f.Required.CODE_EMPTY],
},
)
def test_fail_digests_contents_invalid(self):
"""
``digests`` is an array, but it contains invalid values.
"""
self.assertFilterErrors(
{
'digests': [
b'',
True,
None,
b'not valid trytes',
# This is actually valid; I just added it to make sure the
# filter isn't cheating!
TryteString(self.digest_1),
2130706433,
],
},
{
'digests.0': [f.Required.CODE_EMPTY],
'digests.1': [f.Type.CODE_WRONG_TYPE],
'digests.2': [f.Required.CODE_EMPTY],
'digests.3': [Trytes.CODE_NOT_TRYTES],
'digests.5': [f.Type.CODE_WRONG_TYPE],
},
)
| 3,269 |
8,285 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2009, <NAME>'. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
A clone of 'sensors' utility on Linux printing hardware temperatures,
fans speed and battery info.
$ python3 scripts/sensors.py
asus
Temperatures:
asus 57.0°C (high=None°C, critical=None°C)
Fans:
cpu_fan 3500 RPM
acpitz
Temperatures:
acpitz 57.0°C (high=108.0°C, critical=108.0°C)
coretemp
Temperatures:
Physical id 0 61.0°C (high=87.0°C, critical=105.0°C)
Core 0 61.0°C (high=87.0°C, critical=105.0°C)
Core 1 59.0°C (high=87.0°C, critical=105.0°C)
Battery:
charge: 84.95%
status: charging
plugged in: yes
"""
from __future__ import print_function
import psutil
def secs2hours(secs):
mm, ss = divmod(secs, 60)
hh, mm = divmod(mm, 60)
return "%d:%02d:%02d" % (hh, mm, ss)
def main():
if hasattr(psutil, "sensors_temperatures"):
temps = psutil.sensors_temperatures()
else:
temps = {}
if hasattr(psutil, "sensors_fans"):
fans = psutil.sensors_fans()
else:
fans = {}
if hasattr(psutil, "sensors_battery"):
battery = psutil.sensors_battery()
else:
battery = None
if not any((temps, fans, battery)):
print("can't read any temperature, fans or battery info")
return
names = set(list(temps.keys()) + list(fans.keys()))
for name in names:
print(name)
# Temperatures.
if name in temps:
print(" Temperatures:")
for entry in temps[name]:
print(" %-20s %s°C (high=%s°C, critical=%s°C)" % (
entry.label or name, entry.current, entry.high,
entry.critical))
# Fans.
if name in fans:
print(" Fans:")
for entry in fans[name]:
print(" %-20s %s RPM" % (
entry.label or name, entry.current))
# Battery.
if battery:
print("Battery:")
print(" charge: %s%%" % round(battery.percent, 2))
if battery.power_plugged:
print(" status: %s" % (
"charging" if battery.percent < 100 else "fully charged"))
print(" plugged in: yes")
else:
print(" left: %s" % secs2hours(battery.secsleft))
print(" status: %s" % "discharging")
print(" plugged in: no")
if __name__ == '__main__':
main()
| 1,365 |
1,475 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache.backup;
import static org.apache.geode.internal.cache.backup.AbstractBackupWriterConfig.TIMESTAMP;
import static org.apache.geode.internal.cache.backup.AbstractBackupWriterConfig.TYPE;
import static org.apache.geode.internal.cache.backup.BackupWriterFactory.FILE_SYSTEM;
import static org.apache.geode.internal.cache.backup.FileSystemBackupWriterConfig.BASELINE_DIR;
import static org.apache.geode.internal.cache.backup.FileSystemBackupWriterConfig.TARGET_DIR;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Properties;
import org.junit.Test;
public class BackupWriterFactoryTest {
@Test
public void returnsCorrectFactoryForName() {
assertThat(BackupWriterFactory.getFactoryForType("FileSystem")).isEqualTo(FILE_SYSTEM);
}
@Test
public void throwsExceptionWhenFactoryForInvalidNameGiven() {
assertThatThrownBy(() -> BackupWriterFactory.getFactoryForType("badName"))
.isInstanceOf(IllegalArgumentException.class);
}
@Test
public void getType() {
assertThat(FILE_SYSTEM.getType()).isEqualTo("FileSystem");
}
@Test
public void returnsCorrectWriterType() {
Properties properties = new Properties();
properties.setProperty(TYPE, FILE_SYSTEM.getType());
properties.setProperty(TIMESTAMP, "yyyy-MM-dd-HH-mm-ss");
properties.setProperty(TARGET_DIR, "targetDir");
properties.setProperty(BASELINE_DIR, "baselineDir");
assertThat(FILE_SYSTEM.createWriter(properties, "memberId"))
.isInstanceOf(FileSystemBackupWriter.class);
}
@Test
public void returnedWriterHasAbsolutePathToBaselineDirectory() {
Properties properties = new Properties();
properties.setProperty(TYPE, FILE_SYSTEM.getType());
properties.setProperty(TIMESTAMP, "yyyy-MM-dd-HH-mm-ss");
properties.setProperty(TARGET_DIR, "targetDir");
properties.setProperty(BASELINE_DIR, "baselineDir");
BackupWriter writer = FILE_SYSTEM.createWriter(properties, "memberId");
Path absoluteBaseLineDirectory = Paths.get("baselineDir").toAbsolutePath();
assertThat(writer.getBaselineDirectory()).isAbsolute().isEqualTo(absoluteBaseLineDirectory);
}
}
| 937 |
1,350 | <filename>sdk/postgresqlflexibleserver/azure-resourcemanager-postgresqlflexibleserver/src/main/java/module-info.java
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
module com.azure.resourcemanager.postgresqlflexibleserver {
requires transitive com.azure.core.management;
exports com.azure.resourcemanager.postgresqlflexibleserver;
exports com.azure.resourcemanager.postgresqlflexibleserver.fluent;
exports com.azure.resourcemanager.postgresqlflexibleserver.fluent.models;
exports com.azure.resourcemanager.postgresqlflexibleserver.models;
opens com.azure.resourcemanager.postgresqlflexibleserver.fluent.models to
com.azure.core,
com.fasterxml.jackson.databind;
opens com.azure.resourcemanager.postgresqlflexibleserver.models to
com.azure.core,
com.fasterxml.jackson.databind;
}
| 327 |
843 | <reponame>shashwatsingh/addons-server
# Generated by Django 2.2.13 on 2020-07-17 12:33
from django.db import migrations
from olympia.constants.scanners import CUSTOMS
def backfill_model_version(apps, schema_editor):
ScannerResult = apps.get_model('scanners', 'ScannerResult')
# Set a default model version for all customs results that are bound to a
# version and have a score (and no model version already).
ScannerResult.objects.exclude(version=None).exclude(score=None).filter(
scanner=CUSTOMS
).filter(model_version=None).update(model_version='20200227')
class Migration(migrations.Migration):
dependencies = [('scanners', '0036_scannerresult_model_version')]
operations = [migrations.RunPython(backfill_model_version)]
| 256 |
386 | /**
* 定时任务模块,提供类Crontab表达式的定时任务,实现参考了Cron4j,同时可以支持秒级别的定时任务定义和年的定义(同时兼容Crontab、Cron4j、Quartz表达式)
*
* @author <NAME>
* @version 6.3.2
* @since JDK 1.8+
*/
package org.aoju.bus.cron; | 189 |
935 | <filename>spring-cloud-dataflow-server-core/src/main/java/org/springframework/cloud/dataflow/server/db/migration/AbstractBaselineCallback.java
/*
* Copyright 2019-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.dataflow.server.db.migration;
import java.util.ArrayList;
import java.util.List;
import org.flywaydb.core.api.callback.Context;
import org.flywaydb.core.api.callback.Event;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.cloud.dataflow.common.flyway.AbstractCallback;
import org.springframework.cloud.dataflow.common.flyway.SqlCommand;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.PreparedStatementCallback;
import org.springframework.jdbc.datasource.SingleConnectionDataSource;
/**
* Base implementation for baselining schema setup.
*
* @author <NAME>
*
*/
public abstract class AbstractBaselineCallback extends AbstractCallback {
private static final Logger logger = LoggerFactory.getLogger(AbstractBaselineCallback.class);
private final AbstractInitialSetupMigration initialSetupMigration;
/**
* Instantiates a new abstract baseline callback.
*
* @param initialSetupMigration the initial setup migration
*/
public AbstractBaselineCallback(AbstractInitialSetupMigration initialSetupMigration) {
super(Event.BEFORE_BASELINE);
this.initialSetupMigration = initialSetupMigration;
}
@Override
public List<SqlCommand> getCommands(Event event, Context context) {
List<SqlCommand> commands = new ArrayList<>();
List<SqlCommand> defaultCommands = super.getCommands(event, context);
if (defaultCommands != null) {
commands.addAll(defaultCommands);
}
boolean migrateToInitial = !doTableExists(context, "APP_REGISTRATION");
if (migrateToInitial) {
logger.info("Did not detect prior Data Flow schema, doing baseline.");
commands.addAll(initialSetupMigration.getCommands());
}
else {
logger.info("Detected old Data Flow schema, doing baseline.");
commands.addAll(dropIndexes());
commands.addAll(changeAppRegistrationTable());
commands.addAll(changeUriRegistryTable());
commands.addAll(changeStreamDefinitionsTable());
commands.addAll(changeTaskDefinitionsTable());
commands.addAll(changeAuditRecordsTable());
commands.addAll(createTaskLockTable());
commands.addAll(createTaskDeploymentTable());
commands.addAll(createIndexes());
}
return commands;
}
protected boolean doTableExists(Context context, String name) {
try {
JdbcTemplate jdbcTemplate = new JdbcTemplate(new SingleConnectionDataSource(context.getConnection(), true));
jdbcTemplate.execute("select 1 from ?", (PreparedStatementCallback) preparedStatement -> {
preparedStatement.setString(1, name);
return preparedStatement.execute();
});
return true;
} catch (Exception e) {
}
return false;
}
/**
* Drop indexes.
*
* @return the list of sql commands
*/
public abstract List<SqlCommand> dropIndexes();
/**
* Change app registration table.
*
* @return the list of sql commands
*/
public abstract List<SqlCommand> changeAppRegistrationTable();
/**
* Change uri registry table.
*
* @return the list of sql commands
*/
public abstract List<SqlCommand> changeUriRegistryTable();
/**
* Change stream definitions table.
*
* @return the list of sql commands
*/
public abstract List<SqlCommand> changeStreamDefinitionsTable();
/**
* Change task definitions table.
*
* @return the list of sql commands
*/
public abstract List<SqlCommand> changeTaskDefinitionsTable();
/**
* Change audit records table.
*
* @return the list of sql commands
*/
public abstract List<SqlCommand> changeAuditRecordsTable();
/**
* Creates the indexes.
*
* @return the list of sql commands
*/
public abstract List<SqlCommand> createIndexes();
/**
* Creates the task lock table.
*
* @return the list of sql commands
*/
public abstract List<SqlCommand> createTaskLockTable();
/**
* Create the task deployment table.
*
* @return the list of sql commands
*/
public abstract List<SqlCommand> createTaskDeploymentTable();
}
| 1,440 |
864 | <filename>scoops-sample/src/main/java/com/ftinc/themeenginetest/adapters/SwitchColorAdapter.java<gh_stars>100-1000
package com.ftinc.themeenginetest.adapters;
import android.os.Build;
import android.support.annotation.ColorInt;
import android.widget.Switch;
import com.ftinc.scoop.adapters.ColorAdapter;
import com.ftinc.scoop.util.AttrUtils;
import com.ftinc.scoop.util.Utils;
import com.ftinc.themeenginetest.R;
/**
* Created by r0adkll on 6/26/16.
*/
public class SwitchColorAdapter implements ColorAdapter<Switch> {
@Override
public void applyColor(Switch view, @ColorInt int color) {
int disabledColor = AttrUtils.getColorAttr(view.getContext(), R.attr.colorSwitchThumbNormal);
int trackDisabledTint = view.getContext().getResources().getColor(R.color.grey_600);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
view.setThumbTintList(Utils.colorToStateList(color, disabledColor));
view.setTrackTintList(Utils.colorToStateList(color, trackDisabledTint));
}
}
@Override
public int getColor(Switch view) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && view.getThumbTintList() != null) {
return view.getThumbTintList().getDefaultColor();
}
return 0;
}
}
| 511 |
14,668 | // Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "components/feed/core/v2/request_throttler.h"
#include <memory>
#include "base/test/task_environment.h"
#include "components/feed/core/common/pref_names.h"
#include "components/feed/core/v2/config.h"
#include "components/prefs/testing_pref_service.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace feed {
namespace {
const int kMaximumQueryRequestsPerDay = 20;
const int kMaximumUploadActionsRequestsPerDay = 10;
class FeedRequestThrottlerTest : public testing::Test {
public:
FeedRequestThrottlerTest() {
feed::Config config;
config.max_action_upload_requests_per_day =
kMaximumUploadActionsRequestsPerDay;
config.max_feed_query_requests_per_day = kMaximumQueryRequestsPerDay;
SetFeedConfigForTesting(config);
RegisterProfilePrefs(test_prefs_.registry());
// Set the clock to 12:01AM.
base::Time twelveO_One =
(base::Time::Now() + base::Days(1)).LocalMidnight() + base::Minutes(1);
task_environment_.AdvanceClock(twelveO_One - base::Time::Now());
}
protected:
TestingPrefServiceSimple test_prefs_;
base::test::TaskEnvironment task_environment_{
base::test::TaskEnvironment::TimeSource::MOCK_TIME};
RequestThrottler throttler_{&test_prefs_};
};
TEST_F(FeedRequestThrottlerTest, RequestQuotaAllAtOnce) {
for (int i = 0; i < kMaximumQueryRequestsPerDay; ++i) {
EXPECT_TRUE(throttler_.RequestQuota(NetworkRequestType::kFeedQuery));
}
EXPECT_FALSE(throttler_.RequestQuota(NetworkRequestType::kFeedQuery));
}
TEST_F(FeedRequestThrottlerTest, QuotaIsPerDay) {
for (int i = 0; i < kMaximumUploadActionsRequestsPerDay; ++i) {
EXPECT_TRUE(throttler_.RequestQuota(NetworkRequestType::kUploadActions));
}
// Because we started at 12:01AM, we need to advance 24 hours before making
// another successful request.
task_environment_.FastForwardBy(base::Hours(23));
EXPECT_FALSE(throttler_.RequestQuota(NetworkRequestType::kUploadActions));
task_environment_.FastForwardBy(base::Hours(1));
EXPECT_TRUE(throttler_.RequestQuota(NetworkRequestType::kUploadActions));
}
} // namespace
} // namespace feed
| 788 |
463 | //
// disasm.h
//
#ifndef rv_disasm_h
#define rv_disasm_h
namespace riscv {
struct disasm : decode
{
addr_t pc;
inst_t inst;
disasm() : decode(), pc(0), inst(0) {}
};
enum rva {
rva_none,
rva_abs,
rva_pcrel
};
struct rvx {
rv_op op1;
rv_op op2;
rva addr;
};
// instruction pair constraints
const rvx rvx_constraints[] = {
{ rv_op_lui, rv_op_addi, rva_abs },
{ rv_op_auipc, rv_op_addi, rva_pcrel },
{ rv_op_auipc, rv_op_jalr, rva_pcrel },
{ rv_op_auipc, rv_op_ld, rva_pcrel },
{ rv_op_auipc, rv_op_lb, rva_pcrel },
{ rv_op_auipc, rv_op_lh, rva_pcrel },
{ rv_op_auipc, rv_op_lw, rva_pcrel },
{ rv_op_auipc, rv_op_lbu, rva_pcrel },
{ rv_op_auipc, rv_op_lhu, rva_pcrel },
{ rv_op_auipc, rv_op_lwu, rva_pcrel },
{ rv_op_auipc, rv_op_flw, rva_pcrel },
{ rv_op_auipc, rv_op_fld, rva_pcrel },
{ rv_op_auipc, rv_op_sd, rva_pcrel },
{ rv_op_auipc, rv_op_sb, rva_pcrel },
{ rv_op_auipc, rv_op_sh, rva_pcrel },
{ rv_op_auipc, rv_op_sw, rva_pcrel },
{ rv_op_auipc, rv_op_fsw, rva_pcrel },
{ rv_op_auipc, rv_op_fsd, rva_pcrel },
{ rv_op_illegal, rv_op_illegal, rva_none },
};
// instruction buffer length
const size_t rvx_instruction_buffer_len = 16;
// decode pc relative address
template <typename T>
bool decode_pcrel(T &dec, addr_t &addr, addr_t pc, addr_t pc_bias)
{
switch (dec.codec) {
case rv_codec_uj:
case rv_codec_sb:
addr = pc - pc_bias + dec.imm;
return true;
default:
return false;
}
return false;
}
// decode address using instruction pair constraints
template <typename T>
bool decode_pairs(T &dec, addr_t &addr, std::deque<T> &dec_hist, addr_t pc_bias)
{
const rvx* rvxi = rvx_constraints;
while(rvxi->addr != rva_none) {
if (rvxi->op2 == dec.op) {
for (auto li = dec_hist.rbegin(); li != dec_hist.rend(); li++) {
if (rvxi->op1 != li->op && dec.rs1 == li->rd) break; // break: another primitive encountered
if (rvxi->op1 != li->op || dec.rs1 != li->rd) continue; // continue: not the right pair
switch (rvxi->addr) {
case rva_abs:
addr = li->imm + dec.imm;
return true;
case rva_pcrel:
addr = li->pc - pc_bias + li->imm + dec.imm;
return true;
case rva_none:
default:
continue;
}
break;
}
}
rvxi++;
}
return false;
}
// decode address for loads and stores from the global pointer
template <typename T>
bool deocde_gprel(T &dec, addr_t &addr, addr_t gp)
{
if (!gp || dec.rs1 != rv_ireg_gp) return false;
switch (dec.op) {
case rv_op_addi:
case rv_op_lb:
case rv_op_lh:
case rv_op_lw:
case rv_op_ld:
case rv_op_lbu:
case rv_op_lhu:
case rv_op_lwu:
case rv_op_flw:
case rv_op_fld:
case rv_op_sb:
case rv_op_sh:
case rv_op_sw:
case rv_op_sd:
case rv_op_fsw:
case rv_op_fsd:
addr = intptr_t(gp + dec.imm);
return true;
default:
break;
}
return false;
}
typedef std::function<const char*(addr_t, bool nearest)> symbol_name_fn;
typedef std::function<const char*(const char *type)> symbol_colorize_fn;
const char* null_symbol_lookup(addr_t, bool nearest);
const char* null_symbol_colorize(const char *type);
template <typename T>
std::string disasm_inst_simple(T &dec)
{
std::string args;
const char *fmt = rv_inst_format[dec.op];
while (*fmt) {
switch (*fmt) {
case 'O': args += rv_inst_name_sym[dec.op]; break;
case '(': args += "("; break;
case ',': args += ", "; break;
case ')': args += ")"; break;
case '0': args += rv_ireg_name_sym[dec.rd]; break;
case '1': args += rv_ireg_name_sym[dec.rs1]; break;
case '2': args += rv_ireg_name_sym[dec.rs2]; break;
case '3': args += rv_freg_name_sym[dec.rd]; break;
case '4': args += rv_freg_name_sym[dec.rs1]; break;
case '5': args += rv_freg_name_sym[dec.rs2]; break;
case '6': args += rv_freg_name_sym[dec.rs3]; break;
case '7': args += format_string("%d", dec.rs1); break;
case 'i': args += format_string("%d", dec.imm); break;
case 'o': args += format_string("pc %c %td",
intptr_t(dec.imm) < 0 ? '-' : '+',
intptr_t(dec.imm) < 0 ? -intptr_t(dec.imm) : intptr_t(dec.imm)); break;
case 'c': {
const char * csr_name = rv_csr_name_sym[dec.imm & 0xfff];
if (csr_name) args += format_string("%s", csr_name);
else args += format_string("0x%03x", dec.imm & 0xfff);
break;
}
case 'r':
switch(dec.rm) {
case rv_rm_rne: args += "rne"; break;
case rv_rm_rtz: args += "rtz"; break;
case rv_rm_rdn: args += "rdn"; break;
case rv_rm_rup: args += "rup"; break;
case rv_rm_rmm: args += "rmm"; break;
case rv_rm_dyn: args += "dyn"; break;
default: args += "inv"; break;
}
break;
case 'p':
if (dec.pred & rv_fence_i) args += "i";
if (dec.pred & rv_fence_o) args += "o";
if (dec.pred & rv_fence_r) args += "r";
if (dec.pred & rv_fence_w) args += "w";
break;
case 's':
if (dec.succ & rv_fence_i) args += "i";
if (dec.succ & rv_fence_o) args += "o";
if (dec.succ & rv_fence_r) args += "r";
if (dec.succ & rv_fence_w) args += "w";
break;
case '\t': while (args.length() < 12) args += " "; break;
case 'A': if (dec.aq) args += ".aq"; break;
case 'R': if (dec.rl) args += ".rl"; break;
default:
break;
}
fmt++;
}
return args;
}
void disasm_inst_print(disasm &dec, std::deque<disasm> &dec_hist,
addr_t pc, addr_t pc_bias, addr_t gp,
symbol_name_fn symlookup = null_symbol_lookup,
symbol_colorize_fn colorize = null_symbol_colorize);
}
#endif
| 3,057 |
540 | <reponame>rdjdejong/attention-learn-to-route
import sys
import random
import time
from . import oph
#fitness will take a set s and a set of weights and return a tuple containing the fitness and the best path
def fitness( chrom, s, start_point, end_point, tmax ):
augs = []
for i in range( len( s ) ):
augs.append( ( s[ i ][0],
s[ i ][1],
s[ i ][2],
s[ i ][3],
s[ i ][4] + chrom[ i ] ) )
if debug:
print ('fitness---------------------------------')
print ('augs:')
print (augs)
#best = oph.ellinit_replacement( augs, start_point, end_point, tmax )
ellset = oph.ell_sub( tmax, start_point, end_point, augs )
#best = oph.initialize( ellset, start_point, end_point, tmax )[0]
best = oph.init_replacement( ellset, start_point, end_point, tmax )[0]
if debug:
print ('best:')
print (best)
print ('best real reward:')
print ([ x[3] for x in best ])
print (len( s ))
print ([ s[ x[3] - 2 ] for x in best[ 1:len( best ) - 1 ] ])
print ([ s[ x[3] - 2 ][2] for x in best[ 1:len( best ) - 1 ] ])
print (( sum( [ s[ x[3] - 2 ][2] for x in best[ 1:len( best ) - 1 ] ] ), best ))
return ( sum( [ s[ x[3] - 2 ][2] for x in best[ 1:len( best ) - 1 ] ] ), best )
def crossover( c1, c2 ):
assert( len( c1 ) == len( c2 ) )
point = random.randrange( len( c1 ) )
first = random.randrange( 2 )
if( first ):
return c1[:point] + c2[point:]
else:
return c2[:point] + c1[point:]
def mutate( chrom, mchance, msigma ):
return [ x + random.gauss( 0, msigma ) if random.randrange( mchance ) == 0 else
x for x in chrom ]
def run_alg_f( f, tmax, N ):
random.seed()
cpoints = []
an_unused_value = f.readline() # ignore first line of file
for i in range( N ):
cpoints.append( tuple( [ float( x ) for x in f.readline().split() ] ) )
if debug:
print ('N: ', N)
return run_alg(cpoints, tmax)
def run_alg(points, tmax, return_sol=False, verbose=True):
cpoints = [tuple(p) + (i, 0) for i, p in enumerate(points)]
start_point = cpoints.pop( 0 )
end_point = cpoints.pop( 0 )
assert( oph.distance( start_point, end_point ) < tmax )
popsize = 10
genlimit = 10
kt = 5
isigma = 10
msigma = 7
mchance = 2
elitismn = 2
if( debug ):
print ('data set size:', len( cpoints ) + 2)
print ('tmax: ', tmax)
print ('parameters:')
print ('generations: ', genlimit)
print ('population size: ', popsize)
print ('ktournament size:', kt)
print ('mutation chance: ', mchance)
print (str( elitismn ) + '-elitism')
start_time = time.clock()
#generate initial random population
pop = []
for i in range( popsize + elitismn ):
chrom = []
for j in range( len( cpoints ) ):
chrom.append( random.gauss( 0, isigma ) )
chrom = ( fitness( chrom, cpoints, start_point, end_point, tmax )[0], chrom )
while( i - j > 0 and j < elitismn and chrom > pop[ i - 1 - j ] ):
j += 1
pop.insert( i - j, chrom )
bestfit = 0
for i in range( genlimit ):
nextgen = []
for j in range( popsize ):
#select parents in k tournaments
parents = sorted( random.sample( pop, kt ) )[ kt - 2: ] #optimize later
#crossover and mutate
offspring = mutate( crossover( parents[0][1], parents[1][1] ), mchance, msigma )
offspring = ( fitness( offspring, cpoints, start_point, end_point, tmax )[0], offspring )
if( offspring[0] > bestfit ):
bestfit = offspring[0]
if verbose:
print (bestfit)
if( elitismn > 0 and offspring > pop[ popsize ] ):
l = 0
while( l < elitismn and offspring > pop[ popsize + l ] ):
l += 1
pop.insert( popsize + l, offspring )
nextgen.append( pop.pop( popsize ) )
else:
nextgen.append( offspring )
pop = nextgen + pop[ popsize: ]
bestchrom = sorted( pop )[ popsize + elitismn - 1 ]
end_time = time.clock()
if verbose:
print ('time:')
print (end_time - start_time)
print ('best fitness:')
print (bestchrom[0])
print ('best path:')
best_path = fitness( bestchrom[1], cpoints, start_point, end_point, tmax )[1]
if verbose:
print ([ x[3] for x in best_path ])
print ('their stuff:')
stuff = oph.initialize( oph.ell_sub( tmax, start_point, end_point, cpoints )
, start_point, end_point, tmax )[0]
if verbose:
print ('fitness:', sum( [ x[2] for x in stuff ] ))
print ('my stuff:')
stuff2 = oph.ellinit_replacement( cpoints, start_point, end_point, tmax )
if verbose:
print ('fitness:', sum( [ x[2] for x in stuff2 ] ))
print ('checking correctness...')
total_distance = ( oph.distance( start_point, cpoints[ best_path[ 1 ][3] - 2 ] ) +
oph.distance( end_point, cpoints[ best_path[ len( best_path ) - 2 ][3] - 2 ] ) )
for i in range( 1, len( best_path ) - 3 ):
total_distance += oph.distance( cpoints[ best_path[ i ][3] - 2 ],
cpoints[ best_path[ i + 1 ][3] - 2 ] )
if verbose:
print ('OK' if total_distance <= tmax else 'not OK')
print ('tmax: ', tmax)
print ('total distance:', total_distance)
if return_sol:
return ( bestchrom[0], best_path, end_time - start_time )
return ( bestchrom[0], end_time - start_time )
if( __name__ == '__main__' ):
debug = True if 'd' in sys.argv else False
run_alg( open( sys.argv[1] ), int( sys.argv[2] ), int( sys.argv[3] ) )
else:
debug = False
| 2,811 |
627 | //
// FeedModel.h
// DynamicHeightCellLayoutDemo
//
// Created by August on 15/5/19.
// Copyright (c) 2015年 August. All rights reserved.
//
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
@interface FeedModel : NSObject
@property (nonatomic, copy) NSString *title;
@property (nonatomic, copy) NSString *content;
@property (nonatomic, strong) UIImage *image;
@end
| 133 |
422 | <gh_stars>100-1000
from sqlobject import *
from sqlobject.tests.dbtest import *
class EmptyClass(SQLObject):
pass
def test_empty():
if not supports('emptyTable'):
return
setupClass(EmptyClass)
e1 = EmptyClass()
e2 = EmptyClass()
assert e1 != e2
assert e1.id != e2.id
assert e1 in list(EmptyClass.select())
assert e2 in list(EmptyClass.select())
e1.destroySelf()
assert list(EmptyClass.select()) == [e2]
| 179 |
4,372 | <reponame>Trydamere/shardingsphere
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.shardingsphere.shadow.yaml.swapper.table;
import org.apache.shardingsphere.shadow.api.config.table.ShadowTableConfiguration;
import org.apache.shardingsphere.shadow.yaml.config.table.YamlShadowTableConfiguration;
import org.junit.Test;
import java.util.Arrays;
import java.util.Collections;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
public final class ShadowTableConfigurationYamlSwapperTest {
@Test
public void assertSwapToYamlConfiguration() {
ShadowTableConfiguration shadowTableConfig = new ShadowTableConfiguration(Collections.singletonList("shadow-data-source"), Arrays.asList("t_order", "t_user"));
YamlShadowTableConfiguration yamlShadowTableConfig = new ShadowTableConfigurationYamlSwapper().swapToYamlConfiguration(shadowTableConfig);
assertThat(yamlShadowTableConfig.getShadowAlgorithmNames(), is(shadowTableConfig.getShadowAlgorithmNames()));
assertThat(yamlShadowTableConfig.getDataSourceNames(), is(shadowTableConfig.getDataSourceNames()));
}
@Test
public void assertSwapToObject() {
YamlShadowTableConfiguration yamlConfig = new YamlShadowTableConfiguration();
yamlConfig.setDataSourceNames(Collections.singletonList("shadow-data-source"));
yamlConfig.setShadowAlgorithmNames(Arrays.asList("user-id-match-algorithm", "note-algorithm"));
ShadowTableConfiguration shadowTableConfig = new ShadowTableConfigurationYamlSwapper().swapToObject(yamlConfig);
assertThat(shadowTableConfig.getDataSourceNames(), is(yamlConfig.getDataSourceNames()));
assertThat(shadowTableConfig.getShadowAlgorithmNames(), is(yamlConfig.getShadowAlgorithmNames()));
}
}
| 769 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.