max_stars_count
int64 301
224k
| text
stringlengths 6
1.05M
| token_count
int64 3
727k
|
---|---|---|
552 | // THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO
// THE IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A
// PARTICULAR PURPOSE.
//
// Copyright (c) Microsoft Corporation. All rights reserved
// Copied from IdealPropertyHandler sample http://archive.msdn.microsoft.com/shellintegration and modified
#pragma once
#include <windows.h>
// production code should use an installer technology like MSI to register its handlers
// rather than this class.
// this class is used for demontration purpouses, it encapsulate the different types
// of handler registrations, schematizes those by provding methods that have parameters
// that map to the supported extension schema and makes it easy to create self registering
// .exe and .dlls
class CRegisterExtension
{
public:
CRegisterExtension(REFCLSID clsid = CLSID_NULL, HKEY hkeyRoot = HKEY_CURRENT_USER);
~CRegisterExtension();
void SetHandlerCLSID(REFCLSID clsid);
HRESULT RegisterInProcServer(PCWSTR pszFriendlyName, PCWSTR pszThreadingModel) const;
HRESULT RegisterInProcServerAttribute(PCWSTR pszAttribute, DWORD dwValue) const;
// remove a COM object registration
HRESULT UnRegisterObject() const;
HRESULT RegisterContextMenuHandler(PCWSTR pszProgID, PCWSTR pszDescription) const;
// this should probably be private but they are useful
HRESULT RegSetKeyValuePrintf(HKEY hkey, PCWSTR pszKeyFormatString, PCWSTR pszValueName, PCWSTR pszValue, ...) const;
HRESULT RegSetKeyValuePrintf(HKEY hkey, PCWSTR pszKeyFormatString, PCWSTR pszValueName, DWORD dwValue, ...) const;
HRESULT RegDeleteKeyPrintf(HKEY hkey, PCWSTR pszKeyFormatString, ...) const;
PCWSTR GetCLSIDString() const { return _szCLSID; };
bool HasClassID() const { return _clsid != CLSID_NULL ? true : false; };
private:
HRESULT _EnsureModule() const;
bool _IsBaseClassProgID(PCWSTR pszProgID) const;
HRESULT _EnsureBaseProgIDVerbIsNone(PCWSTR pszProgID) const;
void _UpdateAssocChanged(HRESULT hr, PCWSTR pszKeyFormatString) const;
CLSID _clsid;
HKEY _hkeyRoot;
WCHAR _szCLSID[39];
WCHAR _szModule[MAX_PATH];
bool _fAssocChanged;
};
| 822 |
931 | <filename>Numbers/Basic_Programs/palindrome_num.c
/* Palindrome or not using iterative approach */
#include <stdio.h>
int main()
{
int n, reverse = 0, remainder, number;
printf("Enter an integer: ");
scanf("%d", &n);
number = n;
while( n!=0 )
{
remainder = n%10;
reverse = reverse*10 + remainder;
n /= 10;
}
if (number == reverse)
printf("\n%d is a palindrome\n", number);
else
printf("\n%d is not a palindrome\n", number);
return 0;
}
/* Input- Enter an integer:1234321
Output- 1234321 is a palindrome
*/ | 198 |
11,356 | <reponame>Bpowers4/turicreate
// Copyright 2002 The Trustees of Indiana University.
// Use, modification and distribution is subject to the Boost Software
// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
// Boost.MultiArray Library
// Authors: <NAME>
// <NAME>
// <NAME>
// See http://www.boost.org/libs/multi_array for documentation.
#include <iostream>
#include "boost/multi_array.hpp"
#include "boost/array.hpp"
#include "boost/cstdlib.hpp"
template <typename Array>
void print(std::ostream& os, const Array& A)
{
typename Array::const_iterator i;
os << "[";
for (i = A.begin(); i != A.end(); ++i) {
print(os, *i);
if (boost::next(i) != A.end())
os << ',';
}
os << "]";
}
void print(std::ostream& os, const double& x)
{
os << x;
}
int main()
{
typedef boost::multi_array<double, 2> array;
double values[] = {
0, 1, 2,
3, 4, 5
};
const int values_size=6;
array A(boost::extents[2][3]);
A.assign(values,values+values_size);
print(std::cout, A);
return boost::exit_success;
}
// The output is:
// [[0,1,2],[3,4,5]]
| 475 |
631 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.metron.common.utils;
import java.io.EOFException;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;
import java.util.zip.ZipException;
/*
* Factory to provide various compression strategies.
*/
public enum CompressionStrategies implements CompressionStrategy {
GZIP(new CompressionStrategy() {
@Override
public void compress(File inFile, File outFile) throws IOException {
try (FileInputStream fis = new FileInputStream(inFile);
FileOutputStream fos = new FileOutputStream(outFile);
GZIPOutputStream gzipOS = new GZIPOutputStream(fos)) {
byte[] buffer = new byte[1024];
int len;
while ((len = fis.read(buffer)) != -1) {
gzipOS.write(buffer, 0, len);
}
}
}
@Override
public void decompress(File inFile, File outFile) throws IOException {
try (FileInputStream fis = new FileInputStream(inFile);
GZIPInputStream gis = new GZIPInputStream(fis);
FileOutputStream fos = new FileOutputStream(outFile)) {
byte[] buffer = new byte[1024];
int len;
while ((len = gis.read(buffer)) != -1) {
fos.write(buffer, 0, len);
}
}
}
@Override
public boolean test(File gzipFile) {
try (FileInputStream fis = new FileInputStream(gzipFile);
GZIPInputStream gis = new GZIPInputStream(fis)) {
byte[] buffer = new byte[1024];
// this will throw an exception on malformed file
gis.read(buffer);
} catch (ZipException | EOFException e) {
return false;
} catch (IOException e) {
throw new IllegalStateException("Error occurred while attempting to validate gzip file", e);
}
return true;
}
});
private CompressionStrategy strategy;
CompressionStrategies(CompressionStrategy strategy) {
this.strategy = strategy;
}
@Override
public void compress(File inFile, File outFile) throws IOException {
strategy.compress(inFile, outFile);
}
@Override
public void decompress(File inFile, File outFile) throws IOException {
strategy.decompress(inFile, outFile);
}
@Override
public boolean test(File gzipFile) {
return strategy.test(gzipFile);
}
}
| 1,100 |
6,224 | /*
* Copyright (c) 2019 Foundries.io
*
* SPDX-License-Identifier: Apache-2.0
*/
/*
* Source material for IPSO Accelerometer object (3313):
* http://www.openmobilealliance.org/tech/profiles/lwm2m/3313.xml
*/
#define LOG_MODULE_NAME net_ipso_accel
#define LOG_LEVEL CONFIG_LWM2M_LOG_LEVEL
#include <logging/log.h>
LOG_MODULE_REGISTER(LOG_MODULE_NAME);
#include <stdint.h>
#include <init.h>
#include "lwm2m_object.h"
#include "lwm2m_engine.h"
#include "lwm2m_resource_ids.h"
#define ACCEL_VERSION_MAJOR 1
#if defined(CONFIG_LWM2M_IPSO_ACCELEROMETER_VERSION_1_1)
#define ACCEL_VERSION_MINOR 1
#define ACCEL_MAX_ID 11
#else
#define ACCEL_VERSION_MINOR 0
#define ACCEL_MAX_ID 6
#endif /* defined(CONFIG_LWM2M_IPSO_ACCELEROMETER_VERSION_1_1) */
#define MAX_INSTANCE_COUNT CONFIG_LWM2M_IPSO_ACCELEROMETER_INSTANCE_COUNT
/*
* Calculate resource instances as follows:
* start with ACCEL_MAX_ID
*/
#define RESOURCE_INSTANCE_COUNT (ACCEL_MAX_ID)
/* resource state */
struct ipso_accel_data {
double x_value;
double y_value;
double z_value;
double min_range;
double max_range;
};
static struct ipso_accel_data accel_data[MAX_INSTANCE_COUNT];
static struct lwm2m_engine_obj accel;
static struct lwm2m_engine_obj_field fields[] = {
OBJ_FIELD_DATA(X_VALUE_RID, R, FLOAT),
OBJ_FIELD_DATA(Y_VALUE_RID, R_OPT, FLOAT),
OBJ_FIELD_DATA(Z_VALUE_RID, R_OPT, FLOAT),
OBJ_FIELD_DATA(SENSOR_UNITS_RID, R_OPT, STRING),
OBJ_FIELD_DATA(MIN_RANGE_VALUE_RID, R_OPT, FLOAT),
OBJ_FIELD_DATA(MAX_RANGE_VALUE_RID, R_OPT, FLOAT),
#if defined(CONFIG_LWM2M_IPSO_ACCELEROMETER_VERSION_1_1)
OBJ_FIELD_DATA(APPLICATION_TYPE_RID, RW_OPT, STRING),
OBJ_FIELD_DATA(TIMESTAMP_RID, R_OPT, TIME),
OBJ_FIELD_DATA(FRACTIONAL_TIMESTAMP_RID, R_OPT, FLOAT),
OBJ_FIELD_DATA(MEASUREMENT_QUALITY_INDICATOR_RID, R_OPT, U8),
OBJ_FIELD_DATA(MEASUREMENT_QUALITY_LEVEL_RID, R_OPT, U8),
#endif
};
static struct lwm2m_engine_obj_inst inst[MAX_INSTANCE_COUNT];
static struct lwm2m_engine_res res[MAX_INSTANCE_COUNT][ACCEL_MAX_ID];
static struct lwm2m_engine_res_inst
res_inst[MAX_INSTANCE_COUNT][RESOURCE_INSTANCE_COUNT];
static struct lwm2m_engine_obj_inst *accel_create(uint16_t obj_inst_id)
{
int index, avail = -1, i = 0, j = 0;
/* Check that there is no other instance with this ID */
for (index = 0; index < ARRAY_SIZE(inst); index++) {
if (inst[index].obj && inst[index].obj_inst_id == obj_inst_id) {
LOG_ERR("Can not create instance - "
"already existing: %u", obj_inst_id);
return NULL;
}
/* Save first available slot index */
if (avail < 0 && !inst[index].obj) {
avail = index;
}
}
if (avail < 0) {
LOG_ERR("Can not create instance - no more room: %u",
obj_inst_id);
return NULL;
}
/* Set default values */
(void)memset(&accel_data[avail], 0, sizeof(accel_data[avail]));
(void)memset(res[avail], 0,
sizeof(res[avail][0]) * ARRAY_SIZE(res[avail]));
init_res_instance(res_inst[avail], ARRAY_SIZE(res_inst[avail]));
/* initialize instance resource data */
INIT_OBJ_RES_DATA(X_VALUE_RID, res[avail], i, res_inst[avail], j,
&accel_data[avail].x_value,
sizeof(accel_data[avail].x_value));
INIT_OBJ_RES_DATA(Y_VALUE_RID, res[avail], i, res_inst[avail], j,
&accel_data[avail].y_value,
sizeof(accel_data[avail].y_value));
INIT_OBJ_RES_DATA(Z_VALUE_RID, res[avail], i, res_inst[avail], j,
&accel_data[avail].z_value,
sizeof(accel_data[avail].z_value));
INIT_OBJ_RES_OPTDATA(SENSOR_UNITS_RID, res[avail], i,
res_inst[avail], j);
INIT_OBJ_RES_DATA(MIN_RANGE_VALUE_RID, res[avail], i, res_inst[avail],
j, &accel_data[avail].min_range,
sizeof(accel_data[avail].min_range));
INIT_OBJ_RES_DATA(MAX_RANGE_VALUE_RID, res[avail], i, res_inst[avail],
j, &accel_data[avail].max_range,
sizeof(accel_data[avail].max_range));
#if defined(CONFIG_LWM2M_IPSO_ACCELEROMETER_VERSION_1_1)
INIT_OBJ_RES_OPTDATA(APPLICATION_TYPE_RID, res[avail], i,
res_inst[avail], j);
INIT_OBJ_RES_OPTDATA(TIMESTAMP_RID, res[avail], i, res_inst[avail], j);
INIT_OBJ_RES_OPTDATA(FRACTIONAL_TIMESTAMP_RID, res[avail], i,
res_inst[avail], j);
INIT_OBJ_RES_OPTDATA(MEASUREMENT_QUALITY_INDICATOR_RID, res[avail],
i, res_inst[avail], j);
INIT_OBJ_RES_OPTDATA(MEASUREMENT_QUALITY_LEVEL_RID, res[avail], i,
res_inst[avail], j);
#endif
inst[avail].resources = res[avail];
inst[avail].resource_count = i;
LOG_DBG("Create IPSO Accelerometer instance: %d", obj_inst_id);
return &inst[avail];
}
static int ipso_accel_init(const struct device *dev)
{
accel.obj_id = IPSO_OBJECT_ACCELEROMETER_ID;
accel.version_major = ACCEL_VERSION_MAJOR;
accel.version_minor = ACCEL_VERSION_MINOR;
accel.is_core = false;
accel.fields = fields;
accel.field_count = ARRAY_SIZE(fields);
accel.max_instance_count = ARRAY_SIZE(inst);
accel.create_cb = accel_create;
lwm2m_register_obj(&accel);
return 0;
}
SYS_INIT(ipso_accel_init, APPLICATION, CONFIG_KERNEL_INIT_PRIORITY_DEFAULT);
| 2,255 |
626 | <filename>maildown/commands.py
from cleo.commands import Command
from maildown import backends
available_backends = dict(aws=backends.AwsBackend)
class InitCommand(Command):
"""
Configures Maildown for use
init
{--backend=aws : The email backend to use. Defaults to AWS SES }
{options?* : Arguments to pass to the backend's login methods, e.g. `access_key=1234`}
"""
def handle(self):
__backend = available_backends.get(self.option("backend"))
if not __backend:
return self.line(
f'No backend called {self.option("backend")} exists', "error"
)
backend = __backend()
kwargs = dict()
for arg in self.argument("options"):
key, val = arg.split("=")
kwargs[key] = val
backend.login(**kwargs)
self.info("Initiated successfully")
class VerifyCommand(Command):
"""
Verifies your ownership of an email address. Must be done prior to sending any messages
verify
{email-address : The email address that you want to verify}
{--backend=aws : The email backend to use. Defaults to AWS SES }
"""
def handle(self):
email = self.argument("email-address")
__backend = available_backends.get(self.option("backend"))
if not __backend:
return self.line(
f'No backend called {self.option("backend")} exists', "error"
)
backend = __backend()
verified = backend.verify_address(email)
if verified:
self.info("This email address has already been verified")
else:
self.info(
f"Email sent to {email}. You must click the link in this email to verify ownership before "
f"you can send any emails"
)
class SendCommand(Command):
"""
Send an email to a list of recipients
send
{sender : The source email address (you must have verified ownership)}
{subject : The subject line of the email}
{--c|content=? : The content of the email to send}
{--backend=aws : The email backend to use. Defaults to AWS SES }
{--f|file-path=? : A path to a file containing content to send}
{--t|theme=? : A path to a css file to be applied to the email}
{--e|variable=* : Context variables to pass to the email, e.g. `-e name=Chris`}
{recipients?* : A list of email addresses to send the mail to}
"""
def handle(self):
__backend = available_backends.get(self.option("backend"))
if not __backend:
return self.line(
f'No backend called {self.option("backend")} exists', "error"
)
backend = __backend()
sender = self.argument("sender")
subject = self.argument("subject")
content = self.option("content")
file_path = self.option("file-path")
theme = self.option("theme")
recipients = self.argument("recipients")
variables = self.option("variable")
environment = dict()
for var in variables:
key, val = var.split("=")
environment[key] = val
if not recipients:
self.line("You must supply at least one recipient", "error")
return
if not any([content, file_path]) or all([content, file_path]):
self.line(
"You must provide either the content or file_path argument only",
"error",
)
return
kwargs = dict(
sender=sender,
subject=subject,
content=content,
file_path=file_path,
to=recipients,
context=environment,
)
if theme:
kwargs["theme"] = theme
backend.send(**kwargs)
self.info("Messages added to queue")
| 1,698 |
852 | #
# WARNING: This file is in the L1T configuration critical path.
#
# All changes must be explicitly discussed with the L1T offline coordinator.
#
import FWCore.ParameterSet.Config as cms
GlobalParametersRcdSource = cms.ESSource("EmptyESSource",
recordName = cms.string('L1TGlobalParametersRcd'),
iovIsRunNotTime = cms.bool(True),
firstValid = cms.vuint32(1)
)
#GlobalParameters = cms.ESProducer("L1TGlobalParamsESProducer",
GlobalParameters = cms.ESProducer("StableParametersTrivialProducer",
# bx in event
#NumberBxInEvent = cms.int32(5),
# trigger decision
# number of physics trigger algorithms
NumberPhysTriggers = cms.uint32(512),
# trigger objects
# muons
NumberL1Muon = cms.uint32(8),
# e/gamma and isolated e/gamma objects
NumberL1EGamma = cms.uint32(12),
# jets
NumberL1Jet = cms.uint32(12),
# taus
NumberL1Tau = cms.uint32(12),
# hardware
# number of maximum chips defined in the xml file
NumberChips = cms.uint32(1),
# number of pins on the GTL condition chips
PinsOnChip = cms.uint32(512),
# correspondence "condition chip - GTL algorithm word" in the hardware
# e.g.: chip 2: 0 - 95; chip 1: 96 - 128 (191)
OrderOfChip = cms.vint32(1),
)
| 490 |
309 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Script to load the config, run the simulations, and plot them, for the multi-players case.
"""
from __future__ import division, print_function # Python 2 compatibility
__author__ = "<NAME>"
__version__ = "0.9"
from Environment import EvaluatorMultiPlayers, notify
from example_of_configuration_multiplayers import configuration
configuration['showplot'] = True
N_players = len(configuration["successive_players"])
# List to keep all the EvaluatorMultiPlayers objects
evaluators = [[None] * N_players] * len(configuration["environment"])
if __name__ != '__main__':
from sys import exit
exit(0)
for playersId, players in enumerate(configuration["successive_players"]):
print("\n\n\nConsidering the list of players :\n", players) # DEBUG
configuration['playersId'] = playersId
configuration['players'] = players
evaluation = EvaluatorMultiPlayers(configuration)
# Start the evaluation and then print final ranking and plot, for each environment
M = evaluation.nbPlayers
N = len(evaluation.envs)
for envId, env in enumerate(evaluation.envs):
# Evaluate just that env
evaluation.startOneEnv(envId, env)
evaluators[envId][playersId] = evaluation
#
# Compare different MP strategies on the same figures
#
N = len(configuration["environment"])
for envId, env in enumerate(configuration["environment"]):
e0, eothers = evaluators[envId][0], evaluators[envId][1:]
M = e0.nbPlayers
print("\nGiving all the vector of final regrets ...")
e0.printLastRegrets(envId, evaluators=eothers)
print("\nGiving the final ranking ...")
e0.printFinalRankingAll(envId, evaluators=eothers)
print("\nGiving the mean and std running times ...")
e0.printRunningTimes(envId, evaluators=eothers)
e0.plotRunningTimes(envId, evaluators=eothers)
print("\n\n- Plotting the centralized regret for all 'players' values")
e0.plotRegretCentralized(envId, normalized=False, evaluators=eothers)
print("\n\n- Plotting the centralized regret for all 'players' values, in semilogx scale")
e0.plotRegretCentralized(envId, semilogx=True, normalized=False, evaluators=eothers)
print("\n\n- Plotting the centralized regret for all 'players' values, in semilogy scale")
e0.plotRegretCentralized(envId, semilogy=True, normalized=False, evaluators=eothers)
print("\n\n- Plotting the centralized regret for all 'players' values, in loglog scale")
e0.plotRegretCentralized(envId, loglog=True, normalized=False, evaluators=eothers)
print("\n\n- Plotting the centralized fairness (STD)")
e0.plotFairness(envId, fairness='STD', evaluators=eothers)
print("\n- Plotting the cumulated total nb of collision as a function of time for all 'players' values")
e0.plotNbCollisions(envId, cumulated=True, evaluators=eothers)
print("\n\n- Plotting the number of switches as a function of time for all 'players' values")
e0.plotNbSwitchsCentralized(envId, cumulated=True, evaluators=eothers)
print("\n- Plotting the histograms of regrets")
e0.plotLastRegrets(envId, sharex=True, sharey=True, evaluators=eothers)
# e0.plotLastRegrets(envId, all_on_separate_figures=True, evaluators=eothers)
# Done
print("Done for simulations example_of_main_multiplayers_more.py ...")
notify("Done for simulations example_of_main_multiplayers_more.py ...")
| 1,155 |
5,169 | <filename>Specs/c/a/e/UIVVMaterialComponent/0.5.1/UIVVMaterialComponent.podspec.json
{
"name": "UIVVMaterialComponent",
"version": "0.5.1",
"summary": "Beautiful Material Component for iOS.",
"description": "The SDK is a completely customizable widget that can be used in any iOS app.",
"homepage": "https://github.com/vinod1988/UIVVMaterialComponent",
"license": "MIT",
"authors": {
"<NAME>": "<EMAIL>"
},
"social_media_url": "https://twitter.com/in_vvishwakarma",
"platforms": {
"ios": "11.0"
},
"swift_versions": "4.2",
"source": {
"git": "https://github.com/vinod1988/UIVVMaterialComponent.git",
"tag": "0.5.1"
},
"source_files": "UIVVMaterialComponent/Classes/**/*",
"exclude_files": "UIVVMaterialComponent/UIVVMaterialComponent/*.plist",
"swift_version": "4.2"
}
| 315 |
8,772 | <reponame>hsartoris-bard/cas
package org.apereo.cas.config;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.cloud.bootstrap.config.PropertySourceLocator;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
/**
* This is {@link RestfulCloudConfigBootstrapConfiguration}.
*
* @author <NAME>
* @since 6.2.0
*/
@Configuration(value = "restfulCloudConfigBootstrapConfiguration", proxyBeanMethods = false)
public class RestfulCloudConfigBootstrapConfiguration {
@Bean
@ConditionalOnMissingBean(name = "restfulPropertySourceLocator")
public PropertySourceLocator restfulPropertySourceLocator() {
return new RestfulPropertySourceLocator();
}
}
| 237 |
532 | """Static skip connection layout of ``@skippable`` modules."""
from typing import Dict, Iterable, List, Tuple
from torch import nn
from torchgpipe.skip.namespace import Namespace
__all__: List[str] = []
class SkipLayout:
"""Represents a skip connection layout across partitions."""
# Skip routes indexed by 'ns, name': {(ns, name): (prev_j, next_j), ...}
by_ns_name: Dict[Tuple[Namespace, str], Tuple[int, int]]
# Skip routes indexed by partition number 'j': [[next_j]: [(prev_j, ns, name), ...], ...]
by_partition: List[List[Tuple[int, Namespace, str]]]
def __init__(self,
num_partitions: int,
skip_routes: Dict[Tuple[Namespace, str], Tuple[int, int]],
) -> None:
# The skip routes are already indexed by 'ns, name'.
self.by_ns_name = skip_routes
# Index skip routes by partition number 'j'.
self.by_partition = [[] for _ in range(num_partitions)]
for (ns, name), (prev_j, next_j) in skip_routes.items():
self.by_partition[next_j].append((prev_j, ns, name))
for p in self.by_partition:
p.sort()
def copy_policy(self, next_j: int) -> Iterable[Tuple[int, Namespace, str]]:
"""Generates skip routes for the given destination partition number.
The skip routes are sorted by source partition number in ascending
order.
Yields:
Each tuple of (source partition number, namespace, name).
"""
for prev_j, ns, name in self.by_partition[next_j]:
if prev_j == next_j:
# This skip tensor will be popped at the same partition where
# it is stashed. In this case, copy is not required.
continue
yield (prev_j, ns, name)
def requires_copy(self, ns: Namespace, name: str) -> bool:
"""Whether the given namespace and name requires partition-to-partition
copy or not.
"""
prev_j, next_j = self.by_ns_name.get((ns, name), (-1, -1))
return prev_j != next_j
def inspect_skip_layout(partitions: List[nn.Sequential]) -> SkipLayout:
"""Inspects the skip connection layout in the given partitions."""
# NOTE(sublee): Hide circular import inside this subroutine. Circular
# import is not ideal but placing this logic near to SkipLayout may
# increase cohesion of code.
from torchgpipe.skip.skippable import Skippable
skip_routes: Dict[Tuple[Namespace, str], Tuple[int, int]] = {}
stashed_at: Dict[Tuple[Namespace, str], int] = {}
for j, partition in enumerate(partitions):
for layer in partition:
if not isinstance(layer, Skippable):
continue
for ns, name in layer.stashable():
stashed_at[(ns, name)] = j
for ns, name in layer.poppable():
prev_j = stashed_at.pop((ns, name))
skip_routes[(ns, name)] = (prev_j, j)
return SkipLayout(len(partitions), skip_routes)
| 1,264 |
1,745 | /* Copyright (C) 2012-2014 <NAME> <<EMAIL>>
Distributed under the Boost Software License, Version 1.0.
(See accompanying file LICENSE_1_0.txt or copy at
http://www.boost.org/LICENSE_1_0.txt)
*/
#ifndef LIBSIMDPP_SIMDPP_TYPES_INT8X32_H
#define LIBSIMDPP_SIMDPP_TYPES_INT8X32_H
#ifndef LIBSIMDPP_SIMD_H
#error "This file must be included through simd.h"
#endif
#include <simdpp/setup_arch.h>
#include <simdpp/types/fwd.h>
#include <simdpp/types/any.h>
#include <simdpp/detail/construct_eval.h>
#include <cstdint>
namespace simdpp {
namespace SIMDPP_ARCH_NAMESPACE {
#if SIMDPP_USE_AVX2
/** Class representing 32x 8-bit signed integer vector
*/
template<>
class int8<32, void> : public any_int8<32, int8<32,void>> {
public:
static const unsigned type_tag = SIMDPP_TAG_INT;
using element_type = int8_t;
using base_vector_type = int8<32>;
using expr_type = void;
#if SIMDPP_USE_AVX2
using native_type = __m256i;
#endif
SIMDPP_INL int8<32>() = default;
SIMDPP_INL int8<32>(const int8<32> &) = default;
SIMDPP_INL int8<32> &operator=(const int8<32> &) = default;
template<class E> SIMDPP_INL int8<32>(const int8<32,E>& d) { *this = d.eval(); }
template<class E> SIMDPP_INL int8<32>(const uint8<32,E>& d) { *this = d.eval(); }
template<class V> SIMDPP_INL explicit int8<32>(const any_vec<32,V>& d)
{
*this = bit_cast<int8<32>>(d.wrapped().eval());
}
template<class V> SIMDPP_INL int8<32>& operator=(const any_vec<32,V>& d)
{
*this = bit_cast<int8<32>>(d.wrapped().eval()); return *this;
}
/// Construct from the underlying vector type
SIMDPP_INL int8<32>(const native_type& d) : d_(d) {}
SIMDPP_INL int8<32>& operator=(const native_type& d) { d_ = d; return *this; }
/// Convert to the underlying vector type
#if !SIMDPP_DISABLE_DEPRECATED_CONVERSION_OPERATOR_TO_NATIVE_TYPES
SIMDPP_INL operator native_type() const SIMDPP_IMPLICIT_CONVERSION_DEPRECATION_MSG
{ return d_; }
#endif
SIMDPP_INL native_type native() const { return d_; }
template<class E> SIMDPP_INL int8<32>(const expr_vec_construct<E>& e)
{
detail::construct_eval_wrapper(*this, e.expr());
}
template<class E> SIMDPP_INL int8<32>& operator=(const expr_vec_construct<E>& e)
{
detail::construct_eval_wrapper(*this, e.expr()); return *this;
}
/// Access base vectors
SIMDPP_INL const int8<32>& vec(unsigned) const { return *this; }
SIMDPP_INL int8<32>& vec(unsigned) { return *this; }
SIMDPP_INL int8<32> eval() const { return *this; }
private:
native_type d_;
};
/** Class representing 32x 8-bit unsigned integer vector
*/
template<>
class uint8<32, void> : public any_int8<32, uint8<32,void>> {
public:
static const unsigned type_tag = SIMDPP_TAG_UINT;
using element_type = uint8_t;
using base_vector_type = uint8<32>;
using expr_type = void;
#if SIMDPP_USE_AVX2
using native_type = __m256i;
#endif
SIMDPP_INL uint8<32>() = default;
SIMDPP_INL uint8<32>(const uint8<32> &) = default;
SIMDPP_INL uint8<32> &operator=(const uint8<32> &) = default;
template<class E> SIMDPP_INL uint8<32>(const uint8<32,E>& d) { *this = d.eval(); }
template<class E> SIMDPP_INL uint8<32>(const int8<32,E>& d) { *this = d.eval(); }
template<class V> SIMDPP_INL explicit uint8<32>(const any_vec<32,V>& d)
{
*this = bit_cast<uint8<32>>(d.wrapped().eval());
}
template<class V> SIMDPP_INL uint8<32>& operator=(const any_vec<32,V>& d)
{
*this = bit_cast<uint8<32>>(d.wrapped().eval()); return *this;
}
/// Construct from the underlying vector type
SIMDPP_INL uint8<32>(const native_type& d) : d_(d) {}
SIMDPP_INL uint8<32>& operator=(const native_type& d) { d_ = d; return *this; }
/// Convert to the underlying vector type
#if !SIMDPP_DISABLE_DEPRECATED_CONVERSION_OPERATOR_TO_NATIVE_TYPES
SIMDPP_INL operator native_type() const SIMDPP_IMPLICIT_CONVERSION_DEPRECATION_MSG
{ return d_; }
#endif
SIMDPP_INL native_type native() const { return d_; }
template<class E> SIMDPP_INL uint8<32>(const expr_vec_construct<E>& e)
{
detail::construct_eval_wrapper(*this, e.expr());
}
template<class E> SIMDPP_INL uint8<32>& operator=(const expr_vec_construct<E>& e)
{
detail::construct_eval_wrapper(*this, e.expr()); return *this;
}
/// Access base vectors
SIMDPP_INL const uint8<32>& vec(unsigned) const { return *this; }
SIMDPP_INL uint8<32>& vec(unsigned) { return *this; }
SIMDPP_INL uint8<32> eval() const { return *this; }
private:
native_type d_;
};
/// Class representing possibly optimized mask data for 16x 8-bit integer
/// vector
template<>
class mask_int8<32, void> : public any_int8<32, mask_int8<32,void>> {
public:
static const unsigned type_tag = SIMDPP_TAG_MASK_INT;
using base_vector_type = mask_int16v;
using expr_type = void;
#if SIMDPP_USE_AVX512VL
using native_type = __mmask32;
#elif SIMDPP_USE_AVX2
using native_type = __m256i;
#endif
SIMDPP_INL mask_int8<32>() = default;
SIMDPP_INL mask_int8<32>(const mask_int8<32> &) = default;
SIMDPP_INL mask_int8<32> &operator=(const mask_int8<32> &) = default;
SIMDPP_INL mask_int8<32>(const native_type& d) : d_(d) {}
#if (SIMDPP_USE_AVX2 && !SIMDPP_USE_AVX512VL)
SIMDPP_INL mask_int8<32>(const uint8<32>& d) : d_(d.native()) {}
#endif
/// Convert to the underlying vector type
#if !SIMDPP_DISABLE_DEPRECATED_CONVERSION_OPERATOR_TO_NATIVE_TYPES
SIMDPP_INL operator native_type() const SIMDPP_IMPLICIT_CONVERSION_DEPRECATION_MSG
{ return d_; }
#endif
SIMDPP_INL native_type native() const { return d_; }
/// Access the underlying type
SIMDPP_INL uint8<32> unmask() const
{
#if SIMDPP_USE_AVX512VL
return _mm256_movm_epi8(d_);
#elif SIMDPP_USE_AVX2
return uint8<32>(d_);
#endif
}
SIMDPP_INL const mask_int8<32>& vec(unsigned) const { return *this; }
SIMDPP_INL mask_int8<32>& vec(unsigned) { return *this; }
SIMDPP_INL mask_int8<32> eval() const { return *this; }
private:
native_type d_;
};
#endif // SIMDPP_USE_AVX2
} // namespace SIMDPP_ARCH_NAMESPACE
} // namespace simdpp
#endif
| 2,718 |
435 | <filename>pycon-us-2016/videos/sebastian-vetter-click-a-pleasure-to-write-a-pleasure-to-use-pycon-2016.json<gh_stars>100-1000
{
"copyright_text": "Standard YouTube License",
"description": "Speaker: <NAME>\n\nWe have a wide variety of packages and modules in Python that help build commandline tools in different ways. One of the more recent contenders is 'click'. It uses a very intuitive approach to create simple CLIs as well as complex ones. In this talk, I will introduce building CLIs with 'click' and illustrate some of its advantages.\n\nSlides can be found at: https://speakerdeck.com/pycon2016 and https://github.com/PyCon/2016-slides",
"duration": 1827,
"id": 5175,
"language": "eng",
"recorded": "2016-05-31",
"related_urls": [
"https://github.com/PyCon/2016-slides",
"https://speakerdeck.com/pycon2016"
],
"slug": "sebastian-vetter-click-a-pleasure-to-write-a-pleasure-to-use-pycon-2016",
"speakers": [
"<NAME>"
],
"tags": [],
"thumbnail_url": "https://i.ytimg.com/vi/SDyHLG2ltSY/maxresdefault.jpg",
"title": "Click: A Pleasure To Write, A Pleasure To Use",
"videos": [
{
"type": "youtube",
"url": "https://www.youtube.com/watch?v=SDyHLG2ltSY"
}
]
}
| 457 |
412 | <filename>regression/ansi-c/float_constant2/main.c
#include <assert.h>
int main()
{
#if defined(__GNUC__) && !defined(__clang__)
// accepted by GCC, but not Clang
assert(0 < 50.0d);
assert(0 < 1.0w);
assert(0 < 1.0q);
assert(0 < 1.0W);
assert(0 < 1.0Q);
assert(0 < 1.0df);
assert(0 < 1.0dd);
assert(0 < 1.0dl);
#endif
return 0;
}
| 165 |
364 | // The MIT License (MIT)
// Copyright (c) 2016, Microsoft
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
#pragma once
#include <istream> // std::istream used as a parameter.
namespace BitFunnel
{
class Version;
//*************************************************************************
//
// PackedArray provides a memory efficient packed array of n-bit records
// where n is at least 1 and less than 57.
//
// THREAD SAFETY: PackedArray is not thread safe.
//
// TODO: REVIEW: Should PackedArray take a useVirtualAlloc parameter or
// should it just decide which allocator to use. It seems it should pick
// the allocator. File formats should probably not save the useVirtualAlloc
// parameter as the decision whether to use VirtualAlloc may be different
// across machines and OS versions.
//
//*************************************************************************
class PackedArray
{
public:
// Constructs a PackedArray with 'capacity' entries, each with
// 'bitsPerEntry' bits. If useVirtualAlloc is true, the underlying buffer
// will be allocated with VirtualAlloc(). This allows for very large
// PackedArrays that would cause malloc() to fail. If useVirtualAlloc is
// false, the underlying buffer will be allocated with new [].
PackedArray(size_t capacity, unsigned bitsPerEntry, bool useVirtualAlloc);
// Construct a PackedArray from data that was previously persisted to a
// stream with the Write() method.
PackedArray(std::istream& input);
// Destroys the packed array. Its underlying buffer is released with
// either VirtualFree() or delete [], depending on the value of
// m_useVirtualAlloc.
~PackedArray();
// Saves the dimensions and contents of the PackedArray to a stream.
void Write(std::ostream& output) const;
// Return the number of slots in the PackedArray.
size_t GetCapacity() const;
// Returns the value at the specified position in the array.
uint64_t Get(size_t index) const;
// Sets the value at the specified position in the array.
void Set(size_t index, uint64_t value);
//
// Static methods below are provided for other classes that want to use
// PackedArray functionality, but for whatever reason can't use an
// instance of PackedArray.
//
// Returns the size of the buffer in quad words.
static size_t GetBufferSize(size_t capacity,
unsigned bitsPerEntry);
// Allocates the underlying buffer, with space for 'capacity' entries,
// each with 'bitsPerEntry' entries.
static uint64_t* AllocateBuffer(size_t capacity,
unsigned bitsPerEntry,
bool useVirtualAlloc);
// Returns the value at the specified index in a packed array of
// where each entry has 'bitsPerEntry' bits. The mask should be set
// to (1 << bitsPerEntry) - 1. Does not perform bounds checking
// on index.
static uint64_t Get(size_t index,
unsigned bitsPerEntry,
uint64_t mask,
uint64_t* buffer);
// Sets the value at the specified index in a packed array of
// where each entry has 'bitsPerEntry' bits. The mask should be set
// to (1 << bitsPerEntry) - 1. Does not perform bounds checking
// on index.
static void Set(size_t index,
unsigned bitsPerEntry,
uint64_t mask,
uint64_t* buffer,
uint64_t value);
// Returns the maximum number of bits supported in an entry.
static unsigned GetMaxBitsPerEntry();
private:
static const Version c_version;
static const unsigned c_maxBitsPerEntry = 56;
bool m_useVirtualAlloc;
size_t m_capacity;
unsigned m_bitsPerEntry;
uint64_t m_mask;
// Buffer that holds packed array values.
uint64_t* m_buffer;
};
}
| 1,927 |
1,255 | <gh_stars>1000+
'''
Language parser for JavaScript
'''
import re
from .code_reader import CodeReader
from .clike import CCppCommentsMixin
from .js_style_language_states import JavaScriptStyleLanguageStates
from .js_style_regex_expression import js_style_regex_expression
class TypeScriptReader(CodeReader, CCppCommentsMixin):
# pylint: disable=R0903
ext = ['ts']
language_names = ['typescript', 'ts']
_conditions = set(['if', 'elseif', 'for', 'while', '&&', '||', '?',
'catch', 'case'])
def __init__(self, context):
super(TypeScriptReader, self).__init__(context)
self.parallel_states = [TypeScriptStates(context)]
@staticmethod
@js_style_regex_expression
def generate_tokens(source_code, addition='', token_class=None):
addition = addition +\
r"|(?:\w+\?)"
return CodeReader.generate_tokens(source_code, addition, token_class)
class TypeScriptStates(JavaScriptStyleLanguageStates):
def _expecting_func_opening_bracket(self, token):
if token == ':':
self.next(self._expecting_default)
return
super(TypeScriptStates, self)._expecting_func_opening_bracket(token)
def _expecting_default(self, token):
self.next(self._function_return_type)
if token == '{':
self.read_object()
def _function_return_type(self, token):
if token == ';':
self.next(self._state_global)
elif token == '{':
self.next(self._expecting_func_opening_bracket, token)
| 637 |
995 | <filename>scripts/external_libs/scapy-2.3.1/python3/scapy/layers/hsrp.py
## This file is part of Scapy
## See http://www.secdev.org/projects/scapy for more informations
## Copyright (C) <NAME> <<EMAIL>>
## This program is published under a GPLv2 license
#############################################################################
## ##
## hsrp.py --- HSRP protocol support for Scapy ##
## ##
## Copyright (C) 2010 <NAME> mathieu.renard(at)gmail.com ##
## ##
## This program is free software; you can redistribute it and/or modify it ##
## under the terms of the GNU General Public License version 2 as ##
## published by the Free Software Foundation; version 2. ##
## ##
## This program is distributed in the hope that it will be useful, but ##
## WITHOUT ANY WARRANTY; without even the implied warranty of ##
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ##
## General Public License for more details. ##
## ##
#############################################################################
## HSRP Version 1
## Ref. RFC 2281
## HSRP Version 2
## Ref. http://www.smartnetworks.jp/2006/02/hsrp_8_hsrp_version_2.html
##
## $Log: hsrp.py,v $
## Revision 0.2 2011/05/01 15:23:34 mrenard
## Cleanup code
"""
HSRP (Hot Standby Router Protocol): proprietary redundancy protocol for Cisco routers.
"""
from scapy.fields import *
from scapy.packet import *
from scapy.layers.inet import UDP
class HSRP(Packet):
name = "HSRP"
fields_desc = [
ByteField("version", 0),
ByteEnumField("opcode", 0, {0: "Hello", 1: "Coup", 2: "Resign", 3: "Advertise"}),
ByteEnumField("state", 16, {0: "Initial", 1: "Learn", 2: "Listen", 4: "Speak", 8: "Standby", 16: "Active"}),
ByteField("hellotime", 3),
ByteField("holdtime", 10),
ByteField("priority", 120),
ByteField("group", 1),
ByteField("reserved", 0),
StrFixedLenField("auth", "cisco" + "\00" * 3, 8),
IPField("virtualIP", "192.168.1.1")]
def guess_payload_class(self, payload):
if self.underlayer.len > 28:
return HSRPmd5
else:
return Packet.guess_payload_class(self, payload)
class HSRPmd5(Packet):
name = "HSRP MD5 Authentication"
fields_desc = [
ByteEnumField("type", 4, {4: "MD5 authentication"}),
ByteField("len", None),
ByteEnumField("algo", 0, {1: "MD5"}),
ByteField("padding", 0x00),
XShortField("flags", 0x00),
IPField("sourceip", None),
XIntField("keyid", 0x00),
StrFixedLenField("authdigest", "\00" * 16, 16)]
def post_build(self, p, pay):
if self.len is None and pay:
l = len(pay)
p = p[:1] + hex(l)[30:] + p[30:]
return p
bind_layers(UDP, HSRP, dport=1985, sport=1985)
| 1,565 |
8,747 | // Copyright 2015-2021 Espressif Systems (Shanghai) PTE LTD
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/*******************************************************************************
* NOTICE
* The hal is not public api, don't use in application code.
* See readme.md in hal/include/hal/readme.md
******************************************************************************/
// The LL layer for ESP32 PCNT register operations
#pragma once
#include <stdlib.h>
#include <stdbool.h>
#include "soc/pcnt_struct.h"
#include "hal/pcnt_types.h"
#ifdef __cplusplus
extern "C" {
#endif
#define PCNT_LL_GET_HW(num) (((num) == 0) ? (&PCNT) : NULL)
#define PCNT_LL_MAX_GLITCH_WIDTH 1023
typedef enum {
PCNT_LL_EVENT_THRES1,
PCNT_LL_EVENT_THRES0,
PCNT_LL_EVENT_LOW_LIMIT,
PCNT_LL_EVENT_HIGH_LIMIT,
PCNT_LL_EVENT_ZERO_CROSS,
PCNT_LL_EVENT_MAX
} pcnt_ll_event_id_t;
#define PCNT_LL_EVENT_MASK ((1 << PCNT_LL_EVENT_MAX) - 1)
/**
* @brief Set PCNT channel edge action
*
* @param hw Peripheral PCNT hardware instance address.
* @param unit PCNT unit number
* @param channel PCNT channel number
* @param pos_act Counter action when detecting positive edge
* @param neg_act Counter action when detecting negative edge
*/
static inline void pcnt_ll_set_edge_action(pcnt_dev_t *hw, uint32_t unit, uint32_t channel, pcnt_channel_edge_action_t pos_act, pcnt_channel_edge_action_t neg_act)
{
if (channel == 0) {
hw->conf_unit[unit].conf0.ch0_pos_mode = pos_act;
hw->conf_unit[unit].conf0.ch0_neg_mode = neg_act;
} else {
hw->conf_unit[unit].conf0.ch1_pos_mode = pos_act;
hw->conf_unit[unit].conf0.ch1_neg_mode = neg_act;
}
}
/**
* @brief Set PCNT channel level action
*
* @param hw Peripheral PCNT hardware instance address.
* @param unit PCNT unit number
* @param channel PCNT channel number
* @param high_act Counter action when control signal is high level
* @param low_act Counter action when control signal is low level
*/
static inline void pcnt_ll_set_level_action(pcnt_dev_t *hw, uint32_t unit, uint32_t channel, pcnt_channel_level_action_t high_act, pcnt_channel_level_action_t low_act)
{
if (channel == 0) {
hw->conf_unit[unit].conf0.ch0_hctrl_mode = high_act;
hw->conf_unit[unit].conf0.ch0_lctrl_mode = low_act;
} else {
hw->conf_unit[unit].conf0.ch1_hctrl_mode = high_act;
hw->conf_unit[unit].conf0.ch1_lctrl_mode = low_act;
}
}
/**
* @brief Get pulse counter value
*
* @param hw Peripheral PCNT hardware instance address.
* @param unit Pulse Counter unit number
* @return PCNT count value (a signed integer)
*/
static inline int pcnt_ll_get_count(pcnt_dev_t *hw, uint32_t unit)
{
typeof(hw->cnt_unit[unit]) cnt_reg = hw->cnt_unit[unit];
int16_t value = cnt_reg.cnt_val;
return value;
}
/**
* @brief Pause PCNT counter of PCNT unit
*
* @param hw Peripheral PCNT hardware instance address.
* @param unit PCNT unit number
*/
static inline void pcnt_ll_stop_count(pcnt_dev_t *hw, uint32_t unit)
{
hw->ctrl.val |= 1 << (2 * unit + 1);
}
/**
* @brief Resume counting for PCNT counter
*
* @param hw Peripheral PCNT hardware instance address.
* @param unit PCNT unit number, select from uint32_t
*/
static inline void pcnt_ll_start_count(pcnt_dev_t *hw, uint32_t unit)
{
hw->ctrl.val &= ~(1 << (2 * unit + 1));
}
/**
* @brief Clear PCNT counter value to zero
*
* @param hw Peripheral PCNT hardware instance address.
* @param unit PCNT unit number, select from uint32_t
*/
static inline void pcnt_ll_clear_count(pcnt_dev_t *hw, uint32_t unit)
{
hw->ctrl.val |= 1 << (2 * unit);
hw->ctrl.val &= ~(1 << (2 * unit));
}
/**
* @brief Enable PCNT interrupt for PCNT unit
* @note Each PCNT unit has five watch point events that share the same interrupt bit.
*
* @param hw Peripheral PCNT hardware instance address.
* @param unit_mask PCNT units mask
* @param enable True to enable interrupt, False to disable interrupt
*/
static inline void pcnt_ll_enable_intr(pcnt_dev_t *hw, uint32_t unit_mask, bool enable)
{
if (enable) {
hw->int_ena.val |= unit_mask;
} else {
hw->int_ena.val &= ~unit_mask;
}
}
/**
* @brief Get PCNT interrupt status
*
* @param hw Peripheral PCNT hardware instance address.
* @return Interrupt status word
*/
__attribute__((always_inline)) static inline uint32_t pcnt_ll_get_intr_status(pcnt_dev_t *hw)
{
return hw->int_st.val;
}
/**
* @brief Clear PCNT interrupt status
*
* @param hw Peripheral PCNT hardware instance address.
* @param status value to clear interrupt status
*/
__attribute__((always_inline)) static inline void pcnt_ll_clear_intr_status(pcnt_dev_t *hw, uint32_t status)
{
hw->int_clr.val = status;
}
/**
* @brief Enable PCNT high limit event
*
* @param hw Peripheral PCNT hardware instance address.
* @param unit PCNT unit number
* @param enable true to enable, false to disable
*/
static inline void pcnt_ll_enable_high_limit_event(pcnt_dev_t *hw, uint32_t unit, bool enable)
{
hw->conf_unit[unit].conf0.thr_h_lim_en = enable;
}
/**
* @brief Enable PCNT low limit event
*
* @param hw Peripheral PCNT hardware instance address.
* @param unit PCNT unit number
* @param enable true to enable, false to disable
*/
static inline void pcnt_ll_enable_low_limit_event(pcnt_dev_t *hw, uint32_t unit, bool enable)
{
hw->conf_unit[unit].conf0.thr_l_lim_en = enable;
}
/**
* @brief Enable PCNT zero cross event
*
* @param hw Peripheral PCNT hardware instance address.
* @param unit PCNT unit number
* @param enable true to enable, false to disable
*/
static inline void pcnt_ll_enable_zero_cross_event(pcnt_dev_t *hw, uint32_t unit, bool enable)
{
hw->conf_unit[unit].conf0.thr_zero_en = enable;
}
/**
* @brief Enable PCNT threshold event
*
* @param hw Peripheral PCNT hardware instance address.
* @param unit PCNT unit number
* @param thres Threshold ID
* @param enable true to enable, false to disable
*/
static inline void pcnt_ll_enable_thres_event(pcnt_dev_t *hw, uint32_t unit, uint32_t thres, bool enable)
{
if (thres == 0) {
hw->conf_unit[unit].conf0.thr_thres0_en = enable;
} else {
hw->conf_unit[unit].conf0.thr_thres1_en = enable;
}
}
/**
* @brief Disable all PCNT threshold events
*
* @param hw Peripheral PCNT hardware instance address.
* @param unit unit number
*/
static inline void pcnt_ll_disable_all_events(pcnt_dev_t *hw, uint32_t unit)
{
hw->conf_unit[unit].conf0.val &= ~(PCNT_LL_EVENT_MASK << 11);
}
/**
* @brief Set PCNT high limit value
*
* @param hw Peripheral PCNT hardware instance address.
* @param unit PCNT unit number
* @param value PCNT high limit value
*/
static inline void pcnt_ll_set_high_limit_value(pcnt_dev_t *hw, uint32_t unit, int value)
{
typeof(hw->conf_unit[unit].conf2) conf2_reg = hw->conf_unit[unit].conf2;
conf2_reg.cnt_h_lim = value;
hw->conf_unit[unit].conf2 = conf2_reg;
}
/**
* @brief Set PCNT low limit value
*
* @param hw Peripheral PCNT hardware instance address.
* @param unit PCNT unit number
* @param value PCNT low limit value
*/
static inline void pcnt_ll_set_low_limit_value(pcnt_dev_t *hw, uint32_t unit, int value)
{
typeof(hw->conf_unit[unit].conf2) conf2_reg = hw->conf_unit[unit].conf2;
conf2_reg.cnt_l_lim = value;
hw->conf_unit[unit].conf2 = conf2_reg;
}
/**
* @brief Set PCNT threshold value
*
* @param hw Peripheral PCNT hardware instance address.
* @param unit PCNT unit number
* @param thres Threshold ID
* @param value PCNT threshold value
*/
static inline void pcnt_ll_set_thres_value(pcnt_dev_t *hw, uint32_t unit, uint32_t thres, int value)
{
typeof(hw->conf_unit[unit].conf1) conf1_reg = hw->conf_unit[unit].conf1;
if (thres == 0) {
conf1_reg.cnt_thres0 = value;
} else {
conf1_reg.cnt_thres1 = value;
}
hw->conf_unit[unit].conf1 = conf1_reg;
}
/**
* @brief Get PCNT high limit value
*
* @param hw Peripheral PCNT hardware instance address.
* @param unit PCNT unit number
* @return PCNT high limit value
*/
static inline int pcnt_ll_get_high_limit_value(pcnt_dev_t *hw, uint32_t unit)
{
typeof(hw->conf_unit[unit].conf2) conf2_reg = hw->conf_unit[unit].conf2;
int16_t value = conf2_reg.cnt_h_lim;
return value;
}
/**
* @brief Get PCNT low limit value
*
* @param hw Peripheral PCNT hardware instance address.
* @param unit PCNT unit number
* @return PCNT high limit value
*/
static inline int pcnt_ll_get_low_limit_value(pcnt_dev_t *hw, uint32_t unit)
{
typeof(hw->conf_unit[unit].conf2) conf2_reg = hw->conf_unit[unit].conf2;
int16_t value = conf2_reg.cnt_l_lim;
return value;
}
/**
* @brief Get PCNT threshold value
*
* @param hw Peripheral PCNT hardware instance address.
* @param unit PCNT unit number
* @param thres Threshold ID
* @return PCNT threshold value
*/
static inline int pcnt_ll_get_thres_value(pcnt_dev_t *hw, uint32_t unit, uint32_t thres)
{
int16_t value;
typeof(hw->conf_unit[unit].conf1) conf1_reg = hw->conf_unit[unit].conf1;
if (thres == 0) {
value = conf1_reg.cnt_thres0;
} else {
value = conf1_reg.cnt_thres1;
}
return value;
}
/**
* @brief Get PCNT unit runtime status
*
* @param hw Peripheral PCNT hardware instance address.
* @param unit PCNT unit number
* @return PCNT unit runtime status
*/
static inline uint32_t pcnt_ll_get_unit_status(pcnt_dev_t *hw, uint32_t unit)
{
return hw->status_unit[unit].val;
}
/**
* @brief Get PCNT count sign
*
* @param hw Peripheral PCNT hardware instance address.
* @param unit PCNT unit number
* @return Count sign
*/
static inline pcnt_unit_count_sign_t pcnt_ll_get_count_sign(pcnt_dev_t *hw, uint32_t unit)
{
return hw->status_unit[unit].val & 0x03;
}
/**
* @brief Get PCNT event status
*
* @param hw Peripheral PCNT hardware instance address.
* @param unit PCNT unit number
* @return Event status word
*/
static inline uint32_t pcnt_ll_get_event_status(pcnt_dev_t *hw, uint32_t unit)
{
return hw->status_unit[unit].val >> 2;
}
/**
* @brief Set PCNT glitch filter threshold
*
* @param hw Peripheral PCNT hardware instance address.
* @param unit PCNT unit number
* @param filter_val PCNT signal filter value, counter in APB_CLK cycles.
* Any pulses lasting shorter than this will be ignored when the filter is enabled.
*/
static inline void pcnt_ll_set_glitch_filter_thres(pcnt_dev_t *hw, uint32_t unit, uint32_t filter_val)
{
hw->conf_unit[unit].conf0.filter_thres = filter_val;
}
/**
* @brief Get PCNT glitch filter threshold
*
* @param hw Peripheral PCNT hardware instance address.
* @param unit PCNT unit number
* @return glitch filter threshold
*/
static inline uint32_t pcnt_ll_get_glitch_filter_thres(pcnt_dev_t *hw, uint32_t unit)
{
return hw->conf_unit[unit].conf0.filter_thres;
}
/**
* @brief Enable PCNT glitch filter
*
* @param hw Peripheral PCNT hardware instance address.
* @param unit PCNT unit number
* @param enable True to enable the filter, False to disable the filter
*/
static inline void pcnt_ll_enable_glitch_filter(pcnt_dev_t *hw, uint32_t unit, bool enable)
{
hw->conf_unit[unit].conf0.filter_en = enable;
}
#ifdef __cplusplus
}
#endif
| 4,396 |
1,350 | <reponame>Manny27nyc/azure-sdk-for-java
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.mixedreality.fluent;
import com.azure.core.annotation.ReturnType;
import com.azure.core.annotation.ServiceMethod;
import com.azure.core.http.rest.PagedIterable;
import com.azure.core.http.rest.Response;
import com.azure.core.util.Context;
import com.azure.resourcemanager.mixedreality.fluent.models.AccountKeysInner;
import com.azure.resourcemanager.mixedreality.fluent.models.RemoteRenderingAccountInner;
import com.azure.resourcemanager.mixedreality.models.AccountKeyRegenerateRequest;
/** An instance of this class provides access to all the operations defined in RemoteRenderingAccountsClient. */
public interface RemoteRenderingAccountsClient {
/**
* List Remote Rendering Accounts by Subscription.
*
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return result of the request to get resource collection.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
PagedIterable<RemoteRenderingAccountInner> list();
/**
* List Remote Rendering Accounts by Subscription.
*
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return result of the request to get resource collection.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
PagedIterable<RemoteRenderingAccountInner> list(Context context);
/**
* List Resources by Resource Group.
*
* @param resourceGroupName Name of an Azure resource group.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return result of the request to get resource collection.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
PagedIterable<RemoteRenderingAccountInner> listByResourceGroup(String resourceGroupName);
/**
* List Resources by Resource Group.
*
* @param resourceGroupName Name of an Azure resource group.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return result of the request to get resource collection.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
PagedIterable<RemoteRenderingAccountInner> listByResourceGroup(String resourceGroupName, Context context);
/**
* Delete a Remote Rendering Account.
*
* @param resourceGroupName Name of an Azure resource group.
* @param accountName Name of an Mixed Reality Account.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
void delete(String resourceGroupName, String accountName);
/**
* Delete a Remote Rendering Account.
*
* @param resourceGroupName Name of an Azure resource group.
* @param accountName Name of an Mixed Reality Account.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the response.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Response<Void> deleteWithResponse(String resourceGroupName, String accountName, Context context);
/**
* Retrieve a Remote Rendering Account.
*
* @param resourceGroupName Name of an Azure resource group.
* @param accountName Name of an Mixed Reality Account.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return remoteRenderingAccount Response.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
RemoteRenderingAccountInner getByResourceGroup(String resourceGroupName, String accountName);
/**
* Retrieve a Remote Rendering Account.
*
* @param resourceGroupName Name of an Azure resource group.
* @param accountName Name of an Mixed Reality Account.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return remoteRenderingAccount Response.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Response<RemoteRenderingAccountInner> getByResourceGroupWithResponse(
String resourceGroupName, String accountName, Context context);
/**
* Updating a Remote Rendering Account.
*
* @param resourceGroupName Name of an Azure resource group.
* @param accountName Name of an Mixed Reality Account.
* @param remoteRenderingAccount Remote Rendering Account parameter.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return remoteRenderingAccount Response.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
RemoteRenderingAccountInner update(
String resourceGroupName, String accountName, RemoteRenderingAccountInner remoteRenderingAccount);
/**
* Updating a Remote Rendering Account.
*
* @param resourceGroupName Name of an Azure resource group.
* @param accountName Name of an Mixed Reality Account.
* @param remoteRenderingAccount Remote Rendering Account parameter.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return remoteRenderingAccount Response.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Response<RemoteRenderingAccountInner> updateWithResponse(
String resourceGroupName,
String accountName,
RemoteRenderingAccountInner remoteRenderingAccount,
Context context);
/**
* Creating or Updating a Remote Rendering Account.
*
* @param resourceGroupName Name of an Azure resource group.
* @param accountName Name of an Mixed Reality Account.
* @param remoteRenderingAccount Remote Rendering Account parameter.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return remoteRenderingAccount Response.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
RemoteRenderingAccountInner create(
String resourceGroupName, String accountName, RemoteRenderingAccountInner remoteRenderingAccount);
/**
* Creating or Updating a Remote Rendering Account.
*
* @param resourceGroupName Name of an Azure resource group.
* @param accountName Name of an Mixed Reality Account.
* @param remoteRenderingAccount Remote Rendering Account parameter.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return remoteRenderingAccount Response.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Response<RemoteRenderingAccountInner> createWithResponse(
String resourceGroupName,
String accountName,
RemoteRenderingAccountInner remoteRenderingAccount,
Context context);
/**
* List Both of the 2 Keys of a Remote Rendering Account.
*
* @param resourceGroupName Name of an Azure resource group.
* @param accountName Name of an Mixed Reality Account.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return developer Keys of account.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
AccountKeysInner listKeys(String resourceGroupName, String accountName);
/**
* List Both of the 2 Keys of a Remote Rendering Account.
*
* @param resourceGroupName Name of an Azure resource group.
* @param accountName Name of an Mixed Reality Account.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return developer Keys of account.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Response<AccountKeysInner> listKeysWithResponse(String resourceGroupName, String accountName, Context context);
/**
* Regenerate specified Key of a Remote Rendering Account.
*
* @param resourceGroupName Name of an Azure resource group.
* @param accountName Name of an Mixed Reality Account.
* @param regenerate Required information for key regeneration.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return developer Keys of account.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
AccountKeysInner regenerateKeys(
String resourceGroupName, String accountName, AccountKeyRegenerateRequest regenerate);
/**
* Regenerate specified Key of a Remote Rendering Account.
*
* @param resourceGroupName Name of an Azure resource group.
* @param accountName Name of an Mixed Reality Account.
* @param regenerate Required information for key regeneration.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return developer Keys of account.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Response<AccountKeysInner> regenerateKeysWithResponse(
String resourceGroupName, String accountName, AccountKeyRegenerateRequest regenerate, Context context);
}
| 3,747 |
569 | <gh_stars>100-1000
/*
Copyright 1995-2015 Esri
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
For additional information, contact:
Environmental Systems Research Institute, Inc.
Attn: Contracts Dept
380 New York Street
Redlands, California, USA 92373
email: <EMAIL>
*/
package com.esri.core.geometry;
import com.esri.core.geometry.OperatorCutLocal;
import java.util.ArrayList;
import java.util.Arrays;
class Cutter {
static class CompareVertices {
int m_orderIndex;
EditShape m_editShape;
CompareVertices(int orderIndex, EditShape editShape) {
m_orderIndex = orderIndex;
m_editShape = editShape;
}
int _compareVertices(int v1, int v2) {
Point2D pt1 = new Point2D();
m_editShape.getXY(v1, pt1);
Point2D pt2 = new Point2D();
m_editShape.getXY(v2, pt2);
int res = pt1.compare(pt2);
if (res != 0)
return res;
int z1 = m_editShape.getUserIndex(v1, m_orderIndex);
int z2 = m_editShape.getUserIndex(v2, m_orderIndex);
if (z1 < z2)
return -1;
if (z1 == z2)
return 0;
return 1;
}
}
static class CutterVertexComparer extends
AttributeStreamOfInt32.IntComparator {
CompareVertices m_compareVertices;
CutterVertexComparer(CompareVertices _compareVertices) {
m_compareVertices = _compareVertices;
}
@Override
public int compare(int v1, int v2) {
return m_compareVertices._compareVertices(v1, v2);
}
}
static class CutEvent {
int m_ivertexCuttee;
int m_ipartCuttee;
double m_scalarCuttee0;
double m_scalarCuttee1;
int m_count;
int m_ivertexCutter;
int m_ipartCutter;
double m_scalarCutter0;
double m_scalarCutter1;
CutEvent(int ivertexCuttee, int ipartCuttee, double scalarCuttee0,
double scalarCuttee1, int count, int ivertexCutter,
int ipartCutter, double scalarCutter0, double scalarCutter1) {
m_ivertexCuttee = ivertexCuttee;
m_ipartCuttee = ipartCuttee;
m_scalarCuttee0 = scalarCuttee0;
m_scalarCuttee1 = scalarCuttee1;
m_count = count;
m_ivertexCutter = ivertexCutter;
m_ipartCutter = ipartCutter;
m_scalarCutter0 = scalarCutter0;
m_scalarCutter1 = scalarCutter1;
}
}
static EditShape CutPolyline(boolean bConsiderTouch, Polyline cuttee,
Polyline cutter, double tolerance,
ArrayList<OperatorCutLocal.CutPair> cutPairs,
AttributeStreamOfInt32 segmentCounts, ProgressTracker progressTracker) {
if (cuttee.isEmpty()) {
OperatorCutLocal.CutPair cutPair;
cutPair = new OperatorCutLocal.CutPair(cuttee,
OperatorCutLocal.Side.Uncut, -1, -1, NumberUtils.NaN(),
OperatorCutLocal.Side.Uncut, -1, -1, NumberUtils.NaN(), -1,
-1, NumberUtils.NaN(), -1, -1, NumberUtils.NaN());
cutPairs.add(cutPair);
return null;
}
EditShape editShape = new EditShape();
int cutteeHandle = editShape.addGeometry(cuttee);
int cutterHandle = editShape.addGeometry(cutter);
CrackAndCluster.execute(editShape, tolerance, progressTracker, true);
int order = 0;
int orderIndex = editShape.createUserIndex();
for (int igeometry = editShape.getFirstGeometry(); igeometry != -1; igeometry = editShape
.getNextGeometry(igeometry))
for (int ipath = editShape.getFirstPath(igeometry); ipath != -1; ipath = editShape
.getNextPath(ipath))
for (int ivertex = editShape.getFirstVertex(ipath), i = 0, n = editShape
.getPathSize(ipath); i < n; ivertex = editShape
.getNextVertex(ivertex), i++)
editShape.setUserIndex(ivertex, orderIndex, order++);
ArrayList<CutEvent> cutEvents = _getCutEvents(orderIndex, editShape);
_Cut(bConsiderTouch, false, cutEvents, editShape, cutPairs,
segmentCounts);
return editShape;
}
private static ArrayList<CutEvent> _getCutEvents(int orderIndex,
EditShape editShape) {
int pointCount = editShape.getTotalPointCount();
if (pointCount == 0)
return null;
// Sort vertices lexicographically
// Firstly copy allvertices to an array.
AttributeStreamOfInt32 vertices = new AttributeStreamOfInt32(0);
for (int igeometry = editShape.getFirstGeometry(); igeometry != -1; igeometry = editShape
.getNextGeometry(igeometry))
for (int ipath = editShape.getFirstPath(igeometry); ipath != -1; ipath = editShape
.getNextPath(ipath))
for (int ivertex = editShape.getFirstVertex(ipath), i = 0, n = editShape
.getPathSize(ipath); i < n; ivertex = editShape
.getNextVertex(ivertex), i++)
vertices.add(ivertex);
// Sort
CompareVertices compareVertices = new CompareVertices(orderIndex,
editShape);
vertices.Sort(0, pointCount, new CutterVertexComparer(compareVertices));
// Find Cut Events
ArrayList<CutEvent> cutEvents = new ArrayList<CutEvent>(0);
ArrayList<CutEvent> cutEventsTemp = new ArrayList<CutEvent>(0);
int eventIndex = editShape.createUserIndex();
int eventIndexTemp = editShape.createUserIndex();
int cutteeHandle = editShape.getFirstGeometry();
int cutterHandle = editShape.getNextGeometry(cutteeHandle);
Point2D pointCuttee = new Point2D();
Point2D pointCutter = new Point2D();
int ivertexCuttee = vertices.get(0);
;
int ipartCuttee = editShape.getPathFromVertex(ivertexCuttee);
int igeometryCuttee = editShape.getGeometryFromPath(ipartCuttee);
editShape.getXY(ivertexCuttee, pointCuttee);
int istart = 1;
int ivertex = 0;
while (istart < pointCount - 1) {
boolean bCutEvent = false;
for (int i = istart; i < pointCount; i++) {
if (i == ivertex)
continue;
int ivertexCutter = vertices.get(i);
int ipartCutter = editShape.getPathFromVertex(ivertexCutter);
int igeometryCutter = editShape
.getGeometryFromPath(ipartCutter);
editShape.getXY(ivertexCutter, pointCutter);
if (pointCuttee.isEqual(pointCutter)) {
boolean bCondition = igeometryCuttee == cutteeHandle
&& igeometryCutter == cutterHandle;
if (bCondition)
bCutEvent = _cutteeCutterEvents(eventIndex,
eventIndexTemp, editShape, cutEvents,
cutEventsTemp, ipartCuttee, ivertexCuttee,
ipartCutter, ivertexCutter);
} else
break;
}
if (bCutEvent || ivertex == istart - 1) {
if (bCutEvent && (ivertex == istart - 1))
istart--;
if (++ivertex == pointCount)
break;
ivertexCuttee = vertices.get(ivertex);
ipartCuttee = editShape.getPathFromVertex(ivertexCuttee);
igeometryCuttee = editShape.getGeometryFromPath(ipartCuttee);
editShape.getXY(ivertexCuttee, pointCuttee);
}
if (!bCutEvent)
istart = ivertex + 1;
}
ArrayList<CutEvent> cutEventsSorted = new ArrayList<CutEvent>(0);
// Sort CutEvents
int icutEvent;
int icutEventTemp;
for (int igeometry = editShape.getFirstGeometry(); igeometry != -1; igeometry = editShape
.getNextGeometry(igeometry)) {
for (int ipath = editShape.getFirstPath(igeometry); ipath != -1; ipath = editShape
.getNextPath(ipath)) {
for (int iv = editShape.getFirstVertex(ipath), i = 0, n = editShape
.getPathSize(ipath); i < n; iv = editShape
.getNextVertex(iv), i++) {
icutEventTemp = editShape.getUserIndex(iv, eventIndexTemp);
if (icutEventTemp >= 0) {
// _ASSERT(cutEventsTemp.get(icutEventTemp).m_ivertexCuttee
// == iv);
while (icutEventTemp < cutEventsTemp.size()
&& cutEventsTemp.get(icutEventTemp).m_ivertexCuttee == iv)
cutEventsSorted.add(cutEventsTemp
.get(icutEventTemp++));
}
icutEvent = editShape.getUserIndex(iv, eventIndex);
if (icutEvent >= 0) {
// _ASSERT(cutEvents->Get(icutEvent)->m_ivertexCuttee ==
// iv);
while (icutEvent < cutEvents.size()
&& cutEvents.get(icutEvent).m_ivertexCuttee == iv)
cutEventsSorted.add(cutEvents.get(icutEvent++));
}
}
}
}
// _ASSERT(cutEvents->Size() + cutEventsTemp->Size() ==
// cutEventsSorted->Size());
editShape.removeUserIndex(eventIndex);
editShape.removeUserIndex(eventIndexTemp);
return cutEventsSorted;
}
static boolean _cutteeCutterEvents(int eventIndex, int eventIndexTemp,
EditShape editShape, ArrayList<CutEvent> cutEvents,
ArrayList<CutEvent> cutEventsTemp, int ipartCuttee,
int ivertexCuttee, int ipartCutter, int ivertexCutter) {
int ilastVertexCuttee = editShape.getLastVertex(ipartCuttee);
int ilastVertexCutter = editShape.getLastVertex(ipartCutter);
int ifirstVertexCuttee = editShape.getFirstVertex(ipartCuttee);
int ifirstVertexCutter = editShape.getFirstVertex(ipartCutter);
int ivertexCutteePrev = editShape.getPrevVertex(ivertexCuttee);
int ivertexCutterPrev = editShape.getPrevVertex(ivertexCutter);
boolean bEndEnd = false;
boolean bEndStart = false;
boolean bStartEnd = false;
boolean bStartStart = false;
if (ivertexCuttee != ifirstVertexCuttee) {
if (ivertexCutter != ifirstVertexCutter)
bEndEnd = _cutteeEndCutterEndEvent(eventIndex, editShape,
cutEvents, ipartCuttee, ivertexCutteePrev, ipartCutter,
ivertexCutterPrev);
if (ivertexCutter != ilastVertexCutter)
bEndStart = _cutteeEndCutterStartEvent(eventIndex, editShape,
cutEvents, ipartCuttee, ivertexCutteePrev, ipartCutter,
ivertexCutter);
}
if (ivertexCuttee != ilastVertexCuttee) {
if (ivertexCutter != ifirstVertexCutter)
bStartEnd = _cutteeStartCutterEndEvent(eventIndexTemp,
editShape, cutEventsTemp, ipartCuttee, ivertexCuttee,
ipartCutter, ivertexCutterPrev, ifirstVertexCuttee);
if (ivertexCutter != ilastVertexCutter)
bStartStart = _cutteeStartCutterStartEvent(eventIndexTemp,
editShape, cutEventsTemp, ipartCuttee, ivertexCuttee,
ipartCutter, ivertexCutter, ifirstVertexCuttee);
}
if (bEndEnd && bEndStart && bStartEnd) {
int iendstart = cutEvents.size() - 1;
int istartend = (bStartStart ? cutEventsTemp.size() - 2
: cutEventsTemp.size() - 1);
if (cutEventsTemp.get(istartend).m_count == 2) {
// Replace bEndEnd with bEndStart, and remove duplicate
// bEndStart (get rid of bEndEnd)
cutEvents.set(iendstart - 1, cutEvents.get(iendstart));
cutEvents.remove(cutEvents.size() - 1);
}
} else if (bEndEnd && bEndStart && bStartStart) {
int istartstart = cutEventsTemp.size() - 1;
if (cutEventsTemp.get(istartstart).m_count == 2) {
// Remove bEndStart
CutEvent lastEvent = cutEvents.get(cutEvents.size() - 1);
cutEvents.remove(cutEvents.get(cutEvents.size() - 1));
int icutEvent = editShape.getUserIndex(
lastEvent.m_ivertexCuttee, eventIndex);
if (icutEvent == cutEvents.size())
editShape.setUserIndex(lastEvent.m_ivertexCuttee,
eventIndex, -1);
}
}
return bEndEnd || bEndStart || bStartEnd || bStartStart;
}
private static boolean _cutteeEndCutterEndEvent(int eventIndex,
EditShape editShape, ArrayList<CutEvent> cutEvents,
int ipartCuttee, int ivertexCuttee, int ipartCutter,
int ivertexCutter) {
Segment segmentCuttee;
Segment segmentCutter;
Line lineCuttee = new Line();
Line lineCutter = new Line();
double[] scalarsCuttee = new double[2];
double[] scalarsCutter = new double[2];
CutEvent cutEvent;
segmentCuttee = editShape.getSegment(ivertexCuttee);
if (segmentCuttee == null) {
editShape.queryLineConnector(ivertexCuttee, lineCuttee);
segmentCuttee = lineCuttee;
}
segmentCutter = editShape.getSegment(ivertexCutter);
if (segmentCutter == null) {
editShape.queryLineConnector(ivertexCutter, lineCutter);
segmentCutter = lineCutter;
}
int count = segmentCuttee.intersect(segmentCutter, null, scalarsCuttee,
scalarsCutter, 0.0);
// _ASSERT(count > 0);
int icutEvent;
// If count == 2 (i.e. when they overlap), this this event would have
// been discovered by _CutteeStartCutterStartEvent at the previous index
if (count < 2) {
cutEvent = new CutEvent(ivertexCuttee, ipartCuttee,
scalarsCuttee[0], NumberUtils.NaN(), count, ivertexCutter,
ipartCutter, scalarsCutter[0], NumberUtils.NaN());
cutEvents.add(cutEvent);
icutEvent = editShape.getUserIndex(ivertexCuttee, eventIndex);
if (icutEvent < 0)
editShape.setUserIndex(ivertexCuttee, eventIndex,
cutEvents.size() - 1);
}
return true;
}
private static boolean _cutteeEndCutterStartEvent(int eventIndex,
EditShape editShape, ArrayList<CutEvent> cutEvents,
int ipartCuttee, int ivertexCuttee, int ipartCutter,
int ivertexCutter) {
Segment segmentCuttee;
Segment segmentCutter;
Line lineCuttee = new Line();
Line lineCutter = new Line();
double[] scalarsCuttee = new double[2];
double[] scalarsCutter = new double[2];
CutEvent cutEvent;
segmentCuttee = editShape.getSegment(ivertexCuttee);
if (segmentCuttee == null) {
editShape.queryLineConnector(ivertexCuttee, lineCuttee);
segmentCuttee = lineCuttee;
}
segmentCutter = editShape.getSegment(ivertexCutter);
if (segmentCutter == null) {
editShape.queryLineConnector(ivertexCutter, lineCutter);
segmentCutter = lineCutter;
}
int count = segmentCuttee.intersect(segmentCutter, null, scalarsCuttee,
scalarsCutter, 0.0);
// _ASSERT(count > 0);
int icutEvent;
// If count == 2 (i.e. when they overlap), this this event would have
// been discovered by _CutteeStartCutterEndEvent at the previous index
if (count < 2) {
cutEvent = new CutEvent(ivertexCuttee, ipartCuttee,
scalarsCuttee[0], NumberUtils.NaN(), count, ivertexCutter,
ipartCutter, scalarsCutter[0], NumberUtils.NaN());
cutEvents.add(cutEvent);
icutEvent = editShape.getUserIndex(ivertexCuttee, eventIndex);
if (icutEvent < 0)
editShape.setUserIndex(ivertexCuttee, eventIndex,
cutEvents.size() - 1);
return true;
}
return false;
}
private static boolean _cutteeStartCutterEndEvent(int eventIndex,
EditShape editShape, ArrayList<CutEvent> cutEvents,
int ipartCuttee, int ivertexCuttee, int ipartCutter,
int ivertexCutter, int ifirstVertexCuttee) {
Segment segmentCuttee;
Segment segmentCutter;
Line lineCuttee = new Line();
Line lineCutter = new Line();
double[] scalarsCuttee = new double[2];
double[] scalarsCutter = new double[2];
CutEvent cutEvent;
segmentCuttee = editShape.getSegment(ivertexCuttee);
if (segmentCuttee == null) {
editShape.queryLineConnector(ivertexCuttee, lineCuttee);
segmentCuttee = lineCuttee;
}
segmentCutter = editShape.getSegment(ivertexCutter);
if (segmentCutter == null) {
editShape.queryLineConnector(ivertexCutter, lineCutter);
segmentCutter = lineCutter;
}
int count = segmentCuttee.intersect(segmentCutter, null, scalarsCuttee,
scalarsCutter, 0.0);
// _ASSERT(count > 0);
int icutEvent;
if (count == 2) {
cutEvent = new CutEvent(ivertexCuttee, ipartCuttee,
scalarsCuttee[0], scalarsCuttee[1], count, ivertexCutter,
ipartCutter, scalarsCutter[0], scalarsCutter[1]);
cutEvents.add(cutEvent);
icutEvent = editShape.getUserIndex(ivertexCuttee, eventIndex);
if (icutEvent < 0)
editShape.setUserIndex(ivertexCuttee, eventIndex,
cutEvents.size() - 1);
return true;
} else {
boolean bCutEvent = false;
if (ivertexCuttee == ifirstVertexCuttee) {
cutEvent = new CutEvent(ivertexCuttee, ipartCuttee,
scalarsCuttee[0], NumberUtils.NaN(), count,
ivertexCutter, ipartCutter, scalarsCutter[0],
NumberUtils.NaN());
cutEvents.add(cutEvent);
icutEvent = editShape.getUserIndex(ivertexCuttee, eventIndex);
if (icutEvent < 0)
editShape.setUserIndex(ivertexCuttee, eventIndex,
cutEvents.size() - 1);
bCutEvent = true;
}
return bCutEvent;
}
}
private static boolean _cutteeStartCutterStartEvent(int eventIndex,
EditShape editShape, ArrayList<CutEvent> cutEvents,
int ipartCuttee, int ivertexCuttee, int ipartCutter,
int ivertexCutter, int ifirstVertexCuttee) {
Segment segmentCuttee;
Segment segmentCutter;
Line lineCuttee = new Line();
Line lineCutter = new Line();
double[] scalarsCuttee = new double[2];
double[] scalarsCutter = new double[2];
CutEvent cutEvent;
segmentCuttee = editShape.getSegment(ivertexCuttee);
if (segmentCuttee == null) {
editShape.queryLineConnector(ivertexCuttee, lineCuttee);
segmentCuttee = lineCuttee;
}
segmentCutter = editShape.getSegment(ivertexCutter);
if (segmentCutter == null) {
editShape.queryLineConnector(ivertexCutter, lineCutter);
segmentCutter = lineCutter;
}
int count = segmentCuttee.intersect(segmentCutter, null, scalarsCuttee,
scalarsCutter, 0.0);
// _ASSERT(count > 0);
int icutEvent;
if (count == 2) {
cutEvent = new CutEvent(ivertexCuttee, ipartCuttee,
scalarsCuttee[0], scalarsCuttee[1], count, ivertexCutter,
ipartCutter, scalarsCutter[0], scalarsCutter[1]);
cutEvents.add(cutEvent);
icutEvent = editShape.getUserIndex(ivertexCuttee, eventIndex);
if (icutEvent < 0)
editShape.setUserIndex(ivertexCuttee, eventIndex,
cutEvents.size() - 1);
return true;
} else {
boolean bCutEvent = false;
if (ivertexCuttee == ifirstVertexCuttee) {
cutEvent = new CutEvent(ivertexCuttee, ipartCuttee,
scalarsCuttee[0], NumberUtils.NaN(), count,
ivertexCutter, ipartCutter, scalarsCutter[0],
NumberUtils.NaN());
cutEvents.add(cutEvent);
icutEvent = editShape.getUserIndex(ivertexCuttee, eventIndex);
if (icutEvent < 0)
editShape.setUserIndex(ivertexCuttee, eventIndex,
cutEvents.size() - 1);
bCutEvent = true;
}
return bCutEvent;
}
}
static void _Cut(boolean bConsiderTouch, boolean bLocalCutsOnly,
ArrayList<CutEvent> cutEvents, EditShape shape,
ArrayList<OperatorCutLocal.CutPair> cutPairs,
AttributeStreamOfInt32 segmentCounts) {
OperatorCutLocal.CutPair cutPair;
Point2D[] tangents = new Point2D[4];
tangents[0] = new Point2D();
tangents[1] = new Point2D();
tangents[2] = new Point2D();
tangents[3] = new Point2D();
Point2D tangent0 = new Point2D();
Point2D tangent1 = new Point2D();
Point2D tangent2 = new Point2D();
Point2D tangent3 = new Point2D();
SegmentBuffer segmentBufferCuttee = null;
if (cutPairs != null) {
segmentBufferCuttee = new SegmentBuffer();
segmentBufferCuttee.createLine();
}
Segment segmentCuttee = null;
int icutEvent = 0;
MultiPath multipath = null;
Line lineCuttee = new Line();
Line lineCutter = new Line();
int polyline = shape.getFirstGeometry();
for (int ipath = shape.getFirstPath(polyline); ipath != -1; ipath = shape
.getNextPath(ipath)) {
int cut;
int cutPrev = OperatorCutLocal.Side.Uncut;
int ipartCuttee = -1;
int ivertexCuttee = -1;
double scalarCuttee = NumberUtils.NaN();
int ipartCutteePrev = -1;
int ivertexCutteePrev = -1;
double scalarCutteePrev = NumberUtils.NaN();
int ipartCutter = -1;
int ivertexCutter = -1;
double scalarCutter = NumberUtils.NaN();
int ipartCutterPrev = -1;
int ivertexCutterPrev = -1;
double scalarCutterPrev = NumberUtils.NaN();
boolean bNoCutYet = true; // Indicates whether a cut as occured for
// the current part
boolean bCoincidentNotAdded = false; // Indicates whether the
// current coincident
// multipath has been added
// to cutPairs
boolean bCurrentMultiPathNotAdded = true; // Indicates whether there
// is a multipath not
// yet added to cutPairs
// (left, right, or
// undefined)
boolean bStartNewPath = true;
boolean bCreateNewMultiPath = true;
int segmentCount = 0;
ipartCutteePrev = ipath;
scalarCutteePrev = 0.0;
for (int ivertex = shape.getFirstVertex(ipath), n = shape
.getPathSize(ipath), i = 0; i < n; ivertex = shape
.getNextVertex(ivertex), i++) {
segmentCuttee = shape.getSegment(ivertex);
if (segmentCuttee == null) {
if (!shape.queryLineConnector(ivertex, lineCuttee))
continue;
segmentCuttee = lineCuttee;
}
if (ivertexCutteePrev == -1)
ivertexCutteePrev = ivertex;
double lastScalarCuttee = 0.0; // last scalar along the current
// segment
while (icutEvent < cutEvents.size()
&& ivertex == cutEvents.get(icutEvent).m_ivertexCuttee) {
ipartCuttee = cutEvents.get(icutEvent).m_ipartCuttee;
ivertexCuttee = cutEvents.get(icutEvent).m_ivertexCuttee;
scalarCuttee = cutEvents.get(icutEvent).m_scalarCuttee0;
ipartCutter = cutEvents.get(icutEvent).m_ipartCutter;
ivertexCutter = cutEvents.get(icutEvent).m_ivertexCutter;
scalarCutter = cutEvents.get(icutEvent).m_scalarCutter0;
if (cutEvents.get(icutEvent).m_count == 2) {
// We have an overlap
if (!bCoincidentNotAdded) {
ipartCutteePrev = ipartCuttee;
ivertexCutteePrev = ivertexCuttee;
scalarCutteePrev = scalarCuttee;
ipartCutterPrev = ipartCutter;
ivertexCutterPrev = ivertexCutter;
scalarCutterPrev = scalarCutter;
cutPrev = OperatorCutLocal.Side.Coincident;
// Create new multipath
if (cutPairs != null)
multipath = new Polyline();
else
segmentCount = 0;
bCreateNewMultiPath = false;
bStartNewPath = true;
}
scalarCuttee = cutEvents.get(icutEvent).m_scalarCuttee1;
scalarCutter = cutEvents.get(icutEvent).m_scalarCutter1;
if (cutPairs != null) {
segmentCuttee.cut(lastScalarCuttee,
cutEvents.get(icutEvent).m_scalarCuttee1,
segmentBufferCuttee);
multipath.addSegment(segmentBufferCuttee.get(),
bStartNewPath);
} else
segmentCount++;
lastScalarCuttee = scalarCuttee;
bCoincidentNotAdded = true;
bNoCutYet = false;
bStartNewPath = false;
if (icutEvent + 1 == cutEvents.size()
|| cutEvents.get(icutEvent + 1).m_count != 2
|| cutEvents.get(icutEvent + 1).m_ivertexCuttee == ivertexCuttee
&& cutEvents.get(icutEvent + 1).m_scalarCuttee0 != lastScalarCuttee) {
if (cutPairs != null) {
cutPair = new OperatorCutLocal.CutPair(
(Geometry) multipath,
OperatorCutLocal.Side.Coincident,
ipartCuttee, ivertexCuttee,
scalarCuttee, cutPrev, ipartCutteePrev,
ivertexCutteePrev, scalarCutteePrev,
ipartCutter, ivertexCutter,
scalarCutter, ipartCutterPrev,
ivertexCutterPrev, scalarCutterPrev);
cutPairs.add(cutPair);
} else {
segmentCounts.add(segmentCount);
}
ipartCutteePrev = ipartCuttee;
ivertexCutteePrev = ivertexCuttee;
scalarCutteePrev = scalarCuttee;
ipartCutterPrev = ipartCutter;
ivertexCutterPrev = ivertexCutter;
scalarCutterPrev = scalarCutter;
cutPrev = OperatorCutLocal.Side.Coincident;
bNoCutYet = false;
bCoincidentNotAdded = false;
bCreateNewMultiPath = true;
bStartNewPath = true;
}
icutEvent++;
continue;
}
int ivertexCutteePlus = shape.getNextVertex(ivertexCuttee);
int ivertexCutterPlus = shape.getNextVertex(ivertexCutter);
int ivertexCutterMinus = shape.getPrevVertex(ivertexCutter);
if (icutEvent < cutEvents.size() - 1
&& cutEvents.get(icutEvent + 1).m_ivertexCuttee == ivertexCutteePlus
&& cutEvents.get(icutEvent + 1).m_ivertexCutter == ivertexCutter
&& cutEvents.get(icutEvent + 1).m_count == 2) {
if (scalarCuttee != lastScalarCuttee) {
if (bCreateNewMultiPath) {
if (cutPairs != null)
multipath = new Polyline();
else
segmentCount = 0;
}
if (icutEvent > 0
&& cutEvents.get(icutEvent - 1).m_ipartCuttee == ipartCuttee) {
if (cutPrev == OperatorCutLocal.Side.Right)
cut = OperatorCutLocal.Side.Left;
else if (cutPrev == OperatorCutLocal.Side.Left)
cut = OperatorCutLocal.Side.Right;
else
cut = OperatorCutLocal.Side.Undefined;
} else
cut = OperatorCutLocal.Side.Undefined;
if (cutPairs != null) {
segmentCuttee.cut(lastScalarCuttee,
scalarCuttee, segmentBufferCuttee);
multipath.addSegment(segmentBufferCuttee.get(),
bStartNewPath);
cutPair = new OperatorCutLocal.CutPair(
multipath, cut, ipartCuttee,
ivertexCuttee, scalarCuttee, cutPrev,
ipartCutteePrev, ivertexCutteePrev,
scalarCutteePrev, ipartCutter,
ivertexCutter, scalarCutter,
ipartCutterPrev, ivertexCutterPrev,
scalarCutterPrev);
cutPairs.add(cutPair);
} else {
segmentCount++;
segmentCounts.add(segmentCount);
}
lastScalarCuttee = scalarCuttee;
ipartCutteePrev = ipartCuttee;
ivertexCutteePrev = ivertexCuttee;
scalarCutteePrev = scalarCuttee;
ipartCutterPrev = ipartCutter;
ivertexCutterPrev = ivertexCutter;
scalarCutterPrev = scalarCutter;
cutPrev = cut;
bCurrentMultiPathNotAdded = false;
bNoCutYet = false;
bCreateNewMultiPath = true;
bStartNewPath = true;
}
icutEvent++;
continue;
}
boolean bContinue = _cutterTangents(bConsiderTouch, shape,
cutEvents, icutEvent, tangent0, tangent1);
if (bContinue) {
icutEvent++;
continue;
}
_cutteeTangents(shape, cutEvents, icutEvent, ipath,
ivertex, tangent2, tangent3);
boolean bCut = false;
boolean bTouch = false;
boolean bCutRight = true;
if (!tangent0.isEqual(tangent2)
&& !tangent1.isEqual(tangent2)
&& !tangent0.isEqual(tangent3)
&& !tangent1.isEqual(tangent3)) {
tangents[0].setCoords(tangent0);
tangents[1].setCoords(tangent1);
tangents[2].setCoords(tangent2);
tangents[3].setCoords(tangent3);
Arrays.sort(tangents, new Point2D.CompareVectors());
// SORTARRAY(tangents, Point2D,
// Point2D::_CompareVectors);
Point2D value0 = (Point2D) tangents[0];
Point2D value1 = (Point2D) tangents[1];
Point2D value2 = (Point2D) tangents[2];
Point2D value3 = (Point2D) tangents[3];
if (value0.isEqual(tangent0)) {
if (value1.isEqual(tangent1)) {
if (!bConsiderTouch)
bCut = false;
else {
bCut = true;
bTouch = true;
bCutRight = false;
}
} else if (value3.isEqual(tangent1)) {
if (!bConsiderTouch)
bCut = false;
else {
bCut = true;
bTouch = true;
bCutRight = true;
}
} else {
bCut = true;
bCutRight = value1.isEqual(tangent2);
}
} else if (value1.isEqual(tangent0)) {
if (value2.isEqual(tangent1)) {
if (!bConsiderTouch)
bCut = false;
else {
bCut = true;
bTouch = true;
bCutRight = false;
}
} else if (value0.isEqual(tangent1)) {
if (!bConsiderTouch)
bCut = false;
else {
bCut = true;
bTouch = true;
bCutRight = true;
}
} else {
bCut = true;
bCutRight = value2.isEqual(tangent2);
}
} else if (value2.isEqual(tangent0)) {
if (value3.isEqual(tangent1)) {
if (!bConsiderTouch)
bCut = false;
else {
bCut = true;
bTouch = true;
bCutRight = false;
}
} else if (value1.isEqual(tangent1)) {
if (!bConsiderTouch)
bCut = false;
else {
bCut = true;
bTouch = true;
bCutRight = true;
}
} else {
bCut = true;
bCutRight = value3.isEqual(tangent2);
}
} else {
if (value0.isEqual(tangent1)) {
if (!bConsiderTouch)
bCut = false;
else {
bCut = true;
bTouch = true;
bCutRight = false;
}
} else if (value2.isEqual(tangent1)) {
if (!bConsiderTouch)
bCut = false;
else {
bCut = true;
bTouch = true;
bCutRight = true;
}
} else {
bCut = true;
bCutRight = value0.isEqual(tangent2);
}
}
}
if (bCut) {
boolean bIsFirstSegmentInPath = (ivertex == ivertexCuttee);
if (scalarCuttee != lastScalarCuttee
|| bIsFirstSegmentInPath
&& lastScalarCuttee == 0.0) {
if (bCreateNewMultiPath) {
if (cutPairs != null)
multipath = new Polyline();
else
segmentCount = 0;
}
if (cutPairs != null) {
segmentCuttee.cut(lastScalarCuttee,
scalarCuttee, segmentBufferCuttee);
multipath.addSegment(segmentBufferCuttee.get(),
bStartNewPath);
} else
segmentCount++;
}
if (bCutRight) {
if (cutPrev != OperatorCutLocal.Side.Right
|| bLocalCutsOnly) {
if (scalarCuttee != lastScalarCuttee
|| bIsFirstSegmentInPath
&& lastScalarCuttee == 0.0
|| bLocalCutsOnly) {
if (cutPairs != null) {
cutPair = new OperatorCutLocal.CutPair(
multipath,
OperatorCutLocal.Side.Right,
ipartCuttee, ivertexCuttee,
scalarCuttee, cutPrev,
ipartCutteePrev,
ivertexCutteePrev,
scalarCutteePrev, ipartCutter,
ivertexCutter, scalarCutter,
ipartCutterPrev,
ivertexCutterPrev,
scalarCutterPrev);
cutPairs.add(cutPair);
} else {
segmentCounts.add(segmentCount);
}
}
if (!bTouch)
cutPrev = OperatorCutLocal.Side.Right;
else if (icutEvent == cutEvents.size() - 2
|| cutEvents.get(icutEvent + 2).m_ipartCuttee != ipartCuttee)
cutPrev = OperatorCutLocal.Side.Left;
} else {
if (scalarCuttee != lastScalarCuttee
|| bIsFirstSegmentInPath
&& lastScalarCuttee == 0.0
|| bLocalCutsOnly) {
if (cutPairs != null) {
cutPair = new OperatorCutLocal.CutPair(
multipath,
OperatorCutLocal.Side.Undefined,
ipartCuttee, ivertexCuttee,
scalarCuttee, cutPrev,
ipartCutteePrev,
ivertexCutteePrev,
scalarCutteePrev, ipartCutter,
ivertexCutter, scalarCutter,
ipartCutterPrev,
ivertexCutterPrev,
scalarCutterPrev);
cutPairs.add(cutPair);
} else {
segmentCounts.add(segmentCount);
}
}
cutPrev = OperatorCutLocal.Side.Right;
}
} else {
if (cutPrev != OperatorCutLocal.Side.Left
|| bLocalCutsOnly) {
if (scalarCuttee != lastScalarCuttee
|| bIsFirstSegmentInPath
&& lastScalarCuttee == 0.0
|| bLocalCutsOnly) {
if (cutPairs != null) {
cutPair = new OperatorCutLocal.CutPair(
multipath,
OperatorCutLocal.Side.Left,
ipartCuttee, ivertexCuttee,
scalarCuttee, cutPrev,
ipartCutteePrev,
ivertexCutteePrev,
scalarCutteePrev, ipartCutter,
ivertexCutter, scalarCutter,
ipartCutterPrev,
ivertexCutterPrev,
scalarCutterPrev);
cutPairs.add(cutPair);
} else {
segmentCounts.add(segmentCount);
}
}
if (!bTouch)
cutPrev = OperatorCutLocal.Side.Left;
else if (icutEvent == cutEvents.size() - 2
|| cutEvents.get(icutEvent + 2).m_ipartCuttee != ipartCuttee)
cutPrev = OperatorCutLocal.Side.Right;
} else {
if (scalarCuttee != lastScalarCuttee
|| bIsFirstSegmentInPath
&& lastScalarCuttee == 0.0
|| bLocalCutsOnly) {
if (cutPairs != null) {
cutPair = new OperatorCutLocal.CutPair(
multipath,
OperatorCutLocal.Side.Undefined,
ipartCuttee, ivertexCuttee,
scalarCuttee, cutPrev,
ipartCutteePrev,
ivertexCutteePrev,
scalarCutteePrev, ipartCutter,
ivertexCutter, scalarCutter,
ipartCutterPrev,
ivertexCutterPrev,
scalarCutterPrev);
cutPairs.add(cutPair);
} else {
segmentCounts.add(segmentCount);
}
}
cutPrev = OperatorCutLocal.Side.Left;
}
}
if (scalarCuttee != lastScalarCuttee
|| bIsFirstSegmentInPath
&& lastScalarCuttee == 0.0 || bLocalCutsOnly) {
lastScalarCuttee = scalarCuttee;
ipartCutteePrev = ipartCuttee;
ivertexCutteePrev = ivertexCuttee;
scalarCutteePrev = scalarCuttee;
ipartCutterPrev = ipartCutter;
ivertexCutterPrev = ivertexCutter;
scalarCutterPrev = scalarCutter;
bCurrentMultiPathNotAdded = false;
bNoCutYet = false;
bCreateNewMultiPath = true;
bStartNewPath = true;
}
}
icutEvent++;
}
if (lastScalarCuttee != 1.0) {
if (bCreateNewMultiPath) {
if (cutPairs != null)
multipath = new Polyline();
else
segmentCount = 0;
}
if (cutPairs != null) {
segmentCuttee.cut(lastScalarCuttee, 1.0,
segmentBufferCuttee);
multipath.addSegment(segmentBufferCuttee.get(),
bStartNewPath);
} else
segmentCount++;
bCreateNewMultiPath = false;
bStartNewPath = false;
bCurrentMultiPathNotAdded = true;
}
}
if (bCurrentMultiPathNotAdded) {
scalarCuttee = 1.0;
ivertexCuttee = shape.getLastVertex(ipath);
ivertexCuttee = shape.getPrevVertex(ivertexCuttee);
ipartCutter = -1;
ivertexCutter = -1;
scalarCutter = NumberUtils.NaN();
if (bNoCutYet) {
if (cutPairs != null) {
cutPair = new OperatorCutLocal.CutPair(multipath,
OperatorCutLocal.Side.Uncut, ipartCuttee,
ivertexCuttee, scalarCuttee, cutPrev,
ipartCutteePrev, ivertexCutteePrev,
scalarCutteePrev, ipartCutter, ivertexCutter,
scalarCutter, ipartCutterPrev,
ivertexCutterPrev, scalarCutterPrev);
cutPairs.add(cutPair);
} else {
segmentCounts.add(segmentCount);
}
} else {
if (cutPrev == OperatorCutLocal.Side.Right)
cut = OperatorCutLocal.Side.Left;
else if (cutPrev == OperatorCutLocal.Side.Left)
cut = OperatorCutLocal.Side.Right;
else
cut = OperatorCutLocal.Side.Undefined;
if (cutPairs != null) {
cutPair = new OperatorCutLocal.CutPair(multipath, cut,
ipartCuttee, ivertexCuttee, scalarCuttee,
cutPrev, ipartCutteePrev, ivertexCutteePrev,
scalarCutteePrev, ipartCutter, ivertexCutter,
scalarCutter, ipartCutterPrev,
ivertexCutterPrev, scalarCutterPrev);
cutPairs.add(cutPair);
} else {
segmentCounts.add(segmentCount);
}
}
}
}
}
static boolean _cutterTangents(boolean bConsiderTouch, EditShape shape,
ArrayList<CutEvent> cutEvents, int icutEvent, Point2D tangent0,
Point2D tangent1) {
double scalarCutter = cutEvents.get(icutEvent).m_scalarCutter0;
if (scalarCutter == 1.0)
return _cutterEndTangents(bConsiderTouch, shape, cutEvents,
icutEvent, tangent0, tangent1);
if (scalarCutter == 0.0)
return _cutterStartTangents(bConsiderTouch, shape, cutEvents,
icutEvent, tangent0, tangent1);
throw GeometryException.GeometryInternalError();
}
static boolean _cutterEndTangents(boolean bConsiderTouch, EditShape shape,
ArrayList<CutEvent> cutEvents, int icutEvent, Point2D tangent0,
Point2D tangent1) {
Line lineCutter = new Line();
Segment segmentCutter;
int ivertexCuttee = cutEvents.get(icutEvent).m_ivertexCuttee;
int ipartCutter = cutEvents.get(icutEvent).m_ipartCutter;
int ivertexCutter = cutEvents.get(icutEvent).m_ivertexCutter;
int ivertexCutteePrev = -1;
int ipartCutterPrev = -1;
int ivertexCutterPrev = -1;
int countPrev = -1;
if (!bConsiderTouch && icutEvent > 0) {
CutEvent cutEvent = cutEvents.get(icutEvent - 1);
ivertexCutteePrev = cutEvent.m_ivertexCuttee;
ipartCutterPrev = cutEvent.m_ipartCutter;
ivertexCutterPrev = cutEvent.m_ivertexCutter;
countPrev = cutEvent.m_count;
}
int ivertexCutteeNext = -1;
int ipartCutterNext = -1;
int ivertexCutterNext = -1;
int countNext = -1;
if (icutEvent < cutEvents.size() - 1) {
CutEvent cutEvent = cutEvents.get(icutEvent + 1);
ivertexCutteeNext = cutEvent.m_ivertexCuttee;
ipartCutterNext = cutEvent.m_ipartCutter;
ivertexCutterNext = cutEvent.m_ivertexCutter;
countNext = cutEvent.m_count;
}
int ivertexCutteePlus = shape.getNextVertex(ivertexCuttee);
int ivertexCutterPlus = shape.getNextVertex(ivertexCutter);
if (!bConsiderTouch) {
if ((icutEvent > 0 && ivertexCutteePrev == ivertexCuttee
&& ipartCutterPrev == ipartCutter
&& ivertexCutterPrev == ivertexCutterPlus && countPrev == 2)
|| (icutEvent < cutEvents.size() - 1
&& ivertexCutteeNext == ivertexCutteePlus
&& ipartCutterNext == ipartCutter
&& ivertexCutterNext == ivertexCutterPlus && countNext == 2)) {
segmentCutter = shape.getSegment(ivertexCutter);
if (segmentCutter == null) {
shape.queryLineConnector(ivertexCutter, lineCutter);
segmentCutter = lineCutter;
}
tangent1.setCoords(segmentCutter._getTangent(1.0));
tangent0.negate(tangent1);
tangent1.normalize();
tangent0.normalize();
return false;
}
if (icutEvent < cutEvents.size() - 1
&& ivertexCutteeNext == ivertexCuttee
&& ipartCutterNext == ipartCutter
&& ivertexCutterNext == ivertexCutterPlus) {
segmentCutter = shape.getSegment(ivertexCutter);
if (segmentCutter == null) {
shape.queryLineConnector(ivertexCutter, lineCutter);
segmentCutter = lineCutter;
}
tangent0.setCoords(segmentCutter._getTangent(1.0));
segmentCutter = shape.getSegment(ivertexCutterPlus);
if (segmentCutter == null) {
shape.queryLineConnector(ivertexCutterPlus, lineCutter);
segmentCutter = lineCutter;
}
tangent1.setCoords(segmentCutter._getTangent(0.0));
tangent0.negate();
tangent1.normalize();
tangent0.normalize();
return false;
}
return true;
}
if (icutEvent == cutEvents.size() - 1
|| ivertexCutteeNext != ivertexCuttee
|| ipartCutterNext != ipartCutter
|| ivertexCutterNext != ivertexCutterPlus || countNext == 2) {
segmentCutter = shape.getSegment(ivertexCutter);
if (segmentCutter == null) {
shape.queryLineConnector(ivertexCutter, lineCutter);
segmentCutter = lineCutter;
}
tangent1.setCoords(segmentCutter._getTangent(1.0));
tangent0.negate(tangent1);
tangent1.normalize();
tangent0.normalize();
return false;
}
segmentCutter = shape.getSegment(ivertexCutter);
if (segmentCutter == null) {
shape.queryLineConnector(ivertexCutter, lineCutter);
segmentCutter = lineCutter;
}
tangent0.setCoords(segmentCutter._getTangent(1.0));
segmentCutter = shape.getSegment(ivertexCutterPlus);
if (segmentCutter == null) {
shape.queryLineConnector(ivertexCutterPlus, lineCutter);
segmentCutter = lineCutter;
}
tangent1.setCoords(segmentCutter._getTangent(0.0));
tangent0.negate();
tangent1.normalize();
tangent0.normalize();
return false;
}
static boolean _cutterStartTangents(boolean bConsiderTouch,
EditShape shape, ArrayList<CutEvent> cutEvents, int icutEvent,
Point2D tangent0, Point2D tangent1) {
Line lineCutter = new Line();
Segment segmentCutter;
int ivertexCuttee = cutEvents.get(icutEvent).m_ivertexCuttee;
int ipartCutter = cutEvents.get(icutEvent).m_ipartCutter;
int ivertexCutter = cutEvents.get(icutEvent).m_ivertexCutter;
int ivertexCutteeNext = -1;
int ipartCutterNext = -1;
int ivertexCutterNext = -1;
int countNext = -1;
if (!bConsiderTouch && icutEvent < cutEvents.size() - 1) {
CutEvent cutEvent = cutEvents.get(icutEvent + 1);
ivertexCutteeNext = cutEvent.m_ivertexCuttee;
ipartCutterNext = cutEvent.m_ipartCutter;
ivertexCutterNext = cutEvent.m_ivertexCutter;
countNext = cutEvent.m_count;
}
int ivertexCutteePrev = -1;
int ipartCutterPrev = -1;
int ivertexCutterPrev = -1;
int countPrev = -1;
if (icutEvent > 0) {
CutEvent cutEvent = cutEvents.get(icutEvent - 1);
ivertexCutteePrev = cutEvent.m_ivertexCuttee;
ipartCutterPrev = cutEvent.m_ipartCutter;
ivertexCutterPrev = cutEvent.m_ivertexCutter;
countPrev = cutEvent.m_count;
}
int ivertexCutteePlus = shape.getNextVertex(ivertexCuttee);
int ivertexCutterMinus = shape.getPrevVertex(ivertexCutter);
if (!bConsiderTouch) {
if ((icutEvent > 0 && ivertexCutteePrev == ivertexCuttee
&& ipartCutterPrev == ipartCutter
&& ivertexCutterPrev == ivertexCutterMinus && countPrev == 2)
|| (icutEvent < cutEvents.size() - 1
&& ivertexCutteeNext == ivertexCutteePlus
&& ipartCutterNext == ipartCutter
&& ivertexCutterNext == ivertexCutterMinus && countNext == 2)) {
segmentCutter = shape.getSegment(ivertexCutter);
if (segmentCutter == null) {
shape.queryLineConnector(ivertexCutter, lineCutter);
segmentCutter = lineCutter;
}
tangent1.setCoords(segmentCutter._getTangent(0.0));
tangent0.negate(tangent1);
tangent1.normalize();
tangent0.normalize();
return false;
}
return true;
}
if (icutEvent == 0 || ivertexCutteePrev != ivertexCuttee
|| ipartCutterPrev != ipartCutter
|| ivertexCutterPrev != ivertexCutterMinus || countPrev == 2) {
segmentCutter = shape.getSegment(ivertexCutter);
if (segmentCutter == null) {
shape.queryLineConnector(ivertexCutter, lineCutter);
segmentCutter = lineCutter;
}
tangent1.setCoords(segmentCutter._getTangent(0.0));
tangent0.negate(tangent1);
tangent1.normalize();
tangent0.normalize();
return false;
}
// Already processed the event
return true;
}
static boolean _cutteeTangents(EditShape shape,
ArrayList<CutEvent> cutEvents, int icutEvent, int ipath,
int ivertex, Point2D tangent2, Point2D tangent3) {
Line lineCuttee = new Line();
Segment segmentCuttee = shape.getSegment(ivertex);
if (segmentCuttee == null) {
shape.queryLineConnector(ivertex, lineCuttee);
segmentCuttee = lineCuttee;
}
CutEvent cutEvent = cutEvents.get(icutEvent);
int ivertexCuttee = cutEvent.m_ivertexCuttee;
double scalarCuttee = cutEvent.m_scalarCuttee0;
int ivertexCutteePlus = shape.getNextVertex(ivertexCuttee);
if (scalarCuttee == 1.0) {
tangent2.setCoords(segmentCuttee._getTangent(1.0));
if (ivertexCutteePlus != -1
&& ivertexCutteePlus != shape.getLastVertex(ipath)) {
segmentCuttee = shape.getSegment(ivertexCutteePlus);
if (segmentCuttee == null) {
shape.queryLineConnector(ivertexCutteePlus, lineCuttee);
segmentCuttee = lineCuttee;
}
tangent3.setCoords(segmentCuttee._getTangent(0.0));
segmentCuttee = shape.getSegment(ivertexCuttee);
if (segmentCuttee == null) {
shape.queryLineConnector(ivertexCuttee, lineCuttee);
segmentCuttee = lineCuttee;
}
} else
tangent3.setCoords(tangent2);
tangent2.negate();
tangent3.normalize();
tangent2.normalize();
return false;
}
if (scalarCuttee == 0.0) {
tangent3.setCoords(segmentCuttee._getTangent(scalarCuttee));
tangent2.negate(tangent3);
tangent3.normalize();
tangent2.normalize();
return false;
}
throw GeometryException.GeometryInternalError();
}
}
| 20,239 |
347 | <gh_stars>100-1000
package org.ovirt.engine.core.utils.network.predicate;
import java.util.function.Predicate;
import org.ovirt.engine.core.common.businessentities.network.VdsNetworkInterface;
public final class InterfaceByNetworkNamePredicate implements Predicate<VdsNetworkInterface> {
final String hostManagementNetworkName;
public InterfaceByNetworkNamePredicate(String hostManagementNetworkName) {
this.hostManagementNetworkName = hostManagementNetworkName;
}
@Override
public boolean test(VdsNetworkInterface iface) {
return iface.getNetworkName() != null && iface.getNetworkName().equals(hostManagementNetworkName);
}
}
| 203 |
4,857 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.mapreduce;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.testclassification.MapReduceTests;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
@Category({MapReduceTests.class, LargeTests.class})
public class TestHBaseMRTestingUtility {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestHBaseMRTestingUtility.class);
@Test
public void testMRYarnConfigsPopulation() throws IOException {
Map<String, String> dummyProps = new HashMap<>();
dummyProps.put("mapreduce.jobtracker.address", "dummyhost:11234");
dummyProps.put("yarn.resourcemanager.address", "dummyhost:11235");
dummyProps.put("mapreduce.jobhistory.address", "dummyhost:11236");
dummyProps.put("yarn.resourcemanager.scheduler.address", "dummyhost:11237");
dummyProps.put("mapreduce.jobhistory.webapp.address", "dummyhost:11238");
dummyProps.put("yarn.resourcemanager.webapp.address", "dummyhost:11239");
HBaseTestingUtil hbt = new HBaseTestingUtil();
// populate the mr props to the Configuration instance
for (Map.Entry<String, String> entry : dummyProps.entrySet()) {
hbt.getConfiguration().set(entry.getKey(), entry.getValue());
}
for (Map.Entry<String,String> entry : dummyProps.entrySet()) {
assertTrue("The Configuration for key " + entry.getKey() +" and value: " + entry.getValue() +
" is not populated correctly", hbt.getConfiguration().get(entry.getKey()).equals(entry.getValue()));
}
hbt.startMiniMapReduceCluster();
// Confirm that MiniMapReduceCluster overwrites the mr properties and updates the Configuration
for (Map.Entry<String,String> entry : dummyProps.entrySet()) {
assertFalse("The MR prop: " + entry.getValue() + " is not overwritten when map reduce mini"+
"cluster is started", hbt.getConfiguration().get(entry.getKey()).equals(entry.getValue()));
}
hbt.shutdownMiniMapReduceCluster();
}
}
| 1,017 |
425 | <gh_stars>100-1000
{
"name" : "the rice grain mayhem",
"author" : {
"name" : "<NAME>",
"github" : "nicoptere",
"website" : "http://www.barradeau.com",
"twitter" : "nicoptere"
},
"description" : "the casual rice grain badassery.",
"tags" : [ "rice", "grains" ],
"interaction" : {
"mouse" : false,
"keyboard" : false,
"touch" : false
},
"instructions" : "watch'em move",
"colour_scheme" : "dark",
"mobile_friendly" : false,
"slug" : "nicoptere/the-rice-grain-mayhem"
} | 270 |
734 | <gh_stars>100-1000
package com.cheikh.lazywaimai.ui.adapter.holder;
import android.text.TextUtils;
import android.view.View;
import android.widget.TextView;
import butterknife.BindView;
import com.cheikh.lazywaimai.R;
import com.cheikh.lazywaimai.base.BaseViewHolder;
import com.cheikh.lazywaimai.model.bean.CartInfo;
import com.cheikh.lazywaimai.util.StringFetcher;
/**
* author: cheikh.wang on 16/11/24
* email: <EMAIL>
*/
public class ExtraFeeItemViewHolder extends BaseViewHolder<CartInfo.ExtraFee> {
@BindView(R.id.txt_name)
TextView nameTxt;
@BindView(R.id.txt_price)
TextView priceTxt;
@BindView(R.id.txt_desc)
TextView descTxt;
public ExtraFeeItemViewHolder(View itemView) {
super(itemView);
}
public void bind(CartInfo.ExtraFee item) {
nameTxt.setText(item.getName());
priceTxt.setText(StringFetcher.getString(R.string.label_price, item.getPrice()));
if (!TextUtils.isEmpty(item.getDescription())) {
descTxt.setText(item.getDescription());
descTxt.setVisibility(View.VISIBLE);
} else {
descTxt.setVisibility(View.GONE);
}
}
}
| 506 |
1,376 | <reponame>maestro-1/gino
import asyncio
import pytest
from .models import db, User, UserType
pytestmark = pytest.mark.asyncio
async def test(bind):
await User.create(nickname="test")
assert isinstance(await User.query.gino.first(), User)
bind.update_execution_options(return_model=False)
assert not isinstance(await User.query.gino.first(), User)
async with db.acquire() as conn:
assert isinstance(
await conn.execution_options(return_model=True).first(User.query), User
)
assert not isinstance(
await User.query.execution_options(return_model=False).gino.first(), User
)
assert isinstance(
await User.query.execution_options(return_model=True).gino.first(), User
)
assert not isinstance(await User.query.gino.first(), User)
bind.update_execution_options(return_model=True)
assert isinstance(await User.query.gino.first(), User)
# noinspection PyProtectedMember
async def test_compiled_first_not_found(bind):
async with bind.acquire() as conn:
with pytest.raises(LookupError, match="No such execution option"):
result = conn._execute("SELECT NOW()", (), {})
result.context._compiled_first_opt("nonexist")
# noinspection PyUnusedLocal
async def test_query_ext(bind):
q = User.query
assert q.gino.query is q
u = await User.create(nickname="test")
assert isinstance(await User.query.gino.first(), User)
row = await User.query.gino.return_model(False).first()
assert not isinstance(row, User)
assert row == (
u.id,
"test",
{"age": 18, "birthday": "1970-01-01T00:00:00.000000"},
UserType.USER,
None,
)
row = await User.query.gino.model(None).first()
assert not isinstance(row, User)
assert row == (
u.id,
"test",
{"age": 18, "birthday": "1970-01-01T00:00:00.000000"},
UserType.USER,
None,
)
row = await db.select([User.id, User.nickname, User.type]).gino.first()
assert not isinstance(row, User)
assert row == (u.id, "test", UserType.USER)
user = await db.select([User.id, User.nickname, User.type]).gino.model(User).first()
assert isinstance(user, User)
assert user.id is not None
assert user.nickname == "test"
assert user.type == UserType.USER
with pytest.raises(asyncio.TimeoutError):
await db.select([db.func.SLEEP(1), User.id]).gino.timeout(0.1).status()
| 1,013 |
468 | <filename>Examples/libffmpeghelper/audiodecoding.cpp<gh_stars>100-1000
#include "stdafx.h"
struct AudioDecoderContext
{
AVCodec *codec;
AVCodecContext *av_codec_context;
AVPacket av_raw_packet;
AVFrame *frame;
};
struct AudioResamplerContext
{
SwrContext *swr_context;
uint8_t **out_data;
int out_linesize;
int64_t out_nb_samples;
int out_sample_rate;
int out_channels;
AVSampleFormat out_sample_format;
};
int create_audio_decoder(int codec_id, int bits_per_coded_sample, void **handle)
{
if (!handle)
return -1;
auto context = static_cast<AudioDecoderContext *>(av_mallocz(sizeof(AudioDecoderContext)));
if (!context)
return -2;
context->codec = avcodec_find_decoder(static_cast<AVCodecID>(codec_id));
if (!context->codec)
{
remove_audio_decoder(context);
return -3;
}
context->av_codec_context = avcodec_alloc_context3(context->codec);
if (!context->av_codec_context)
{
remove_audio_decoder(context);
return -4;
}
if (codec_id == AV_CODEC_ID_PCM_MULAW || codec_id == AV_CODEC_ID_PCM_ALAW)
{
context->av_codec_context->sample_rate = 8000;
context->av_codec_context->channels = 1;
}
context->av_codec_context->bits_per_coded_sample = bits_per_coded_sample;
if (avcodec_open2(context->av_codec_context, context->codec, nullptr) < 0)
{
remove_audio_decoder(context);
return -5;
}
context->frame = av_frame_alloc();
if (!context->frame)
{
remove_audio_decoder(context);
return -6;
}
av_init_packet(&context->av_raw_packet);
*handle = context;
return 0;
}
int set_audio_decoder_extradata(void *handle, void *extradata, int extradataLength)
{
#if _DEBUG
if (!handle || !extradata || !extradataLength)
return -1;
#endif
auto context = static_cast<AudioDecoderContext *>(handle);
if (!context->av_codec_context->extradata || context->av_codec_context->extradata_size < extradataLength)
{
av_free(context->av_codec_context->extradata);
context->av_codec_context->extradata = static_cast<uint8_t*>(av_malloc(extradataLength + AV_INPUT_BUFFER_PADDING_SIZE));
if (!context->av_codec_context->extradata)
return -2;
}
context->av_codec_context->extradata_size = extradataLength;
memcpy(context->av_codec_context->extradata, extradata, extradataLength);
memset(context->av_codec_context->extradata + extradataLength, 0, AV_INPUT_BUFFER_PADDING_SIZE);
avcodec_close(context->av_codec_context);
if (avcodec_open2(context->av_codec_context, context->codec, nullptr) < 0)
return -3;
return 0;
}
int decode_audio_frame(void *handle, void *rawBuffer, int rawBufferLength, int *sampleRate, int *bitsPerSample, int *channels)
{
#if _DEBUG
if (!handle || !rawBuffer || !rawBufferLength || !sampleRate || !bitsPerSample || !channels)
return -1;
if (reinterpret_cast<uintptr_t>(rawBuffer) % 4 != 0)
return -2;
#endif
auto context = static_cast<AudioDecoderContext *>(handle);
context->av_raw_packet.data = static_cast<uint8_t *>(rawBuffer);
context->av_raw_packet.size = rawBufferLength;
int got_frame;
const int len = avcodec_decode_audio4(context->av_codec_context, context->frame, &got_frame, &context->av_raw_packet);
if (len != rawBufferLength)
return -3;
if (got_frame)
{
*sampleRate = context->av_codec_context->sample_rate;
*bitsPerSample = av_get_bytes_per_sample(context->av_codec_context->sample_fmt) * 8;
*channels = context->av_codec_context->channels;
return 0;
}
return -4;
}
int get_decoded_audio_frame(void *handle, void **outBuffer, int *outDataSize)
{
#if _DEBUG
if (!handle)
return -1;
#endif
auto context = static_cast<AudioDecoderContext *>(handle);
*reinterpret_cast<uint8_t **>(outBuffer) = context->frame->data[0];
*outDataSize = av_samples_get_buffer_size(nullptr, context->av_codec_context->channels, context->frame->nb_samples, context->av_codec_context->sample_fmt, 1);;
return 0;
}
void remove_audio_decoder(void *handle)
{
if (!handle)
return;
auto context = static_cast<AudioDecoderContext *>(handle);
if (context->av_codec_context)
{
avcodec_close(context->av_codec_context);
av_free(context->av_codec_context);
}
av_frame_free(&context->frame);
av_free(context);
}
int create_audio_resampler(void *decoderHandle, int outSampleRate, int outBitsPerSample, int outChannels, void **handle)
{
#if _DEBUG
if (!handle)
return -1;
#endif
const auto decoder_context = static_cast<AudioDecoderContext *>(decoderHandle);
#if _DEBUG
if (!decoder_context)
return -2;
#endif
const int out_sample_rate = outSampleRate != 0 ? outSampleRate : decoder_context->av_codec_context->sample_rate;
AVSampleFormat out_sample_format;
if (outBitsPerSample != 0)
{
if (outBitsPerSample == 8)
out_sample_format = AV_SAMPLE_FMT_U8;
else if (outBitsPerSample == 16)
out_sample_format = AV_SAMPLE_FMT_S16;
else
return -3;
}
else
out_sample_format = decoder_context->av_codec_context->sample_fmt;
int out_channels;
int64_t out_channel_layout;
if (outChannels != 0)
{
out_channels = outChannels;
out_channel_layout = av_get_default_channel_layout(outChannels);
}
else
{
out_channel_layout = decoder_context->av_codec_context->channel_layout;
out_channels = decoder_context->av_codec_context->channels;
}
const auto resampler_context = static_cast<AudioResamplerContext *>(av_mallocz(sizeof(AudioResamplerContext)));
if (!resampler_context)
return -4;
const int64_t in_channel_layout = decoder_context->av_codec_context->channel_layout;
resampler_context->swr_context = swr_alloc_set_opts(nullptr, out_channel_layout, out_sample_format, out_sample_rate, in_channel_layout,
decoder_context->av_codec_context->sample_fmt, decoder_context->av_codec_context->sample_rate, 0, nullptr);
if (resampler_context->swr_context == nullptr)
{
av_free(resampler_context);
return -5;
}
if(swr_init(resampler_context->swr_context) < 0)
{
remove_audio_resampler(resampler_context);
return -6;
}
resampler_context->out_sample_rate = out_sample_rate;
resampler_context->out_channels = out_channels;
resampler_context->out_sample_format = out_sample_format;
*handle = resampler_context;
return 0;
}
int resample_decoded_audio_frame(void *decoderHandle, void *resamplerHandle, void **outBuffer, int *outDataSize)
{
#if _DEBUG
if (!decoderHandle || !resamplerHandle || !outBuffer || !outDataSize)
return -1;
#endif
const auto decoder_context = static_cast<AudioDecoderContext *>(decoderHandle);
const auto resampler_context = static_cast<AudioResamplerContext *>(resamplerHandle);
const int out_nb_samples = static_cast<int>(av_rescale_rnd(swr_get_delay(resampler_context->swr_context, decoder_context->frame->sample_rate) +
decoder_context->frame->nb_samples, resampler_context->out_sample_rate,
decoder_context->frame->sample_rate, AV_ROUND_UP));
if (out_nb_samples > resampler_context->out_nb_samples)
{
if (resampler_context->out_data)
av_freep(&resampler_context->out_data[0]);
if (av_samples_alloc_array_and_samples(&resampler_context->out_data, &resampler_context->out_linesize, resampler_context->out_channels,
out_nb_samples, resampler_context->out_sample_format, 0) < 0)
return -2;
resampler_context->out_nb_samples = out_nb_samples;
}
const int ret = swr_convert(resampler_context->swr_context, resampler_context->out_data, out_nb_samples, const_cast<const uint8_t **>(decoder_context->frame->data), decoder_context->frame->nb_samples);
if(ret < 0)
return -3;
*reinterpret_cast<uint8_t **>(outBuffer) = resampler_context->out_data[0];
*outDataSize = av_samples_get_buffer_size(&resampler_context->out_linesize, resampler_context->out_channels,
ret, resampler_context->out_sample_format, 1);;
return 0;
}
void remove_audio_resampler(void *handle)
{
if (!handle)
return;
auto resampler_context = static_cast<AudioResamplerContext *>(handle);
if (resampler_context->out_data)
{
av_freep(&resampler_context->out_data[0]);
av_freep(&resampler_context->out_data);
}
swr_free(&resampler_context->swr_context);
av_free(resampler_context);
} | 3,227 |
793 | # coding: utf-8
import os
from os import path
import copy
import shutil
import unittest
from unittest import mock
import rarfile
from tests import create_test_directory
from tests.unit import assets_path
from tests.unit.getsubtitles import get_function as get_f
from getsub.models import Video
def get_function(**kwargs):
return get_f("process_archive", **kwargs)
class TestProcessArchive(unittest.TestCase):
test_dir = "testPA"
test_video = Video(path.join(test_dir, "video.mkv"), test_dir)
test_dir_structure = {}
def tearDown(self):
if path.exists(TestProcessArchive.test_dir):
shutil.rmtree(TestProcessArchive.test_dir)
def test_unsupported_archive(self):
process_archive = get_function()
err, subnames = process_archive(TestProcessArchive.test_video, b"", ".tar")
self.assertEqual((err, subnames), ("unsupported file type .tar", []))
def test_invalid_archive(self):
process_archive = get_function()
self.assertRaises(
rarfile.BadRarFile,
process_archive,
TestProcessArchive.test_video,
b"",
".7z",
)
def test_empty_archive(self):
with open(path.join(assets_path, "empty.zip"), "rb") as f:
data = f.read()
process_archive = get_function()
err, subnames = process_archive(TestProcessArchive.test_video, data, ".zip")
self.assertEqual((err, subnames), ("no subtitle in this archive", []))
def test_fail_guess(self):
with open(path.join(assets_path, "archive.zip"), "rb") as f:
data = f.read()
process_archive = get_function()
err, subnames = process_archive(TestProcessArchive.test_video, data, ".zip")
self.assertEqual((err, subnames), ("no guess result in auto mode", []))
@mock.patch("builtins.input", side_effect=["1"])
def test_choose_subtitle(self, mock_input):
create_test_directory(
TestProcessArchive.test_dir_structure,
parent_dir=TestProcessArchive.test_dir,
)
with open(path.join(assets_path, "archive.zip"), "rb") as f:
data = f.read()
test_video = Video(path.join(TestProcessArchive.test_dir, "sub1.mkv"))
process_archive = get_function(single=True)
err, subnames = process_archive(test_video, data, ".zip")
self.assertEqual((err, subnames), ("", [["dir1/sub1.ass", ".ass"]]))
def test_save_both_subtitles_success(self):
create_test_directory(
TestProcessArchive.test_dir_structure,
parent_dir=TestProcessArchive.test_dir,
)
with open(path.join(assets_path, "archive.zip"), "rb") as f:
data = f.read()
test_video = Video(path.join(TestProcessArchive.test_dir, "sub.mkv"))
process_archive = get_function(both=True)
err, subnames = process_archive(test_video, data, ".zip")
self.assertTrue(
"sub.ass" in os.listdir(TestProcessArchive.test_dir)
and "sub.srt" in os.listdir(TestProcessArchive.test_dir)
)
def test_save_both_subtitles_fail(self):
create_test_directory(
TestProcessArchive.test_dir_structure,
parent_dir=TestProcessArchive.test_dir,
)
with open(path.join(assets_path, "archive.zip"), "rb") as f:
data = f.read()
process_archive = get_function(both=True)
test_video = Video(path.join(TestProcessArchive.test_dir, "sub1.mkv"))
err, subnames = process_archive(test_video, data, ".zip")
self.assertTrue(
"sub1.ass" in os.listdir(TestProcessArchive.test_dir)
and "sub1.srt" not in os.listdir(TestProcessArchive.test_dir)
)
def test_delete_existed_subtitles(self):
dir_structure = copy.copy(TestProcessArchive.test_dir_structure)
dir_structure["sub1.ass"] = None
dir_structure["sub1.srt"] = None
create_test_directory(
dir_structure, parent_dir=TestProcessArchive.test_dir,
)
with open(path.join(assets_path, "archive.zip"), "rb") as f:
data = f.read()
process_archive = get_function(both=True)
test_video = Video(path.join(TestProcessArchive.test_dir, "sub1.mkv"))
err, subnames = process_archive(test_video, data, ".zip")
self.assertTrue(
"sub1.ass" in os.listdir(TestProcessArchive.test_dir)
and "sub1.srt" not in os.listdir(TestProcessArchive.test_dir)
)
def test_identifier(self):
dir_structure = copy.copy(TestProcessArchive.test_dir_structure)
dir_structure["sub1.ass"] = None
dir_structure["sub1.zh.srt"] = None
create_test_directory(
dir_structure, parent_dir=TestProcessArchive.test_dir,
)
with open(path.join(assets_path, "archive.zip"), "rb") as f:
data = f.read()
process_archive = get_function()
test_video = Video(
path.join(TestProcessArchive.test_dir, "sub1.mkv"), identifier=".zh"
)
err, subnames = process_archive(test_video, data, ".zip")
self.assertTrue(
"sub1.ass" in os.listdir(TestProcessArchive.test_dir)
and "sub1.zh.ass" in os.listdir(TestProcessArchive.test_dir)
and "sub1.zh.srt" not in os.listdir(TestProcessArchive.test_dir)
)
if __name__ == "__main__":
unittest.main()
| 2,442 |
2,206 | /*
*
* Copyright (c) 2006-2020, Speedment, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); You may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.speedment.runtime.compute.trait;
import com.speedment.runtime.compute.ToByte;
import com.speedment.runtime.compute.ToDouble;
import com.speedment.runtime.compute.ToInt;
import com.speedment.runtime.compute.ToLong;
/**
* Trait that describes an expression that has several
* {@link #multiply(int)}-methods for generating new expressions for the product
* of this value and something else.
*
* @param <T> the input type
*
* @author <NAME>
* @since 3.1.0
*/
public interface HasMultiply<T> {
/**
* Creates and returns an expression that returns the product of the result
* from the current expression and the other factor. For an example, if the
* result of the current expression was {@code 9} and the factor was set to
* {@code 3}, then the result of the returned expression would be {@code
* 27}.
* <p>
* If this expression is nullable and the result was {@code null}, then the
* result of the returned expression will also be {@code null}.
*
* @param other the other factor used for the multiplication
* @return the new expression
*/
HasMultiply<T> multiply(byte other);
/**
* Creates and returns an expression that returns the product of the result
* from the current expression and the other factor. For an example, if the
* result of the current expression was {@code 9} and the factor was set to
* {@code 3}, then the result of the returned expression would be {@code
* 27}.
* <p>
* If this expression is nullable and the result was {@code null}, then the
* result of the returned expression will also be {@code null}.
*
* @param other the other factor used for the multiplication
* @return the new expression
*/
@SuppressWarnings("overloads")
HasMultiply<T> multiply(ToByte<T> other);
/**
* Creates and returns an expression that returns the product of the result
* from the current expression and the other factor. For an example, if the
* result of the current expression was {@code 9} and the factor was set to
* {@code 3}, then the result of the returned expression would be {@code
* 27}.
* <p>
* If this expression is nullable and the result was {@code null}, then the
* result of the returned expression will also be {@code null}.
*
* @param other the other factor used for the multiplication
* @return the new expression
*/
HasMultiply<T> multiply(int other);
/**
* Creates and returns an expression that returns the product of the result
* from the current expression and the other factor. For an example, if the
* result of the current expression was {@code 9} and the factor was set to
* {@code 3}, then the result of the returned expression would be {@code
* 27}.
* <p>
* If this expression is nullable and the result was {@code null}, then the
* result of the returned expression will also be {@code null}.
*
* @param other the other factor used for the multiplication
* @return the new expression
*/
@SuppressWarnings("overloads")
HasMultiply<T> multiply(ToInt<T> other);
/**
* Creates and returns an expression that returns the product of the result
* from the current expression and the other factor. For an example, if the
* result of the current expression was {@code 9} and the factor was set to
* {@code 3}, then the result of the returned expression would be {@code
* 27}.
* <p>
* If this expression is nullable and the result was {@code null}, then the
* result of the returned expression will also be {@code null}.
*
* @param other the other factor used for the multiplication
* @return the new expression
*/
HasMultiply<T> multiply(long other);
/**
* Creates and returns an expression that returns the product of the result
* from the current expression and the other factor. For an example, if the
* result of the current expression was {@code 9} and the factor was set to
* {@code 3}, then the result of the returned expression would be {@code
* 27}.
* <p>
* If this expression is nullable and the result was {@code null}, then the
* result of the returned expression will also be {@code null}.
*
* @param other the other factor used for the multiplication
* @return the new expression
*/
@SuppressWarnings("overloads")
HasMultiply<T> multiply(ToLong<T> other);
/**
* Creates and returns an expression that returns the product of the result
* from the current expression and the other factor. For an example, if the
* result of the current expression was {@code 9} and the factor was set to
* {@code 3}, then the result of the returned expression would be {@code
* 27}.
* <p>
* If this expression is nullable and the result was {@code null}, then the
* result of the returned expression will also be {@code null}.
*
* @param other the other factor used for the multiplication
* @return the new expression
*/
ToDouble<T> multiply(double other);
/**
* Creates and returns an expression that returns the product of the result
* from the current expression and the other factor. For an example, if the
* result of the current expression was {@code 9} and the factor was set to
* {@code 3}, then the result of the returned expression would be {@code
* 27}.
* <p>
* If this expression is nullable and the result was {@code null}, then the
* result of the returned expression will also be {@code null}.
*
* @param other the other factor used for the multiplication
* @return the new expression
*/
@SuppressWarnings("overloads")
ToDouble<T> multiply(ToDouble<T> other);
} | 2,048 |
501 | #!/usr/bin/env python
#
# Author: <NAME> <<EMAIL>>
#
from pyscf import gto, scf, mcscf
'''
Symmetry is not immutable
In PySCF, symmetry is not built-in data structure. Orbitals are stored in C1
symmetry. The irreps and symmetry information are generated on the fly.
We can switch on symmetry for CASSCF solver even the Hartree-Fock is not
optimized with symmetry.
'''
mol = gto.Mole()
mol.build(
atom = [['O' , (0. , 0. , 0.)],
[1 , (0. , -0.757 , 0.587)],
[1 , (0. , 0.757 , 0.587)]],
basis = 'cc-pvdz',
)
mf = scf.RHF(mol)
mf.kernel()
mol.build(0, 0, symmetry = 'C2v')
mc = mcscf.CASSCF(mf, 6, 8)
mc.kernel()
| 299 |
349 | try:
from public_config import *
except ImportError:
pass
PORT = 9032
SERVICE_NAME = 'project'
MT_PROJECT_ID = 15
JIRA_USER_ID = 127
| 58 |
2,338 | <filename>compiler-rt/lib/gwp_asan/optional/printf.h
//===-- printf.h ------------------------------------------------*- C++ -*-===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
#ifndef GWP_ASAN_OPTIONAL_PRINTF_H_
#define GWP_ASAN_OPTIONAL_PRINTF_H_
namespace gwp_asan {
// ================================ Requirements ===============================
// This function is required to be provided by the supporting allocator iff the
// allocator wants to use any of the optional components.
// ================================ Description ================================
// This function shall produce output according to a strict subset of the C
// standard library's printf() family. This function must support printing the
// following formats:
// 1. integers: "%([0-9]*)?(z|ll)?{d,u,x,X}"
// 2. pointers: "%p"
// 3. strings: "%[-]([0-9]*)?(\\.\\*)?s"
// 4. chars: "%c"
// This function must be implemented in a signal-safe manner, and thus must not
// malloc().
// =================================== Notes ===================================
// This function has a slightly different signature than the C standard
// library's printf(). Notably, it returns 'void' rather than 'int'.
typedef void (*Printf_t)(const char *Format, ...);
} // namespace gwp_asan
#endif // GWP_ASAN_OPTIONAL_PRINTF_H_
| 441 |
897 | <gh_stars>100-1000
'''
Implement next permutation, which rearranges numbers into the lexicographically next greater permutation of numbers.
If such an arrangement is not possible, it must rearrange it as the lowest possible order (i.e., sorted in ascending order).
The replacement must be in place and use only constant extra memory.
Ex1:
Input :[1, 2, 3]
output:[1, 3, 2]
Ex2:
Input :[3, 2, 1]
output:[1, 2, 3]
Ex3:
Input :[1, 1, 5]
output:[1, 5, 1]
'''
def permute(arr):
#if the arr is smaller than 2 return the arr same
if len(arr)<2:
return arr
inverse=len(arr)-2
#if the array element start larger value then inverse the array
while inverse>=0 and arr[inverse]>=arr[inverse+1]:
inverse-=1
if inverse<0:
return
#checking the value larger value swap the poistion with smaller element
for i in reversed(range(inverse, len(arr))):
if arr[i]>arr[inverse]:
arr[i], arr[inverse]=arr[inverse],arr[i]
break
arr[inverse+1:]= reversed(arr[inverse+1:])
#result
return arr
if __name__ == "__main__":
arr= list(map(int, input("Enter the list: ").split()))
print(permute(arr))
'''
Time Complexcity: O(n)
Space Complexcity: O(1)
INPUT:
Enter the list:
1 2 3
OUTPUT:
[1,3,2]
'''
| 537 |
451 | // File Automatically generated by eLiSe
#include "general/all.h"
#include "private/all.h"
#include "cEqAppui_X_C2MEbner.h"
cEqAppui_X_C2MEbner::cEqAppui_X_C2MEbner():
cElCompiledFonc(1)
{
AddIntRef (cIncIntervale("Intr",0,15));
AddIntRef (cIncIntervale("Orient",15,21));
Close(false);
}
void cEqAppui_X_C2MEbner::ComputeVal()
{
double tmp0_ = mCompCoord[15];
double tmp1_ = mCompCoord[16];
double tmp2_ = cos(tmp1_);
double tmp3_ = sin(tmp0_);
double tmp4_ = cos(tmp0_);
double tmp5_ = sin(tmp1_);
double tmp6_ = mCompCoord[17];
double tmp7_ = mCompCoord[18];
double tmp8_ = mLocXTer-tmp7_;
double tmp9_ = sin(tmp6_);
double tmp10_ = -(tmp9_);
double tmp11_ = -(tmp5_);
double tmp12_ = cos(tmp6_);
double tmp13_ = mCompCoord[19];
double tmp14_ = mLocYTer-tmp13_;
double tmp15_ = mCompCoord[20];
double tmp16_ = mLocZTer-tmp15_;
double tmp17_ = mLocEbner_State_0_0*mLocEbner_State_0_0;
double tmp18_ = tmp17_*0.666667;
double tmp19_ = mLocYIm*mLocYIm;
double tmp20_ = tmp19_-tmp18_;
double tmp21_ = mLocXIm*mLocXIm;
double tmp22_ = tmp21_-tmp18_;
mVal[0] = (mCompCoord[1]+mCompCoord[0]*((tmp4_*tmp2_*(tmp8_)+tmp3_*tmp2_*(tmp14_)+tmp5_*(tmp16_))/((-(tmp3_)*tmp10_+tmp4_*tmp11_*tmp12_)*(tmp8_)+(tmp4_*tmp10_+tmp3_*tmp11_*tmp12_)*(tmp14_)+tmp2_*tmp12_*(tmp16_))))-(((1+mCompCoord[3])*mLocXIm+mCompCoord[4]*mLocYIm)-mCompCoord[5]*2*(tmp22_)+mCompCoord[6]*mLocXIm*mLocYIm+mCompCoord[7]*(tmp20_)+mCompCoord[9]*mLocXIm*(tmp20_)+mCompCoord[11]*mLocYIm*(tmp22_)+mCompCoord[13]*(tmp22_)*(tmp20_));
}
void cEqAppui_X_C2MEbner::ComputeValDeriv()
{
double tmp0_ = mCompCoord[15];
double tmp1_ = mCompCoord[16];
double tmp2_ = cos(tmp1_);
double tmp3_ = sin(tmp0_);
double tmp4_ = cos(tmp0_);
double tmp5_ = sin(tmp1_);
double tmp6_ = mCompCoord[17];
double tmp7_ = mCompCoord[18];
double tmp8_ = mLocXTer-tmp7_;
double tmp9_ = sin(tmp6_);
double tmp10_ = -(tmp9_);
double tmp11_ = -(tmp5_);
double tmp12_ = cos(tmp6_);
double tmp13_ = mCompCoord[19];
double tmp14_ = mLocYTer-tmp13_;
double tmp15_ = mCompCoord[20];
double tmp16_ = mLocZTer-tmp15_;
double tmp17_ = mLocEbner_State_0_0*mLocEbner_State_0_0;
double tmp18_ = tmp17_*0.666667;
double tmp19_ = mLocYIm*mLocYIm;
double tmp20_ = tmp19_-tmp18_;
double tmp21_ = mLocXIm*mLocXIm;
double tmp22_ = tmp21_-tmp18_;
double tmp23_ = tmp4_*tmp2_;
double tmp24_ = tmp23_*(tmp8_);
double tmp25_ = tmp3_*tmp2_;
double tmp26_ = tmp25_*(tmp14_);
double tmp27_ = tmp24_+tmp26_;
double tmp28_ = tmp5_*(tmp16_);
double tmp29_ = tmp27_+tmp28_;
double tmp30_ = -(tmp3_);
double tmp31_ = tmp30_*tmp10_;
double tmp32_ = tmp4_*tmp11_;
double tmp33_ = tmp32_*tmp12_;
double tmp34_ = tmp31_+tmp33_;
double tmp35_ = (tmp34_)*(tmp8_);
double tmp36_ = tmp4_*tmp10_;
double tmp37_ = tmp3_*tmp11_;
double tmp38_ = tmp37_*tmp12_;
double tmp39_ = tmp36_+tmp38_;
double tmp40_ = (tmp39_)*(tmp14_);
double tmp41_ = tmp35_+tmp40_;
double tmp42_ = tmp2_*tmp12_;
double tmp43_ = tmp42_*(tmp16_);
double tmp44_ = tmp41_+tmp43_;
double tmp45_ = (tmp29_)/(tmp44_);
double tmp46_ = mLocXIm*mLocYIm;
double tmp47_ = -(1);
double tmp48_ = tmp47_*tmp3_;
double tmp49_ = mCompCoord[0];
double tmp50_ = tmp47_*tmp5_;
double tmp51_ = -(tmp2_);
double tmp52_ = ElSquare(tmp44_);
double tmp53_ = -(tmp12_);
double tmp54_ = tmp47_*tmp9_;
mVal[0] = (mCompCoord[1]+tmp49_*(tmp45_))-(((1+mCompCoord[3])*mLocXIm+mCompCoord[4]*mLocYIm)-mCompCoord[5]*2*(tmp22_)+mCompCoord[6]*tmp46_+mCompCoord[7]*(tmp20_)+mCompCoord[9]*mLocXIm*(tmp20_)+mCompCoord[11]*mLocYIm*(tmp22_)+mCompCoord[13]*(tmp22_)*(tmp20_));
mCompDer[0][0] = tmp45_;
mCompDer[0][1] = 1;
mCompDer[0][2] = 0;
mCompDer[0][3] = -(mLocXIm);
mCompDer[0][4] = -(mLocYIm);
mCompDer[0][5] = -(-(2*(tmp22_)));
mCompDer[0][6] = -(tmp46_);
mCompDer[0][7] = -(tmp20_);
mCompDer[0][8] = 0;
mCompDer[0][9] = -(mLocXIm*(tmp20_));
mCompDer[0][10] = 0;
mCompDer[0][11] = -(mLocYIm*(tmp22_));
mCompDer[0][12] = 0;
mCompDer[0][13] = -((tmp22_)*(tmp20_));
mCompDer[0][14] = 0;
mCompDer[0][15] = (((tmp48_*tmp2_*(tmp8_)+tmp23_*(tmp14_))*(tmp44_)-(tmp29_)*((-(tmp4_)*tmp10_+tmp48_*tmp11_*tmp12_)*(tmp8_)+(tmp48_*tmp10_+tmp33_)*(tmp14_)))/tmp52_)*tmp49_;
mCompDer[0][16] = (((tmp50_*tmp4_*(tmp8_)+tmp50_*tmp3_*(tmp14_)+tmp2_*(tmp16_))*(tmp44_)-(tmp29_)*(tmp51_*tmp4_*tmp12_*(tmp8_)+tmp51_*tmp3_*tmp12_*(tmp14_)+tmp50_*tmp12_*(tmp16_)))/tmp52_)*tmp49_;
mCompDer[0][17] = (-((tmp29_)*((tmp53_*tmp30_+tmp54_*tmp32_)*(tmp8_)+(tmp53_*tmp4_+tmp54_*tmp37_)*(tmp14_)+tmp54_*tmp2_*(tmp16_)))/tmp52_)*tmp49_;
mCompDer[0][18] = ((tmp47_*tmp23_*(tmp44_)-(tmp29_)*tmp47_*(tmp34_))/tmp52_)*tmp49_;
mCompDer[0][19] = ((tmp47_*tmp25_*(tmp44_)-(tmp29_)*tmp47_*(tmp39_))/tmp52_)*tmp49_;
mCompDer[0][20] = ((tmp50_*(tmp44_)-(tmp29_)*tmp47_*tmp42_)/tmp52_)*tmp49_;
}
void cEqAppui_X_C2MEbner::ComputeValDerivHessian()
{
ELISE_ASSERT(false,"Foncteur cEqAppui_X_C2MEbner Has no Der Sec");
}
void cEqAppui_X_C2MEbner::SetEbner_State_0_0(double aVal){ mLocEbner_State_0_0 = aVal;}
void cEqAppui_X_C2MEbner::SetXIm(double aVal){ mLocXIm = aVal;}
void cEqAppui_X_C2MEbner::SetXTer(double aVal){ mLocXTer = aVal;}
void cEqAppui_X_C2MEbner::SetYIm(double aVal){ mLocYIm = aVal;}
void cEqAppui_X_C2MEbner::SetYTer(double aVal){ mLocYTer = aVal;}
void cEqAppui_X_C2MEbner::SetZTer(double aVal){ mLocZTer = aVal;}
double * cEqAppui_X_C2MEbner::AdrVarLocFromString(const std::string & aName)
{
if (aName == "Ebner_State_0_0") return & mLocEbner_State_0_0;
if (aName == "XIm") return & mLocXIm;
if (aName == "XTer") return & mLocXTer;
if (aName == "YIm") return & mLocYIm;
if (aName == "YTer") return & mLocYTer;
if (aName == "ZTer") return & mLocZTer;
return 0;
}
cElCompiledFonc::cAutoAddEntry cEqAppui_X_C2MEbner::mTheAuto("cEqAppui_X_C2MEbner",cEqAppui_X_C2MEbner::Alloc);
cElCompiledFonc * cEqAppui_X_C2MEbner::Alloc()
{ return new cEqAppui_X_C2MEbner();
}
| 3,097 |
852 | <reponame>ckamtsikis/cmssw<filename>RecoEcal/EgammaClusterProducers/test/testRecoEcal_cfgV2.py
import FWCore.ParameterSet.Config as cms
process = cms.Process('testRecoEcal')
process.load('RecoEcal.Configuration.RecoEcal_cff')
process.load("Configuration.StandardSequences.Geometry_cff")
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(1000)
)
process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_cff")
## global tags:
#process.GlobalTag.globaltag = cms.string('GR_R_37X_V6A::All')
process.GlobalTag.globaltag = cms.string('START38_V8::All')
process.load("Configuration.StandardSequences.MagneticField_cff")
process.source = cms.Source("PoolSource",
# debugVerbosity = cms.untracked.uint32(1),
# debugFlag = cms.untracked.bool(True),
fileNames = cms.untracked.vstring(
'/store/relval/CMSSW_3_9_0_pre1/RelValZEE/GEN-SIM-RECO/START38_V8-v1/0009/1A21AD3E-F89A-DF11-A955-002618943865.root',
# '/store/data/Run2010A/EG/RECO/Jun14thReReco_v1/0000/062F305D-A278-DF11-A9B3-003048F0E3B2.root',
#'/store/relval/CMSSW_3_1_0_pre4/RelValSingleElectronPt35/GEN-SIM-RECO/IDEAL_30X_v1/0003/F602BF32-2916-DE11-B4DE-000423D8FA38.root',
#'/store/relval/CMSSW_3_1_0_pre4/RelValSingleElectronPt35/GEN-SIM-RECO/IDEAL_30X_v1/0003/F698554C-AB16-DE11-8C19-001617E30D06.root'
)
)
process.out = cms.OutputModule("PoolOutputModule",
outputCommands = cms.untracked.vstring(
'drop *',
'keep *_*_*_testRecoEcal'),
fileName = cms.untracked.string('output_testRecoEcal.root')
)
process.p = cms.Path(process.ecalClusters)
process.outpath = cms.EndPath(process.out)
| 744 |
5,169 | <reponame>Gantios/Specs<filename>Specs/3/0/f/SdkDemo/3.1.3/SdkDemo.podspec.json
{
"name": "SdkDemo",
"version": "3.1.3",
"license": {
"type": "BSD",
"text": "Copyright (C) 2010 Apple Inc. All Rights Reserved."
},
"homepage": "https://github.com/BizzonInfo/SDKDemo",
"authors": {
"Abiram": "<EMAIL>"
},
"summary": "The Sample demo done by BizzonInfoSolutions.",
"source": {
"git": "https://github.com/BizzonInfo/SDKDemo.git",
"branch": "xyz",
"tag": "3.1.3"
},
"source_files": "SdkDemoFramework/**/*.{h,m}",
"platforms": {
"osx": "10.7",
"ios": "5.0"
}
}
| 292 |
432 | /*
* modified for Lites 1.1
*
* Aug 1995, <NAME> (<EMAIL>)
* University of Utah, Department of Computer Science
*
* $FreeBSD: src/sys/gnu/ext2fs/ext2_linux_ialloc.c,v 1.13.2.2 2001/08/14 18:03:19 gallatin Exp $
*/
/*
* linux/fs/ext2/ialloc.c
*
* Copyright (C) 1992, 1993, 1994, 1995
* <NAME> (<EMAIL>)
* Laboratoire MASI - Institut Blaise Pascal
* Universite Pierre et Marie Curie (Paris VI)
*
* BSD ufs-inspired inode and directory allocation by
* <NAME> (<EMAIL>), 1993
*/
/*
* The free inodes are managed by bitmaps. A file system contains several
* blocks groups. Each group contains 1 bitmap block for blocks, 1 bitmap
* block for inodes, N blocks for the inode table and data blocks.
*
* The file system contains group descriptors which are located after the
* super block. Each descriptor contains the number of the bitmap block and
* the free blocks count in the block. The descriptors are loaded in memory
* when a file system is mounted (see ext2_read_super).
*/
#include <sys/param.h>
#include <sys/systm.h>
#include <sys/buf.h>
#include <sys/proc.h>
#include <sys/mount.h>
#include <sys/vnode.h>
#include "quota.h"
#include "inode.h"
#include "ext2_mount.h"
#include "ext2_extern.h"
#include "ext2_fs.h"
#include "ext2_fs_sb.h"
#include "fs.h"
#include <sys/stat.h>
#include <sys/buf2.h>
#include <sys/thread2.h>
#ifdef __x86_64__
#include "ext2_bitops.h"
#endif
/* this is supposed to mark a buffer dirty on ready for delayed writing
*/
void
mark_buffer_dirty(struct buf *bh)
{
crit_enter();
bh->b_flags |= B_DIRTY;
crit_exit();
}
struct ext2_group_desc *
get_group_desc(struct mount *mp, unsigned int block_group,
struct buf **bh)
{
struct ext2_sb_info *sb = VFSTOEXT2(mp)->um_e2fs;
unsigned long group_desc;
unsigned long desc;
struct ext2_group_desc *gdp;
if (block_group >= sb->s_groups_count)
panic ("get_group_desc: "
"block_group >= groups_count - "
"block_group = %d, groups_count = %lu",
block_group, sb->s_groups_count);
group_desc = block_group / EXT2_DESC_PER_BLOCK(sb);
desc = block_group % EXT2_DESC_PER_BLOCK(sb);
if (!sb->s_group_desc[group_desc])
panic ( "get_group_desc:"
"Group descriptor not loaded - "
"block_group = %d, group_desc = %lu, desc = %lu",
block_group, group_desc, desc);
gdp = (struct ext2_group_desc *)
sb->s_group_desc[group_desc]->b_data;
if (bh)
*bh = sb->s_group_desc[group_desc];
return gdp + desc;
}
static void
read_inode_bitmap(struct mount *mp, unsigned long block_group,
unsigned int bitmap_nr)
{
struct ext2_sb_info *sb = VFSTOEXT2(mp)->um_e2fs;
struct ext2_group_desc *gdp;
struct buf *bh;
int error;
gdp = get_group_desc (mp, block_group, NULL);
if ((error = bread (VFSTOEXT2(mp)->um_devvp,
fsbtodoff(sb, gdp->bg_inode_bitmap),
sb->s_blocksize, &bh)) != 0)
panic ( "read_inode_bitmap:"
"Cannot read inode bitmap - "
"block_group = %lu, inode_bitmap = %lu",
block_group, (unsigned long) gdp->bg_inode_bitmap);
sb->s_inode_bitmap_number[bitmap_nr] = block_group;
sb->s_inode_bitmap[bitmap_nr] = bh;
LCK_BUF(bh)
}
/*
* load_inode_bitmap loads the inode bitmap for a blocks group
*
* It maintains a cache for the last bitmaps loaded. This cache is managed
* with a LRU algorithm.
*
* Notes:
* 1/ There is one cache per mounted file system.
* 2/ If the file system contains less than EXT2_MAX_GROUP_LOADED groups,
* this function reads the bitmap without maintaining a LRU cache.
*/
static int
load_inode_bitmap(struct mount *mp, unsigned int block_group)
{
struct ext2_sb_info *sb = VFSTOEXT2(mp)->um_e2fs;
int i, j;
unsigned long inode_bitmap_number;
struct buf *inode_bitmap;
if (block_group >= sb->s_groups_count)
panic ("load_inode_bitmap:"
"block_group >= groups_count - "
"block_group = %d, groups_count = %lu",
block_group, sb->s_groups_count);
if (sb->s_loaded_inode_bitmaps > 0 &&
sb->s_inode_bitmap_number[0] == block_group)
return 0;
if (sb->s_groups_count <= EXT2_MAX_GROUP_LOADED) {
if (sb->s_inode_bitmap[block_group]) {
if (sb->s_inode_bitmap_number[block_group] !=
block_group)
panic ( "load_inode_bitmap:"
"block_group != inode_bitmap_number");
else
return block_group;
} else {
read_inode_bitmap (mp, block_group, block_group);
return block_group;
}
}
for (i = 0; i < sb->s_loaded_inode_bitmaps &&
sb->s_inode_bitmap_number[i] != block_group;
i++)
;
if (i < sb->s_loaded_inode_bitmaps &&
sb->s_inode_bitmap_number[i] == block_group) {
inode_bitmap_number = sb->s_inode_bitmap_number[i];
inode_bitmap = sb->s_inode_bitmap[i];
for (j = i; j > 0; j--) {
sb->s_inode_bitmap_number[j] =
sb->s_inode_bitmap_number[j - 1];
sb->s_inode_bitmap[j] =
sb->s_inode_bitmap[j - 1];
}
sb->s_inode_bitmap_number[0] = inode_bitmap_number;
sb->s_inode_bitmap[0] = inode_bitmap;
} else {
if (sb->s_loaded_inode_bitmaps < EXT2_MAX_GROUP_LOADED)
sb->s_loaded_inode_bitmaps++;
else
ULCK_BUF(sb->s_inode_bitmap[EXT2_MAX_GROUP_LOADED - 1])
for (j = sb->s_loaded_inode_bitmaps - 1; j > 0; j--) {
sb->s_inode_bitmap_number[j] =
sb->s_inode_bitmap_number[j - 1];
sb->s_inode_bitmap[j] =
sb->s_inode_bitmap[j - 1];
}
read_inode_bitmap (mp, block_group, 0);
}
return 0;
}
void
ext2_free_inode(struct inode *inode)
{
struct ext2_sb_info *sb;
struct buf *bh;
struct buf *bh2;
unsigned long block_group;
unsigned long bit;
int bitmap_nr;
struct ext2_group_desc *gdp;
struct ext2_super_block *es;
if (!inode)
return;
if (inode->i_nlink) {
kprintf ("ext2_free_inode: inode has nlink=%d\n",
inode->i_nlink);
return;
}
ext2_debug ("freeing inode %lu\n", inode->i_number);
sb = inode->i_e2fs;
lock_super (DEVVP(inode));
if (inode->i_number < EXT2_FIRST_INO(sb) ||
inode->i_number > sb->s_es->s_inodes_count) {
kprintf ("free_inode reserved inode or nonexistent inode");
unlock_super (DEVVP(inode));
return;
}
es = sb->s_es;
block_group = (inode->i_number - 1) / EXT2_INODES_PER_GROUP(sb);
bit = (inode->i_number - 1) % EXT2_INODES_PER_GROUP(sb);
bitmap_nr = load_inode_bitmap (ITOV(inode)->v_mount, block_group);
bh = sb->s_inode_bitmap[bitmap_nr];
if (!clear_bit (bit, bh->b_data))
kprintf ( "ext2_free_inode:"
"bit already cleared for inode %lu",
(unsigned long)inode->i_number);
else {
gdp = get_group_desc (ITOV(inode)->v_mount, block_group, &bh2);
gdp->bg_free_inodes_count++;
if (S_ISDIR(inode->i_mode))
gdp->bg_used_dirs_count--;
mark_buffer_dirty(bh2);
es->s_free_inodes_count++;
}
mark_buffer_dirty(bh);
/*** XXX
if (sb->s_flags & MS_SYNCHRONOUS) {
ll_rw_block (WRITE, 1, &bh);
wait_on_buffer (bh);
}
***/
sb->s_dirt = 1;
unlock_super (DEVVP(inode));
}
#if 0 /* linux */
/*
* This function increments the inode version number
*
* This may be used one day by the NFS server
*/
static void
inc_inode_version(struct inode *inode, struct ext2_group_desc *gdp, int mode)
{
unsigned long inode_block;
struct buf *bh;
struct ext2_inode *raw_inode;
inode_block = gdp->bg_inode_table + (((inode->i_number - 1) %
EXT2_INODES_PER_GROUP(inode->i_sb)) /
EXT2_INODES_PER_BLOCK(inode->i_sb));
bh = bread (inode->i_sb->s_dev, dbtob(inode_block), inode->i_sb->s_blocksize);
if (!bh) {
kprintf ("inc_inode_version Cannot load inode table block - "
"inode=%lu, inode_block=%lu\n",
inode->i_number, inode_block);
inode->u.ext2_i.i_version = 1;
return;
}
raw_inode = ((struct ext2_inode *) bh->b_data) +
(((inode->i_number - 1) %
EXT2_INODES_PER_GROUP(inode->i_sb)) %
EXT2_INODES_PER_BLOCK(inode->i_sb));
raw_inode->i_version++;
inode->u.ext2_i.i_version = raw_inode->i_version;
bdwrite (bh);
}
#endif /* linux */
/*
* There are two policies for allocating an inode. If the new inode is
* a directory, then a forward search is made for a block group with both
* free space and a low directory-to-inode ratio; if that fails, then of
* the groups with above-average free space, that group with the fewest
* directories already is chosen.
*
* For other inodes, search forward from the parent directory\'s block
* group to find a free inode.
*/
/*
* this functino has been reduced to the actual 'find the inode number' part
*/
ino_t
ext2_new_inode(const struct inode *dir, int mode)
{
struct ext2_sb_info *sb;
struct buf *bh;
struct buf *bh2;
int i, j, avefreei;
int bitmap_nr;
struct ext2_group_desc *gdp;
struct ext2_group_desc *tmp;
struct ext2_super_block *es;
if (!dir)
return 0;
sb = dir->i_e2fs;
lock_super (DEVVP(dir));
es = sb->s_es;
repeat:
gdp = NULL; i=0;
if (S_ISDIR(mode)) {
avefreei = es->s_free_inodes_count /
sb->s_groups_count;
/* I am not yet convinced that this next bit is necessary.
i = dir->u.ext2_i.i_block_group;
for (j = 0; j < sb->u.ext2_sb.s_groups_count; j++) {
tmp = get_group_desc (sb, i, &bh2);
if ((tmp->bg_used_dirs_count << 8) <
tmp->bg_free_inodes_count) {
gdp = tmp;
break;
}
else
i = ++i % sb->u.ext2_sb.s_groups_count;
}
*/
if (!gdp) {
for (j = 0; j < sb->s_groups_count; j++) {
tmp = get_group_desc(ITOV(dir)->v_mount,j,&bh2);
if (tmp->bg_free_inodes_count &&
tmp->bg_free_inodes_count >= avefreei) {
if (!gdp ||
(tmp->bg_free_blocks_count >
gdp->bg_free_blocks_count)) {
i = j;
gdp = tmp;
}
}
}
}
}
else
{
/*
* Try to place the inode in its parent directory
*/
i = dir->i_block_group;
tmp = get_group_desc (ITOV(dir)->v_mount, i, &bh2);
if (tmp->bg_free_inodes_count)
gdp = tmp;
else
{
/*
* Use a quadratic hash to find a group with a
* free inode
*/
for (j = 1; j < sb->s_groups_count; j <<= 1) {
i += j;
if (i >= sb->s_groups_count)
i -= sb->s_groups_count;
tmp = get_group_desc(ITOV(dir)->v_mount,i,&bh2);
if (tmp->bg_free_inodes_count) {
gdp = tmp;
break;
}
}
}
if (!gdp) {
/*
* That failed: try linear search for a free inode
*/
i = dir->i_block_group + 1;
for (j = 2; j < sb->s_groups_count; j++) {
if (++i >= sb->s_groups_count)
i = 0;
tmp = get_group_desc(ITOV(dir)->v_mount,i,&bh2);
if (tmp->bg_free_inodes_count) {
gdp = tmp;
break;
}
}
}
}
if (!gdp) {
unlock_super (DEVVP(dir));
return 0;
}
bitmap_nr = load_inode_bitmap (ITOV(dir)->v_mount, i);
bh = sb->s_inode_bitmap[bitmap_nr];
if ((j = find_first_zero_bit ((unsigned long *) bh->b_data,
EXT2_INODES_PER_GROUP(sb))) <
EXT2_INODES_PER_GROUP(sb)) {
if (set_bit (j, bh->b_data)) {
kprintf ( "ext2_new_inode:"
"bit already set for inode %d", j);
goto repeat;
}
/* Linux now does the following:
mark_buffer_dirty(bh);
if (sb->s_flags & MS_SYNCHRONOUS) {
ll_rw_block (WRITE, 1, &bh);
wait_on_buffer (bh);
}
*/
mark_buffer_dirty(bh);
} else {
if (gdp->bg_free_inodes_count != 0) {
kprintf ( "ext2_new_inode:"
"Free inodes count corrupted in group %d",
i);
unlock_super (DEVVP(dir));
return 0;
}
goto repeat;
}
j += i * EXT2_INODES_PER_GROUP(sb) + 1;
if (j < EXT2_FIRST_INO(sb) || j > es->s_inodes_count) {
kprintf ( "ext2_new_inode:"
"reserved inode or inode > inodes count - "
"block_group = %d,inode=%d", i, j);
unlock_super (DEVVP(dir));
return 0;
}
gdp->bg_free_inodes_count--;
if (S_ISDIR(mode))
gdp->bg_used_dirs_count++;
mark_buffer_dirty(bh2);
es->s_free_inodes_count--;
/* mark_buffer_dirty(sb->u.ext2_sb.s_sbh, 1); */
sb->s_dirt = 1;
unlock_super (DEVVP(dir));
return j;
}
#ifdef unused
static unsigned long
ext2_count_free_inodes(struct mount *mp)
{
#ifdef EXT2FS_DEBUG
struct ext2_sb_info *sb = VFSTOEXT2(mp)->um_e2fs;
struct ext2_super_block *es;
unsigned long desc_count, bitmap_count, x;
int bitmap_nr;
struct ext2_group_desc *gdp;
int i;
lock_super (VFSTOEXT2(mp)->um_devvp);
es = sb->s_es;
desc_count = 0;
bitmap_count = 0;
gdp = NULL;
for (i = 0; i < sb->s_groups_count; i++) {
gdp = get_group_desc (mp, i, NULL);
desc_count += gdp->bg_free_inodes_count;
bitmap_nr = load_inode_bitmap (mp, i);
x = ext2_count_free (sb->s_inode_bitmap[bitmap_nr],
EXT2_INODES_PER_GROUP(sb) / 8);
ext2_debug ("group %d: stored = %d, counted = %lu\n",
i, gdp->bg_free_inodes_count, x);
bitmap_count += x;
}
ext2_debug("stored = %lu, computed = %lu, %lu\n",
es->s_free_inodes_count, desc_count, bitmap_count);
unlock_super (VFSTOEXT2(mp)->um_devvp);
return desc_count;
#else
return VFSTOEXT2(mp)->um_e2fsb->s_free_inodes_count;
#endif
}
#endif /* unused */
#ifdef LATER
void
ext2_check_inodes_bitmap(struct mount *mp)
{
struct ext2_super_block *es;
unsigned long desc_count, bitmap_count, x;
int bitmap_nr;
struct ext2_group_desc *gdp;
int i;
lock_super (sb);
es = sb->u.ext2_sb.s_es;
desc_count = 0;
bitmap_count = 0;
gdp = NULL;
for (i = 0; i < sb->u.ext2_sb.s_groups_count; i++) {
gdp = get_group_desc (sb, i, NULL);
desc_count += gdp->bg_free_inodes_count;
bitmap_nr = load_inode_bitmap (sb, i);
x = ext2_count_free (sb->u.ext2_sb.s_inode_bitmap[bitmap_nr],
EXT2_INODES_PER_GROUP(sb) / 8);
if (gdp->bg_free_inodes_count != x)
kprintf ( "ext2_check_inodes_bitmap:"
"Wrong free inodes count in group %d, "
"stored = %d, counted = %lu", i,
gdp->bg_free_inodes_count, x);
bitmap_count += x;
}
if (es->s_free_inodes_count != bitmap_count)
kprintf ( "ext2_check_inodes_bitmap:"
"Wrong free inodes count in super block, "
"stored = %lu, counted = %lu",
(unsigned long) es->s_free_inodes_count, bitmap_count);
unlock_super (sb);
}
#endif
| 6,341 |
375 | <reponame>wulinyun/lumify
package io.lumify.sphinx;
import java.io.*;
public class WavFileUtil {
// see https://ccrma.stanford.edu/courses/422/projects/WaveFormat/
public static void fixWavHeaders(File wavFile, File wavFileNoHeaders) throws IOException {
byte[] buffer = new byte[1024];
InputStream in = new FileInputStream(wavFile);
OutputStream out = new FileOutputStream(wavFileNoHeaders);
try {
int read;
// read RIFF head
read = in.read(buffer, 0, 12);
if (read < 12) {
throw new IOException("Could not read RIFF header");
}
out.write(buffer, 0, 12);
// skip non-data subchunks
while (true) {
read = in.read(buffer, 0, 8);
if (read < 8) {
throw new IOException("Could not read subchunk");
}
String subchunkName = new String(buffer, 0, 4);
if (subchunkName.equals("data")) {
out.write(buffer, 0, 8);
break;
}
int chunkSize = ((((int) buffer[4]) << 0) | (((int) buffer[5]) << 8) | (((int) buffer[6]) << 16) | (((int) buffer[7]) << 24));
while (chunkSize > 0) {
read = Math.min(chunkSize, buffer.length);
in.read(buffer, 0, read);
chunkSize -= read;
}
}
// copy remaining data
while ((read = in.read(buffer)) > 0) {
out.write(buffer, 0, read);
}
} finally {
in.close();
out.close();
}
}
}
| 926 |
1,473 | /*
* Copyright 2021 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.plugin.rocketmq.interceptor;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.util.List;
import java.util.Map;
import org.apache.rocketmq.common.message.Message;
import org.apache.rocketmq.common.message.MessageExt;
import com.navercorp.pinpoint.bootstrap.context.Header;
import com.navercorp.pinpoint.bootstrap.context.MethodDescriptor;
import com.navercorp.pinpoint.bootstrap.context.SpanEventRecorder;
import com.navercorp.pinpoint.bootstrap.context.SpanRecorder;
import com.navercorp.pinpoint.bootstrap.context.Trace;
import com.navercorp.pinpoint.bootstrap.context.TraceContext;
import com.navercorp.pinpoint.bootstrap.context.TraceId;
import com.navercorp.pinpoint.bootstrap.interceptor.SpanRecursiveAroundInterceptor;
import com.navercorp.pinpoint.bootstrap.logging.PLogger;
import com.navercorp.pinpoint.bootstrap.logging.PLoggerFactory;
import com.navercorp.pinpoint.bootstrap.sampler.SamplingFlagUtils;
import com.navercorp.pinpoint.bootstrap.util.NumberUtils;
import com.navercorp.pinpoint.common.plugin.util.HostAndPort;
import com.navercorp.pinpoint.common.trace.ServiceType;
import com.navercorp.pinpoint.common.util.StringUtils;
import com.navercorp.pinpoint.plugin.rocketmq.RocketMQConstants;
import com.navercorp.pinpoint.plugin.rocketmq.description.EntryPointMethodDescriptor;
import com.navercorp.pinpoint.plugin.rocketmq.field.accessor.ChannelFutureGetter;
import com.navercorp.pinpoint.plugin.rocketmq.field.accessor.ChannelTablesAccessor;
/**
* @author messi-gao
*/
public class ConsumerMessageEntryPointInterceptor extends SpanRecursiveAroundInterceptor {
protected static final String SCOPE_NAME = "##ROCKETMQ_ENTRY_POINT_START_TRACE";
protected static final EntryPointMethodDescriptor ENTRY_POINT_METHOD_DESCRIPTOR =
new EntryPointMethodDescriptor();
public ConsumerMessageEntryPointInterceptor(TraceContext traceContext, MethodDescriptor methodDescriptor) {
super(traceContext, methodDescriptor, SCOPE_NAME);
traceContext.cacheApi(ENTRY_POINT_METHOD_DESCRIPTOR);
}
@Override
protected void doInBeforeTrace(SpanEventRecorder recorder, Object target, Object[] args) {
recorder.recordServiceType(RocketMQConstants.ROCKETMQ_CLIENT_INTERNAL);
}
@Override
protected void doInAfterTrace(SpanEventRecorder recorder, Object target, Object[] args, Object result,
Throwable throwable) {
recorder.recordApi(methodDescriptor);
recorder.recordException(throwable);
}
@Override
protected Trace createTrace(Object target, Object[] args) {
final List<MessageExt> msgs = (List<MessageExt>) args[0];
if (msgs.isEmpty()) {
return null;
}
return createTrace(target, msgs);
}
private Trace createTrace(Object target, List<MessageExt> msgs) {
TraceFactoryProvider.TraceFactory traceFactory = TraceFactoryProvider.get(msgs);
return traceFactory.createTrace(target, traceContext, msgs);
}
private static class TraceFactoryProvider {
private static TraceFactory get(List<MessageExt> msgs) {
if (msgs.size() == 1) {
return new SupportContinueTraceFactory();
} else {
return new DefaultTraceFactory();
}
}
private interface TraceFactory {
Trace createTrace(Object target, TraceContext traceContext, List<MessageExt> msgs);
}
private static class DefaultTraceFactory implements TraceFactory {
final PLogger logger = PLoggerFactory.getLogger(this.getClass());
final boolean isDebug = logger.isDebugEnabled();
@Override
public Trace createTrace(Object target, TraceContext traceContext, List<MessageExt> msgs) {
return createTrace0(target, traceContext, msgs);
}
Trace createTrace0(Object target, TraceContext traceContext, List<MessageExt> msgs) {
final Trace trace = traceContext.newTraceObject();
if (trace.canSampled()) {
final SpanRecorder recorder = trace.getSpanRecorder();
recordRootSpan(target, recorder, msgs);
if (isDebug) {
logger.debug("TraceID not exist. start new trace.");
}
} else {
if (isDebug) {
logger.debug("TraceID not exist. camSampled is false. skip trace.");
}
}
return trace;
}
void recordRootSpan(Object target, SpanRecorder recorder, List<MessageExt> msgs) {
recordRootSpan(target, recorder, msgs, null, null);
}
void recordRootSpan(Object target, SpanRecorder recorder, List<MessageExt> msgs,
String parentApplicationName,
String parentApplicationType) {
recorder.recordServiceType(RocketMQConstants.ROCKETMQ_CLIENT);
recorder.recordApi(ENTRY_POINT_METHOD_DESCRIPTOR);
final MessageExt messageExt = msgs.get(0);
String acceptorHost = null;
if (msgs.size() == 1) {
acceptorHost = messageExt.getUserProperty(RocketMQConstants.ACCEPTOR_HOST);
}
if (StringUtils.isEmpty(acceptorHost)) {
acceptorHost = RocketMQConstants.UNKNOWN;
}
recorder.recordRemoteAddress(acceptorHost);
recorder.recordAcceptorHost(acceptorHost);
recordEndPoint(target, recorder, messageExt);
final String topic = messageExt.getTopic();
recorder.recordRpcName(createRpcName(recorder, topic, msgs));
recorder.recordAttribute(RocketMQConstants.ROCKETMQ_TOPIC_ANNOTATION_KEY, topic);
if (StringUtils.hasText(parentApplicationName) && StringUtils.hasText(parentApplicationType)) {
recorder.recordParentApplication(parentApplicationName, NumberUtils
.parseShort(parentApplicationType, ServiceType.UNDEFINED.getCode()));
}
}
private void recordEndPoint(Object target, SpanRecorder recorder, MessageExt messageExt) {
String endPointAddress = RocketMQConstants.UNKNOWN;
ChannelTablesAccessor channelTablesAccessor = (ChannelTablesAccessor) target;
Map<String, Object> channelTables = channelTablesAccessor._$PINPOINT$_getChannelTables();
SocketAddress socketAddress = messageExt.getStoreHost();
String brokenAddr = getEndPoint(socketAddress);
ChannelFutureGetter channelFutureGetter = (ChannelFutureGetter) channelTables.get(brokenAddr);
if (channelFutureGetter != null) {
SocketAddress consumerAddress =
channelFutureGetter._$PINPOINT$_getChannelFuture().channel().localAddress();
endPointAddress = getEndPoint(consumerAddress);
}
recorder.recordEndPoint(endPointAddress);
recorder.recordAttribute(RocketMQConstants.ROCKETMQ_BROKER_SERVER_STATUS_ANNOTATION_KEY,
brokenAddr);
}
private String getEndPoint(SocketAddress socketAddress) {
if (socketAddress instanceof InetSocketAddress) {
final InetSocketAddress inetSocketAddress = (InetSocketAddress) socketAddress;
final InetAddress remoteAddress = inetSocketAddress.getAddress();
if (remoteAddress != null) {
return HostAndPort.toHostAndPortString(remoteAddress.getHostAddress(),
inetSocketAddress.getPort());
}
// Warning : InetSocketAddressAvoid unnecessary DNS lookup (warning:InetSocketAddress.getHostName())
final String hostName = inetSocketAddress.getHostName();
if (hostName != null) {
return HostAndPort.toHostAndPortString(hostName, inetSocketAddress.getPort());
}
}
return null;
}
private String createRpcName(SpanRecorder recorder, String topic, List<MessageExt> msgs) {
final StringBuilder rpcName = new StringBuilder("rocketmq://");
rpcName.append("topic=").append(topic);
if (msgs.size() == 1) {
MessageExt messageExt = msgs.get(0);
int queueId = messageExt.getQueueId();
long commitLogOffset = messageExt.getCommitLogOffset();
rpcName.append("?partition=").append(queueId);
rpcName.append("&offset=").append(commitLogOffset);
recorder.recordAttribute(RocketMQConstants.ROCKETMQ_PARTITION_ANNOTATION_KEY, queueId);
recorder.recordAttribute(RocketMQConstants.ROCKETMQ_OFFSET_ANNOTATION_KEY, commitLogOffset);
} else {
rpcName.append("?batch=").append(msgs.size());
recorder.recordAttribute(RocketMQConstants.ROCKETMQ_BATCH_ANNOTATION_KEY, msgs.size());
}
return rpcName.toString();
}
}
private static class SupportContinueTraceFactory extends DefaultTraceFactory {
@Override
public Trace createTrace(Object target, TraceContext traceContext, List<MessageExt> msgs) {
final MessageExt messageExt = msgs.get(0);
if (!SamplingFlagUtils.isSamplingFlag(
messageExt.getUserProperty(Header.HTTP_FLAGS.name()))) {
final Trace trace = traceContext.disableSampling();
if (isDebug) {
logger.debug("remotecall sampling flag found. skip trace");
}
return trace;
}
final TraceId traceId = populateTraceIdFromHeaders(traceContext, messageExt);
if (traceId != null) {
return createContinueTrace(target, traceContext, msgs, traceId);
} else {
return createTrace0(target, traceContext, msgs);
}
}
private TraceId populateTraceIdFromHeaders(TraceContext traceContext, MessageExt messageExt) {
final String transactionId = messageExt.getUserProperty(Header.HTTP_TRACE_ID.toString());
final String spanID = messageExt.getUserProperty(Header.HTTP_SPAN_ID.toString());
final String parentSpanID = messageExt.getUserProperty(Header.HTTP_PARENT_SPAN_ID.toString());
final String flags = messageExt.getUserProperty(Header.HTTP_FLAGS.toString());
if (transactionId == null || spanID == null || parentSpanID == null || flags == null) {
return null;
}
return traceContext.createTraceId(transactionId, Long.parseLong(parentSpanID),
Long.parseLong(spanID), Short.parseShort(flags));
}
private Trace createContinueTrace(Object target, TraceContext traceContext, List<MessageExt> msgs,
TraceId traceId) {
if (isDebug) {
logger.debug("TraceID exist. continue trace. traceId:{}", traceId);
}
final Message consumerRecord = msgs.get(0);
final boolean isAsyncSend = Boolean.parseBoolean(
consumerRecord.getUserProperty(RocketMQConstants.IS_ASYNC_SEND));
final String parentApplicationName = consumerRecord.getUserProperty(
Header.HTTP_PARENT_APPLICATION_NAME.toString());
final String parentApplicationType = consumerRecord.getUserProperty(
Header.HTTP_PARENT_APPLICATION_TYPE.toString());
final Trace trace;
if (isAsyncSend) {
trace = traceContext.continueAsyncTraceObject(traceId);
} else {
trace = traceContext.continueTraceObject(traceId);
}
if (trace.canSampled()) {
final SpanRecorder recorder = trace.getSpanRecorder();
recordRootSpan(target, recorder, msgs, parentApplicationName, parentApplicationType);
}
return trace;
}
}
}
}
| 6,032 |
787 | /*
* Java Genetic Algorithm Library (@__identifier__@).
* Copyright (c) @__year__@ <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Author:
* <NAME> (<EMAIL>)
*/
package io.jenetics.stat;
import static java.lang.Double.NaN;
import static java.lang.Math.sqrt;
import static java.util.Objects.requireNonNull;
import io.jenetics.internal.math.DoubleAdder;
/**
* Base class for statistical moments calculation.
*
* @see <a href="http://people.xiph.org/~tterribe/notes/homs.html">
* Computing Higher-Order Moments Online</a>
*
* @author <a href="mailto:<EMAIL>"><NAME></a>
* @since 3.0
* @version 6.0
*/
abstract class MomentStatistics {
// the sample count.
private long _n = 0L;
// Variables used for statistical moments.
private final DoubleAdder _m1 = new DoubleAdder();
private final DoubleAdder _m2 = new DoubleAdder();
private final DoubleAdder _m3 = new DoubleAdder();
private final DoubleAdder _m4 = new DoubleAdder();
/**
* Update the moments with the given {@code value}.
*
* @param value the value which is used to update this statistical moments.
*/
void accept(final double value) {
++_n;
final double n = _n;
final double d = value - _m1.value();
final double dN = d/n;
final double dN2 = dN*dN;
final double t1 = d*dN*(n - 1.0);
_m1.add(dN);
_m4.add(t1*dN2*(n*n - 3.0*n + 3.0))
.add(6.0*dN2*_m2.value() - 4.0*dN*_m3.value());
_m3.add(t1*dN*(n - 2.0) - 3.0*dN*_m2.value());
_m2.add(t1);
}
/**
* Combines the state of another {@code Moments} object into this one.
*
* @see <a href="http://people.xiph.org/~tterribe/notes/homs.html">
* Computing Higher-Order Moments Online</a>
*/
void combine(final MomentStatistics b) {
requireNonNull(b);
final double m2 = _m2.value();
final double m3 = _m3.value();
final double pn = _n;
final double n = _n + b._n;
final double nn = n*n;
final double d = b._m1.value() - _m1.value();
final double dd = d*d;
_n += b._n;
_m1.add(d*b._n/n);
_m2.add(b._m2).add(dd*pn*b._n/n);
_m3.add(b._m3)
.add(dd*d*(pn*b._n*(pn - b._n)/nn))
.add(3.0*d*(pn*b._m2.value() - b._n*m2)/n);
final double bnbn = (double)b._n*(double)b._n;
_m4.add(b._m4)
.add(dd*dd*(pn*b._n*(pn*pn - pn*b._n + bnbn)/(nn*n)))
.add(6.0*dd*(pn*pn*b._m2.value() + bnbn*m2)/nn)
.add(4.0*d*(pn*b._m3.value() - b._n*m3)/n);
}
/**
* Returns the count of values recorded.
*
* @return the count of recorded values
*/
public long count() {
return _n;
}
/**
* Return the arithmetic mean of values recorded, or {@code Double.NaN} if
* no values have been recorded.
*
* @return the arithmetic mean of values, or zero if none
*/
public double mean() {
return _n == 0L ? NaN : _m1.value();
}
/**
* Return the variance of values recorded, or {@code Double.NaN} if no
* values have been recorded.
*
* @return the variance of values, or {@code NaN} if none
*/
public double variance() {
double var = NaN;
if (_n == 1L) {
var = _m2.value();
} else if (_n > 1L) {
var = _m2.value()/(_n - 1.0);
}
return var;
}
/**
* Return the skewness of values recorded, or {@code Double.NaN} if less
* than two values have been recorded.
*
* @see <a href="https://en.wikipedia.org/wiki/Skewness">Skewness</a>
*
* @return the skewness of values, or {@code NaN} if less than two values
* have been recorded
*/
public double skewness() {
double skewness = NaN;
if (_n >= 3L) {
final double var = _m2.value()/(_n - 1.0);
skewness = var < 10E-20
? 0.0d
: (_n*_m3.value())/((_n - 1.0)*(_n - 2.0)*sqrt(var)*var);
}
return skewness;
}
/**
* Return the kurtosis of values recorded, or {@code Double.NaN} if less
* than four values have been recorded.
*
* @see <a href="https://en.wikipedia.org/wiki/Kurtosis">Kurtosis</a>
*
* @return the kurtosis of values, or {@code NaN} if less than four values
* have been recorded
*/
public double kurtosis() {
double kurtosis = NaN;
if (_n > 3L) {
final double var = _m2.value()/(_n - 1);
kurtosis = _n <= 3L || var < 10E-20
? 0.0
: (_n*(_n + 1.0)*_m4.value() -
3.0*_m2.value()*_m2.value()*(_n - 1.0))/
((_n - 1.0)*(_n - 2.0)*(_n - 3.0)*var*var);
}
return kurtosis;
}
final boolean sameState(final MomentStatistics statistics) {
return _n == statistics._n &&
_m1.sameState(statistics._m1) &&
_m2.sameState(statistics._m2) &&
_m3.sameState(statistics._m3) &&
_m4.sameState(statistics._m4);
}
}
| 2,088 |
438 | <filename>forge/tests/test_github.py<gh_stars>100-1000
# Copyright 2017 datawire. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time, os
from forge.tasks import TaskError
from forge.github import Github
from tempfile import mkdtemp
from shutil import rmtree
# github will deactivate this token if it detects it in our source, so
# we obfuscate it slightly
numbers = [48, 49, 51, 99, 99, 101, 52, 51, 48, 53, 54, 100, 57, 56, 97, 50,
55, 97, 54, 53, 55, 55, 49, 48, 49, 55, 48, 54, 55, 102, 100, 48,
102, 57, 49, 51, 97, 48, 102, 51]
token = "".join(chr(c) for c in numbers)
def test_list():
gh = Github(token)
repos = gh.list("forgeorg")
assert repos == [(u'forgeorg/foo', u'https://github.com/forgeorg/foo.git')]
def test_pull():
gh = Github(token)
repos = gh.list("forgeorg")
name, url = repos[0]
output = mkdtemp()
gh.pull(url, os.path.join(output, name))
assert os.path.exists(os.path.join(output, name, "README.md"))
rmtree(output)
def test_exists():
gh = Github(token)
assert gh.exists("https://github.com/forgeorg/foo.git")
assert not gh.exists("https://github.com/forgeorg/nosuchrepo.git")
unauth_gh = Github(None)
try:
unauth_gh.exists("https://github.com/forgeorg/nosuchrepo.git")
assert False
except TaskError, e:
assert "Authentication failed" in str(e)
def test_clone():
gh = Github(token)
output = mkdtemp()
gh.clone("https://github.com/forgeorg/foo.git", os.path.join(output, 'foo'))
assert os.path.exists(os.path.join(output, 'foo', "README.md"))
rmtree(output)
def test_remote():
gh = Github(token)
base = mkdtemp()
target = os.path.join(base, 'foo')
gh.clone("https://github.com/forgeorg/foo.git", target)
assert os.path.exists(os.path.join(target, "README.md"))
# XXX: this is necessary because of the injected token
remote = gh.remote(target)
assert remote.endswith("github.com/forgeorg/foo.git"), remote
assert gh.remote(base) == None
rmtree(base)
| 960 |
854 | __________________________________________________________________________________________________
class Solution {
public:
int findCelebrity(int n) {
vector<bool> candidate(n, true);
for (int i = 0; i < n; ++i) {
for (int j = 0; j < n; ++j) {
if (candidate[i] && i != j) {
if (knows(i, j) || !knows(j, i)) {
candidate[i] = false;
break;
} else {
candidate[j] = false;
}
}
}
if (candidate[i]) return i;
}
return -1;
}
};
__________________________________________________________________________________________________
class Solution {
public:
int findCelebrity(int n) {
for (int i = 0, j = 0; i < n; ++i) {
for (j = 0; j < n; ++j) {
if (i != j && (knows(i, j) || !knows(j, i))) break;
}
if (j == n) return i;
}
return -1;
}
};
__________________________________________________________________________________________________
class Solution {
public:
int findCelebrity(int n) {
int res = 0;
for (int i = 0; i < n; ++i) {
if (knows(res, i)) res = i;
}
for (int i = 0; i < n; ++i) {
if (res != i && (knows(res, i) || !knows(i, res))) return -1;
}
return res;
}
}; | 737 |
631 | <reponame>kst9395/javalite<gh_stars>100-1000
package org.javalite.activeweb.mojo;
import freemarker.template.TemplateException;
import org.javalite.activeweb.EndpointFinder;
import org.javalite.activeweb.Format;
import org.javalite.activeweb.OpenAPITemplateManager;
import org.javalite.common.Util;
import java.io.IOException;
public class Generator {
public String generate(String templateFile, EndpointFinder endpointFinder, Format format) throws TemplateException, IOException {
String templateContent = Util.readFile(templateFile);
String content = endpointFinder.getOpenAPIDocs(templateContent, format);
OpenAPITemplateManager m = new OpenAPITemplateManager();
return m.process(content);
}
}
| 250 |
348 | <reponame>chamberone/Leaflet.PixiOverlay<gh_stars>100-1000
{"nom":"Ménil","circ":"2ème circonscription","dpt":"Mayenne","inscrits":683,"abs":345,"votants":338,"blancs":29,"nuls":6,"exp":303,"res":[{"nuance":"MDM","nom":"<NAME>","voix":175},{"nuance":"LR","nom":"<NAME>","voix":128}]} | 116 |
456 | // SPDX-License-Identifier: BSD-3-Clause
// Copyright (c) 2020 <NAME>
// All rights reserved.
#include <djvCore/OS.h>
#include <djvCore/Memory.h>
#include <djvCore/String.h>
#ifndef WIN32_LEAN_AND_MEAN
#define WIN32_LEAN_AND_MEAN
#endif // WIN32_LEAN_AND_MEAN
#ifndef NOMINMAX
#define NOMINMAX
#endif // NOMINMAX
#include <windows.h>
#include <Shlobj.h>
#include <shellapi.h>
#include <stdlib.h>
#include <VersionHelpers.h>
#include <array>
#include <codecvt>
#include <locale>
//#pragma optimize("", off)
namespace djv
{
namespace Core
{
namespace OS
{
namespace
{
enum class Windows
{
Unknown,
_7,
_8,
_8_1,
_10,
Count
};
std::string getLabel(Windows value)
{
const std::array<std::string, static_cast<size_t>(Windows::Count)> data =
{
"Unknown",
"Windows 7",
"Windows 8",
"Windows 8.1",
"Windows 10"
};
return data[static_cast<size_t>(value)];
}
typedef void (WINAPI *PGNSI)(LPSYSTEM_INFO);
Windows windowsVersion()
{
Windows out = Windows::Unknown;
if (IsWindows10OrGreater())
{
out = Windows::_10;
}
else if (IsWindows8Point1OrGreater())
{
out = Windows::_8_1;
}
else if (IsWindows8OrGreater())
{
out = Windows::_8;
}
else if (IsWindows7OrGreater())
{
out = Windows::_7;
}
return out;
}
} // namespace
std::string getInformation()
{
return getLabel(windowsVersion());
}
size_t getRAMSize()
{
MEMORYSTATUSEX statex;
statex.dwLength = sizeof(statex);
GlobalMemoryStatusEx(&statex);
return statex.ullTotalPhys;
}
std::string getUserName()
{
WCHAR tmp[String::cStringLength] = { 0 };
DWORD size = String::cStringLength;
GetUserNameW(tmp, &size);
std::wstring_convert<std::codecvt_utf8_utf16<wchar_t>, wchar_t> utf16;
return std::string(utf16.to_bytes(tmp));
}
int getTerminalWidth()
{
int out = 80;
HANDLE h = GetStdHandle(STD_OUTPUT_HANDLE);
if (h != INVALID_HANDLE_VALUE)
{
CONSOLE_SCREEN_BUFFER_INFO info;
if (GetConsoleScreenBufferInfo(h, &info))
{
out = info.dwSize.X;
}
}
return out;
}
bool getEnv(const std::string& name, std::string& out)
{
size_t size = 0;
wchar_t * p = 0;
std::wstring_convert<std::codecvt_utf8_utf16<wchar_t>, wchar_t> utf16;
if (0 == _wdupenv_s(&p, &size, utf16.from_bytes(name).c_str()))
{
if (p)
{
out = utf16.to_bytes(p);
free(p);
return true;
}
}
if (p)
{
free(p);
}
return false;
}
bool setEnv(const std::string& name, const std::string& value)
{
std::wstring_convert<std::codecvt_utf8_utf16<wchar_t>, wchar_t> utf16;
return _wputenv_s(utf16.from_bytes(name).c_str(), utf16.from_bytes(value).c_str()) == 0;
}
bool clearEnv(const std::string& name)
{
std::wstring_convert<std::codecvt_utf8_utf16<wchar_t>, wchar_t> utf16;
return _wputenv_s(utf16.from_bytes(name).c_str(), utf16.from_bytes(std::string()).c_str()) == 0;
}
void openURL(const std::string& value)
{
ShellExecute(0, 0, value.c_str(), 0, 0, SW_SHOW);
}
} // namespace OS
} // namespace Core
} // namespace djv
| 3,028 |
6,197 | package com.kickstarter.ui;
public final class ArgumentsKey {
private ArgumentsKey() {}
public static final String CANCEL_PLEDGE_PROJECT = "com.kickstarter.ui.fragments.CancelPledgeFragment.project";
public static final String DISCOVERY_SORT_POSITION = "argument_discovery_position";
public static final String NEW_CARD_MODAL = "com.kickstarter.ui.fragments.NewCardFragment.modal";
public static final String NEW_CARD_PROJECT = "com.kickstarter.ui.fragments.NewCardFragment.project";
public static final String PLEDGE_PLEDGE_DATA= "com.kickstarter.ui.fragments.PledgeFragment.pledge_data";
public static final String PLEDGE_PLEDGE_REASON = "com.kickstarter.ui.fragments.PledgeFragment.pledge_reason";
public static final String PROJECT_PAGER_POSITION = "com.kickstarter.ui.fragments.position";
}
| 264 |
562 | import pkgutil
def import_submodules(context, root_module, path):
"""
Import all submodules and register them in the ``context`` namespace.
>>> import_submodules(locals(), __name__, __path__)
"""
for loader, name, _ in pkgutil.walk_packages(path, root_module + "."):
module = loader.find_module(name).load_module(name)
pkg_names = getattr(module, "__all__", None)
for k, v in list(vars(module).items()):
if not k.startswith("_") and (pkg_names is None or k in pkg_names):
context[k] = v
context[name] = module
| 243 |
350 | <reponame>nathantspencer/SimpleElastix<gh_stars>100-1000
{
"name" : "ApproximateSignedDistanceMapImageFilter",
"template_code_filename" : "ImageFilter",
"template_test_filename" : "ImageFilter",
"doc" : "Docs",
"number_of_inputs" : 1,
"pixel_types" : "IntegerPixelIDTypeList",
"output_pixel_type" : "float",
"members" : [
{
"name" : "InsideValue",
"type" : "double",
"default" : "1u",
"pixeltype" : "Input",
"briefdescriptionSet" : "",
"detaileddescriptionSet" : "Set/Get intensity value representing the interior of objects in the mask.",
"briefdescriptionGet" : "",
"detaileddescriptionGet" : "Set/Get intensity value representing the interior of objects in the mask."
},
{
"name" : "OutsideValue",
"type" : "double",
"default" : "0u",
"pixeltype" : "Input",
"briefdescriptionSet" : "",
"detaileddescriptionSet" : "Set/Get intensity value representing non-objects in the mask.",
"briefdescriptionGet" : "",
"detaileddescriptionGet" : "Set/Get intensity value representing the interior of objects in the mask."
}
],
"tests" : [
{
"tag" : "default",
"description" : "Default parameter settings",
"tolerance" : "0.01",
"settings" : [],
"inputs" : [
"Input/2th_cthead1.png"
]
},
{
"tag" : "modified_parms",
"description" : "Setting InsideValue and OutsideValuew",
"tolerance" : "0.01",
"settings" : [
{
"parameter" : "InsideValue",
"value" : "100"
},
{
"parameter" : "OutsideValue",
"value" : "0"
}
],
"inputs" : [
"Input/2th_cthead1.png"
]
}
],
"briefdescription" : "Create a map of the approximate signed distance from the boundaries of a binary image.",
"detaileddescription" : "The ApproximateSignedDistanceMapImageFilter takes as input a binary image and produces a signed distance map. Each pixel value in the output contains the approximate distance from that pixel to the nearest \"object\" in the binary image. This filter differs from the DanielssonDistanceMapImageFilter in that it calculates the distance to the \"object edge\" for pixels within the object.\n\nNegative values in the output indicate that the pixel at that position is within an object in the input image. The absolute value of a negative pixel represents the approximate distance to the nearest object boundary pixel.\n\nWARNING: This filter requires that the output type be floating-point. Otherwise internal calculations will not be performed to the appropriate precision, resulting in completely incorrect (read: zero-valued) output.\n\nThe distances computed by this filter are Chamfer distances, which are only an approximation to Euclidian distances, and are not as exact approximations as those calculated by the DanielssonDistanceMapImageFilter . On the other hand, this filter is faster.\n\nThis filter requires that an \"inside value\" and \"outside value\" be set as parameters. The \"inside value\" is the intensity value of the binary image which corresponds to objects, and the \"outside value\" is the intensity of the background. (A typical binary image often represents objects as black (0) and background as white (usually 255), or vice-versa.) Note that this filter is slightly faster if the inside value is less than the outside value. Otherwise an extra iteration through the image is required.\n\nThis filter uses the FastChamferDistanceImageFilter and the IsoContourDistanceImageFilter internally to perform the distance calculations.\n\n\\see DanielssonDistanceMapImageFilter \n\n\n\\see SignedDanielssonDistanceMapImageFilter \n\n\n\\see SignedMaurerDistanceMapImageFilter \n\n\n\\see FastChamferDistanceImageFilter \n\n\n\\see IsoContourDistanceImageFilter \n\n\n\\author <NAME>",
"itk_module" : "ITKDistanceMap",
"itk_group" : "DistanceMap"
}
| 1,281 |
1,039 | package com.android.gpstest.io;
import android.content.Context;
import android.os.Environment;
import android.util.Log;
import android.widget.Toast;
import com.android.gpstest.Application;
import com.android.gpstest.R;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Date;
/**
* A base implementation of a GNSS logger to store information to a file. Originally from https://github.com/google/gps-measurement-tools/tree/master/GNSSLogger,
* modified for GPSTest.
*/
public abstract class BaseFileLogger implements FileLogger {
protected final String TAG = this.getClass().getName();
protected static final String FILE_PREFIX = "gnss_log";
protected final Context context;
protected BufferedWriter fileWriter;
protected File file;
protected boolean isStarted = false;
protected File baseDirectory;
public BaseFileLogger(Context context) {
this.context = context;
}
public File getFile() {
return file;
}
public File getBaseDirectory() {
return baseDirectory;
}
/**
* Defines the file extension to be used in implementations, following the ".". So "json" would
* be used for files with the ".json" extension.
* @return the file extension to be used, following the ".". So "json" would
* * be used for files with the ".json" extension.
*/
abstract String getFileExtension();
/**
* Initialize file by adding a header, if desired for the given implementation
*
* @param writer writer to use when writing file
* @param filePath path to the current file
*/
abstract void writeFileHeader(BufferedWriter writer, String filePath);
/**
* Called after files have finished initialing within startLog() but prior to returning from startLog(), if
* additional init is required for a specific file logging implementation
* @param fileWriter
* @param isNewFile true if the file is new, or false if it already existed and was re-opened
* @return true if the operation was successful, false if it was not
*/
abstract boolean postFileInit(BufferedWriter fileWriter, boolean isNewFile);
/**
* Start a file logging process
*
* @param existingFile The existing file if file logging is to be continued, or null if a
* new file should be created.
* @param date The date and time to use for the file name
* @return true if a new file was created, false if an existing file was used
*/
public synchronized boolean startLog(File existingFile, Date date) {
boolean isNewFile = false;
String state = Environment.getExternalStorageState();
if (Environment.MEDIA_MOUNTED.equals(state)) {
baseDirectory = new File(Environment.getExternalStorageDirectory(), FILE_PREFIX);
baseDirectory.mkdirs();
} else if (Environment.MEDIA_MOUNTED_READ_ONLY.equals(state)) {
logError("Cannot write to external storage.");
return false;
} else {
logError("Cannot read external storage.");
return false;
}
String currentFilePath;
if (existingFile != null) {
// Use existing file
currentFilePath = existingFile.getAbsolutePath();
BufferedWriter writer;
try {
writer = new BufferedWriter(new FileWriter(existingFile, true));
} catch (IOException e) {
logException("Could not open file: " + currentFilePath, e);
return false;
}
if (!closeOldFileWriter()) {
return false;
}
file = existingFile;
fileWriter = writer;
isNewFile = false;
} else {
// Create new logging file
SimpleDateFormat formatter = new SimpleDateFormat("yyy_MM_dd_HH_mm_ss");
String fileName = String.format("%s_%s." + getFileExtension(), FILE_PREFIX, formatter.format(date));
File currentFile = new File(baseDirectory, fileName);
currentFilePath = currentFile.getAbsolutePath();
BufferedWriter writer;
try {
writer = new BufferedWriter(new FileWriter(currentFile, true));
} catch (IOException e) {
logException("Could not open file: " + currentFilePath, e);
return false;
}
writeFileHeader(writer, currentFilePath);
if (!closeOldFileWriter()) {
return false;
}
file = currentFile;
fileWriter = writer;
Log.d(TAG, Application.Companion.getApp().getString(R.string.logging_to_new_file, currentFilePath));
isNewFile = true;
}
boolean postInit = postFileInit(fileWriter, isNewFile);
if (!postInit) {
return false;
}
isStarted = true;
return isNewFile;
}
private boolean closeOldFileWriter() {
if (fileWriter != null) {
try {
fileWriter.close();
} catch (IOException e) {
logException(Application.Companion.getApp().getString(R.string.unable_to_close_all_file_streams), e);
return false;
}
}
return true;
}
/**
* Returns true if the logger is already started, or false if it is not
*
* @return
*/
public synchronized boolean isStarted() {
return isStarted;
}
public synchronized void close() {
if (fileWriter != null) {
try {
fileWriter.flush();
fileWriter.close();
fileWriter = null;
isStarted = false;
} catch (IOException e) {
logException("Unable to close all file streams.", e);
return;
}
}
}
protected void logException(String errorMessage, Exception e) {
Log.e(TAG, errorMessage, e);
Toast.makeText(context, errorMessage, Toast.LENGTH_LONG).show();
}
protected void logError(String errorMessage) {
Log.e(TAG, errorMessage);
Toast.makeText(context, errorMessage, Toast.LENGTH_LONG).show();
}
}
| 2,604 |
417 | #include <sys/mman.h>
#include <stdlib.h>
#include <cstdint>
#include <cstring>
#include <future>
#include <iostream>
#include <thread>
#include <vector>
#include <gflags/gflags.h>
#include "util.h"
DEFINE_int32(num_runs, 100,
"Number of times to zero the pages (per page count)");
DEFINE_int32(num_pages_min, 1, "Minimum number of pages to zero");
DEFINE_int32(num_pages_max, 50, "Maximum number of pages to zero");
DEFINE_int32(num_threads, 1,
"Number of threads on which to try the experiment at once.");
DEFINE_bool(touch_after_zero, false,
"Whether to actually try touching the pages we zero.");
struct Result {
std::uint64_t memsetCycles;
std::uint64_t madviseDontneedCycles;
std::uint64_t madviseDontneedWillneedCycles;
Result()
: memsetCycles(0),
madviseDontneedCycles(0),
madviseDontneedWillneedCycles(0) {}
void accum(const Result& other) {
memsetCycles += other.memsetCycles;
madviseDontneedCycles += other.madviseDontneedCycles;
madviseDontneedWillneedCycles += other.madviseDontneedWillneedCycles;
}
};
void maybeTouchPages(void* beginv, std::size_t length) {
char* begin = static_cast<char*>(beginv);
if (FLAGS_touch_after_zero) {
for (char* ptr = begin; ptr != begin + length; ptr += 4096) {
*ptr = 0;
}
}
}
void zeroMemset(void* ptr, std::size_t size) {
std::memset(ptr, 0, size);
}
void zeroMadviseDontneed(void* ptr, std::size_t size) {
int err = madvise(ptr, size, MADV_DONTNEED);
if (err != 0) {
std::cerr << "Couldn't madvise(... MADV_DONTNEED); error was "
<< err << std::endl;
exit(1);
}
}
void zeroMadviseDontneedWillneed(void* ptr, std::size_t size) {
int err = madvise(ptr, size, MADV_DONTNEED);
if (err != 0) {
std::cerr << "Couldn't madvise(..., MADV_DONTNEED); error was "
<< err << std::endl;
exit(1);
}
err = madvise(ptr, size, MADV_WILLNEED);
if (err != 0) {
std::cerr << "Couldn't madvise(..., MAP_POPULATE); error was "
<< err << std::endl;
exit(1);
}
}
Result runTest(std::size_t size) {
Result result;
void *ptr;
int err = posix_memalign(&ptr, 4096, size);
if (err != 0) {
std::cerr << "Couldn't allocate; error was " << err << std::endl;
exit(1);
}
// Touch all the pages from this thread.
std::memset(ptr, 0, size);
// Touch all the pages from another thread.
std::async(std::launch::async, std::memset, ptr, 0, size).get();
// We'll probably be dealing with uncached memory here; we care about this
// difference when pulling memory out of an inactive state.
util::flushCache(ptr, size);
result.memsetCycles = util::runTimed([&]() {
zeroMemset(ptr, size);
maybeTouchPages(ptr, size);
});
util::flushCache(ptr, size);
result.madviseDontneedCycles = util::runTimed([&]() {
zeroMadviseDontneed(ptr, size);
maybeTouchPages(ptr, size);
});
util::flushCache(ptr, size);
result.madviseDontneedWillneedCycles = util::runTimed([&]() {
zeroMadviseDontneedWillneed(ptr, size);
maybeTouchPages(ptr, size);
});
return result;
}
int main(int argc, char** argv) {
std::string usage =
"This program benchmarks memset vs madvise for zeroing memory.\n"
"Sample usage:\n";
usage += argv[0];
usage += " --num_pages_min=20 --num_pagse_max=50 --num_runs=30 ";
usage += "--num_threads=4 --touch_after_zero=true";
gflags::SetUsageMessage(usage);
gflags::ParseCommandLineFlags(&argc, &argv, true);
for (int i = FLAGS_num_pages_min; i <= FLAGS_num_pages_max; ++i) {
Result sum;
for (int j = 0; j < FLAGS_num_runs; ++j) {
std::vector<std::future<Result>> results;
for (int k = 0; k < FLAGS_num_threads; ++k) {
results.push_back(std::async(std::launch::async, runTest, 4096 * i));
}
for (int k = 0; k < FLAGS_num_threads; ++k) {
sum.accum(results[k].get());
}
}
std::cout << "When zeroing " << i << " pages (averaging across "
<< FLAGS_num_runs << " runs of " << FLAGS_num_threads << " threads:\n"
<< " memset: " << sum.memsetCycles / FLAGS_num_runs << " cycles\n"
<< " madvise(..., MADV_DONTNEED): "
<< sum.madviseDontneedCycles / FLAGS_num_runs << " cycles\n"
<< " madvise(..., MADV_DONTNEED); madvise(..., MADV_WILLNEED): "
<< sum.madviseDontneedWillneedCycles / FLAGS_num_runs << " cycles\n";
}
return 0;
}
| 1,818 |
648 | <reponame>jcgeer/synthea
{"resourceType":"ValueSet","id":"us-core-condition-code","text":{"status":"generated","div":"<div xmlns=\"http://www.w3.org/1999/xhtml\"><h2>US Core Condition Code</h2><div><p>This describes the problem. Diagnosis/Problem List is broadly defined as a series of brief statements that catalog a patient's medical, nursing, dental, social, preventative and psychiatric events and issues that are relevant to that patient's healthcare (e.g., signs, symptoms, and defined conditions). ICD-10 is appropriate for Diagnosis information, and ICD-9 for historical information.</p>\n</div><p><b>Copyright Statement:</b> This value set includes content from SNOMED CT, which is copyright © 2002+ International Health Terminology Standards Development Organisation (IHTSDO), and distributed by agreement between IHTSDO and HL7. Implementer use of SNOMED CT is not covered by this agreement. ICD-9 and ICD-10 are copyrighted by the World Health Organization (WHO) which owns and publishes the classification. See https://www.who.int/classifications/icd/en. WHO has authorized the development of an adaptation of ICD-9 and ICD-10 to ICD-9-CM to ICD-10-CM for use in the United States for U.S. government purposes. </p><p>This value set includes codes from the following code systems:</p><ul><li>Include these codes as defined in <a href=\"http://www.snomed.org/\"><code>http://snomed.info/sct</code></a><table class=\"none\"><tr><td style=\"white-space:nowrap\"><b>Code</b></td><td><b>Display</b></td></tr><tr><td><a href=\"http://browser.ihtsdotools.org/?perspective=full&conceptId1=160245001\">160245001</a></td><td>No current problems or disability</td><td/></tr></table></li><li>Include codes from <a href=\"http://www.snomed.org/\"><code>http://snomed.info/sct</code></a> where concept is-a 404684003 (Clinical finding (finding))</li><li>Include codes from <a href=\"http://www.snomed.org/\"><code>http://snomed.info/sct</code></a> where concept is-a 243796009 (Context-dependent category)</li><li>Include all codes defined in <code>http://hl7.org/fhir/sid/icd-10-cm</code></li><li>Include all codes defined in <code>http://hl7.org/fhir/sid/icd-9-cm</code></li></ul></div>"},"url":"http://hl7.org/fhir/us/core/ValueSet/us-core-condition-code","version":"3.1.0","name":"USCoreConditionCode","title":"US Core Condition Code","status":"active","date":"2019-05-21T00:00:00+10:00","publisher":"HL7 US Realm Steering Committee","contact":[{"telecom":[{"system":"other","value":"http://hl7.org/fhir"}]}],"description":"This describes the problem. Diagnosis/Problem List is broadly defined as a series of brief statements that catalog a patient's medical, nursing, dental, social, preventative and psychiatric events and issues that are relevant to that patient's healthcare (e.g., signs, symptoms, and defined conditions). ICD-10 is appropriate for Diagnosis information, and ICD-9 for historical information.","jurisdiction":[{"coding":[{"system":"urn:iso:std:iso:3166","code":"US","display":"United States of America"}]}],"copyright":"This value set includes content from SNOMED CT, which is copyright © 2002+ International Health Terminology Standards Development Organisation (IHTSDO), and distributed by agreement between IHTSDO and HL7. Implementer use of SNOMED CT is not covered by this agreement. ICD-9 and ICD-10 are copyrighted by the World Health Organization (WHO) which owns and publishes the classification. See https://www.who.int/classifications/icd/en. WHO has authorized the development of an adaptation of ICD-9 and ICD-10 to ICD-9-CM to ICD-10-CM for use in the United States for U.S. government purposes. ","compose":{"include":[{"system":"http://snomed.info/sct","concept":[{"code":"160245001"}]},{"system":"http://snomed.info/sct","filter":[{"property":"concept","op":"is-a","value":"404684003"}]},{"system":"http://snomed.info/sct","filter":[{"property":"concept","op":"is-a","value":"243796009"}]},{"system":"http://hl7.org/fhir/sid/icd-10-cm"},{"system":"http://hl7.org/fhir/sid/icd-9-cm"}]}} | 1,190 |
60,067 | <gh_stars>1000+
#ifndef THC_GENERIC_FILE
#error "You must define THC_GENERIC_FILE before including THCGenerateBoolType.h"
#endif
#define scalar_t bool
#define ureal bool
#define accreal int64_t
#define Real Bool
#define CReal CudaBool
#define THC_REAL_IS_BOOL
#line 1 THC_GENERIC_FILE
#include THC_GENERIC_FILE
#undef scalar_t
#undef ureal
#undef accreal
#undef Real
#undef CReal
#undef THC_REAL_IS_BOOL
#ifndef THCGenerateBoolType
#undef THC_GENERIC_FILE
#endif
| 190 |
357 | import smtplib, traceback
from email.mime.text import MIMEText
from email.header import Header
from config.mail_config import mail_config
from log.logger import logger as log
def send_mail(subject, content, receiver):
message = MIMEText(content, 'plain', 'utf-8')
message['From'] = Header(mail_config["user"], 'utf-8')
message['To'] = Header(mail_config["to"], 'utf-8')
message['Subject'] = Header(subject, 'utf-8')
try:
smtpObj = smtplib.SMTP()
smtpObj.connect(mail_config["host"], 25) # 25 为 SMTP 端口号
smtpObj.login(mail_config["user"], mail_config["passwd"])
smtpObj.sendmail(mail_config["user"], receiver, message.as_string())
log.info("mail sent success.")
except smtplib.SMTPException as e:
log.error("mail sent failed")
log.error(str(e))
log.error(traceback.format_exc()) | 349 |
679 | <gh_stars>100-1000
/**************************************************************
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*************************************************************/
#ifndef _SD_PPTIN_HXX
#define _SD_PPTIN_HXX
#include <filter/msfilter/svdfppt.hxx>
#include <svx/msdffdef.hxx>
#include <diadef.h>
#include <svx/svdtypes.hxx>
#include <filter/msfilter/msfiltertracer.hxx>
#include <com/sun/star/uno/Any.h>
#include <boost/shared_ptr.hpp>
class SdDrawDocument;
class SfxMedium;
/*************************************************************************
|*
|* lokaler Import
|*
\************************************************************************/
class SdPage;
class SdAnimationInfo;
struct PptInteractiveInfoAtom;
class Ppt97Animation;
typedef boost::shared_ptr< Ppt97Animation > Ppt97AnimationPtr;
typedef ::std::map < SdrObject*, Ppt97AnimationPtr > tAnimationMap;
typedef std::vector< std::pair< SdrObject*, Ppt97AnimationPtr > > tAnimationVector;
class ImplSdPPTImport : public SdrPowerPointImport
{
SfxMedium& mrMed;
SvStorage& mrStorage;
// SvStream* mpPicStream;
DffRecordHeader maDocHd;
List maSlideNameList;
sal_Bool mbDocumentFound;
sal_uInt32 mnFilterOptions;
SdDrawDocument* mpDoc;
PresChange mePresChange;
SdrLayerID mnBackgroundLayerID;
SdrLayerID mnBackgroundObjectsLayerID;
tAnimationMap maAnimations;
void SetHeaderFooterPageSettings( SdPage* pPage, const PptSlidePersistEntry* pMasterPersist );
void ImportPageEffect( SdPage* pPage, const sal_Bool bNewAnimationsUsed );
void FillSdAnimationInfo( SdAnimationInfo* pInfo, PptInteractiveInfoAtom* pIAtom, String aMacroName );
virtual SdrObject* ProcessObj( SvStream& rSt, DffObjData& rData, void* pData, Rectangle& rTextRect, SdrObject* pObj );
virtual SdrObject* ApplyTextObj( PPTTextObj* pTextObj, SdrTextObj* pText, SdPage* pPage,
SfxStyleSheet*, SfxStyleSheet** ) const;
public:
String ReadSound( sal_uInt32 nSoundRef ) const;
String ReadMedia( sal_uInt32 nMediaRef ) const;
ImplSdPPTImport( SdDrawDocument* pDoc, SvStorage& rStorage, SfxMedium& rMed, PowerPointImportParam& );
~ImplSdPPTImport();
sal_Bool Import();
};
class SdPPTImport
{
ImplSdPPTImport* pFilter;
public:
SdPPTImport( SdDrawDocument* pDoc, SvStream& rDocStream, SvStorage& rStorage, SfxMedium& rMed, MSFilterTracer* pTracer = NULL );
~SdPPTImport();
sal_Bool Import();
};
#endif // _SD_PPTIN_HXX
| 1,125 |
468 | <filename>Bin/GLFunctions/Misc/OES_geometry_shader_Include.h
#define GLI_INCLUDE_GL_OES_GEOMETRY_SHADER
enum Main {
//GL_GEOMETRY_SHADER_OES = 0x8DD9,
//GL_GEOMETRY_SHADER_BIT_OES = 0x00000004,
//GL_GEOMETRY_LINKED_VERTICES_OUT_OES = 0x8916,
//GL_GEOMETRY_LINKED_INPUT_TYPE_OES = 0x8917,
//GL_GEOMETRY_LINKED_OUTPUT_TYPE_OES = 0x8918,
//GL_GEOMETRY_SHADER_INVOCATIONS_OES = 0x887F,
//GL_LAYER_PROVOKING_VERTEX_OES = 0x825E,
//GL_LINES_ADJACENCY_OES = 0x000A,
//GL_LINE_STRIP_ADJACENCY_OES = 0x000B,
//GL_TRIANGLES_ADJACENCY_OES = 0x000C,
//GL_TRIANGLE_STRIP_ADJACENCY_OES = 0x000D,
//GL_MAX_GEOMETRY_UNIFORM_COMPONENTS_OES = 0x8DDF,
//GL_MAX_GEOMETRY_UNIFORM_BLOCKS_OES = 0x8A2C,
//GL_MAX_COMBINED_GEOMETRY_UNIFORM_COMPONENTS_OES = 0x8A32,
//GL_MAX_GEOMETRY_INPUT_COMPONENTS_OES = 0x9123,
//GL_MAX_GEOMETRY_OUTPUT_COMPONENTS_OES = 0x9124,
//GL_MAX_GEOMETRY_OUTPUT_VERTICES_OES = 0x8DE0,
//GL_MAX_GEOMETRY_TOTAL_OUTPUT_COMPONENTS_OES = 0x8DE1,
//GL_MAX_GEOMETRY_SHADER_INVOCATIONS_OES = 0x8E5A,
//GL_MAX_GEOMETRY_TEXTURE_IMAGE_UNITS_OES = 0x8C29,
//GL_MAX_GEOMETRY_ATOMIC_COUNTER_BUFFERS_OES = 0x92CF,
//GL_MAX_GEOMETRY_ATOMIC_COUNTERS_OES = 0x92D5,
//GL_MAX_GEOMETRY_IMAGE_UNIFORMS_OES = 0x90CD,
//GL_MAX_GEOMETRY_SHADER_STORAGE_BLOCKS_OES = 0x90D7,
//GL_FIRST_VERTEX_CONVENTION_OES = 0x8E4D,
//GL_LAST_VERTEX_CONVENTION_OES = 0x8E4E,
//GL_UNDEFINED_VERTEX_OES = 0x8260,
//GL_PRIMITIVES_GENERATED_OES = 0x8C87,
//GL_FRAMEBUFFER_DEFAULT_LAYERS_OES = 0x9312,
//GL_MAX_FRAMEBUFFER_LAYERS_OES = 0x9317,
//GL_FRAMEBUFFER_INCOMPLETE_LAYER_TARGETS_OES = 0x8DA8,
//GL_FRAMEBUFFER_ATTACHMENT_LAYERED_OES = 0x8DA7,
//GL_REFERENCED_BY_GEOMETRY_SHADER_OES = 0x9309,
};
void glFramebufferTextureOES(GLenum[Main] target, GLenum[Main] attachment, GLuint texture, GLint level);
| 1,646 |
4,054 | // Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
#pragma once
#include <cstdint>
namespace document { class Bucket; }
namespace vespa::config::content::internal {
class InternalStorDistributionType;
}
namespace storage::lib { class ClusterStateBundle; }
namespace search::bmcluster {
/*
* Interface class for describing cluster toplogy and how messages are
* routed from feeders into the cluster.
*/
class IBmDistribution {
public:
using DistributionConfigBuilder = vespa::config::content::internal::InternalStorDistributionType;
using DistributionConfig = const DistributionConfigBuilder;
virtual ~IBmDistribution() = default;
virtual uint32_t get_num_nodes() const = 0;
virtual uint32_t get_service_layer_node_idx(const document::Bucket & bucket) const = 0;
virtual uint32_t get_distributor_node_idx(const document::Bucket & bucket) const = 0;
virtual DistributionConfig get_distribution_config() const = 0;
virtual storage::lib::ClusterStateBundle get_cluster_state_bundle() const = 0;
};
};
| 331 |
1,555 |
struct Foo {
int a;
int b;
int c;
int d;
};
int f(struct Foo s)
{
if(s.a != 1)
return 1;
if(s.b != 2)
return 2;
if(s.c != 3)
return 3;
if(s.d != 4)
return 4;
return 0;
}
int
main()
{
struct Foo s;
s.a = 1;
s.b = 2;
s.c = 3;
s.d = 4;
return f(s);
}
| 166 |
496 | #define BOOST_TEST_MAIN
#include <boost/test/included/unit_test.hpp>
// std
#include <array>
#include <string>
#include <tuple>
#include <vector>
// internal
#include <poac/util/meta.hpp>
// 1. std::optional<std::size_t> index_of(const SinglePassRange& rng, const T& t)
// 2. inline auto index_of(InputIterator first, InputIterator last, const T& value)
BOOST_AUTO_TEST_CASE( poac_util_types_index_of_test )
{
using poac::util::meta::index_of;
std::vector<std::string> test_case{"0", "1", "2"};
BOOST_CHECK( index_of(test_case.begin(), test_case.end(), "1") == 1 );
BOOST_CHECK( index_of(test_case.begin(), test_case.end(), "10") == 3 ); // out of range
BOOST_CHECK( index_of(test_case.cbegin(), test_case.cend(), "0") == 0 );
}
// inline auto index_of_if(InputIterator first, InputIterator last, Predicate pred)
BOOST_AUTO_TEST_CASE( poac_util_types_index_of_if_test )
{
using poac::util::meta::index_of_if;
std::vector<std::string> test_case{"0", "1", "2"};
BOOST_CHECK(
index_of_if(
test_case.cbegin(),
test_case.cend(),
[](auto& x){ return x == "0"; }
) == 0
);
}
// bool duplicate(const SinglePassRange& rng)
BOOST_AUTO_TEST_CASE( poac_util_types_duplicate_test )
{
using poac::util::meta::duplicate;
std::vector<std::string> test_case{"0", "0", "2"};
BOOST_CHECK( duplicate(test_case) );
test_case = {"0", "1", "2"};
BOOST_CHECK( !duplicate(test_case) );
}
// 1. std::vector<T> ptree_to_vector(const U& pt, const K& key)
// 2. std::vector<T> ptree_to_vector(const U &pt)
BOOST_AUTO_TEST_CASE( poac_util_types_ptree_to_vector_test )
{
using poac::util::meta::to_vector;
boost::property_tree::ptree pt;
std::vector<std::string> test_case{ "0", "1", "2" };
boost::property_tree::ptree children;
{
boost::property_tree::ptree child;
child.put("", "0");
children.push_back(std::make_pair("", child));
}
{
boost::property_tree::ptree child;
child.put("", "1");
children.push_back(std::make_pair("", child));
}
{
boost::property_tree::ptree child;
child.put("", "2");
children.push_back(std::make_pair("", child));
}
pt.add_child("data", children);
BOOST_CHECK(to_vector<std::string>(pt, "data") == test_case ); // 1
BOOST_CHECK(to_vector<std::string>(children) == test_case ); // 2
}
BOOST_AUTO_TEST_CASE( poac_util_meta_are_all_same_test )
{
using poac::util::meta::are_all_same;
using poac::util::meta::are_all_same_v;
static_assert(are_all_same<int, int, int>::value);
static_assert(are_all_same_v<int, int, int>);
static_assert(std::negation_v<are_all_same<int, std::string, int>>);
static_assert(std::negation_v<are_all_same<std::string, int, int>>);
static_assert(std::negation_v<are_all_same<int, int, std::string>>);
}
BOOST_AUTO_TEST_CASE( poac_util_meta_is_specialization_test )
{
using poac::util::meta::is_specialization;
static_assert(is_specialization<std::vector<int>, std::vector>::value);
static_assert(is_specialization<std::map<int, int>, std::map>::value);
static_assert(is_specialization<std::map<int, std::vector<int>>, std::map>::value);
static_assert(std::negation_v<is_specialization<std::map<int, std::vector<int>>, std::vector>>);
}
BOOST_AUTO_TEST_CASE( poac_util_meta_is_tuple_test )
{
using poac::util::meta::is_tuple;
using poac::util::meta::is_tuple_v;
static_assert(is_tuple_v<std::tuple<int>>);
static_assert(is_tuple_v<std::tuple<int, std::string>>);
static_assert(std::negation_v<is_tuple<std::vector<int>>>);
}
BOOST_AUTO_TEST_CASE( poac_util_meta_to_array_test )
{
using poac::util::meta::to_array;
constexpr std::array<int, 3> test_case{
0, 1, 2
};
constexpr std::tuple<int, int, int> res1 = std::make_tuple(0, 1, 2);
constexpr std::array<int, 3> res = to_array(res1);
BOOST_CHECK( res == test_case );
}
| 1,758 |
353 | /*
* junixsocket
*
* Copyright 2009-2021 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.newsclub.net.unix;
import java.io.FileDescriptor;
import java.io.IOException;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.net.SocketException;
import java.util.concurrent.atomic.AtomicBoolean;
import com.kohlschutter.annotations.compiletime.SuppressFBWarnings;
/**
* A {@link DatagramSocket} implementation that works with AF_UNIX Unix domain sockets.
*
* @author <NAME>
*/
public final class AFUNIXDatagramSocket extends DatagramSocket implements AFUNIXSomeSocket,
AFUNIXSocketExtensions {
private static final InetSocketAddress WILDCARD_ADDRESS = new InetSocketAddress(0);
private final AFUNIXDatagramSocketImpl impl;
private final AncillaryDataSupport ancillaryDataSupport;
private final AtomicBoolean created = new AtomicBoolean(false);
private final AtomicBoolean deleteOnClose = new AtomicBoolean(true);
private final AFUNIXDatagramChannel channel = new AFUNIXDatagramChannel(this);
private AFUNIXDatagramSocket(final AFUNIXDatagramSocketImpl impl) throws IOException {
super(impl);
this.impl = impl;
this.ancillaryDataSupport = impl.ancillaryDataSupport;
}
/**
* Returns a new {@link AFUNIXDatagramSocket} instance.
*
* @return The new instance.
* @throws IOException on error.
*/
public static AFUNIXDatagramSocket newInstance() throws IOException {
return new AFUNIXDatagramSocket(new AFUNIXDatagramSocketImpl((FileDescriptor) null));
}
static AFUNIXDatagramSocket newInstance(FileDescriptor fdObj, int localPort, int remotePort)
throws IOException {
if (fdObj == null) {
return newInstance();
}
if (!fdObj.valid()) {
throw new SocketException("Invalid file descriptor");
}
int status = NativeUnixSocket.socketStatus(fdObj);
if (status == NativeUnixSocket.SOCKETSTATUS_INVALID) {
throw new SocketException("Not a valid socket");
}
AFUNIXDatagramSocket socket = new AFUNIXDatagramSocket(new AFUNIXDatagramSocketImpl(fdObj));
socket.getAFImpl().updatePorts(localPort, remotePort);
switch (status) {
case NativeUnixSocket.SOCKETSTATUS_CONNECTED:
socket.internalDummyConnect();
break;
case NativeUnixSocket.SOCKETSTATUS_BOUND:
socket.internalDummyBind();
break;
case NativeUnixSocket.SOCKETSTATUS_UNKNOWN:
break;
default:
throw new IllegalStateException("Invalid socketStatus response: " + status);
}
return socket;
}
@Override
public void connect(InetAddress address, int port) {
throw new IllegalArgumentException("Cannot connect to InetAddress");
}
/**
* Reads the next received packet without actually removing it from the queue.
*
* In other words, once a packet is received, calling this method multiple times in a row will not
* have further effects on the packet contents.
*
* This call still blocks until at least one packet has been received and added to the queue.
*
* @param p The packet.
* @throws IOException on error.
*/
public void peek(DatagramPacket p) throws IOException {
synchronized (p) {
if (isClosed()) {
throw new SocketException("Socket is closed");
}
getAFImpl().peekData(p);
}
}
@Override
public void send(DatagramPacket p) throws IOException {
synchronized (p) {
if (isClosed()) {
throw new SocketException("Socket is closed");
}
if (!isBound()) {
internalDummyBind();
}
getAFImpl().send(p);
}
}
void internalDummyConnect() throws SocketException {
super.connect(AFUNIXSocketAddress.INTERNAL_DUMMY_DONT_CONNECT);
}
void internalDummyBind() throws SocketException {
bind(AFUNIXSocketAddress.INTERNAL_DUMMY_BIND);
}
@Override
public synchronized void connect(SocketAddress addr) throws SocketException {
if (!isBound()) {
internalDummyBind();
}
internalDummyConnect();
try {
getAFImpl().connect(AFUNIXSocketAddress.preprocessSocketAddress(addr, null));
} catch (SocketException e) {
throw e;
} catch (IOException e) {
throw (SocketException) new SocketException(e.getMessage()).initCause(e);
}
}
@Override
public synchronized AFUNIXSocketAddress getRemoteSocketAddress() {
return getAFImpl().getRemoteSocketAddress();
}
@Override
public boolean isConnected() {
return super.isConnected() || impl.isConnected();
}
@Override
public boolean isBound() {
return super.isBound() || impl.isBound();
}
@Override
public void close() {
// IMPORTANT This method must not be synchronized on "this",
// otherwise we can't unblock a pending read
if (isClosed()) {
return;
}
getAFImpl().close();
boolean wasBound = isBound();
if (wasBound && deleteOnClose.get()) {
InetAddress addr = getLocalAddress();
if (AFUNIXSocketAddress.isSupportedAddress(addr)) {
try {
AFUNIXSocketAddress socketAddress = AFUNIXSocketAddress.unwrap(addr, 0);
if (socketAddress.hasFilename()) {
if (!socketAddress.getFile().delete()) {
// ignore
}
}
} catch (IOException e) {
// ignore
}
}
}
super.close();
}
@Override
public synchronized void bind(SocketAddress addr) throws SocketException {
if (isClosed()) {
throw new SocketException("Socket is closed");
}
if (isBound()) {
if (addr == AFUNIXSocketAddress.INTERNAL_DUMMY_BIND) { // NOPMD
return;
}
throw new SocketException("already bound");
}
super.bind(AFUNIXSocketAddress.INTERNAL_DUMMY_BIND);
if (addr == null || WILDCARD_ADDRESS.equals(addr)) {
return;
}
AFUNIXSocketAddress epoint = AFUNIXSocketAddress.preprocessSocketAddress(addr, null);
try {
getAFImpl().bind(epoint);
} catch (SocketException e) {
getAFImpl().close();
throw e;
}
}
@Override
public AFUNIXSocketAddress getLocalSocketAddress() {
if (isClosed()) {
return null;
}
if (!isBound()) {
return null;
}
try {
return AFUNIXSocketAddress.unwrap(getLocalAddress(), getLocalPort());
} catch (SocketException e) {
return null;
}
}
/**
* Checks if this {@link AFUNIXDatagramSocket}'s bound filename should be removed upon
* {@link #close()}.
*
* Deletion is not guaranteed, especially when not supported (e.g., addresses in the abstract
* namespace).
*
* @return {@code true} if an attempt is made to delete the socket file upon {@link #close()}.
*/
public boolean isDeleteOnClose() {
return deleteOnClose.get();
}
/**
* Enables/disables deleting this {@link AFUNIXDatagramSocket}'s bound filename upon
* {@link #close()}.
*
* Deletion is not guaranteed, especially when not supported (e.g., addresses in the abstract
* namespace).
*
* @param b Enabled if {@code true}.
*/
public void setDeleteOnClose(boolean b) {
deleteOnClose.set(b);
}
AFUNIXDatagramSocketImpl getAFImpl() {
if (created.compareAndSet(false, true)) {
try {
getSoTimeout(); // trigger create via java.net.Socket
} catch (SocketException e) {
// ignore
}
}
return impl;
}
@Override
public int getAncillaryReceiveBufferSize() {
return ancillaryDataSupport.getAncillaryReceiveBufferSize();
}
@Override
public void setAncillaryReceiveBufferSize(int size) {
ancillaryDataSupport.setAncillaryReceiveBufferSize(size);
}
@Override
public void ensureAncillaryReceiveBufferSize(int minSize) {
ancillaryDataSupport.ensureAncillaryReceiveBufferSize(minSize);
}
@Override
public FileDescriptor[] getReceivedFileDescriptors() throws IOException {
return ancillaryDataSupport.getReceivedFileDescriptors();
}
@Override
public void clearReceivedFileDescriptors() {
ancillaryDataSupport.clearReceivedFileDescriptors();
}
@Override
public void setOutboundFileDescriptors(FileDescriptor... fdescs) throws IOException {
if (fdescs != null && fdescs.length > 0 && !isConnected()) {
throw new SocketException("Not connected");
}
ancillaryDataSupport.setOutboundFileDescriptors(fdescs);
}
@Override
public boolean hasOutboundFileDescriptors() {
return ancillaryDataSupport.hasOutboundFileDescriptors();
}
@Override
public AFUNIXSocketCredentials getPeerCredentials() throws IOException {
if (isClosed() || !isConnected()) {
throw new SocketException("Not connected");
}
return impl.getPeerCredentials();
}
@Override
public boolean isClosed() {
return super.isClosed() || getAFImpl().isClosed();
}
@SuppressFBWarnings("EI_EXPOSE_REP")
@Override
public AFUNIXDatagramChannel getChannel() {
return channel;
}
@Override
public FileDescriptor getFileDescriptor() throws IOException {
return getAFImpl().getFileDescriptor();
}
}
| 3,386 |
2,219 | <filename>src/base/test/test_file_util_win.cc
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "base/test/test_file_util.h"
#include <aclapi.h>
#include <stddef.h>
#include <wchar.h>
#include <windows.h>
#include <memory>
#include "base/check_op.h"
#include "base/files/file_path.h"
#include "base/files/file_util.h"
#include "base/memory/ptr_util.h"
#include "base/notreached.h"
#include "base/strings/string_split.h"
#include "base/strings/string_util.h"
#include "base/threading/platform_thread.h"
#include "base/win/scoped_handle.h"
#include "base/win/shlwapi.h"
namespace base {
namespace {
struct PermissionInfo {
PSECURITY_DESCRIPTOR security_descriptor;
ACL dacl;
};
// Gets a blob indicating the permission information for |path|.
// |length| is the length of the blob. Zero on failure.
// Returns the blob pointer, or NULL on failure.
void* GetPermissionInfo(const FilePath& path, size_t* length) {
DCHECK(length);
*length = 0;
PACL dacl = nullptr;
PSECURITY_DESCRIPTOR security_descriptor;
if (GetNamedSecurityInfo(path.value().c_str(), SE_FILE_OBJECT,
DACL_SECURITY_INFORMATION, nullptr, nullptr, &dacl,
nullptr, &security_descriptor) != ERROR_SUCCESS) {
return nullptr;
}
DCHECK(dacl);
*length = sizeof(PSECURITY_DESCRIPTOR) + dacl->AclSize;
PermissionInfo* info = reinterpret_cast<PermissionInfo*>(new char[*length]);
info->security_descriptor = security_descriptor;
memcpy(&info->dacl, dacl, dacl->AclSize);
return info;
}
// Restores the permission information for |path|, given the blob retrieved
// using |GetPermissionInfo()|.
// |info| is the pointer to the blob.
// |length| is the length of the blob.
// Either |info| or |length| may be NULL/0, in which case nothing happens.
bool RestorePermissionInfo(const FilePath& path, void* info, size_t length) {
if (!info || !length)
return false;
PermissionInfo* perm = reinterpret_cast<PermissionInfo*>(info);
DWORD rc = SetNamedSecurityInfo(const_cast<wchar_t*>(path.value().c_str()),
SE_FILE_OBJECT, DACL_SECURITY_INFORMATION,
nullptr, nullptr, &perm->dacl, nullptr);
LocalFree(perm->security_descriptor);
char* char_array = reinterpret_cast<char*>(info);
delete [] char_array;
return rc == ERROR_SUCCESS;
}
std::unique_ptr<wchar_t[]> ToCStr(const std::basic_string<wchar_t>& str) {
size_t size = str.size() + 1;
std::unique_ptr<wchar_t[]> ptr = std::make_unique<wchar_t[]>(size);
wcsncpy(ptr.get(), str.c_str(), size);
return ptr;
}
} // namespace
bool DieFileDie(const FilePath& file, bool recurse) {
// It turns out that to not induce flakiness a long timeout is needed.
const int kIterations = 25;
const TimeDelta kTimeout = TimeDelta::FromSeconds(10) / kIterations;
if (!PathExists(file))
return true;
// Sometimes Delete fails, so try a few more times. Divide the timeout
// into short chunks, so that if a try succeeds, we won't delay the test
// for too long.
for (int i = 0; i < kIterations; ++i) {
bool success;
if (recurse)
success = DeletePathRecursively(file);
else
success = DeleteFile(file);
if (success)
return true;
PlatformThread::Sleep(kTimeout);
}
return false;
}
void SyncPageCacheToDisk() {
// Approximating this with noop. The proper implementation would require
// administrator privilege:
// https://docs.microsoft.com/en-us/windows/desktop/api/FileAPI/nf-fileapi-flushfilebuffers
}
bool EvictFileFromSystemCache(const FilePath& file) {
win::ScopedHandle file_handle(
CreateFile(file.value().c_str(), GENERIC_READ | GENERIC_WRITE, 0, nullptr,
OPEN_EXISTING, FILE_FLAG_NO_BUFFERING, nullptr));
if (!file_handle.IsValid())
return false;
// Re-write the file time information to trigger cache eviction for the file.
// This function previously overwrote the entire file without buffering, but
// local experimentation validates this simplified and *much* faster approach:
// [1] Sysinternals RamMap no longer lists these files as cached afterwards.
// [2] Telemetry performance test startup.cold.blank_page reports sane values.
BY_HANDLE_FILE_INFORMATION bhi = {0};
CHECK(::GetFileInformationByHandle(file_handle.Get(), &bhi));
CHECK(::SetFileTime(file_handle.Get(), &bhi.ftCreationTime,
&bhi.ftLastAccessTime, &bhi.ftLastWriteTime));
return true;
}
// Deny |permission| on the file |path|, for the current user.
bool DenyFilePermission(const FilePath& path, DWORD permission) {
PACL old_dacl;
PSECURITY_DESCRIPTOR security_descriptor;
std::unique_ptr<TCHAR[]> path_ptr = ToCStr(path.value().c_str());
if (GetNamedSecurityInfo(path_ptr.get(), SE_FILE_OBJECT,
DACL_SECURITY_INFORMATION, nullptr, nullptr,
&old_dacl, nullptr,
&security_descriptor) != ERROR_SUCCESS) {
return false;
}
std::unique_ptr<TCHAR[]> current_user = ToCStr(std::wstring(L"CURRENT_USER"));
EXPLICIT_ACCESS new_access = {
permission,
DENY_ACCESS,
0,
{nullptr, NO_MULTIPLE_TRUSTEE, TRUSTEE_IS_NAME, TRUSTEE_IS_USER,
current_user.get()}};
PACL new_dacl;
if (SetEntriesInAcl(1, &new_access, old_dacl, &new_dacl) != ERROR_SUCCESS) {
LocalFree(security_descriptor);
return false;
}
DWORD rc = SetNamedSecurityInfo(path_ptr.get(), SE_FILE_OBJECT,
DACL_SECURITY_INFORMATION, nullptr, nullptr,
new_dacl, nullptr);
LocalFree(security_descriptor);
LocalFree(new_dacl);
return rc == ERROR_SUCCESS;
}
bool MakeFileUnreadable(const FilePath& path) {
return DenyFilePermission(path, GENERIC_READ);
}
bool MakeFileUnwritable(const FilePath& path) {
return DenyFilePermission(path, GENERIC_WRITE);
}
FilePermissionRestorer::FilePermissionRestorer(const FilePath& path)
: path_(path), info_(nullptr), length_(0) {
info_ = GetPermissionInfo(path_, &length_);
DCHECK(info_);
DCHECK_NE(0u, length_);
}
FilePermissionRestorer::~FilePermissionRestorer() {
if (!RestorePermissionInfo(path_, info_, length_))
NOTREACHED();
}
} // namespace base
| 2,487 |
1,682 | <reponame>haroldl/rest.li
/*
Copyright (c) 2016 LinkedIn Corp.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.linkedin.restli.common.testutils;
import com.linkedin.data.ByteString;
import com.linkedin.data.DataComplex;
import com.linkedin.data.DataList;
import com.linkedin.data.DataMap;
import com.linkedin.data.Null;
import com.linkedin.data.schema.DataSchemaUtil;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
/**
* Compare data objects, and return easy to understand error messages.
* Inspired by JSONassert [https://github.com/skyscreamer/jsonassert] which has identical error messages for JSON.
*
* @author <NAME>
*/
public class DataCompare
{
private final Options _options;
private DataCompare()
{
_options = new Options(true, true);
}
private DataCompare(Options options)
{
_options = options;
}
/**
* Compare the expected and actual data objects, and return a comparison result.
*
* @param expected expected data object
* @param actual actual data object
* @return comparison result
*/
public static Result compare(Object expected, Object actual)
{
Result compareResult = new Result();
new DataCompare().compare("", expected, actual, compareResult);
return compareResult;
}
/**
* Compare the expected and actual data objects according to the _options, and return a comparison result.
*
* @param expected expected data object
* @param actual actual data object
* @param options comparison _options
* @return comparison result
*/
public static Result compare(DataComplex expected, DataComplex actual, Options options)
{
Result compareResult = new Result();
new DataCompare(options).compare("", expected, actual, compareResult);
return compareResult;
}
private void compare(String path, Object expected, Object actual, Result result)
{
if (expected.getClass().isAssignableFrom(actual.getClass()))
{
if (expected instanceof DataMap && actual instanceof DataMap)
{
compareDataMap(path, (DataMap) expected, (DataMap) actual, result);
}
else if (expected instanceof DataList && actual instanceof DataList)
{
compareDataList(path, (DataList) expected, (DataList) actual, result);
}
else if (!expected.equals(actual))
{
result.mismatchedValue(path, expected, actual);
}
else
{
assert expected.equals(actual);
}
}
else if (expected instanceof Number && actual instanceof Number)
{
compareNumbers(path, (Number) expected, (Number) actual, result);
}
else if (isStringLike(expected) && isStringLike(actual))
{
compareStringLike(path, expected, actual, result);
}
else
{
result.mismatchedType(path, expected, actual);
}
}
private void compareDataMap(String path, DataMap expected, DataMap actual, Result result)
{
checkDataMapKeysExpectedInActual(path, expected, actual, result);
checkDataMapKeysActualInExpected(path, expected, actual, result);
}
// Check that actual data map contains all the keys in expected data map, and that the values match
private void checkDataMapKeysExpectedInActual(String path, DataMap expected, DataMap actual, Result result)
{
Set<String> expectedKeys = expected.keySet();
for (String key : expectedKeys)
{
Object expectedValue = expected.get(key);
if (actual.containsKey(key))
{
Object actualValue = actual.get(key);
compare(qualify(path, key), expectedValue, actualValue, result);
} else
{
result.missing(path, key);
}
}
}
// Check that expected data map contains all the keys in actual data map
private static void checkDataMapKeysActualInExpected(String path, DataMap expected, DataMap actual, Result result)
{
actual.keySet().forEach(key -> {
if (!expected.containsKey(key))
{
result.unexpected(path, key);
}
});
}
private void compareDataList(String path, DataList expected, DataList actual, Result result)
{
if (expected.size() != actual.size())
{
result.addMessage(path + "[] Expected " + expected.size() + " values but got " + actual.size());
return;
}
if (_options._dataListComparator != null) {
expected.sort(_options._dataListComparator);
actual.sort(_options._dataListComparator);
}
for (int index = 0; index < expected.size(); ++index)
{
Object expectedItem = expected.get(index);
Object actualItem = actual.get(index);
compare(path + "[" + index + "]", expectedItem, actualItem, result);
}
}
private void compareNumbers(String path, Number expected, Number actual, Result result)
{
if (expected.getClass().isAssignableFrom(actual.getClass()))
{ // compare by value for same type
if (!expected.equals(actual))
{
result.mismatchedValue(path, expected, actual);
}
}
else if (_options._shouldCoerceNumbers)
{ // coerce to BigDecimal and compare by value if coercion is enabled
BigDecimal expectedBigDecimal = new BigDecimal(expected.toString());
BigDecimal actualBigDecimal = new BigDecimal(actual.toString());
if (expectedBigDecimal.compareTo(actualBigDecimal) != 0)
{
result.mismatchedValue(path, expected, actual);
}
}
else
{
result.mismatchedType(path, expected, actual);
}
}
private void compareStringLike(String path, Object expected, Object actual, Result result)
{
if (expected.getClass().isAssignableFrom(actual.getClass()))
{
if (!expected.equals(actual))
{
result.mismatchedValue(path, expected, actual);
}
}
else if (_options._shouldCoerceByteStrings)
{
if (expected instanceof ByteString && actual instanceof String)
{
compareByteString(path, (ByteString) expected, (String) actual, result);
}
else if (expected instanceof String && actual instanceof ByteString)
{
compareByteString(path, (String) expected, (ByteString) actual, result);
}
else
{
result.mismatchedType(path, expected, actual);
}
}
else
{
result.mismatchedType(path, expected, actual);
}
}
private boolean isStringLike(Object object)
{
return object instanceof String || object instanceof ByteString;
}
private void compareByteString(String path, ByteString expected, String actual, Result result)
{
if (!expected.asAvroString().equals(actual))
{
result.mismatchedValue(path, expected.asAvroString(), actual);
}
}
private void compareByteString(String path, String expected, ByteString actual, Result result)
{
if (!expected.equals(actual.asAvroString()))
{
result.mismatchedValue(path, expected, actual.asAvroString());
}
}
/**
* The options used to configure comparison of data objects.
*/
public static class Options
{
/**
* When comparing numbers for equality, whether to coerce numbers to double and compare by value,
* or to compare using equals.
*/
private final boolean _shouldCoerceNumbers;
/**
* When comparing a bytestring and a string, whether to coerce the bytestring to a string and compare,
* or to compare using equals.
*/
private final boolean _shouldCoerceByteStrings;
/**
* When comparing DataList, use the non-null comparator to sort and then compare.
*/
private final Comparator<? super Object> _dataListComparator;
public Options(boolean shouldCoerceNumbers, boolean shouldCoerceByteStrings)
{
this(shouldCoerceNumbers, shouldCoerceByteStrings, null);
}
public Options(boolean shouldCoerceNumbers, boolean shouldCoerceByteStrings,
Comparator<? super Object> dataListComparator)
{
_shouldCoerceNumbers = shouldCoerceNumbers;
_shouldCoerceByteStrings = shouldCoerceByteStrings;
_dataListComparator = dataListComparator;
}
}
/**
* The result of comparing data objects.
*/
public static class Result
{
private final List<String> _messages = new ArrayList<>();
private Result()
{
}
/**
* Whether the expected and actual data objects did not match
*/
public boolean hasError()
{
return !_messages.isEmpty();
}
@Override
public String toString()
{
return "\n" + _messages.stream().collect(Collectors.joining("\n\n")) + "\n";
}
private void addMessage(String message)
{
_messages.add(message);
}
private void missing(String path, Object expectedKey)
{
_messages.add(path
+ "\nExpected: "
+ expectedKey.toString()
+ "\n but none found");
}
private void unexpected(String path, Object unexpectedKey)
{
_messages.add(path
+ "\nUnexpected: "
+ unexpectedKey);
}
private void mismatchedValue(String path, Object expected, Object actual)
{
_messages.add(path
+ "\nExpected: "
+ expected.toString()
+ "\n got: "
+ actual.toString());
}
private void mismatchedType(String path, Object expected, Object actual)
{
_messages.add(path
+ "\nExpected: "
+ describeType(expected)
+ "\n got: " + describeType(actual));
}
}
private static String qualify(String prefix, String key)
{
boolean isUnionMemberKey = key.contains(".");
String valueToAppend;
if (isUnionMemberKey)
{
// union member keys for named types are very verbose, so shorten them to their simple names
// e.g. a prefix "foo" with union value "com.linkedin.restli.common.EmptyRecord" will have path "foo{EmptyRecord}"
valueToAppend = "{" + key.substring(key.lastIndexOf(".") + 1) + "}";
}
else
{
valueToAppend = "." + key;
}
return "".equals(prefix) ? key : prefix + valueToAppend;
}
private static String describeType(Object value)
{
Class<?> valueClass = value.getClass();
if (valueClass == Null.class)
{
return "null";
}
else if (isPrimitiveClass(valueClass))
{
return DataSchemaUtil.classToPrimitiveDataSchema(value.getClass()).getUnionMemberKey();
}
else
{
assert isComplexClass(valueClass);
if (valueClass == DataMap.class)
{
return "data map";
}
else
{
assert valueClass == DataList.class;
return "data list";
}
}
}
private static boolean isComplexClass(Class<?> clazz)
{
return clazz == DataMap.class || clazz == DataList.class;
}
private static boolean isPrimitiveClass(Class<?> clazz)
{
return clazz == String.class
|| clazz == Integer.class
|| clazz == Double.class
|| clazz == Boolean.class
|| clazz == Long.class
|| clazz == Float.class
|| clazz == ByteString.class
|| clazz == Null.class;
}
}
| 4,201 |
1,275 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pinot.core.query.optimizer.filter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.annotation.Nullable;
import org.apache.pinot.common.request.Expression;
import org.apache.pinot.common.request.ExpressionType;
import org.apache.pinot.common.request.FilterOperator;
import org.apache.pinot.common.request.Function;
import org.apache.pinot.common.utils.request.FilterQueryTree;
import org.apache.pinot.common.utils.request.RequestUtils;
import org.apache.pinot.pql.parsers.pql2.ast.FilterKind;
import org.apache.pinot.spi.data.FieldSpec;
import org.apache.pinot.spi.data.FieldSpec.DataType;
import org.apache.pinot.spi.data.Schema;
/**
* The {@code MergeRangeFilterOptimizer} merges multiple RANGE predicates on the same column joined by AND by taking
* their intersection. It also pulls up the merged predicate in the absence of other predicates.
*
* NOTE: This optimizer follows the {@link FlattenAndOrFilterOptimizer}, so all the AND/OR filters are already
* flattened.
*/
public class MergeRangeFilterOptimizer implements FilterOptimizer {
@Override
public FilterQueryTree optimize(FilterQueryTree filterQueryTree, @Nullable Schema schema) {
if (schema == null) {
return filterQueryTree;
}
FilterOperator operator = filterQueryTree.getOperator();
if (operator == FilterOperator.AND) {
List<FilterQueryTree> children = filterQueryTree.getChildren();
Map<String, Range> rangeMap = new HashMap<>();
List<FilterQueryTree> newChildren = new ArrayList<>();
boolean recreateFilter = false;
// Iterate over all the child filters to create and merge ranges
for (FilterQueryTree child : children) {
FilterOperator childOperator = child.getOperator();
assert childOperator != FilterOperator.AND;
if (childOperator == FilterOperator.OR) {
child.getChildren().replaceAll(c -> optimize(c, schema));
newChildren.add(child);
} else if (childOperator == FilterOperator.RANGE) {
String column = child.getColumn();
FieldSpec fieldSpec = schema.getFieldSpecFor(column);
if (fieldSpec == null || !fieldSpec.isSingleValueField()) {
// Skip optimizing transform expression and multi-value column
// NOTE: We cannot optimize multi-value column because [0, 10] will match filter "col < 1 AND col > 9", but
// not the merged one.
newChildren.add(child);
continue;
}
// Create a range and merge with current range if exists
Range range = Range.getRange(child.getValue().get(0), fieldSpec.getDataType());
Range currentRange = rangeMap.get(column);
if (currentRange == null) {
rangeMap.put(column, range);
} else {
currentRange.intersect(range);
recreateFilter = true;
}
} else {
newChildren.add(child);
}
}
if (recreateFilter) {
if (newChildren.isEmpty() && rangeMap.size() == 1) {
// Single range without other filters
Map.Entry<String, Range> entry = rangeMap.entrySet().iterator().next();
return getRangeFilterQueryTree(entry.getKey(), entry.getValue());
} else {
for (Map.Entry<String, Range> entry : rangeMap.entrySet()) {
newChildren.add(getRangeFilterQueryTree(entry.getKey(), entry.getValue()));
}
return new FilterQueryTree(null, null, FilterOperator.AND, newChildren);
}
} else {
return filterQueryTree;
}
} else if (operator == FilterOperator.OR) {
filterQueryTree.getChildren().replaceAll(c -> optimize(c, schema));
return filterQueryTree;
} else {
return filterQueryTree;
}
}
/**
* Helper method to construct a RANGE predicate FilterQueryTree from the given column and range.
*/
private static FilterQueryTree getRangeFilterQueryTree(String column, Range range) {
return new FilterQueryTree(column, Collections.singletonList(range.getRangeString()), FilterOperator.RANGE, null);
}
@Override
public Expression optimize(Expression filterExpression, @Nullable Schema schema) {
if (schema == null || filterExpression.getType() != ExpressionType.FUNCTION) {
return filterExpression;
}
Function function = filterExpression.getFunctionCall();
String operator = function.getOperator();
if (operator.equals(FilterKind.AND.name())) {
List<Expression> children = function.getOperands();
Map<String, Range> rangeMap = new HashMap<>();
List<Expression> newChildren = new ArrayList<>();
boolean recreateFilter = false;
// Iterate over all the child filters to create and merge ranges
for (Expression child : children) {
Function childFunction = child.getFunctionCall();
FilterKind filterKind = FilterKind.valueOf(childFunction.getOperator());
assert filterKind != FilterKind.AND;
if (filterKind == FilterKind.OR) {
childFunction.getOperands().replaceAll(o -> optimize(o, schema));
newChildren.add(child);
} else if (filterKind.isRange()) {
List<Expression> operands = childFunction.getOperands();
Expression lhs = operands.get(0);
if (lhs.getType() != ExpressionType.IDENTIFIER) {
// Skip optimizing transform expression
newChildren.add(child);
continue;
}
String column = lhs.getIdentifier().getName();
FieldSpec fieldSpec = schema.getFieldSpecFor(column);
if (fieldSpec == null || !fieldSpec.isSingleValueField()) {
// Skip optimizing multi-value column
// NOTE: We cannot optimize multi-value column because [0, 10] will match filter "col < 1 AND col > 9", but
// not the merged one.
newChildren.add(child);
continue;
}
// Create a range and merge with current range if exists
DataType dataType = fieldSpec.getDataType();
Range range = getRange(filterKind, operands, dataType);
Range currentRange = rangeMap.get(column);
if (currentRange == null) {
rangeMap.put(column, range);
} else {
currentRange.intersect(range);
recreateFilter = true;
}
} else {
newChildren.add(child);
}
}
if (recreateFilter) {
if (newChildren.isEmpty() && rangeMap.size() == 1) {
// Single range without other filters
Map.Entry<String, Range> entry = rangeMap.entrySet().iterator().next();
return getRangeFilterExpression(entry.getKey(), entry.getValue());
} else {
for (Map.Entry<String, Range> entry : rangeMap.entrySet()) {
newChildren.add(getRangeFilterExpression(entry.getKey(), entry.getValue()));
}
function.setOperands(newChildren);
return filterExpression;
}
} else {
return filterExpression;
}
} else if (operator.equals(FilterKind.OR.name())) {
function.getOperands().replaceAll(c -> optimize(c, schema));
return filterExpression;
} else {
return filterExpression;
}
}
/**
* Helper method to create a Range from the given filter kind, operands and data type.
*/
private static Range getRange(FilterKind filterKind, List<Expression> operands, DataType dataType) {
switch (filterKind) {
case GREATER_THAN:
return new Range(getComparable(operands.get(1), dataType), false, null, false);
case GREATER_THAN_OR_EQUAL:
return new Range(getComparable(operands.get(1), dataType), true, null, false);
case LESS_THAN:
return new Range(null, false, getComparable(operands.get(1), dataType), false);
case LESS_THAN_OR_EQUAL:
return new Range(null, false, getComparable(operands.get(1), dataType), true);
case BETWEEN:
return new Range(getComparable(operands.get(1), dataType), true, getComparable(operands.get(2), dataType),
true);
case RANGE:
return Range.getRange(operands.get(1).getLiteral().getStringValue(), dataType);
default:
throw new IllegalStateException("Unsupported filter kind: " + filterKind);
}
}
/**
* Helper method to create a Comparable from the given literal expression and data type.
*/
@SuppressWarnings("rawtypes")
private static Comparable getComparable(Expression literalExpression, DataType dataType) {
return dataType.convertInternal(literalExpression.getLiteral().getFieldValue().toString());
}
/**
* Helper method to construct a RANGE predicate filter Expression from the given column and range.
*/
private static Expression getRangeFilterExpression(String column, Range range) {
Expression rangeFilter = RequestUtils.getFunctionExpression(FilterKind.RANGE.name());
rangeFilter.getFunctionCall().setOperands(Arrays.asList(RequestUtils.createIdentifierExpression(column),
RequestUtils.getLiteralExpression(range.getRangeString())));
return rangeFilter;
}
}
| 3,679 |
679 | /**************************************************************
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*************************************************************/
// MARKER(update_precomp.py): autogen include statement, do not remove
#include "precompiled_framework.hxx"
#include <uifactory/menubarfactory.hxx>
//_________________________________________________________________________________________________________________
// my own includes
//_________________________________________________________________________________________________________________
#include <threadhelp/resetableguard.hxx>
#include "services.h"
#include <uielement/menubarwrapper.hxx>
//_________________________________________________________________________________________________________________
// interface includes
//_________________________________________________________________________________________________________________
#include <com/sun/star/util/XURLTransformer.hpp>
#include <com/sun/star/frame/XFrame.hpp>
#include <com/sun/star/frame/XModel.hpp>
#include <com/sun/star/lang/XInitialization.hpp>
#include <com/sun/star/ui/XModuleUIConfigurationManagerSupplier.hpp>
#ifndef _COM_SUN_STAR_UI_XUICONFIGURATIONMANAGERSUPLLIER_HPP_
#include <com/sun/star/ui/XUIConfigurationManagerSupplier.hpp>
#endif
//_________________________________________________________________________________________________________________
// includes of other projects
//_________________________________________________________________________________________________________________
#ifndef _VCL_MENU_HXX_
#include <vcl/menu.hxx>
#endif
#include <vcl/svapp.hxx>
#include <tools/urlobj.hxx>
#include <rtl/ustrbuf.hxx>
#include <rtl/logfile.hxx>
//_________________________________________________________________________________________________________________
// Defines
//_________________________________________________________________________________________________________________
//
using namespace com::sun::star::uno;
using namespace com::sun::star::lang;
using namespace com::sun::star::frame;
using namespace com::sun::star::beans;
using namespace com::sun::star::util;
using namespace ::com::sun::star::ui;
namespace framework
{
//*****************************************************************************************************************
// XInterface, XTypeProvider, XServiceInfo
//*****************************************************************************************************************
DEFINE_XSERVICEINFO_ONEINSTANCESERVICE ( MenuBarFactory ,
::cppu::OWeakObject ,
SERVICENAME_MENUBARFACTORY ,
IMPLEMENTATIONNAME_MENUBARFACTORY
)
DEFINE_INIT_SERVICE ( MenuBarFactory, {} )
MenuBarFactory::MenuBarFactory( const ::com::sun::star::uno::Reference< ::com::sun::star::lang::XMultiServiceFactory >& xServiceManager ) :
ThreadHelpBase()
, m_xServiceManager( xServiceManager )
, m_xModuleManager( xServiceManager->createInstance( SERVICENAME_MODULEMANAGER ), UNO_QUERY )
{
}
MenuBarFactory::MenuBarFactory( const ::com::sun::star::uno::Reference< ::com::sun::star::lang::XMultiServiceFactory >& xServiceManager,bool ) :
ThreadHelpBase(&Application::GetSolarMutex())
, m_xServiceManager( xServiceManager )
, m_xModuleManager( xServiceManager->createInstance( SERVICENAME_MODULEMANAGER ), UNO_QUERY )
{
}
MenuBarFactory::~MenuBarFactory()
{
}
// XUIElementFactory
Reference< XUIElement > SAL_CALL MenuBarFactory::createUIElement(
const ::rtl::OUString& ResourceURL,
const Sequence< PropertyValue >& Args )
throw ( ::com::sun::star::container::NoSuchElementException, ::com::sun::star::lang::IllegalArgumentException, ::com::sun::star::uno::RuntimeException )
{
// SAFE
ResetableGuard aLock( m_aLock );
MenuBarWrapper* pMenuBarWrapper = new MenuBarWrapper( m_xServiceManager );
Reference< ::com::sun::star::ui::XUIElement > xMenuBar( (OWeakObject *)pMenuBarWrapper, UNO_QUERY );
Reference< ::com::sun::star::frame::XModuleManager > xModuleManager = m_xModuleManager;
aLock.unlock();
CreateUIElement(ResourceURL,Args,"MenuOnly","private:resource/menubar/",xMenuBar,xModuleManager,m_xServiceManager);
return xMenuBar;
}
void MenuBarFactory::CreateUIElement(const ::rtl::OUString& ResourceURL
, const Sequence< PropertyValue >& Args
,const char* _pExtraMode
,const char* _pAsciiName
,const Reference< ::com::sun::star::ui::XUIElement >& _xMenuBar
,const ::com::sun::star::uno::Reference< ::com::sun::star::frame::XModuleManager >& _xModuleManager
,const ::com::sun::star::uno::Reference< ::com::sun::star::lang::XMultiServiceFactory >& _xServiceManager)
{
Reference< XUIConfigurationManager > xCfgMgr;
Reference< XUIConfigurationManager > xConfigSource;
Reference< XFrame > xFrame;
rtl::OUString aResourceURL( ResourceURL );
sal_Bool bPersistent( sal_True );
sal_Bool bExtraMode( sal_False );
for ( sal_Int32 n = 0; n < Args.getLength(); n++ )
{
if ( Args[n].Name.equalsAscii( "ConfigurationSource" ))
Args[n].Value >>= xConfigSource;
else if ( Args[n].Name.equalsAscii( "Frame" ))
Args[n].Value >>= xFrame;
else if ( Args[n].Name.equalsAscii( "ResourceURL" ))
Args[n].Value >>= aResourceURL;
else if ( Args[n].Name.equalsAscii( "Persistent" ))
Args[n].Value >>= bPersistent;
else if ( _pExtraMode && Args[n].Name.equalsAscii( _pExtraMode ))
Args[n].Value >>= bExtraMode;
} // for ( sal_Int32 n = 0; n < Args.getLength(); n++ )
if ( aResourceURL.indexOf( rtl::OUString::createFromAscii(_pAsciiName)) != 0 )
throw IllegalArgumentException();
// Identify frame and determine document based ui configuration manager/module ui configuration manager
if ( xFrame.is() && !xConfigSource.is() )
{
bool bHasSettings( false );
Reference< XModel > xModel;
Reference< XController > xController = xFrame->getController();
if ( xController.is() )
xModel = xController->getModel();
if ( xModel.is() )
{
Reference< XUIConfigurationManagerSupplier > xUIConfigurationManagerSupplier( xModel, UNO_QUERY );
if ( xUIConfigurationManagerSupplier.is() )
{
xCfgMgr = xUIConfigurationManagerSupplier->getUIConfigurationManager();
bHasSettings = xCfgMgr->hasSettings( aResourceURL );
}
}
if ( !bHasSettings )
{
rtl::OUString aModuleIdentifier = _xModuleManager->identify( Reference< XInterface >( xFrame, UNO_QUERY ));
if ( aModuleIdentifier.getLength() )
{
Reference< ::com::sun::star::ui::XModuleUIConfigurationManagerSupplier > xModuleCfgSupplier(
_xServiceManager->createInstance( SERVICENAME_MODULEUICONFIGURATIONMANAGERSUPPLIER ), UNO_QUERY );
xCfgMgr = xModuleCfgSupplier->getUIConfigurationManager( aModuleIdentifier );
bHasSettings = xCfgMgr->hasSettings( aResourceURL );
}
}
}
PropertyValue aPropValue;
Sequence< Any > aPropSeq( _pExtraMode ? 5 : 4);
aPropValue.Name = rtl::OUString( RTL_CONSTASCII_USTRINGPARAM( "Frame" ));
aPropValue.Value <<= xFrame;
aPropSeq[0] <<= aPropValue;
aPropValue.Name = rtl::OUString( RTL_CONSTASCII_USTRINGPARAM( "ConfigurationSource" ));
aPropValue.Value <<= xCfgMgr;
aPropSeq[1] <<= aPropValue;
aPropValue.Name = rtl::OUString( RTL_CONSTASCII_USTRINGPARAM( "ResourceURL" ));
aPropValue.Value <<= aResourceURL;
aPropSeq[2] <<= aPropValue;
aPropValue.Name = rtl::OUString( RTL_CONSTASCII_USTRINGPARAM( "Persistent" ));
aPropValue.Value <<= bPersistent;
aPropSeq[3] <<= aPropValue;
if ( _pExtraMode )
{
aPropValue.Name = rtl::OUString::createFromAscii(_pExtraMode);
aPropValue.Value <<= bExtraMode;
aPropSeq[4] <<= aPropValue;
}
vos::OGuard aGuard( Application::GetSolarMutex() );
Reference< XInitialization > xInit( _xMenuBar, UNO_QUERY );
xInit->initialize( aPropSeq );
}
} // namespace framework
| 3,548 |
775 | package com.lauzy.freedom.lbehavior.activity;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.design.widget.BottomNavigationView;
import android.support.design.widget.FloatingActionButton;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentTransaction;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.view.MenuItem;
import android.view.View;
import android.view.animation.BounceInterpolator;
import android.widget.FrameLayout;
import com.lauzy.freedom.lbehavior.R;
import com.lauzy.freedom.lbehavior.fragment.Demo1Fragment;
import com.lauzy.freedom.lbehavior.fragment.Demo2Fragment;
import com.lauzy.freedom.lbehavior.fragment.Demo3Fragment;
import com.lauzy.freedom.lbehavior.fragment.Demo4Fragment;
import com.lauzy.freedom.lbehaviorlib.behavior.CommonBehavior;
public class Demo3ActivityWithFragment extends AppCompatActivity implements BottomNavigationView.OnNavigationItemSelectedListener {
private FrameLayout mFrameLayout;
private Demo1Fragment mDemo1Fragment;
private Demo2Fragment mDemo2Fragment;
private Demo3Fragment mDemo3Fragment;
private Demo4Fragment mDemo4Fragment;
private Toolbar mToolbar;
private CommonBehavior mToolBarBehavior;
private BottomNavigationView mBottomMainNavigation;
private CommonBehavior mBottomBehavior;
private FloatingActionButton mFloatingActionButton;
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.layout_demo_fragment_activity);
mFrameLayout = findViewById(R.id.frame_main);
mFloatingActionButton = findViewById(R.id.fab_mode);
mToolbar = findViewById(R.id.toolbar_common);
mBottomMainNavigation = findViewById(R.id.bottom_main_navigation);
loadFragment(savedInstanceState);
mToolBarBehavior = CommonBehavior.from(mToolbar);
mBottomBehavior = CommonBehavior.from(mBottomMainNavigation);
MenuItem item = mBottomMainNavigation.getMenu().getItem(0);
onNavigationItemSelected(item);//默认选中第一个
mBottomMainNavigation.setOnNavigationItemSelectedListener(this);
mToolBarBehavior.setMinScrollY(50);
mToolBarBehavior.setScrollYDistance(100);
mToolBarBehavior.setDuration(1000);
mToolBarBehavior.setInterpolator(new BounceInterpolator());
mBottomBehavior.setMinScrollY(20);
mBottomBehavior.setScrollYDistance(100);
mBottomBehavior.setDuration(1000);
mBottomBehavior.setInterpolator(new BounceInterpolator());
CommonBehavior floatActionBehavior = CommonBehavior.from(mFloatingActionButton);
floatActionBehavior.setMinScrollY(20);
floatActionBehavior.setScrollYDistance(100);
floatActionBehavior.setDuration(1000);
floatActionBehavior.setInterpolator(new BounceInterpolator());
}
@Override
public boolean onNavigationItemSelected(MenuItem item) {
FragmentManager manager = getSupportFragmentManager();
FragmentTransaction transaction = manager.beginTransaction();
switch (item.getItemId()) {
case R.id.menu_main_item_beauty:
mToolbar.setVisibility(View.VISIBLE);
mToolBarBehavior.isEnableScroll(true);
transaction.show(mDemo1Fragment).hide(mDemo2Fragment)
.hide(mDemo3Fragment).hide(mDemo4Fragment);
break;
case R.id.menu_main_item_android:
mToolbar.setVisibility(View.VISIBLE);
mToolBarBehavior.isEnableScroll(false);
transaction.show(mDemo2Fragment).hide(mDemo3Fragment)
.hide(mDemo1Fragment).hide(mDemo4Fragment);
break;
case R.id.menu_main_item_category:
mToolbar.setVisibility(View.GONE);//隐藏测试fragment中动画
mToolBarBehavior.isEnableScroll(false);
transaction.show(mDemo3Fragment).hide(mDemo2Fragment)
.hide(mDemo1Fragment).hide(mDemo4Fragment);
break;
case R.id.menu_main_item_mine:
mToolbar.setVisibility(View.VISIBLE);
transaction.show(mDemo4Fragment).hide(mDemo3Fragment)
.hide(mDemo1Fragment).hide(mDemo2Fragment);
break;
}
transaction.commit();
return true;
}
private void loadFragment(Bundle savedInstanceState) {
if (savedInstanceState == null) {
mDemo1Fragment = new Demo1Fragment();
mDemo2Fragment = new Demo2Fragment();
mDemo3Fragment = new Demo3Fragment();
mDemo4Fragment = new Demo4Fragment();
getSupportFragmentManager().beginTransaction()
.add(R.id.frame_main, mDemo1Fragment, mDemo1Fragment.getClass().getSimpleName())
.add(R.id.frame_main, mDemo2Fragment, mDemo2Fragment.getClass().getSimpleName())
.add(R.id.frame_main, mDemo3Fragment, mDemo3Fragment.getClass().getSimpleName())
.add(R.id.frame_main, mDemo4Fragment, mDemo4Fragment.getClass().getSimpleName())
.show(mDemo1Fragment)
.hide(mDemo2Fragment)
.hide(mDemo3Fragment)
.hide(mDemo4Fragment)
.commit();
} else {
mDemo1Fragment = (Demo1Fragment) getSupportFragmentManager()
.findFragmentByTag(Demo1Fragment.class.getSimpleName());
mDemo2Fragment = (Demo2Fragment) getSupportFragmentManager()
.findFragmentByTag(Demo2Fragment.class.getSimpleName());
mDemo3Fragment = (Demo3Fragment) getSupportFragmentManager()
.findFragmentByTag(Demo3Fragment.class.getSimpleName());
mDemo4Fragment = (Demo4Fragment) getSupportFragmentManager()
.findFragmentByTag(Demo4Fragment.class.getSimpleName());
getSupportFragmentManager().beginTransaction()
.show(mDemo1Fragment)
.hide(mDemo2Fragment)
.hide(mDemo3Fragment)
.hide(mDemo4Fragment)
.commit();
}
}
}
| 2,875 |
683 | <reponame>evoToBetter/opentelemetry-java-instrumentation<gh_stars>100-1000
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.javaagent.instrumentation.vaadin;
import static io.opentelemetry.javaagent.instrumentation.vaadin.VaadinTracer.tracer;
import static net.bytebuddy.matcher.ElementMatchers.named;
import static net.bytebuddy.matcher.ElementMatchers.takesArgument;
import io.opentelemetry.context.Context;
import io.opentelemetry.context.Scope;
import io.opentelemetry.javaagent.extension.instrumentation.TypeInstrumentation;
import io.opentelemetry.javaagent.extension.instrumentation.TypeTransformer;
import net.bytebuddy.asm.Advice;
import net.bytebuddy.description.type.TypeDescription;
import net.bytebuddy.matcher.ElementMatcher;
// add spans around calls to methods with @ClientCallable annotation
public class ClientCallableRpcInstrumentation implements TypeInstrumentation {
@Override
public ElementMatcher<TypeDescription> typeMatcher() {
return named("com.vaadin.flow.server.communication.rpc.PublishedServerEventHandlerRpcHandler");
}
@Override
public void transform(TypeTransformer transformer) {
transformer.applyAdviceToMethod(
named("invokeMethod")
.and(takesArgument(0, named("com.vaadin.flow.component.Component")))
.and(takesArgument(1, named(Class.class.getName())))
.and(takesArgument(2, named(String.class.getName())))
.and(takesArgument(3, named("elemental.json.JsonArray")))
.and(takesArgument(4, named(int.class.getName()))),
this.getClass().getName() + "$InvokeMethodAdvice");
}
@SuppressWarnings("unused")
public static class InvokeMethodAdvice {
@Advice.OnMethodEnter(suppress = Throwable.class)
public static void onEnter(
@Advice.Argument(1) Class<?> componentClass,
@Advice.Argument(2) String methodName,
@Advice.Local("otelContext") Context context,
@Advice.Local("otelScope") Scope scope) {
context = tracer().startClientCallableSpan(componentClass, methodName);
scope = context.makeCurrent();
}
@Advice.OnMethodExit(onThrowable = Throwable.class, suppress = Throwable.class)
public static void onExit(
@Advice.Thrown Throwable throwable,
@Advice.Local("otelContext") Context context,
@Advice.Local("otelScope") Scope scope) {
scope.close();
tracer().endSpan(context, throwable);
}
}
}
| 899 |
1,056 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.modules.team.ide;
import java.awt.event.ActionEvent;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.io.File;
import java.io.IOException;
import java.lang.ref.Reference;
import java.lang.ref.WeakReference;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.Callable;
import javax.swing.Action;
import javax.swing.Icon;
import javax.swing.JFileChooser;
import org.netbeans.api.project.FileOwnerQuery;
import org.netbeans.api.project.Project;
import org.netbeans.api.project.ProjectInformation;
import org.netbeans.api.project.ProjectManager;
import org.netbeans.api.project.ProjectUtils;
import org.netbeans.api.project.ui.OpenProjects;
import org.netbeans.modules.team.ide.spi.IDEProject;
import org.netbeans.modules.team.ide.spi.ProjectServices;
import org.netbeans.spi.project.ui.support.CommonProjectActions;
import org.netbeans.spi.project.ui.support.ProjectChooser;
import org.openide.explorer.ExplorerManager;
import org.openide.filesystems.FileChangeAdapter;
import org.openide.filesystems.FileEvent;
import org.openide.filesystems.FileObject;
import org.openide.filesystems.FileUtil;
import org.openide.loaders.DataObject;
import org.openide.nodes.Node;
import org.openide.util.Exceptions;
import org.openide.util.Lookup;
import org.openide.windows.TopComponent;
import org.openide.windows.WindowManager;
/**
*
* @author <NAME>
*/
@org.openide.util.lookup.ServiceProvider(service=org.netbeans.modules.team.ide.spi.ProjectServices.class)
public class ProjectServicesImpl implements ProjectServices {
/** Listener on OpenProjects, bridging to the registered IDEProject.OpenListener listeners. */
private static ProjectOpenListener projectOpenListener;
/** Registered listeners from the UI of team project sources. Notified when
* new projects get opened. */
private static List<Reference<IDEProject.OpenListener>> ideProjectOpenListeners;
@Override
public FileObject[] getOpenProjectsDirectories() {
Project[] openProjects = OpenProjects.getDefault().getOpenProjects();
if (openProjects.length == 0) {
return null;
}
FileObject[] directories = new FileObject[openProjects.length];
for (int i = 0; i < openProjects.length; i++) {
Project project = openProjects[i];
directories[i] = project.getProjectDirectory();
}
return directories;
}
@Override
public FileObject getMainProjectDirectory() {
Project p = OpenProjects.getDefault().getMainProject();
return p != null ? p.getProjectDirectory() : null;
}
@Override
public FileObject getFileOwnerDirectory(FileObject fileObject) {
Project project = FileOwnerQuery.getOwner(fileObject);
return project != null ? project.getProjectDirectory() : null;
}
@Override
public FileObject[] getCurrentSelection() {
Node[] nodes = TopComponent.getRegistry().getActivatedNodes();
if(nodes == null) {
return null;
}
List<FileObject> ret = new ArrayList<FileObject>();
for(Node node : nodes) {
Lookup nodeLookup = node.getLookup();
Collection<? extends Project> projects = nodeLookup.lookupAll(Project.class);
if(projects != null && !projects.isEmpty()) {
for (Project project : projects) {
ret.add(project.getProjectDirectory());
}
} else {
DataObject dataObj = nodeLookup.lookup(DataObject.class);
if (dataObj != null) {
FileObject fileObj = dataObj.getPrimaryFile();
if (fileObj != null) {
ret.add(fileObj);
}
}
}
}
return ret.toArray(new FileObject[ret.size()]);
}
public FileObject[] getProjectDirectories(Lookup lookup) {
Collection<? extends Project> projects = lookup.lookupAll(Project.class);
if(projects == null) {
return null;
}
List<FileObject> ret = new ArrayList<FileObject>();
for (Project project : projects) {
ret.add(project.getProjectDirectory());
}
return ret.toArray(new FileObject[ret.size()]);
}
@Override
public <T> T runAfterProjectOpenFinished(Callable<T> operation) throws Exception {
// wait until projects are opened
OpenProjects.getDefault().openProjects().get();
return operation.call();
}
@Override
public boolean openProject(URL url) {
Project p = getProject(url);
if (p == null) {
return false;
}
OpenProjects.getDefault().open(new Project[] { p }, false);
TopComponent projectsTC = WindowManager.getDefault().findTopComponent("projectTabLogical_tc"); // NOI18N
projectsTC.requestActive();
ExplorerManager em = ((ExplorerManager.Provider) projectsTC).getExplorerManager();
Node root = em.getRootContext();
Node projNode = null;
for (Node n : root.getChildren().getNodes()) {
Project prj = n.getLookup().lookup(Project.class);
if (prj != null && prj.getProjectDirectory().equals(p.getProjectDirectory())) {
projNode = n;
break;
}
}
if (projNode == null) { // fallback
projNode = root.getChildren().findChild(ProjectUtils.getInformation(p).getName());
}
if (projNode != null) {
try {
em.setSelectedNodes(new Node[] { projNode });
} catch (Exception ignore) { // may ignore it
}
}
return true;
}
@Override
public void openOtherProject(File workingDir) {
chooseAndOpenProjects(workingDir, true);
}
@Override
public File[] chooseProjects(File workingDir) {
return chooseAndOpenProjects(workingDir, false);
}
private File[] chooseAndOpenProjects(File workingDir, boolean open) {
if (workingDir != null) {
ProjectChooser.setProjectsFolder(workingDir);
}
JFileChooser chooser = ProjectChooser.projectChooser();
if (workingDir != null) {
chooser.setCurrentDirectory(workingDir);
}
chooser.setMultiSelectionEnabled(true);
File[] projectDirs;
int option = chooser.showOpenDialog(WindowManager.getDefault().getMainWindow());
if (option == JFileChooser.APPROVE_OPTION) {
if (chooser.isMultiSelectionEnabled()) {
projectDirs = chooser.getSelectedFiles();
} else {
projectDirs = new File[] { chooser.getSelectedFile() };
}
if (open) {
ArrayList<Project> projects = new ArrayList<Project>(projectDirs.length);
for (File d : projectDirs) {
try {
Project p = ProjectManager.getDefault().findProject(FileUtil.toFileObject(d));
if (p != null) {
projects.add(p);
}
} catch (IOException ex) {
Exceptions.printStackTrace(ex);
} catch (IllegalArgumentException ex) {
Exceptions.printStackTrace(ex);
}
}
if (!projects.isEmpty()) {
OpenProjects.getDefault().open(projects.toArray(new Project[projects.size()]), false);
}
WindowManager.getDefault().findTopComponent("projectTabLogical_tc").requestActive(); // NOI18N
}
} else {
projectDirs = new File[0];
}
return projectDirs;
}
@Override
public void reopenProjectsFromNewLocation(File[] oldLocations, File[] newLocations) {
List<Project> projectsToClose = new ArrayList<Project>();
List<Project> projectsToOpen = new ArrayList<Project>();
ProjectManager.getDefault().clearNonProjectCache();
for (int i=0; i < oldLocations.length; i++) {
Project prj = FileOwnerQuery.getOwner(FileUtil.toFileObject(oldLocations[i]));
if (prj != null) {
projectsToClose.add(prj);
}
}
for (int i=0; i < newLocations.length; i++) {
Project prj = FileOwnerQuery.getOwner(FileUtil.toFileObject(newLocations[i]));
if (prj != null) {
projectsToOpen.add(prj);
}
}
projectsToClose.remove(null);
projectsToOpen.remove(null);
OpenProjects.getDefault().close(projectsToClose.toArray(new Project[projectsToClose.size()]));
OpenProjects.getDefault().open(projectsToOpen.toArray(new Project[projectsToOpen.size()]), false);
}
@Override
public void createNewProject(File workingDir) {
Action newProjectAction = CommonProjectActions.newProjectAction();
if (newProjectAction != null) {
ProjectChooser.setProjectsFolder(workingDir);
newProjectAction.actionPerformed(new ActionEvent(this,
ActionEvent.ACTION_PERFORMED, "command")); // NOI18N
}
}
@Override
public IDEProject getIDEProject(URL url) {
Project p = getProject(url);
return p != null ? createIDEProject(p) : null;
}
@Override
public IDEProject[] getOpenProjects() {
Project[] openProjects = OpenProjects.getDefault().getOpenProjects();
IDEProject[] ideProjects = new IDEProject[openProjects.length];
for (int i=0; i < openProjects.length; i++) {
ideProjects[i] = createIDEProject(openProjects[i]);
}
return ideProjects;
}
private static Project getProject(URL url) {
try {
return FileOwnerQuery.getOwner(url.toURI());
} catch (URISyntaxException ex) {
Exceptions.printStackTrace(ex);
return null;
}
}
private static IDEProject createIDEProject(Project p) {
ProjectInformation pi = ProjectUtils.getInformation(p);
return new NbProject(pi.getDisplayName(), pi.getIcon(), p.getProjectDirectory().toURL());
}
@Override
public synchronized void addProjectOpenListener(IDEProject.OpenListener listener) {
if (ideProjectOpenListeners == null) {
ideProjectOpenListeners = new LinkedList<Reference<IDEProject.OpenListener>>();
} else {
Iterator<Reference<IDEProject.OpenListener>> it = ideProjectOpenListeners.iterator();
while (it.hasNext()) {
Reference<IDEProject.OpenListener> r = it.next();
IDEProject.OpenListener l = r.get();
if (l == null || l == listener) {
it.remove(); // also doing cleanup of GC'ed references
}
}
}
ideProjectOpenListeners.add(new WeakReference<IDEProject.OpenListener>(listener));
if (projectOpenListener == null) {
projectOpenListener = new ProjectOpenListener();
OpenProjects.getDefault().addPropertyChangeListener(projectOpenListener);
}
}
@Override
public synchronized void removeProjectOpenListener(IDEProject.OpenListener listener) {
if (ideProjectOpenListeners != null) {
Iterator<Reference<IDEProject.OpenListener>> it = ideProjectOpenListeners.iterator();
while (it.hasNext()) {
Reference<IDEProject.OpenListener> r = it.next();
IDEProject.OpenListener l = r.get();
if (l == null || l == listener) {
it.remove(); // also doing cleanup of GC'ed references
}
}
if (ideProjectOpenListeners.isEmpty()) {
ideProjectOpenListeners = null;
if (projectOpenListener != null) {
OpenProjects.getDefault().removePropertyChangeListener(projectOpenListener);
projectOpenListener = null;
}
}
}
}
private static synchronized IDEProject.OpenListener[] getIDEProjectOpenListeners() {
if (ideProjectOpenListeners == null) {
return null;
}
List<IDEProject.OpenListener> listenerList = new ArrayList<IDEProject.OpenListener>(ideProjectOpenListeners.size());
Iterator<Reference<IDEProject.OpenListener>> it = ideProjectOpenListeners.iterator();
while (it.hasNext()) {
Reference<IDEProject.OpenListener> r = it.next();
IDEProject.OpenListener l = r.get();
if (l == null) {
it.remove(); // also doing cleanup of GC'ed references
} else {
listenerList.add(l);
}
}
if (ideProjectOpenListeners.isEmpty()) {
ideProjectOpenListeners = null;
if (projectOpenListener != null) {
OpenProjects.getDefault().removePropertyChangeListener(projectOpenListener);
projectOpenListener = null;
}
return null;
}
return listenerList.toArray(new IDEProject.OpenListener[listenerList.size()]);
}
private static class NbProject extends IDEProject {
private ProjectDeleteListener projectDeleteListener;
NbProject(String displayName, Icon icon, URL url) {
super(displayName, icon, url);
}
@Override
public synchronized boolean addDeleteListener(DeleteListener l) {
boolean added = super.addDeleteListener(l);
if (added && projectDeleteListener == null) {
Project p = getProject(getURL());
if (p != null) {
FileObject projDir = p.getProjectDirectory();
projectDeleteListener = new ProjectDeleteListener(projDir.toURL(), this);
projDir.addFileChangeListener(projectDeleteListener);
} else {
super.removeDeleteListener(l);
added = false;
}
}
return added;
}
@Override
public synchronized boolean removeDeleteListener(DeleteListener l) {
boolean removed = super.removeDeleteListener(l);
if (removed && getDeleteListeners().isEmpty() && projectDeleteListener != null) {
Project p = getProject(getURL());
if (p != null) {
FileObject projDir = p.getProjectDirectory();
projDir.removeFileChangeListener(projectDeleteListener);
projectDeleteListener = null;
}
}
return removed;
}
}
private static class ProjectOpenListener implements PropertyChangeListener {
@Override
public void propertyChange(PropertyChangeEvent evt) {
if (!OpenProjects.PROPERTY_OPEN_PROJECTS.equals(evt.getPropertyName())
|| evt.getNewValue() == null) {
return;
}
IDEProject.OpenListener[] listeners = getIDEProjectOpenListeners();
if (listeners == null) {
return;
}
Project[] newProjects = (Project[])evt.getNewValue();
Project[] oldProjects = (Project[])evt.getOldValue();
List<Project> openedList;
if (oldProjects == null) {
openedList = Arrays.asList(newProjects);
} else {
openedList = new ArrayList<Project>();
openedList.addAll(Arrays.asList(newProjects));
openedList.removeAll(Arrays.asList(oldProjects));
}
if (!openedList.isEmpty()) {
IDEProject[] newlyOpened = new IDEProject[openedList.size()];
for (int i=0; i < newlyOpened.length; i++) {
newlyOpened[i] = createIDEProject(openedList.get(i));
}
for (IDEProject.OpenListener l : listeners) {
l.projectsOpened(newlyOpened);
}
}
}
}
private static class ProjectDeleteListener extends FileChangeAdapter {
private URL url;
private Reference<NbProject> projectRef;
ProjectDeleteListener(URL url, NbProject ideProject) {
this.url = url;
this.projectRef = new WeakReference<NbProject>(ideProject);
}
@Override
public void fileDeleted(FileEvent fe) {
if (fe.getFile().toURL().equals(url)) {
NbProject project = projectRef.get();
if (project != null) {
project.notifyDeleted();
} else {
fe.getFile().removeFileChangeListener(this);
}
}
}
}
}
| 7,733 |
17,037 | <filename>diagrams/elastic/observability.py
# This module is automatically generated by autogen.sh. DO NOT EDIT.
from . import _Elastic
class _Observability(_Elastic):
_type = "observability"
_icon_dir = "resources/elastic/observability"
class APM(_Observability):
_icon = "apm.png"
class Logs(_Observability):
_icon = "logs.png"
class Metrics(_Observability):
_icon = "metrics.png"
class Observability(_Observability):
_icon = "observability.png"
class Uptime(_Observability):
_icon = "uptime.png"
# Aliases
| 205 |
5,964 | // Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// This file has been auto-generated by code_generator_v8.py. DO NOT MODIFY!
#ifndef ServicePortMatchOptions_h
#define ServicePortMatchOptions_h
#include "modules/ModulesExport.h"
#include "platform/heap/Handle.h"
#include "wtf/text/WTFString.h"
namespace blink {
class MODULES_EXPORT ServicePortMatchOptions {
ALLOW_ONLY_INLINE_ALLOCATION();
public:
ServicePortMatchOptions();
bool hasName() const { return !m_name.isNull(); }
String name() const { return m_name; }
void setName(String value) { m_name = value; }
bool hasTargetURL() const { return !m_targetURL.isNull(); }
String targetURL() const { return m_targetURL; }
void setTargetURL(String value) { m_targetURL = value; }
DECLARE_VIRTUAL_TRACE();
private:
String m_name;
String m_targetURL;
friend class V8ServicePortMatchOptions;
};
} // namespace blink
#endif // ServicePortMatchOptions_h
| 407 |
1,269 | <filename>styled-xml-parser/src/main/java/com/itextpdf/styledxmlparser/jsoup/parser/TokeniserState.java
/*
This file is part of the iText (R) project.
Copyright (c) 1998-2021 iText Group NV
Authors: iText Software.
This program is offered under a commercial and under the AGPL license.
For commercial licensing, contact us at https://itextpdf.com/sales. For AGPL licensing, see below.
AGPL licensing:
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
package com.itextpdf.styledxmlparser.jsoup.parser;
import com.itextpdf.styledxmlparser.jsoup.nodes.DocumentType;
/**
* States and transition activations for the Tokeniser.
*/
abstract class TokeniserState {
static TokeniserState Data = new TokeniserState() {
@Override
public String toString() {
return "Data";
}
// in data state, gather characters until a character reference or tag is found
void read(Tokeniser t, CharacterReader r) {
switch (r.current()) {
case '&':
t.advanceTransition(CharacterReferenceInData);
break;
case '<':
t.advanceTransition(TagOpen);
break;
case nullChar:
t.error(this); // NOT replacement character (oddly?)
t.emit(r.consume());
break;
case eof:
t.emit(new Token.EOF());
break;
default:
String data = r.consumeData();
t.emit(data);
break;
}
}
};
static TokeniserState CharacterReferenceInData = new TokeniserState() {
@Override
public String toString() {
return "CharacterReferenceInData";
}
// from & in data
void read(Tokeniser t, CharacterReader r) {
readCharRef(t, Data);
}
};
static TokeniserState Rcdata = new TokeniserState() {
@Override
public String toString() {
return "Rcdata";
}
/// handles data in title, textarea etc
void read(Tokeniser t, CharacterReader r) {
switch (r.current()) {
case '&':
t.advanceTransition(CharacterReferenceInRcdata);
break;
case '<':
t.advanceTransition(RcdataLessthanSign);
break;
case nullChar:
t.error(this);
r.advance();
t.emit(replacementChar);
break;
case eof:
t.emit(new Token.EOF());
break;
default:
String data = r.consumeData();
t.emit(data);
break;
}
}
};
static TokeniserState CharacterReferenceInRcdata = new TokeniserState() {
@Override
public String toString() {
return "CharacterReferenceInRcdata";
}
void read(Tokeniser t, CharacterReader r) {
readCharRef(t, Rcdata);
}
};
static TokeniserState Rawtext = new TokeniserState() {
@Override
public String toString() {
return "Rawtext";
}
void read(Tokeniser t, CharacterReader r) {
readRawData(t, r, this, RawtextLessthanSign);
}
};
static TokeniserState ScriptData = new TokeniserState() {
@Override
public String toString() {
return "ScriptData";
}
void read(Tokeniser t, CharacterReader r) {
readRawData(t, r, this, ScriptDataLessthanSign);
}
};
static TokeniserState PLAINTEXT = new TokeniserState() {
@Override
public String toString() {
return "PLAINTEXT";
}
void read(Tokeniser t, CharacterReader r) {
switch (r.current()) {
case nullChar:
t.error(this);
r.advance();
t.emit(replacementChar);
break;
case eof:
t.emit(new Token.EOF());
break;
default:
String data = r.consumeTo(nullChar);
t.emit(data);
break;
}
}
};
static TokeniserState TagOpen = new TokeniserState() {
@Override
public String toString() {
return "TagOpen";
}
// from < in data
void read(Tokeniser t, CharacterReader r) {
switch (r.current()) {
case '!':
t.advanceTransition(MarkupDeclarationOpen);
break;
case '/':
t.advanceTransition(EndTagOpen);
break;
case '?':
t.createBogusCommentPending();
t.advanceTransition(BogusComment);
break;
default:
if (r.matchesLetter()) {
t.createTagPending(true);
t.transition(TagName);
} else {
t.error(this);
t.emit('<'); // char that got us here
t.transition(Data);
}
break;
}
}
};
static TokeniserState EndTagOpen = new TokeniserState() {
@Override
public String toString() {
return "EndTagOpen";
}
void read(Tokeniser t, CharacterReader r) {
if (r.isEmpty()) {
t.eofError(this);
t.emit("</");
t.transition(Data);
} else if (r.matchesLetter()) {
t.createTagPending(false);
t.transition(TagName);
} else if (r.matches('>')) {
t.error(this);
t.advanceTransition(Data);
} else {
t.error(this);
t.createBogusCommentPending();
t.advanceTransition(BogusComment);
}
}
};
static TokeniserState TagName = new TokeniserState() {
@Override
public String toString() {
return "TagName";
}
// from < or </ in data, will have start or end tag pending
void read(Tokeniser t, CharacterReader r) {
// previous TagOpen state did NOT consume, will have a letter char in current
//String tagName = r.consumeToAnySorted(tagCharsSorted).toLowerCase();
String tagName = r.consumeTagName();
t.tagPending.appendTagName(tagName);
char c = r.consume();
switch (c) {
case '\t':
case '\n':
case '\r':
case '\f':
case ' ':
t.transition(BeforeAttributeName);
break;
case '/':
t.transition(SelfClosingStartTag);
break;
case '<': // NOTE: out of spec, but clear author intent
r.unconsume();
t.error(this);
// intended fall through to next >
case '>':
t.emitTagPending();
t.transition(Data);
break;
case nullChar: // replacement
t.tagPending.appendTagName(replacementStr);
break;
case eof: // should emit pending tag?
t.eofError(this);
t.transition(Data);
break;
default: // buffer underrun
t.tagPending.appendTagName(c);
}
}
};
static TokeniserState RcdataLessthanSign = new TokeniserState() {
@Override
public String toString() {
return "RcdataLessthanSign";
}
// from < in rcdata
void read(Tokeniser t, CharacterReader r) {
if (r.matches('/')) {
t.createTempBuffer();
t.advanceTransition(RCDATAEndTagOpen);
} else if (r.matchesLetter() && t.appropriateEndTagName() != null && !r.containsIgnoreCase("</" + t.appropriateEndTagName())) {
// diverge from spec: got a start tag, but there's no appropriate end tag (</title>), so rather than
// consuming to EOF; break out here
t.tagPending = t.createTagPending(false).name(t.appropriateEndTagName());
t.emitTagPending();
t.transition(TagOpen); // straight into TagOpen, as we came from < and looks like we're on a start tag
} else {
t.emit("<");
t.transition(Rcdata);
}
}
};
static TokeniserState RCDATAEndTagOpen = new TokeniserState() {
@Override
public String toString() {
return "RCDATAEndTagOpen";
}
void read(Tokeniser t, CharacterReader r) {
if (r.matchesLetter()) {
t.createTagPending(false);
t.tagPending.appendTagName(r.current());
t.dataBuffer.append(r.current());
t.advanceTransition(RCDATAEndTagName);
} else {
t.emit("</");
t.transition(Rcdata);
}
}
};
static TokeniserState RCDATAEndTagName = new TokeniserState() {
@Override
public String toString() {
return "RCDATAEndTagName";
}
void read(Tokeniser t, CharacterReader r) {
if (r.matchesLetter()) {
String name = r.consumeLetterSequence();
t.tagPending.appendTagName(name);
t.dataBuffer.append(name);
return;
}
char c = r.consume();
switch (c) {
case '\t':
case '\n':
case '\r':
case '\f':
case ' ':
if (t.isAppropriateEndTagToken())
t.transition(BeforeAttributeName);
else
anythingElse(t, r);
break;
case '/':
if (t.isAppropriateEndTagToken())
t.transition(SelfClosingStartTag);
else
anythingElse(t, r);
break;
case '>':
if (t.isAppropriateEndTagToken()) {
t.emitTagPending();
t.transition(Data);
}
else
anythingElse(t, r);
break;
default:
anythingElse(t, r);
}
}
private void anythingElse(Tokeniser t, CharacterReader r) {
t.emit("</");
t.emit(t.dataBuffer);
r.unconsume();
t.transition(Rcdata);
}
};
static TokeniserState RawtextLessthanSign = new TokeniserState() {
@Override
public String toString() {
return "RawtextLessthanSign";
}
void read(Tokeniser t, CharacterReader r) {
if (r.matches('/')) {
t.createTempBuffer();
t.advanceTransition(RawtextEndTagOpen);
} else {
t.emit('<');
t.transition(Rawtext);
}
}
};
static TokeniserState RawtextEndTagOpen = new TokeniserState() {
@Override
public String toString() {
return "RawtextEndTagOpen";
}
void read(Tokeniser t, CharacterReader r) {
readEndTag(t, r, RawtextEndTagName, Rawtext);
}
};
static TokeniserState RawtextEndTagName = new TokeniserState() {
@Override
public String toString() {
return "RawtextEndTagName";
}
void read(Tokeniser t, CharacterReader r) {
handleDataEndTag(t, r, Rawtext);
}
};
static TokeniserState ScriptDataLessthanSign = new TokeniserState() {
@Override
public String toString() {
return "ScriptDataLessthanSign";
}
void read(Tokeniser t, CharacterReader r) {
switch (r.consume()) {
case '/':
t.createTempBuffer();
t.transition(ScriptDataEndTagOpen);
break;
case '!':
t.emit("<!");
t.transition(ScriptDataEscapeStart);
break;
case eof:
t.emit("<");
t.eofError(this);
t.transition(Data);
break;
default:
t.emit("<");
r.unconsume();
t.transition(ScriptData);
}
}
};
static TokeniserState ScriptDataEndTagOpen = new TokeniserState() {
@Override
public String toString() {
return "ScriptDataEndTagOpen";
}
void read(Tokeniser t, CharacterReader r) {
readEndTag(t, r, ScriptDataEndTagName, ScriptData);
}
};
static TokeniserState ScriptDataEndTagName = new TokeniserState() {
@Override
public String toString() {
return "ScriptDataEndTagName";
}
void read(Tokeniser t, CharacterReader r) {
handleDataEndTag(t, r, ScriptData);
}
};
static TokeniserState ScriptDataEscapeStart = new TokeniserState() {
@Override
public String toString() {
return "ScriptDataEscapeStart";
}
void read(Tokeniser t, CharacterReader r) {
if (r.matches('-')) {
t.emit('-');
t.advanceTransition(ScriptDataEscapeStartDash);
} else {
t.transition(ScriptData);
}
}
};
static TokeniserState ScriptDataEscapeStartDash = new TokeniserState() {
@Override
public String toString() {
return "ScriptDataEscapeStartDash";
}
void read(Tokeniser t, CharacterReader r) {
if (r.matches('-')) {
t.emit('-');
t.advanceTransition(ScriptDataEscapedDashDash);
} else {
t.transition(ScriptData);
}
}
};
static TokeniserState ScriptDataEscaped = new TokeniserState() {
@Override
public String toString() {
return "ScriptDataEscaped";
}
void read(Tokeniser t, CharacterReader r) {
if (r.isEmpty()) {
t.eofError(this);
t.transition(Data);
return;
}
switch (r.current()) {
case '-':
t.emit('-');
t.advanceTransition(ScriptDataEscapedDash);
break;
case '<':
t.advanceTransition(ScriptDataEscapedLessthanSign);
break;
case nullChar:
t.error(this);
r.advance();
t.emit(replacementChar);
break;
default:
String data = r.consumeToAny('-', '<', nullChar);
t.emit(data);
}
}
};
static TokeniserState ScriptDataEscapedDash = new TokeniserState() {
@Override
public String toString() {
return "ScriptDataEscapedDash";
}
void read(Tokeniser t, CharacterReader r) {
if (r.isEmpty()) {
t.eofError(this);
t.transition(Data);
return;
}
char c = r.consume();
switch (c) {
case '-':
t.emit(c);
t.transition(ScriptDataEscapedDashDash);
break;
case '<':
t.transition(ScriptDataEscapedLessthanSign);
break;
case nullChar:
t.error(this);
t.emit(replacementChar);
t.transition(ScriptDataEscaped);
break;
default:
t.emit(c);
t.transition(ScriptDataEscaped);
}
}
};
static TokeniserState ScriptDataEscapedDashDash = new TokeniserState() {
@Override
public String toString() {
return "ScriptDataEscapedDashDash";
}
void read(Tokeniser t, CharacterReader r) {
if (r.isEmpty()) {
t.eofError(this);
t.transition(Data);
return;
}
char c = r.consume();
switch (c) {
case '-':
t.emit(c);
break;
case '<':
t.transition(ScriptDataEscapedLessthanSign);
break;
case '>':
t.emit(c);
t.transition(ScriptData);
break;
case nullChar:
t.error(this);
t.emit(replacementChar);
t.transition(ScriptDataEscaped);
break;
default:
t.emit(c);
t.transition(ScriptDataEscaped);
}
}
};
static TokeniserState ScriptDataEscapedLessthanSign = new TokeniserState() {
@Override
public String toString() {
return "ScriptDataEscapedLessthanSign";
}
void read(Tokeniser t, CharacterReader r) {
if (r.matchesLetter()) {
t.createTempBuffer();
t.dataBuffer.append(r.current());
t.emit("<");
t.emit(r.current());
t.advanceTransition(ScriptDataDoubleEscapeStart);
} else if (r.matches('/')) {
t.createTempBuffer();
t.advanceTransition(ScriptDataEscapedEndTagOpen);
} else {
t.emit('<');
t.transition(ScriptDataEscaped);
}
}
};
static TokeniserState ScriptDataEscapedEndTagOpen = new TokeniserState() {
@Override
public String toString() {
return "ScriptDataEscapedEndTagOpen";
}
void read(Tokeniser t, CharacterReader r) {
if (r.matchesLetter()) {
t.createTagPending(false);
t.tagPending.appendTagName(r.current());
t.dataBuffer.append(r.current());
t.advanceTransition(ScriptDataEscapedEndTagName);
} else {
t.emit("</");
t.transition(ScriptDataEscaped);
}
}
};
static TokeniserState ScriptDataEscapedEndTagName = new TokeniserState() {
@Override
public String toString() {
return "ScriptDataEscapedEndTagName";
}
void read(Tokeniser t, CharacterReader r) {
handleDataEndTag(t, r, ScriptDataEscaped);
}
};
static TokeniserState ScriptDataDoubleEscapeStart = new TokeniserState() {
@Override
public String toString() {
return "ScriptDataDoubleEscapeStart";
}
void read(Tokeniser t, CharacterReader r) {
handleDataDoubleEscapeTag(t, r, ScriptDataDoubleEscaped, ScriptDataEscaped);
}
};
static TokeniserState ScriptDataDoubleEscaped = new TokeniserState() {
@Override
public String toString() {
return "ScriptDataDoubleEscaped";
}
void read(Tokeniser t, CharacterReader r) {
char c = r.current();
switch (c) {
case '-':
t.emit(c);
t.advanceTransition(ScriptDataDoubleEscapedDash);
break;
case '<':
t.emit(c);
t.advanceTransition(ScriptDataDoubleEscapedLessthanSign);
break;
case nullChar:
t.error(this);
r.advance();
t.emit(replacementChar);
break;
case eof:
t.eofError(this);
t.transition(Data);
break;
default:
String data = r.consumeToAny('-', '<', nullChar);
t.emit(data);
}
}
};
static TokeniserState ScriptDataDoubleEscapedDash = new TokeniserState() {
@Override
public String toString() {
return "ScriptDataDoubleEscapedDash";
}
void read(Tokeniser t, CharacterReader r) {
char c = r.consume();
switch (c) {
case '-':
t.emit(c);
t.transition(ScriptDataDoubleEscapedDashDash);
break;
case '<':
t.emit(c);
t.transition(ScriptDataDoubleEscapedLessthanSign);
break;
case nullChar:
t.error(this);
t.emit(replacementChar);
t.transition(ScriptDataDoubleEscaped);
break;
case eof:
t.eofError(this);
t.transition(Data);
break;
default:
t.emit(c);
t.transition(ScriptDataDoubleEscaped);
}
}
};
static TokeniserState ScriptDataDoubleEscapedDashDash = new TokeniserState() {
@Override
public String toString() {
return "ScriptDataDoubleEscapedDashDash";
}
void read(Tokeniser t, CharacterReader r) {
char c = r.consume();
switch (c) {
case '-':
t.emit(c);
break;
case '<':
t.emit(c);
t.transition(ScriptDataDoubleEscapedLessthanSign);
break;
case '>':
t.emit(c);
t.transition(ScriptData);
break;
case nullChar:
t.error(this);
t.emit(replacementChar);
t.transition(ScriptDataDoubleEscaped);
break;
case eof:
t.eofError(this);
t.transition(Data);
break;
default:
t.emit(c);
t.transition(ScriptDataDoubleEscaped);
}
}
};
static TokeniserState ScriptDataDoubleEscapedLessthanSign = new TokeniserState() {
@Override
public String toString() {
return "ScriptDataDoubleEscapedLessthanSign";
}
void read(Tokeniser t, CharacterReader r) {
if (r.matches('/')) {
t.emit('/');
t.createTempBuffer();
t.advanceTransition(ScriptDataDoubleEscapeEnd);
} else {
t.transition(ScriptDataDoubleEscaped);
}
}
};
static TokeniserState ScriptDataDoubleEscapeEnd = new TokeniserState() {
@Override
public String toString() {
return "ScriptDataDoubleEscapeEnd";
}
void read(Tokeniser t, CharacterReader r) {
handleDataDoubleEscapeTag(t,r, ScriptDataEscaped, ScriptDataDoubleEscaped);
}
};
static TokeniserState BeforeAttributeName = new TokeniserState() {
@Override
public String toString() {
return "BeforeAttributeName";
}
// from tagname <xxx
void read(Tokeniser t, CharacterReader r) {
char c = r.consume();
switch (c) {
case '\t':
case '\n':
case '\r':
case '\f':
case ' ':
break; // ignore whitespace
case '/':
t.transition(SelfClosingStartTag);
break;
case '<': // NOTE: out of spec, but clear (spec has this as a part of the attribute name)
r.unconsume();
t.error(this);
// intended fall through as if >
case '>':
t.emitTagPending();
t.transition(Data);
break;
case nullChar:
r.unconsume();
t.error(this);
t.tagPending.newAttribute();
t.transition(AttributeName);
break;
case eof:
t.eofError(this);
t.transition(Data);
break;
case '"':
case '\'':
case '=':
t.error(this);
t.tagPending.newAttribute();
t.tagPending.appendAttributeName(c);
t.transition(AttributeName);
break;
default: // A-Z, anything else
t.tagPending.newAttribute();
r.unconsume();
t.transition(AttributeName);
}
}
};
static TokeniserState AttributeName = new TokeniserState() {
@Override
public String toString() {
return "AttributeName";
}
// from before attribute name
void read(Tokeniser t, CharacterReader r) {
String name = r.consumeToAnySorted(attributeNameCharsSorted);
t.tagPending.appendAttributeName(name);
char c = r.consume();
switch (c) {
case '\t':
case '\n':
case '\r':
case '\f':
case ' ':
t.transition(AfterAttributeName);
break;
case '/':
t.transition(SelfClosingStartTag);
break;
case '=':
t.transition(BeforeAttributeValue);
break;
case '>':
t.emitTagPending();
t.transition(Data);
break;
case nullChar:
t.error(this);
t.tagPending.appendAttributeName(replacementChar);
break;
case eof:
t.eofError(this);
t.transition(Data);
break;
case '"':
case '\'':
case '<':
t.error(this);
t.tagPending.appendAttributeName(c);
break;
default: // buffer underrun
t.tagPending.appendAttributeName(c);
}
}
};
static TokeniserState AfterAttributeName = new TokeniserState() {
@Override
public String toString() {
return "AfterAttributeName";
}
void read(Tokeniser t, CharacterReader r) {
char c = r.consume();
switch (c) {
case '\t':
case '\n':
case '\r':
case '\f':
case ' ':
// ignore
break;
case '/':
t.transition(SelfClosingStartTag);
break;
case '=':
t.transition(BeforeAttributeValue);
break;
case '>':
t.emitTagPending();
t.transition(Data);
break;
case nullChar:
t.error(this);
t.tagPending.appendAttributeName(replacementChar);
t.transition(AttributeName);
break;
case eof:
t.eofError(this);
t.transition(Data);
break;
case '"':
case '\'':
case '<':
t.error(this);
t.tagPending.newAttribute();
t.tagPending.appendAttributeName(c);
t.transition(AttributeName);
break;
default: // A-Z, anything else
t.tagPending.newAttribute();
r.unconsume();
t.transition(AttributeName);
}
}
};
static TokeniserState BeforeAttributeValue = new TokeniserState() {
@Override
public String toString() {
return "BeforeAttributeValue";
}
void read(Tokeniser t, CharacterReader r) {
char c = r.consume();
switch (c) {
case '\t':
case '\n':
case '\r':
case '\f':
case ' ':
// ignore
break;
case '"':
t.transition(AttributeValue_doubleQuoted);
break;
case '&':
r.unconsume();
t.transition(AttributeValue_unquoted);
break;
case '\'':
t.transition(AttributeValue_singleQuoted);
break;
case nullChar:
t.error(this);
t.tagPending.appendAttributeValue(replacementChar);
t.transition(AttributeValue_unquoted);
break;
case eof:
t.eofError(this);
t.emitTagPending();
t.transition(Data);
break;
case '>':
t.error(this);
t.emitTagPending();
t.transition(Data);
break;
case '<':
case '=':
case '`':
t.error(this);
t.tagPending.appendAttributeValue(c);
t.transition(AttributeValue_unquoted);
break;
default:
r.unconsume();
t.transition(AttributeValue_unquoted);
}
}
};
static TokeniserState AttributeValue_doubleQuoted = new TokeniserState() {
@Override
public String toString() {
return "AttributeValue_doubleQuoted";
}
void read(Tokeniser t, CharacterReader r) {
String value = r.consumeAttributeQuoted(false);
if (value.length() > 0)
t.tagPending.appendAttributeValue(value);
else
t.tagPending.setEmptyAttributeValue();
char c = r.consume();
switch (c) {
case '"':
t.transition(AfterAttributeValue_quoted);
break;
case '&':
int[] ref = t.consumeCharacterReference('"', true);
if (ref != null)
t.tagPending.appendAttributeValue(ref);
else
t.tagPending.appendAttributeValue('&');
break;
case nullChar:
t.error(this);
t.tagPending.appendAttributeValue(replacementChar);
break;
case eof:
t.eofError(this);
t.transition(Data);
break;
default: // hit end of buffer in first read, still in attribute
t.tagPending.appendAttributeValue(c);
}
}
};
static TokeniserState AttributeValue_singleQuoted = new TokeniserState() {
@Override
public String toString() {
return "AttributeValue_singleQuoted";
}
void read(Tokeniser t, CharacterReader r) {
String value = r.consumeAttributeQuoted(true);
if (value.length() > 0)
t.tagPending.appendAttributeValue(value);
else
t.tagPending.setEmptyAttributeValue();
char c = r.consume();
switch (c) {
case '\'':
t.transition(AfterAttributeValue_quoted);
break;
case '&':
int[] ref = t.consumeCharacterReference('\'', true);
if (ref != null)
t.tagPending.appendAttributeValue(ref);
else
t.tagPending.appendAttributeValue('&');
break;
case nullChar:
t.error(this);
t.tagPending.appendAttributeValue(replacementChar);
break;
case eof:
t.eofError(this);
t.transition(Data);
break;
default: // hit end of buffer in first read, still in attribute
t.tagPending.appendAttributeValue(c);
}
}
};
static TokeniserState AttributeValue_unquoted = new TokeniserState() {
@Override
public String toString() {
return "AttributeValue_unquoted";
}
void read(Tokeniser t, CharacterReader r) {
String value = r.consumeToAnySorted(attributeValueUnquoted);
if (value.length() > 0)
t.tagPending.appendAttributeValue(value);
char c = r.consume();
switch (c) {
case '\t':
case '\n':
case '\r':
case '\f':
case ' ':
t.transition(BeforeAttributeName);
break;
case '&':
int[] ref = t.consumeCharacterReference('>', true);
if (ref != null)
t.tagPending.appendAttributeValue(ref);
else
t.tagPending.appendAttributeValue('&');
break;
case '>':
t.emitTagPending();
t.transition(Data);
break;
case nullChar:
t.error(this);
t.tagPending.appendAttributeValue(replacementChar);
break;
case eof:
t.eofError(this);
t.transition(Data);
break;
case '"':
case '\'':
case '<':
case '=':
case '`':
t.error(this);
t.tagPending.appendAttributeValue(c);
break;
default: // hit end of buffer in first read, still in attribute
t.tagPending.appendAttributeValue(c);
}
}
};
// CharacterReferenceInAttributeValue state handled inline
static TokeniserState AfterAttributeValue_quoted = new TokeniserState() {
@Override
public String toString() {
return "AfterAttributeValue_quoted";
}
void read(Tokeniser t, CharacterReader r) {
char c = r.consume();
switch (c) {
case '\t':
case '\n':
case '\r':
case '\f':
case ' ':
t.transition(BeforeAttributeName);
break;
case '/':
t.transition(SelfClosingStartTag);
break;
case '>':
t.emitTagPending();
t.transition(Data);
break;
case eof:
t.eofError(this);
t.transition(Data);
break;
default:
r.unconsume();
t.error(this);
t.transition(BeforeAttributeName);
}
}
};
static TokeniserState SelfClosingStartTag = new TokeniserState() {
@Override
public String toString() {
return "SelfClosingStartTag";
}
void read(Tokeniser t, CharacterReader r) {
char c = r.consume();
switch (c) {
case '>':
t.tagPending.selfClosing = true;
t.emitTagPending();
t.transition(Data);
break;
case eof:
t.eofError(this);
t.transition(Data);
break;
default:
r.unconsume();
t.error(this);
t.transition(BeforeAttributeName);
}
}
};
static TokeniserState BogusComment = new TokeniserState() {
@Override
public String toString() {
return "BogusComment";
}
void read(Tokeniser t, CharacterReader r) {
// rewind to capture character that lead us here
r.unconsume();
t.commentPending.append(r.consumeTo('>'));
char next = r.consume();
if (next == '>' || next == eof) {
t.emitCommentPending();
t.transition(Data);
}
}
};
static TokeniserState MarkupDeclarationOpen = new TokeniserState() {
@Override
public String toString() {
return "MarkupDeclarationOpen";
}
void read(Tokeniser t, CharacterReader r) {
if (r.matchConsume("--")) {
t.createCommentPending();
t.transition(CommentStart);
} else if (r.matchConsumeIgnoreCase("DOCTYPE")) {
t.transition(Doctype);
} else if (r.matchConsume("[CDATA[")) {
// is implemented properly, keep handling as cdata
//} else if (!t.currentNodeInHtmlNS() && r.matchConsume("[CDATA[")) {
t.createTempBuffer();
t.transition(CdataSection);
} else {
t.error(this);
t.createBogusCommentPending();
t.advanceTransition(BogusComment); // advance so this character gets in bogus comment data's rewind
}
}
};
static TokeniserState CommentStart = new TokeniserState() {
@Override
public String toString() {
return "CommentStart";
}
void read(Tokeniser t, CharacterReader r) {
char c = r.consume();
switch (c) {
case '-':
t.transition(CommentStartDash);
break;
case nullChar:
t.error(this);
t.commentPending.append(replacementChar);
t.transition(Comment);
break;
case '>':
t.error(this);
t.emitCommentPending();
t.transition(Data);
break;
case eof:
t.eofError(this);
t.emitCommentPending();
t.transition(Data);
break;
default:
r.unconsume();
t.transition(Comment);
}
}
};
static TokeniserState CommentStartDash = new TokeniserState() {
@Override
public String toString() {
return "CommentStartDash";
}
void read(Tokeniser t, CharacterReader r) {
char c = r.consume();
switch (c) {
case '-':
t.transition(CommentStartDash);
break;
case nullChar:
t.error(this);
t.commentPending.append(replacementChar);
t.transition(Comment);
break;
case '>':
t.error(this);
t.emitCommentPending();
t.transition(Data);
break;
case eof:
t.eofError(this);
t.emitCommentPending();
t.transition(Data);
break;
default:
t.commentPending.append(c);
t.transition(Comment);
}
}
};
static TokeniserState Comment = new TokeniserState() {
@Override
public String toString() {
return "Comment";
}
void read(Tokeniser t, CharacterReader r) {
char c = r.current();
switch (c) {
case '-':
t.advanceTransition(CommentEndDash);
break;
case nullChar:
t.error(this);
r.advance();
t.commentPending.append(replacementChar);
break;
case eof:
t.eofError(this);
t.emitCommentPending();
t.transition(Data);
break;
default:
t.commentPending.append(r.consumeToAny('-', nullChar));
}
}
};
static TokeniserState CommentEndDash = new TokeniserState() {
@Override
public String toString() {
return "CommentEndDash";
}
void read(Tokeniser t, CharacterReader r) {
char c = r.consume();
switch (c) {
case '-':
t.transition(CommentEnd);
break;
case nullChar:
t.error(this);
t.commentPending.append('-').append(replacementChar);
t.transition(Comment);
break;
case eof:
t.eofError(this);
t.emitCommentPending();
t.transition(Data);
break;
default:
t.commentPending.append('-').append(c);
t.transition(Comment);
}
}
};
static TokeniserState CommentEnd = new TokeniserState() {
@Override
public String toString() {
return "CommentEnd";
}
void read(Tokeniser t, CharacterReader r) {
char c = r.consume();
switch (c) {
case '>':
t.emitCommentPending();
t.transition(Data);
break;
case nullChar:
t.error(this);
t.commentPending.append("--").append(replacementChar);
t.transition(Comment);
break;
case '!':
t.error(this);
t.transition(CommentEndBang);
break;
case '-':
t.error(this);
t.commentPending.append('-');
break;
case eof:
t.eofError(this);
t.emitCommentPending();
t.transition(Data);
break;
default:
t.error(this);
t.commentPending.append("--").append(c);
t.transition(Comment);
}
}
};
static TokeniserState CommentEndBang = new TokeniserState() {
@Override
public String toString() {
return "CommentEndBang";
}
void read(Tokeniser t, CharacterReader r) {
char c = r.consume();
switch (c) {
case '-':
t.commentPending.append("--!");
t.transition(CommentEndDash);
break;
case '>':
t.emitCommentPending();
t.transition(Data);
break;
case nullChar:
t.error(this);
t.commentPending.append("--!").append(replacementChar);
t.transition(Comment);
break;
case eof:
t.eofError(this);
t.emitCommentPending();
t.transition(Data);
break;
default:
t.commentPending.append("--!").append(c);
t.transition(Comment);
}
}
};
static TokeniserState Doctype = new TokeniserState() {
@Override
public String toString() {
return "Doctype";
}
void read(Tokeniser t, CharacterReader r) {
char c = r.consume();
switch (c) {
case '\t':
case '\n':
case '\r':
case '\f':
case ' ':
t.transition(BeforeDoctypeName);
break;
case eof:
t.eofError(this);
// note: fall through to > case
case '>': // catch invalid <!DOCTYPE>
t.error(this);
t.createDoctypePending();
t.doctypePending.forceQuirks = true;
t.emitDoctypePending();
t.transition(Data);
break;
default:
t.error(this);
t.transition(BeforeDoctypeName);
}
}
};
static TokeniserState BeforeDoctypeName = new TokeniserState() {
@Override
public String toString() {
return "BeforeDoctypeName";
}
void read(Tokeniser t, CharacterReader r) {
if (r.matchesLetter()) {
t.createDoctypePending();
t.transition(DoctypeName);
return;
}
char c = r.consume();
switch (c) {
case '\t':
case '\n':
case '\r':
case '\f':
case ' ':
break; // ignore whitespace
case nullChar:
t.error(this);
t.createDoctypePending();
t.doctypePending.name.append(replacementChar);
t.transition(DoctypeName);
break;
case eof:
t.eofError(this);
t.createDoctypePending();
t.doctypePending.forceQuirks = true;
t.emitDoctypePending();
t.transition(Data);
break;
default:
t.createDoctypePending();
t.doctypePending.name.append(c);
t.transition(DoctypeName);
}
}
};
static TokeniserState DoctypeName = new TokeniserState() {
@Override
public String toString() {
return "DoctypeName";
}
void read(Tokeniser t, CharacterReader r) {
if (r.matchesLetter()) {
String name = r.consumeLetterSequence();
t.doctypePending.name.append(name);
return;
}
char c = r.consume();
switch (c) {
case '>':
t.emitDoctypePending();
t.transition(Data);
break;
case '\t':
case '\n':
case '\r':
case '\f':
case ' ':
t.transition(AfterDoctypeName);
break;
case nullChar:
t.error(this);
t.doctypePending.name.append(replacementChar);
break;
case eof:
t.eofError(this);
t.doctypePending.forceQuirks = true;
t.emitDoctypePending();
t.transition(Data);
break;
default:
t.doctypePending.name.append(c);
}
}
};
static TokeniserState AfterDoctypeName = new TokeniserState() {
@Override
public String toString() {
return "AfterDoctypeName";
}
void read(Tokeniser t, CharacterReader r) {
if (r.isEmpty()) {
t.eofError(this);
t.doctypePending.forceQuirks = true;
t.emitDoctypePending();
t.transition(Data);
return;
}
if (r.matchesAny('\t', '\n', '\r', '\f', ' '))
r.advance(); // ignore whitespace
else if (r.matches('>')) {
t.emitDoctypePending();
t.advanceTransition(Data);
} else if (r.matchConsumeIgnoreCase(DocumentType.PUBLIC_KEY)) {
t.doctypePending.pubSysKey = DocumentType.PUBLIC_KEY;
t.transition(AfterDoctypePublicKeyword);
} else if (r.matchConsumeIgnoreCase(DocumentType.SYSTEM_KEY)) {
t.doctypePending.pubSysKey = DocumentType.SYSTEM_KEY;
t.transition(AfterDoctypeSystemKeyword);
} else {
t.error(this);
t.doctypePending.forceQuirks = true;
t.advanceTransition(BogusDoctype);
}
}
};
static TokeniserState AfterDoctypePublicKeyword = new TokeniserState() {
@Override
public String toString() {
return "AfterDoctypePublicKeyword";
}
void read(Tokeniser t, CharacterReader r) {
char c = r.consume();
switch (c) {
case '\t':
case '\n':
case '\r':
case '\f':
case ' ':
t.transition(BeforeDoctypePublicIdentifier);
break;
case '"':
t.error(this);
// set public id to empty string
t.transition(DoctypePublicIdentifier_doubleQuoted);
break;
case '\'':
t.error(this);
// set public id to empty string
t.transition(DoctypePublicIdentifier_singleQuoted);
break;
case '>':
t.error(this);
t.doctypePending.forceQuirks = true;
t.emitDoctypePending();
t.transition(Data);
break;
case eof:
t.eofError(this);
t.doctypePending.forceQuirks = true;
t.emitDoctypePending();
t.transition(Data);
break;
default:
t.error(this);
t.doctypePending.forceQuirks = true;
t.transition(BogusDoctype);
}
}
};
static TokeniserState BeforeDoctypePublicIdentifier = new TokeniserState() {
@Override
public String toString() {
return "BeforeDoctypePublicIdentifier";
}
void read(Tokeniser t, CharacterReader r) {
char c = r.consume();
switch (c) {
case '\t':
case '\n':
case '\r':
case '\f':
case ' ':
break;
case '"':
// set public id to empty string
t.transition(DoctypePublicIdentifier_doubleQuoted);
break;
case '\'':
// set public id to empty string
t.transition(DoctypePublicIdentifier_singleQuoted);
break;
case '>':
t.error(this);
t.doctypePending.forceQuirks = true;
t.emitDoctypePending();
t.transition(Data);
break;
case eof:
t.eofError(this);
t.doctypePending.forceQuirks = true;
t.emitDoctypePending();
t.transition(Data);
break;
default:
t.error(this);
t.doctypePending.forceQuirks = true;
t.transition(BogusDoctype);
}
}
};
static TokeniserState DoctypePublicIdentifier_doubleQuoted = new TokeniserState() {
@Override
public String toString() {
return "DoctypePublicIdentifier_doubleQuoted";
}
void read(Tokeniser t, CharacterReader r) {
char c = r.consume();
switch (c) {
case '"':
t.transition(AfterDoctypePublicIdentifier);
break;
case nullChar:
t.error(this);
t.doctypePending.publicIdentifier.append(replacementChar);
break;
case '>':
t.error(this);
t.doctypePending.forceQuirks = true;
t.emitDoctypePending();
t.transition(Data);
break;
case eof:
t.eofError(this);
t.doctypePending.forceQuirks = true;
t.emitDoctypePending();
t.transition(Data);
break;
default:
t.doctypePending.publicIdentifier.append(c);
}
}
};
static TokeniserState DoctypePublicIdentifier_singleQuoted = new TokeniserState() {
@Override
public String toString() {
return "DoctypePublicIdentifier_singleQuoted";
}
void read(Tokeniser t, CharacterReader r) {
char c = r.consume();
switch (c) {
case '\'':
t.transition(AfterDoctypePublicIdentifier);
break;
case nullChar:
t.error(this);
t.doctypePending.publicIdentifier.append(replacementChar);
break;
case '>':
t.error(this);
t.doctypePending.forceQuirks = true;
t.emitDoctypePending();
t.transition(Data);
break;
case eof:
t.eofError(this);
t.doctypePending.forceQuirks = true;
t.emitDoctypePending();
t.transition(Data);
break;
default:
t.doctypePending.publicIdentifier.append(c);
}
}
};
static TokeniserState AfterDoctypePublicIdentifier = new TokeniserState() {
@Override
public String toString() {
return "AfterDoctypePublicIdentifier";
}
void read(Tokeniser t, CharacterReader r) {
char c = r.consume();
switch (c) {
case '\t':
case '\n':
case '\r':
case '\f':
case ' ':
t.transition(BetweenDoctypePublicAndSystemIdentifiers);
break;
case '>':
t.emitDoctypePending();
t.transition(Data);
break;
case '"':
t.error(this);
// system id empty
t.transition(DoctypeSystemIdentifier_doubleQuoted);
break;
case '\'':
t.error(this);
// system id empty
t.transition(DoctypeSystemIdentifier_singleQuoted);
break;
case eof:
t.eofError(this);
t.doctypePending.forceQuirks = true;
t.emitDoctypePending();
t.transition(Data);
break;
default:
t.error(this);
t.doctypePending.forceQuirks = true;
t.transition(BogusDoctype);
}
}
};
static TokeniserState BetweenDoctypePublicAndSystemIdentifiers = new TokeniserState() {
@Override
public String toString() {
return "BetweenDoctypePublicAndSystemIdentifiers";
}
void read(Tokeniser t, CharacterReader r) {
char c = r.consume();
switch (c) {
case '\t':
case '\n':
case '\r':
case '\f':
case ' ':
break;
case '>':
t.emitDoctypePending();
t.transition(Data);
break;
case '"':
t.error(this);
// system id empty
t.transition(DoctypeSystemIdentifier_doubleQuoted);
break;
case '\'':
t.error(this);
// system id empty
t.transition(DoctypeSystemIdentifier_singleQuoted);
break;
case eof:
t.eofError(this);
t.doctypePending.forceQuirks = true;
t.emitDoctypePending();
t.transition(Data);
break;
default:
t.error(this);
t.doctypePending.forceQuirks = true;
t.transition(BogusDoctype);
}
}
};
static TokeniserState AfterDoctypeSystemKeyword = new TokeniserState() {
@Override
public String toString() {
return "AfterDoctypeSystemKeyword";
}
void read(Tokeniser t, CharacterReader r) {
char c = r.consume();
switch (c) {
case '\t':
case '\n':
case '\r':
case '\f':
case ' ':
t.transition(BeforeDoctypeSystemIdentifier);
break;
case '>':
t.error(this);
t.doctypePending.forceQuirks = true;
t.emitDoctypePending();
t.transition(Data);
break;
case '"':
t.error(this);
// system id empty
t.transition(DoctypeSystemIdentifier_doubleQuoted);
break;
case '\'':
t.error(this);
// system id empty
t.transition(DoctypeSystemIdentifier_singleQuoted);
break;
case eof:
t.eofError(this);
t.doctypePending.forceQuirks = true;
t.emitDoctypePending();
t.transition(Data);
break;
default:
t.error(this);
t.doctypePending.forceQuirks = true;
t.emitDoctypePending();
}
}
};
static TokeniserState BeforeDoctypeSystemIdentifier = new TokeniserState() {
@Override
public String toString() {
return "BeforeDoctypeSystemIdentifier";
}
void read(Tokeniser t, CharacterReader r) {
char c = r.consume();
switch (c) {
case '\t':
case '\n':
case '\r':
case '\f':
case ' ':
break;
case '"':
// set system id to empty string
t.transition(DoctypeSystemIdentifier_doubleQuoted);
break;
case '\'':
// set public id to empty string
t.transition(DoctypeSystemIdentifier_singleQuoted);
break;
case '>':
t.error(this);
t.doctypePending.forceQuirks = true;
t.emitDoctypePending();
t.transition(Data);
break;
case eof:
t.eofError(this);
t.doctypePending.forceQuirks = true;
t.emitDoctypePending();
t.transition(Data);
break;
default:
t.error(this);
t.doctypePending.forceQuirks = true;
t.transition(BogusDoctype);
}
}
};
static TokeniserState DoctypeSystemIdentifier_doubleQuoted = new TokeniserState() {
@Override
public String toString() {
return "DoctypeSystemIdentifier_doubleQuoted";
}
void read(Tokeniser t, CharacterReader r) {
char c = r.consume();
switch (c) {
case '"':
t.transition(AfterDoctypeSystemIdentifier);
break;
case nullChar:
t.error(this);
t.doctypePending.systemIdentifier.append(replacementChar);
break;
case '>':
t.error(this);
t.doctypePending.forceQuirks = true;
t.emitDoctypePending();
t.transition(Data);
break;
case eof:
t.eofError(this);
t.doctypePending.forceQuirks = true;
t.emitDoctypePending();
t.transition(Data);
break;
default:
t.doctypePending.systemIdentifier.append(c);
}
}
};
static TokeniserState DoctypeSystemIdentifier_singleQuoted = new TokeniserState() {
@Override
public String toString() {
return "DoctypeSystemIdentifier_singleQuoted";
}
void read(Tokeniser t, CharacterReader r) {
char c = r.consume();
switch (c) {
case '\'':
t.transition(AfterDoctypeSystemIdentifier);
break;
case nullChar:
t.error(this);
t.doctypePending.systemIdentifier.append(replacementChar);
break;
case '>':
t.error(this);
t.doctypePending.forceQuirks = true;
t.emitDoctypePending();
t.transition(Data);
break;
case eof:
t.eofError(this);
t.doctypePending.forceQuirks = true;
t.emitDoctypePending();
t.transition(Data);
break;
default:
t.doctypePending.systemIdentifier.append(c);
}
}
};
static TokeniserState AfterDoctypeSystemIdentifier = new TokeniserState() {
@Override
public String toString() {
return "AfterDoctypeSystemIdentifier";
}
void read(Tokeniser t, CharacterReader r) {
char c = r.consume();
switch (c) {
case '\t':
case '\n':
case '\r':
case '\f':
case ' ':
break;
case '>':
t.emitDoctypePending();
t.transition(Data);
break;
case eof:
t.eofError(this);
t.doctypePending.forceQuirks = true;
t.emitDoctypePending();
t.transition(Data);
break;
default:
t.error(this);
t.transition(BogusDoctype);
// NOT force quirks
}
}
};
static TokeniserState BogusDoctype = new TokeniserState() {
@Override
public String toString() {
return "BogusDoctype";
}
void read(Tokeniser t, CharacterReader r) {
char c = r.consume();
switch (c) {
case '>':
t.emitDoctypePending();
t.transition(Data);
break;
case eof:
t.emitDoctypePending();
t.transition(Data);
break;
default:
// ignore char
break;
}
}
};
static TokeniserState CdataSection = new TokeniserState() {
@Override
public String toString() {
return "CdataSection";
}
void read(Tokeniser t, CharacterReader r) {
String data = r.consumeTo("]]>");
t.dataBuffer.append(data);
if (r.matchConsume("]]>") || r.isEmpty()) {
t.emit(new Token.CData(t.dataBuffer.toString()));
t.transition(Data);
}// otherwise, buffer underrun, stay in data section
}
};
abstract void read(Tokeniser t, CharacterReader r);
static final char nullChar = '\u0000';
// char searches. must be sorted, used in inSorted. MUST update TokenisetStateTest if more arrays are added.
static final char[] attributeNameCharsSorted = new char[]{nullChar, '\t', '\n', '\f', '\r', ' ', '"', '\'', '/', '<', '=', '>'};
static final char[] attributeValueUnquoted = new char[]{nullChar, '\t', '\n', '\f', '\r', ' ', '"', '&', '\'', '<', '=', '>', '`'};
private static final char replacementChar = Tokeniser.replacementChar;
private static final String replacementStr = String.valueOf(Tokeniser.replacementChar);
private static final char eof = CharacterReader.EOF;
/**
* Handles RawtextEndTagName, ScriptDataEndTagName, and ScriptDataEscapedEndTagName. Same body impl, just
* different else exit transitions.
*/
private static void handleDataEndTag(Tokeniser t, CharacterReader r, TokeniserState elseTransition) {
if (r.matchesLetter()) {
String name = r.consumeLetterSequence();
t.tagPending.appendTagName(name);
t.dataBuffer.append(name);
return;
}
boolean needsExitTransition = false;
if (t.isAppropriateEndTagToken() && !r.isEmpty()) {
char c = r.consume();
switch (c) {
case '\t':
case '\n':
case '\r':
case '\f':
case ' ':
t.transition(BeforeAttributeName);
break;
case '/':
t.transition(SelfClosingStartTag);
break;
case '>':
t.emitTagPending();
t.transition(Data);
break;
default:
t.dataBuffer.append(c);
needsExitTransition = true;
}
} else {
needsExitTransition = true;
}
if (needsExitTransition) {
t.emit("</");
t.emit(t.dataBuffer);
t.transition(elseTransition);
}
}
private static void readRawData(Tokeniser t, CharacterReader r, TokeniserState current, TokeniserState advance) {
switch (r.current()) {
case '<':
t.advanceTransition(advance);
break;
case nullChar:
t.error(current);
r.advance();
t.emit(replacementChar);
break;
case eof:
t.emit(new Token.EOF());
break;
default:
String data = r.consumeRawData();
t.emit(data);
break;
}
}
private static void readCharRef(Tokeniser t, TokeniserState advance) {
int[] c = t.consumeCharacterReference(null, false);
if (c == null)
t.emit('&');
else
t.emit(c);
t.transition(advance);
}
private static void readEndTag(Tokeniser t, CharacterReader r, TokeniserState a, TokeniserState b) {
if (r.matchesLetter()) {
t.createTagPending(false);
t.transition(a);
} else {
t.emit("</");
t.transition(b);
}
}
private static void handleDataDoubleEscapeTag(Tokeniser t, CharacterReader r, TokeniserState primary, TokeniserState fallback) {
if (r.matchesLetter()) {
String name = r.consumeLetterSequence();
t.dataBuffer.append(name);
t.emit(name);
return;
}
char c = r.consume();
switch (c) {
case '\t':
case '\n':
case '\r':
case '\f':
case ' ':
case '/':
case '>':
if (t.dataBuffer.toString().equals("script"))
t.transition(primary);
else
t.transition(fallback);
t.emit(c);
break;
default:
r.unconsume();
t.transition(fallback);
}
}
}
| 42,315 |
1,837 | <reponame>neoremind/protostuff
package io.protostuff;
import static org.junit.Assert.assertEquals;
import java.nio.ByteBuffer;
import java.util.List;
import org.junit.Test;
public class LinkBufferTest
{
@Test
public void testWriteLargeVar32() throws Exception
{
LinkBuffer b = new LinkBuffer(8);
b.writeVarInt32(Integer.MAX_VALUE);
assertEquals(1, b.getBuffers().size());
assertEquals(5, b.getBuffers().get(0).remaining());
}
@Test
public void testBasics() throws Exception
{
LinkBuffer buf = new LinkBuffer(8);
// put in 4 longs:
ByteBuffer bigBuf = ByteBuffer.allocate(100);
bigBuf.limit(100);
// each one of these writes gets its own byte buffer.
buf.writeByteBuffer(bigBuf); // 0
buf.writeByteArray(new byte[100]); // 1
buf.writeByteArray(new byte[2]); // 2
buf.writeByteArray(new byte[8]); // 3
buf.writeInt64(1);
buf.writeInt64(2);
buf.writeInt64(3);
buf.writeInt64(4);
List<ByteBuffer> lbb = buf.finish();
assertEquals(8, lbb.size());
assertEquals(100, lbb.get(0).remaining());
assertEquals(100, lbb.get(1).remaining());
assertEquals(2, lbb.get(2).remaining());
assertEquals(8, lbb.get(3).remaining());
for (int i = 3; i < lbb.size(); i++)
{
assertEquals(8, lbb.get(i).remaining());
}
}
@Test
public void testGetBuffers() throws Exception
{
LinkBuffer b = new LinkBuffer(8);
b.writeInt32(42);
b.writeInt32(43);
b.writeInt32(44);
List<ByteBuffer> buffers = b.getBuffers();
assertEquals(2, buffers.size());
assertEquals(8, buffers.get(0).remaining());
assertEquals(4, buffers.get(1).remaining());
assertEquals(42, buffers.get(0).getInt());
assertEquals(43, buffers.get(0).getInt());
assertEquals(44, buffers.get(1).getInt());
}
@Test
public void testGetBuffersAndAppendData() throws Exception
{
LinkBuffer b = new LinkBuffer(8);
b.writeInt32(42);
b.writeInt32(43);
b.writeInt32(44);
List<ByteBuffer> buffers = b.getBuffers();
b.writeInt32(45); // new data should not appear in buffers
assertEquals(2, buffers.size());
assertEquals(8, buffers.get(0).remaining());
assertEquals(4, buffers.get(1).remaining());
}
}
| 1,124 |
5,659 | from django.urls import re_path
from cms.test_utils.project.sampleapp import views
urlpatterns = [
re_path(r'^(?P<path>.+)$', views.parentapp_view, name='parentapp_view'),
]
| 70 |
335 | {
"word": "Warbird",
"definitions": [
"A vintage military aircraft."
],
"parts-of-speech": "Noun"
} | 57 |
2,996 | <filename>engine/src/main/java/org/terasology/engine/physics/bullet/shapes/BulletCollisionShape.java
// Copyright 2021 The Terasology Foundation
// SPDX-License-Identifier: Apache-2.0
package org.terasology.engine.physics.bullet.shapes;
import com.badlogic.gdx.physics.bullet.collision.btCollisionShape;
import org.terasology.joml.geom.AABBf;
import org.joml.Matrix4f;
import org.joml.Quaternionfc;
import org.joml.Vector3f;
import org.joml.Vector3fc;
import org.terasology.engine.physics.shapes.CollisionShape;
public abstract class BulletCollisionShape implements CollisionShape {
public btCollisionShape underlyingShape;
@Override
public AABBf getAABB(Vector3fc origin, Quaternionfc rotation, float scale) {
Vector3f min = new Vector3f();
Vector3f max = new Vector3f();
Matrix4f m = new Matrix4f();
underlyingShape.getAabb(m, min, max);
return new AABBf(min, max).translate(origin);
}
}
| 361 |
335 | <reponame>Safal08/Hacktoberfest-1
{
"word": "Normal",
"definitions": [
"Conforming to a standard; usual, typical, or expected.",
"(of a person) free from physical or mental disorders.",
"(of a line, ray, or other linear feature) intersecting a given line or surface at right angles.",
"(of a salt solution) containing the same salt concentration as the blood.",
"(of a solution) containing one gram-equivalent of solute per litre.",
"Denoting a fault or faulting in which a relative downward movement occurred in the strata situated on the upper side of the fault plane."
],
"parts-of-speech": "Adjective"
} | 219 |
31,928 | import os
import boto3
EDGE_PORT = 4566
def handler(event, context):
protocol = "https" if os.environ.get("USE_SSL") else "http"
endpoint_url = "{}://{}:{}".format(protocol, os.environ["LOCALSTACK_HOSTNAME"], EDGE_PORT)
sqs = boto3.client(
"sqs", endpoint_url=endpoint_url, region_name=event["region_name"], verify=False
)
queue_url = sqs.get_queue_url(QueueName=event["queue_name"])["QueueUrl"]
rs = sqs.send_message(QueueUrl=queue_url, MessageBody=event["message"])
return rs["MessageId"]
| 213 |
3,486 | package com.thinkaurelius.titan.graphdb.database.idassigner;
import java.time.Duration;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.Queue;
import java.util.concurrent.Callable;
import java.util.concurrent.CancellationException;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import com.google.common.base.Preconditions;
import com.google.common.base.Stopwatch;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.thinkaurelius.titan.core.TitanException;
import com.thinkaurelius.titan.diskstorage.BackendException;
import com.thinkaurelius.titan.diskstorage.IDBlock;
import com.thinkaurelius.titan.diskstorage.IDAuthority;
import com.thinkaurelius.titan.diskstorage.util.time.Temporals;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author <NAME> (<EMAIL>)
*/
public class StandardIDPool implements IDPool {
private static final Logger log =
LoggerFactory.getLogger(StandardIDPool.class);
private static final IDBlock ID_POOL_EXHAUSTION = new IDBlock() {
@Override
public long numIds() {
throw new UnsupportedOperationException();
}
@Override
public long getId(long index) {
throw new UnsupportedOperationException();
}
};
private static final IDBlock UNINITIALIZED_BLOCK = new IDBlock() {
@Override
public long numIds() {
return 0;
}
@Override
public long getId(long index) {
throw new ArrayIndexOutOfBoundsException(0);
}
};
private static final int RENEW_ID_COUNT = 100;
private final IDAuthority idAuthority;
private final long idUpperBound; //exclusive
private final int partition;
private final int idNamespace;
private final Duration renewTimeout;
private final double renewBufferPercentage;
private IDBlock currentBlock;
private long currentIndex;
private long renewBlockIndex;
// private long nextID;
// private long currentMaxID;
// private long renewBufferID;
private volatile IDBlock nextBlock;
private Future<IDBlock> idBlockFuture;
private IDBlockGetter idBlockGetter;
private final ThreadPoolExecutor exec;
private volatile boolean closed;
private final Queue<Future<?>> closeBlockers;
public StandardIDPool(IDAuthority idAuthority, int partition, int idNamespace, long idUpperBound, Duration renewTimeout, double renewBufferPercentage) {
Preconditions.checkArgument(idUpperBound > 0);
this.idAuthority = idAuthority;
Preconditions.checkArgument(partition>=0);
this.partition = partition;
Preconditions.checkArgument(idNamespace>=0);
this.idNamespace = idNamespace;
this.idUpperBound = idUpperBound;
Preconditions.checkArgument(!renewTimeout.isZero(), "Renew-timeout must be positive");
this.renewTimeout = renewTimeout;
Preconditions.checkArgument(renewBufferPercentage>0.0 && renewBufferPercentage<=1.0,"Renew-buffer percentage must be in (0.0,1.0]");
this.renewBufferPercentage = renewBufferPercentage;
currentBlock = UNINITIALIZED_BLOCK;
currentIndex = 0;
renewBlockIndex = 0;
nextBlock = null;
// daemon=true would probably be fine too
exec = new ThreadPoolExecutor(1, 1, 0L, TimeUnit.MILLISECONDS,
new LinkedBlockingQueue<Runnable>(), new ThreadFactoryBuilder()
.setDaemon(false)
.setNameFormat("TitanID(" + partition + ")("+idNamespace+")[%d]")
.build());
//exec.allowCoreThreadTimeOut(false);
//exec.prestartCoreThread();
idBlockFuture = null;
closeBlockers = new ArrayDeque<>(4);
closed = false;
}
private synchronized void waitForIDBlockGetter() throws InterruptedException {
Stopwatch sw = Stopwatch.createStarted();
if (null != idBlockFuture) {
try {
nextBlock = idBlockFuture.get(renewTimeout.toMillis(), TimeUnit.MILLISECONDS);
} catch (ExecutionException e) {
String msg = String.format("ID block allocation on partition(%d)-namespace(%d) failed with an exception in %s",
partition, idNamespace, sw.stop());
throw new TitanException(msg, e);
} catch (TimeoutException e) {
String msg = String.format("ID block allocation on partition(%d)-namespace(%d) timed out in %s",
partition, idNamespace, sw.stop());
// Attempt to cancel the renewer
idBlockGetter.stopRequested();
if (idAuthority.supportsInterruption()) {
idBlockFuture.cancel(true);
} else {
// Attempt to clean one dead element out of closeBlockers every time we append to it
if (!closeBlockers.isEmpty()) {
Future<?> f = closeBlockers.peek();
if (null != f && f.isDone())
closeBlockers.remove();
}
closeBlockers.add(idBlockFuture);
}
throw new TitanException(msg, e);
} catch (CancellationException e) {
String msg = String.format("ID block allocation on partition(%d)-namespace(%d) was cancelled after %s",
partition, idNamespace, sw.stop());
throw new TitanException(msg, e);
} finally {
idBlockFuture = null;
}
// Allow InterruptedException to propagate up the stack
}
}
private synchronized void nextBlock() throws InterruptedException {
assert currentIndex == currentBlock.numIds();
Preconditions.checkState(!closed,"ID Pool has been closed for partition(%s)-namespace(%s) - cannot apply for new id block",
partition,idNamespace);
if (null == nextBlock && null == idBlockFuture) {
startIDBlockGetter();
}
if (null == nextBlock) {
waitForIDBlockGetter();
}
if (nextBlock == ID_POOL_EXHAUSTION)
throw new IDPoolExhaustedException("Exhausted ID Pool for partition(" + partition+")-namespace("+idNamespace+")");
currentBlock = nextBlock;
currentIndex = 0;
log.debug("ID partition({})-namespace({}) acquired block: [{}]", partition, idNamespace, currentBlock);
assert currentBlock.numIds()>0;
nextBlock = null;
assert RENEW_ID_COUNT>0;
renewBlockIndex = Math.max(0,currentBlock.numIds()-Math.max(RENEW_ID_COUNT, Math.round(currentBlock.numIds()*renewBufferPercentage)));
assert renewBlockIndex<currentBlock.numIds() && renewBlockIndex>=currentIndex;
}
@Override
public synchronized long nextID() {
assert currentIndex <= currentBlock.numIds();
if (currentIndex == currentBlock.numIds()) {
try {
nextBlock();
} catch (InterruptedException e) {
throw new TitanException("Could not renew id block due to interruption", e);
}
}
if (currentIndex == renewBlockIndex) {
startIDBlockGetter();
}
long returnId = currentBlock.getId(currentIndex);
currentIndex++;
if (returnId >= idUpperBound) throw new IDPoolExhaustedException("Reached id upper bound of " + idUpperBound);
log.trace("partition({})-namespace({}) Returned id: {}", partition, idNamespace, returnId);
return returnId;
}
@Override
public synchronized void close() {
closed=true;
try {
waitForIDBlockGetter();
} catch (InterruptedException e) {
throw new TitanException("Interrupted while waiting for id renewer thread to finish", e);
}
for (Future<?> closeBlocker : closeBlockers) {
try {
closeBlocker.get();
} catch (InterruptedException e) {
throw new TitanException("Interrupted while waiting for runaway ID renewer task " + closeBlocker, e);
} catch (ExecutionException e) {
log.debug("Runaway ID renewer task completed with exception", e);
}
}
exec.shutdownNow();
}
private synchronized void startIDBlockGetter() {
Preconditions.checkArgument(idBlockFuture == null, idBlockFuture);
if (closed) return; //Don't renew anymore if closed
//Renew buffer
log.debug("Starting id block renewal thread upon {}", currentIndex);
idBlockGetter = new IDBlockGetter(idAuthority, partition, idNamespace, renewTimeout);
idBlockFuture = exec.submit(idBlockGetter);
}
private static class IDBlockGetter implements Callable<IDBlock> {
private final Stopwatch alive;
private final IDAuthority idAuthority;
private final int partition;
private final int idNamespace;
private final Duration renewTimeout;
private volatile boolean stopRequested;
public IDBlockGetter(IDAuthority idAuthority, int partition, int idNamespace, Duration renewTimeout) {
this.idAuthority = idAuthority;
this.partition = partition;
this.idNamespace = idNamespace;
this.renewTimeout = renewTimeout;
this.alive = Stopwatch.createStarted();
}
private void stopRequested()
{
this.stopRequested = true;
}
@Override
public IDBlock call() {
Stopwatch running = Stopwatch.createStarted();
try {
if (stopRequested) {
log.debug("Aborting ID block retrieval on partition({})-namespace({}) after " +
"graceful shutdown was requested, exec time {}, exec+q time {}",
partition, idNamespace, running.stop(), alive.stop());
throw new TitanException("ID block retrieval aborted by caller");
}
IDBlock idBlock = idAuthority.getIDBlock(partition, idNamespace, renewTimeout);
log.debug("Retrieved ID block from authority on partition({})-namespace({}), " +
"exec time {}, exec+q time {}",
partition, idNamespace, running.stop(), alive.stop());
Preconditions.checkArgument(idBlock!=null && idBlock.numIds()>0);
return idBlock;
} catch (BackendException e) {
throw new TitanException("Could not acquire new ID block from storage", e);
} catch (IDPoolExhaustedException e) {
return ID_POOL_EXHAUSTION;
}
}
}
}
| 4,704 |
721 | package crazypants.enderio.conduits.network;
import java.util.UUID;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import com.enderio.core.common.util.BlockCoord;
import crazypants.enderio.base.EnderIO;
import crazypants.enderio.base.Log;
import crazypants.enderio.base.conduit.IConduit;
import crazypants.enderio.base.conduit.IConduitBundle;
import crazypants.enderio.base.conduit.registry.ConduitRegistry;
import crazypants.enderio.conduits.conduit.TileConduitBundle;
import crazypants.enderio.conduits.gui.ExternalConnectionContainer;
import crazypants.enderio.util.EnumReader;
import io.netty.buffer.ByteBuf;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.util.EnumFacing;
import net.minecraft.world.World;
import net.minecraftforge.fml.common.network.simpleimpl.MessageContext;
import net.minecraftforge.fml.relauncher.Side;
public abstract class AbstractConduitPacket<T extends IConduit> extends AbstractConduitBundlePacket {
private UUID uuid;
public AbstractConduitPacket() {
}
public AbstractConduitPacket(@Nonnull T conduit) {
super(conduit.getBundle().getEntity());
this.uuid = ConduitRegistry.getNetwork(conduit).getUUID();
}
protected Class<? extends IConduit> getConType() {
return ConduitRegistry.getNetwork(uuid).getBaseType();
}
@Override
public void write(@SuppressWarnings("null") @Nonnull ByteBuf buf) {
buf.writeLong(uuid.getMostSignificantBits());
buf.writeLong(uuid.getLeastSignificantBits());
}
@Override
public void read(@SuppressWarnings("null") @Nonnull ByteBuf buf) {
uuid = new UUID(buf.readLong(), buf.readLong());
}
@SuppressWarnings("unchecked")
public @Nullable T getConduit(MessageContext ctx) {
if (ctx.side == Side.SERVER) {
if (ctx.getServerHandler().player.openContainer instanceof ExternalConnectionContainer) {
final TileConduitBundle tileEntity = ((ExternalConnectionContainer) ctx.getServerHandler().player.openContainer).getTileEntity();
if (tileEntity == null || !tileEntity.getPos().equals(getPos())) {
Log.warn("Player " + ctx.getServerHandler().player.getName() + " tried to manipulate conduit while having another conduit's GUI open!");
return null;
}
} else {
if (BlockCoord.get(ctx.getServerHandler().player).distanceSq(getPos()) > EnderIO.proxy.getReachDistanceForPlayer(ctx.getServerHandler().player)) {
Log.warn("Player " + ctx.getServerHandler().player.getName() + " tried to manipulate conduit without having its GUI open or being near it!");
return null;
}
}
}
World world = getWorld(ctx);
TileEntity tileEntity = getTileEntity(world);
if (tileEntity instanceof IConduitBundle) {
return (T) ((IConduitBundle) tileEntity).getConduit(getConType());
}
return null;
}
public static abstract class Sided<T extends IConduit> extends AbstractConduitPacket<T> {
protected @Nonnull EnumFacing dir = EnumFacing.DOWN;
public Sided() {
}
public Sided(@Nonnull T con, @Nonnull EnumFacing dir) {
super(con);
this.dir = dir;
}
@Override
public void write(@Nonnull ByteBuf buf) {
super.write(buf);
buf.writeShort(dir.ordinal());
}
@Override
public void read(@Nonnull ByteBuf buf) {
super.read(buf);
dir = EnumReader.get(EnumFacing.class, buf.readShort());
}
}
}
| 1,230 |
681 | {
"description":"The second explicit group",
"displayName":"Explicit Group number two",
"aliasInOwner":"EG:II"
}
| 40 |
453 | /*
FUNCTION
<<strspn>>---find initial match
INDEX
strspn
SYNOPSIS
#include <string.h>
size_t strspn(const char *<[s1]>, const char *<[s2]>);
DESCRIPTION
This function computes the length of the initial segment of
the string pointed to by <[s1]> which consists entirely of
characters from the string pointed to by <[s2]> (excluding the
terminating null character).
RETURNS
<<strspn>> returns the length of the segment found.
PORTABILITY
<<strspn>> is ANSI C.
<<strspn>> requires no supporting OS subroutines.
QUICKREF
strspn ansi pure
*/
#include <string.h>
size_t
_DEFUN (strspn, (s1, s2),
_CONST char *s1 _AND
_CONST char *s2)
{
_CONST char *s = s1;
_CONST char *c;
while (*s1)
{
for (c = s2; *c; c++)
{
if (*s1 == *c)
break;
}
if (*c == '\0')
break;
s1++;
}
return s1 - s;
}
| 364 |
732 | //
// DSCommonItem.h
// DSLolita
//
// Created by <NAME> on 15/5/25.
// Copyright (c) 2015年 samDing. All rights reserved.
//
@interface DSCommonItem : NSObject
//图标
@property (nonatomic , copy) NSString *icon;
//标题
@property (nonatomic , copy) NSString *title;
//子标题
@property (nonatomic , copy) NSString *subtitle;
//右边显示的数字标记
@property (nonatomic , copy) NSString *badgeValue;
//点击这行cell,需要跳转到哪个控制器
@property (nonatomic , assign) Class destVcClass;
@property (nonatomic , copy) void (^operation)();
+ (instancetype)itemWithTitle:(NSString *)title icon:(NSString *)icon;
+ (instancetype)itemWithTitle:(NSString *)title;
@end
| 291 |
14,668 | // Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include <stddef.h>
#include <memory>
#include <utility>
#include <vector>
#include "base/bind.h"
#include "base/callback_helpers.h"
#include "base/containers/contains.h"
#include "base/files/file_util.h"
#include "base/files/scoped_temp_dir.h"
#include "base/memory/raw_ptr.h"
#include "base/run_loop.h"
#include "base/test/metrics/histogram_tester.h"
#include "base/test/scoped_feature_list.h"
#include "base/threading/thread_task_runner_handle.h"
#include "base/values.h"
#include "components/crx_file/id_util.h"
#include "components/update_client/crx_update_item.h"
#include "components/update_client/update_client.h"
#include "content/public/browser/notification_service.h"
#include "content/public/test/browser_task_environment.h"
#include "content/public/test/test_utils.h"
#include "extensions/browser/allowlist_state.h"
#include "extensions/browser/blocklist_state.h"
#include "extensions/browser/disable_reason.h"
#include "extensions/browser/extension_prefs.h"
#include "extensions/browser/extension_registry.h"
#include "extensions/browser/extensions_test.h"
#include "extensions/browser/mock_extension_system.h"
#include "extensions/browser/notification_types.h"
#include "extensions/browser/test_extensions_browser_client.h"
#include "extensions/browser/updater/extension_downloader.h"
#include "extensions/browser/updater/extension_update_data.h"
#include "extensions/browser/updater/uninstall_ping_sender.h"
#include "extensions/browser/updater/update_service.h"
#include "extensions/common/extension_builder.h"
#include "extensions/common/extension_features.h"
#include "extensions/common/extension_urls.h"
#include "extensions/common/manifest_url_handlers.h"
#include "extensions/common/value_builder.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "third_party/abseil-cpp/absl/types/optional.h"
namespace {
using UpdateClientEvents = update_client::UpdateClient::Observer::Events;
class FakeUpdateClient : public update_client::UpdateClient {
public:
FakeUpdateClient();
FakeUpdateClient(const FakeUpdateClient&) = delete;
FakeUpdateClient& operator=(const FakeUpdateClient&) = delete;
// Returns the data we've gotten from the CrxDataCallback for ids passed to
// the Update function.
std::vector<absl::optional<update_client::CrxComponent>>* data() {
return &data_;
}
// Used for tests that uninstall pings get requested properly.
struct UninstallPing {
std::string id;
base::Version version;
int reason;
UninstallPing(const std::string& id,
const base::Version& version,
int reason)
: id(id), version(version), reason(reason) {}
};
std::vector<UninstallPing>& uninstall_pings() { return uninstall_pings_; }
struct UpdateRequest {
std::vector<std::string> extension_ids;
update_client::Callback callback;
};
// update_client::UpdateClient
void AddObserver(Observer* observer) override {
if (observer)
observers_.push_back(observer);
}
void RemoveObserver(Observer* observer) override {}
void Install(const std::string& id,
CrxDataCallback crx_data_callback,
CrxStateChangeCallback crx_state_change_callback,
update_client::Callback callback) override {}
void Update(const std::vector<std::string>& ids,
CrxDataCallback crx_data_callback,
CrxStateChangeCallback crx_state_change_callback,
bool is_foreground,
update_client::Callback callback) override;
bool GetCrxUpdateState(
const std::string& id,
update_client::CrxUpdateItem* update_item) const override {
update_item->next_version = base::Version("2.0");
std::map<std::string, std::string> custom_attributes;
if (is_malware_update_item_)
custom_attributes["_malware"] = "true";
if (allowlist_state == extensions::ALLOWLIST_ALLOWLISTED)
custom_attributes["_esbAllowlist"] = "true";
else if (allowlist_state == extensions::ALLOWLIST_NOT_ALLOWLISTED)
custom_attributes["_esbAllowlist"] = "true";
if (!custom_attributes.empty())
update_item->custom_updatecheck_data = custom_attributes;
return true;
}
bool IsUpdating(const std::string& id) const override { return false; }
void Stop() override {}
void SendUninstallPing(const update_client::CrxComponent& crx_component,
int reason,
update_client::Callback callback) override {
uninstall_pings_.emplace_back(crx_component.app_id, crx_component.version,
reason);
}
void SendRegistrationPing(const update_client::CrxComponent& crx_component,
update_client::Callback Callback) override {}
void FireEvent(Observer::Events event, const std::string& extension_id) {
for (Observer* observer : observers_)
observer->OnEvent(event, extension_id);
}
void set_delay_update() { delay_update_ = true; }
void set_is_malware_update_item() { is_malware_update_item_ = true; }
void set_allowlist_state(extensions::AllowlistState state) {
allowlist_state = state;
}
bool delay_update() const { return delay_update_; }
UpdateRequest& update_request(int index) { return delayed_requests_[index]; }
int num_update_requests() const {
return static_cast<int>(delayed_requests_.size());
}
void RunDelayedUpdate(
int index,
Observer::Events event = Observer::Events::COMPONENT_UPDATED) {
UpdateRequest& request = update_request(index);
for (const std::string& id : request.extension_ids)
FireEvent(event, id);
std::move(request.callback).Run(update_client::Error::NONE);
}
protected:
~FakeUpdateClient() override = default;
std::vector<absl::optional<update_client::CrxComponent>> data_;
std::vector<UninstallPing> uninstall_pings_;
std::vector<Observer*> observers_;
bool delay_update_;
bool is_malware_update_item_ = false;
extensions::AllowlistState allowlist_state = extensions::ALLOWLIST_UNDEFINED;
std::vector<UpdateRequest> delayed_requests_;
};
FakeUpdateClient::FakeUpdateClient() : delay_update_(false) {}
void FakeUpdateClient::Update(const std::vector<std::string>& ids,
CrxDataCallback crx_data_callback,
CrxStateChangeCallback crx_state_change_callback,
bool is_foreground,
update_client::Callback callback) {
data_ = std::move(crx_data_callback).Run(ids);
if (delay_update()) {
delayed_requests_.push_back({ids, std::move(callback)});
} else {
for (const std::string& id : ids)
FireEvent(Observer::Events::COMPONENT_UPDATED, id);
std::move(callback).Run(update_client::Error::NONE);
}
}
class UpdateFoundNotificationObserver : public content::NotificationObserver {
public:
UpdateFoundNotificationObserver(const std::string& id,
const std::string& version)
: id_(id), version_(version) {
registrar_.Add(this, extensions::NOTIFICATION_EXTENSION_UPDATE_FOUND,
content::NotificationService::AllSources());
}
~UpdateFoundNotificationObserver() override {
registrar_.Remove(this, extensions::NOTIFICATION_EXTENSION_UPDATE_FOUND,
content::NotificationService::AllSources());
}
void reset() { found_notification_ = false; }
bool found_notification() const { return found_notification_; }
private:
void Observe(int type,
const content::NotificationSource& source,
const content::NotificationDetails& details) override {
ASSERT_EQ(extensions::NOTIFICATION_EXTENSION_UPDATE_FOUND, type);
EXPECT_EQ(id_, content::Details<extensions::UpdateDetails>(details)->id);
EXPECT_EQ(version_, content::Details<extensions::UpdateDetails>(details)
->version.GetString());
found_notification_ = true;
}
private:
content::NotificationRegistrar registrar_;
bool found_notification_ = false;
std::string id_;
std::string version_;
};
} // namespace
namespace extensions {
namespace {
// A global variable for controlling whether uninstalls should cause uninstall
// pings to be sent.
UninstallPingSender::FilterResult g_should_ping =
UninstallPingSender::DO_NOT_SEND_PING;
// Helper method to serve as an uninstall ping filter.
UninstallPingSender::FilterResult ShouldPing(const Extension* extension,
UninstallReason reason) {
return g_should_ping;
}
// A fake ExtensionSystem that lets us intercept calls to install new
// versions of an extension.
class FakeExtensionSystem : public MockExtensionSystem {
public:
using InstallUpdateCallback = MockExtensionSystem::InstallUpdateCallback;
explicit FakeExtensionSystem(content::BrowserContext* context)
: MockExtensionSystem(context) {}
~FakeExtensionSystem() override = default;
struct InstallUpdateRequest {
InstallUpdateRequest(const std::string& extension_id,
const base::FilePath& temp_dir,
bool install_immediately)
: extension_id(extension_id),
temp_dir(temp_dir),
install_immediately(install_immediately) {}
std::string extension_id;
base::FilePath temp_dir;
bool install_immediately;
};
std::vector<InstallUpdateRequest>* install_requests() {
return &install_requests_;
}
void set_install_callback(base::OnceClosure callback) {
next_install_callback_ = std::move(callback);
}
// ExtensionSystem override
void InstallUpdate(const std::string& extension_id,
const std::string& public_key,
const base::FilePath& temp_dir,
bool install_immediately,
InstallUpdateCallback install_update_callback) override {
base::DeletePathRecursively(temp_dir);
install_requests_.push_back(
InstallUpdateRequest(extension_id, temp_dir, install_immediately));
if (!next_install_callback_.is_null()) {
std::move(next_install_callback_).Run();
}
std::move(install_update_callback).Run(absl::nullopt);
}
void PerformActionBasedOnOmahaAttributes(
const std::string& extension_id,
const base::Value& attributes) override {
ExtensionRegistry* registry = ExtensionRegistry::Get(browser_context());
scoped_refptr<const Extension> extension1 =
ExtensionBuilder("1").SetVersion("1.2").SetID(extension_id).Build();
const base::Value* malware_value = attributes.FindKey("_malware");
if (malware_value && malware_value->GetBool())
registry->AddDisabled(extension1);
else
registry->AddEnabled(extension1);
const base::Value* allowlist_value = attributes.FindKey("_esbAllowlist");
if (allowlist_value) {
bool is_allowlisted = allowlist_value->GetBool();
extension_allowlist_states_[extension_id] =
is_allowlisted ? ALLOWLIST_ALLOWLISTED : ALLOWLIST_NOT_ALLOWLISTED;
}
}
bool FinishDelayedInstallationIfReady(const std::string& extension_id,
bool install_immediately) override {
return false;
}
AllowlistState GetExtensionAllowlistState(const std::string& extension_id) {
if (!base::Contains(extension_allowlist_states_, extension_id))
return ALLOWLIST_UNDEFINED;
return extension_allowlist_states_[extension_id];
}
private:
std::vector<InstallUpdateRequest> install_requests_;
base::OnceClosure next_install_callback_;
base::flat_map<std::string, AllowlistState> extension_allowlist_states_;
};
class UpdateServiceTest : public ExtensionsTest {
public:
UpdateServiceTest() = default;
~UpdateServiceTest() override = default;
void SetUp() override {
ExtensionsTest::SetUp();
extensions_browser_client()->set_extension_system_factory(
&fake_extension_system_factory_);
extensions_browser_client()->SetUpdateClientFactory(base::BindRepeating(
&UpdateServiceTest::CreateUpdateClient, base::Unretained(this)));
update_service_ = UpdateService::Get(browser_context());
}
protected:
UpdateService* update_service() const { return update_service_; }
FakeUpdateClient* update_client() const { return update_client_.get(); }
update_client::UpdateClient* CreateUpdateClient() {
// We only expect that this will get called once, so consider it an error
// if our update_client_ is already non-null.
EXPECT_EQ(nullptr, update_client_.get());
update_client_ = base::MakeRefCounted<FakeUpdateClient>();
return update_client_.get();
}
// Helper function that creates a file at |relative_path| within |directory|
// and fills it with |content|.
bool AddFileToDirectory(const base::FilePath& directory,
const base::FilePath& relative_path,
const std::string& content) {
base::FilePath full_path = directory.Append(relative_path);
if (!CreateDirectory(full_path.DirName()))
return false;
int result = base::WriteFile(full_path, content.data(), content.size());
return (static_cast<size_t>(result) == content.size());
}
FakeExtensionSystem* extension_system() {
return static_cast<FakeExtensionSystem*>(
fake_extension_system_factory_.GetForBrowserContext(browser_context()));
}
void BasicUpdateOperations(bool install_immediately) {
// Create a temporary directory that a fake extension will live in and fill
// it with some test files.
base::ScopedTempDir temp_dir;
ASSERT_TRUE(temp_dir.CreateUniqueTempDir());
base::FilePath foo_js(FILE_PATH_LITERAL("foo.js"));
base::FilePath bar_html(FILE_PATH_LITERAL("bar/bar.html"));
ASSERT_TRUE(AddFileToDirectory(temp_dir.GetPath(), foo_js, "hello"))
<< "Failed to write " << temp_dir.GetPath().value() << "/"
<< foo_js.value();
ASSERT_TRUE(AddFileToDirectory(temp_dir.GetPath(), bar_html, "world"));
scoped_refptr<const Extension> extension1 =
ExtensionBuilder("Foo")
.SetVersion("1.0")
.SetID(crx_file::id_util::GenerateId("foo_extension"))
.SetPath(temp_dir.GetPath())
.Build();
ExtensionRegistry::Get(browser_context())->AddEnabled(extension1);
ExtensionUpdateCheckParams update_check_params;
update_check_params.update_info[extension1->id()] = ExtensionUpdateData();
update_check_params.install_immediately = install_immediately;
// Start an update check and verify that the UpdateClient was sent the right
// data.
bool executed = false;
update_service()->StartUpdateCheck(
update_check_params,
base::BindOnce([](bool* executed) { *executed = true; }, &executed));
ASSERT_TRUE(executed);
const auto* data = update_client()->data();
ASSERT_NE(nullptr, data);
ASSERT_EQ(1u, data->size());
ASSERT_EQ(data->at(0)->version, extension1->version());
update_client::CrxInstaller* installer = data->at(0)->installer.get();
ASSERT_NE(installer, nullptr);
// The GetInstalledFile method is used when processing differential updates
// to get a path to an existing file in an extension. We want to test a
// number of scenarios to be user we handle invalid relative paths, don't
// accidentally return paths outside the extension's dir, etc.
base::FilePath tmp;
EXPECT_TRUE(installer->GetInstalledFile(foo_js.MaybeAsASCII(), &tmp));
EXPECT_EQ(temp_dir.GetPath().Append(foo_js), tmp) << tmp.value();
EXPECT_TRUE(installer->GetInstalledFile(bar_html.MaybeAsASCII(), &tmp));
EXPECT_EQ(temp_dir.GetPath().Append(bar_html), tmp) << tmp.value();
EXPECT_FALSE(installer->GetInstalledFile("does_not_exist", &tmp));
EXPECT_FALSE(installer->GetInstalledFile("does/not/exist", &tmp));
EXPECT_FALSE(installer->GetInstalledFile("/does/not/exist", &tmp));
EXPECT_FALSE(installer->GetInstalledFile("C:\\tmp", &tmp));
base::FilePath system_temp_dir;
ASSERT_TRUE(base::GetTempDir(&system_temp_dir));
EXPECT_FALSE(
installer->GetInstalledFile(system_temp_dir.MaybeAsASCII(), &tmp));
// Test the install callback.
base::ScopedTempDir new_version_dir;
ASSERT_TRUE(new_version_dir.CreateUniqueTempDir());
bool done = false;
installer->Install(
new_version_dir.GetPath(), std::string(), nullptr, base::DoNothing(),
base::BindOnce(
[](bool* done, const update_client::CrxInstaller::Result& result) {
*done = true;
EXPECT_EQ(0, result.error);
EXPECT_EQ(0, result.extended_error);
},
&done));
base::RunLoop run_loop;
extension_system()->set_install_callback(run_loop.QuitClosure());
run_loop.Run();
std::vector<FakeExtensionSystem::InstallUpdateRequest>* requests =
extension_system()->install_requests();
ASSERT_EQ(1u, requests->size());
const auto& request = requests->at(0);
EXPECT_EQ(request.extension_id, extension1->id());
EXPECT_EQ(request.temp_dir.value(), new_version_dir.GetPath().value());
EXPECT_EQ(install_immediately, request.install_immediately);
EXPECT_TRUE(done);
}
private:
raw_ptr<UpdateService> update_service_ = nullptr;
scoped_refptr<FakeUpdateClient> update_client_;
MockExtensionSystemFactory<FakeExtensionSystem>
fake_extension_system_factory_;
};
TEST_F(UpdateServiceTest, BasicUpdateOperations_InstallImmediately) {
BasicUpdateOperations(true);
}
TEST_F(UpdateServiceTest, BasicUpdateOperations_NotInstallImmediately) {
BasicUpdateOperations(false);
}
TEST_F(UpdateServiceTest, UninstallPings) {
UninstallPingSender sender(ExtensionRegistry::Get(browser_context()),
base::BindRepeating(&ShouldPing));
// Build 3 extensions.
scoped_refptr<const Extension> extension1 =
ExtensionBuilder("1").SetVersion("1.2").Build();
scoped_refptr<const Extension> extension2 =
ExtensionBuilder("2").SetVersion("2.3").Build();
scoped_refptr<const Extension> extension3 =
ExtensionBuilder("3").SetVersion("3.4").Build();
EXPECT_TRUE(extension1->id() != extension2->id() &&
extension1->id() != extension3->id() &&
extension2->id() != extension3->id());
ExtensionRegistry* registry = ExtensionRegistry::Get(browser_context());
// Run tests for each uninstall reason.
for (int reason_val = static_cast<int>(UNINSTALL_REASON_FOR_TESTING);
reason_val < static_cast<int>(UNINSTALL_REASON_MAX); ++reason_val) {
UninstallReason reason = static_cast<UninstallReason>(reason_val);
// Start with 2 enabled and 1 disabled extensions.
EXPECT_TRUE(registry->AddEnabled(extension1)) << reason;
EXPECT_TRUE(registry->AddEnabled(extension2)) << reason;
EXPECT_TRUE(registry->AddDisabled(extension3)) << reason;
// Uninstall the first extension, instructing our filter not to send pings,
// and verify none were sent.
g_should_ping = UninstallPingSender::DO_NOT_SEND_PING;
EXPECT_TRUE(registry->RemoveEnabled(extension1->id())) << reason;
registry->TriggerOnUninstalled(extension1.get(), reason);
EXPECT_TRUE(update_client()->uninstall_pings().empty()) << reason;
// Uninstall the second and third extensions, instructing the filter to
// send pings, and make sure we got the expected data.
g_should_ping = UninstallPingSender::SEND_PING;
EXPECT_TRUE(registry->RemoveEnabled(extension2->id())) << reason;
registry->TriggerOnUninstalled(extension2.get(), reason);
EXPECT_TRUE(registry->RemoveDisabled(extension3->id())) << reason;
registry->TriggerOnUninstalled(extension3.get(), reason);
std::vector<FakeUpdateClient::UninstallPing>& pings =
update_client()->uninstall_pings();
ASSERT_EQ(2u, pings.size()) << reason;
EXPECT_EQ(extension2->id(), pings[0].id) << reason;
EXPECT_EQ(extension2->version(), pings[0].version) << reason;
EXPECT_EQ(reason, pings[0].reason) << reason;
EXPECT_EQ(extension3->id(), pings[1].id) << reason;
EXPECT_EQ(extension3->version(), pings[1].version) << reason;
EXPECT_EQ(reason, pings[1].reason) << reason;
pings.clear();
}
}
TEST_F(UpdateServiceTest, NoPerformAction) {
std::string extension_id = crx_file::id_util::GenerateId("id");
ExtensionRegistry* registry = ExtensionRegistry::Get(browser_context());
scoped_refptr<const Extension> extension1 =
ExtensionBuilder("1").SetVersion("1.2").SetID(extension_id).Build();
EXPECT_TRUE(registry->AddEnabled(extension1));
update_client()->set_is_malware_update_item();
update_client()->set_delay_update();
ExtensionUpdateCheckParams update_check_params;
update_check_params.update_info[extension_id] = ExtensionUpdateData();
bool executed = false;
update_service()->StartUpdateCheck(
update_check_params,
base::BindOnce([](bool* executed) { *executed = true; }, &executed));
EXPECT_FALSE(executed);
const auto& request = update_client()->update_request(0);
EXPECT_THAT(request.extension_ids, testing::ElementsAre(extension_id));
update_client()->RunDelayedUpdate(
0, UpdateClientEvents::COMPONENT_CHECKING_FOR_UPDATES);
EXPECT_FALSE(registry->disabled_extensions().GetByID(extension_id));
EXPECT_EQ(extensions::ALLOWLIST_UNDEFINED,
extension_system()->GetExtensionAllowlistState(extension_id));
}
TEST_F(UpdateServiceTest, CheckOmahaMalwareAttributes) {
std::string extension_id = crx_file::id_util::GenerateId("id");
ExtensionRegistry* registry = ExtensionRegistry::Get(browser_context());
scoped_refptr<const Extension> extension1 =
ExtensionBuilder("1").SetVersion("1.2").SetID(extension_id).Build();
EXPECT_TRUE(registry->AddEnabled(extension1));
update_client()->set_is_malware_update_item();
update_client()->set_delay_update();
ExtensionUpdateCheckParams update_check_params;
update_check_params.update_info[extension_id] = ExtensionUpdateData();
bool executed = false;
update_service()->StartUpdateCheck(
update_check_params,
base::BindOnce([](bool* executed) { *executed = true; }, &executed));
EXPECT_FALSE(executed);
const auto& request = update_client()->update_request(0);
EXPECT_THAT(request.extension_ids, testing::ElementsAre(extension_id));
update_client()->RunDelayedUpdate(0,
UpdateClientEvents::COMPONENT_NOT_UPDATED);
EXPECT_TRUE(registry->disabled_extensions().GetByID(extension_id));
}
TEST_F(UpdateServiceTest, CheckOmahaAllowlistAttributes) {
std::string extension_id = crx_file::id_util::GenerateId("id");
scoped_refptr<const Extension> extension1 =
ExtensionBuilder("1").SetVersion("1.2").SetID(extension_id).Build();
update_client()->set_allowlist_state(extensions::ALLOWLIST_ALLOWLISTED);
update_client()->set_delay_update();
ExtensionUpdateCheckParams update_check_params;
update_check_params.update_info[extension_id] = ExtensionUpdateData();
bool executed = false;
update_service()->StartUpdateCheck(
update_check_params,
base::BindOnce([](bool* executed) { *executed = true; }, &executed));
EXPECT_FALSE(executed);
const auto& request = update_client()->update_request(0);
EXPECT_THAT(request.extension_ids, testing::ElementsAre(extension_id));
update_client()->RunDelayedUpdate(0,
UpdateClientEvents::COMPONENT_UPDATE_FOUND);
EXPECT_EQ(extensions::ALLOWLIST_ALLOWLISTED,
extension_system()->GetExtensionAllowlistState(extension_id));
}
TEST_F(UpdateServiceTest, CheckNoOmahaAttributes) {
std::string extension_id = crx_file::id_util::GenerateId("id");
ExtensionRegistry* registry = ExtensionRegistry::Get(browser_context());
scoped_refptr<const Extension> extension1 =
ExtensionBuilder("1").SetVersion("1.2").SetID(extension_id).Build();
EXPECT_TRUE(registry->AddDisabled(extension1));
update_client()->set_delay_update();
ExtensionUpdateCheckParams update_check_params;
update_check_params.update_info[extension_id] = ExtensionUpdateData();
bool executed = false;
update_service()->StartUpdateCheck(
update_check_params,
base::BindOnce([](bool* executed) { *executed = true; }, &executed));
EXPECT_FALSE(executed);
const auto& request = update_client()->update_request(0);
EXPECT_THAT(request.extension_ids, testing::ElementsAre(extension_id));
update_client()->RunDelayedUpdate(0,
UpdateClientEvents::COMPONENT_NOT_UPDATED);
EXPECT_TRUE(registry->enabled_extensions().GetByID(extension_id));
EXPECT_EQ(extensions::ALLOWLIST_UNDEFINED,
extension_system()->GetExtensionAllowlistState(extension_id));
}
TEST_F(UpdateServiceTest, UpdateFoundNotification) {
std::string extension_id = crx_file::id_util::GenerateId("id");
UpdateFoundNotificationObserver notification_observer(extension_id, "2.0");
// Fire UpdateClientEvents::COMPONENT_UPDATE_FOUND and verify that
// NOTIFICATION_EXTENSION_UPDATE_FOUND notification is sent.
update_client()->FireEvent(UpdateClientEvents::COMPONENT_UPDATE_FOUND,
extension_id);
EXPECT_TRUE(notification_observer.found_notification());
notification_observer.reset();
update_client()->FireEvent(UpdateClientEvents::COMPONENT_CHECKING_FOR_UPDATES,
extension_id);
EXPECT_FALSE(notification_observer.found_notification());
}
TEST_F(UpdateServiceTest, InProgressUpdate_Successful) {
base::HistogramTester histogram_tester;
update_client()->set_delay_update();
ExtensionUpdateCheckParams update_check_params;
// Extensions with empty IDs will be ignored.
update_check_params.update_info["A"] = ExtensionUpdateData();
update_check_params.update_info["B"] = ExtensionUpdateData();
update_check_params.update_info["C"] = ExtensionUpdateData();
update_check_params.update_info["D"] = ExtensionUpdateData();
update_check_params.update_info["E"] = ExtensionUpdateData();
bool executed = false;
update_service()->StartUpdateCheck(
update_check_params,
base::BindOnce([](bool* executed) { *executed = true; }, &executed));
EXPECT_FALSE(executed);
const auto& request = update_client()->update_request(0);
EXPECT_THAT(request.extension_ids,
testing::ElementsAre("A", "B", "C", "D", "E"));
update_client()->RunDelayedUpdate(0);
EXPECT_TRUE(executed);
}
// Incorrect deduplicating of the same extension ID but with different flags may
// lead to incorrect behaviour: corrupted extension won't be reinstalled.
TEST_F(UpdateServiceTest, InProgressUpdate_DuplicateWithDifferentData) {
base::HistogramTester histogram_tester;
update_client()->set_delay_update();
ExtensionUpdateCheckParams uc1, uc2;
uc1.update_info["A"] = ExtensionUpdateData();
uc2.update_info["A"] = ExtensionUpdateData();
uc2.update_info["A"].install_source = "reinstall";
uc2.update_info["A"].is_corrupt_reinstall = true;
bool executed1 = false;
update_service()->StartUpdateCheck(
uc1,
base::BindOnce([](bool* executed) { *executed = true; }, &executed1));
EXPECT_FALSE(executed1);
bool executed2 = false;
update_service()->StartUpdateCheck(
uc2,
base::BindOnce([](bool* executed) { *executed = true; }, &executed2));
EXPECT_FALSE(executed2);
ASSERT_EQ(2, update_client()->num_update_requests());
{
const auto& request = update_client()->update_request(0);
EXPECT_THAT(request.extension_ids, testing::ElementsAre("A"));
}
{
const auto& request = update_client()->update_request(1);
EXPECT_THAT(request.extension_ids, testing::ElementsAre("A"));
}
update_client()->RunDelayedUpdate(0);
EXPECT_TRUE(executed1);
EXPECT_FALSE(executed2);
update_client()->RunDelayedUpdate(1);
EXPECT_TRUE(executed2);
}
TEST_F(UpdateServiceTest, InProgressUpdate_NonOverlapped) {
// 2 non-overallped update requests.
base::HistogramTester histogram_tester;
update_client()->set_delay_update();
ExtensionUpdateCheckParams uc1, uc2;
uc1.update_info["A"] = ExtensionUpdateData();
uc1.update_info["B"] = ExtensionUpdateData();
uc1.update_info["C"] = ExtensionUpdateData();
uc2.update_info["D"] = ExtensionUpdateData();
uc2.update_info["E"] = ExtensionUpdateData();
bool executed1 = false;
update_service()->StartUpdateCheck(
uc1,
base::BindOnce([](bool* executed) { *executed = true; }, &executed1));
EXPECT_FALSE(executed1);
bool executed2 = false;
update_service()->StartUpdateCheck(
uc2,
base::BindOnce([](bool* executed) { *executed = true; }, &executed2));
EXPECT_FALSE(executed2);
ASSERT_EQ(2, update_client()->num_update_requests());
const auto& request1 = update_client()->update_request(0);
const auto& request2 = update_client()->update_request(1);
EXPECT_THAT(request1.extension_ids, testing::ElementsAre("A", "B", "C"));
EXPECT_THAT(request2.extension_ids, testing::ElementsAre("D", "E"));
update_client()->RunDelayedUpdate(0);
EXPECT_TRUE(executed1);
EXPECT_FALSE(executed2);
update_client()->RunDelayedUpdate(1);
EXPECT_TRUE(executed2);
}
} // namespace
} // namespace extensions
| 10,753 |
3,296 | <filename>bin/bbfcreate.c
#include "ccv.h"
#include <ctype.h>
#include <getopt.h>
static void exit_with_help(void)
{
printf(
"\n \033[1mUSAGE\033[0m\n\n bbfcreate [OPTION...]\n\n"
" \033[1mREQUIRED OPTIONS\033[0m\n\n"
" --positive-list : text file contains a list of positive files (cropped and scaled to the same size)\n"
" --background-list : text file contains a list of image files that don't contain any target objects\n"
" --negative-count : the number of negative examples we should collect from background files to initialize SVM\n"
" --working-dir : the directory to save progress and produce result model\n"
" --width : the width of positive image\n"
" --height : the height of positive image\n\n"
" \033[1mOTHER OPTIONS\033[0m\n\n"
" --base-dir : change the base directory so that the program can read images from there\n"
" --layer : how many layers needed for cascade classifier [DEFAULT TO 24]\n"
" --positive-criteria : what's the percentage of positive examples need to pass for the next layer [DEFAULT TO 0.9975]\n"
" --negative-criteria : what's the percentage of negative examples need to reject for the next layer [DEFAULT TO 0.5]\n"
" --balance : the balance weight for positive examples v.s. negative examples [DEFAULT TO 1.0]\n"
" --feature-number : how big our feature pool should be [DEFAULT TO 100 (thus, 100 * 100 = 10000 features)]\n\n"
);
exit(-1);
}
int main(int argc, char** argv)
{
static struct option bbf_options[] = {
/* help */
{"help", 0, 0, 0},
/* required parameters */
{"positive-list", 1, 0, 0},
{"background-list", 1, 0, 0},
{"working-dir", 1, 0, 0},
{"negative-count", 1, 0, 0},
{"width", 1, 0, 0},
{"height", 1, 0, 0},
/* optional parameters */
{"base-dir", 1, 0, 0},
{"layer", 1, 0, 0},
{"positive-criteria", 1, 0, 0},
{"negative-criteria", 1, 0, 0},
{"balance", 1, 0, 0},
{"feature-number", 1, 0, 0},
{0, 0, 0, 0}
};
char* positive_list = 0;
char* background_list = 0;
char* working_dir = 0;
char* base_dir = 0;
int negnum = 0;
int width = 0, height = 0;
ccv_bbf_new_param_t params = {
.pos_crit = 0.9975,
.neg_crit = 0.50,
.balance_k = 1.0,
.layer = 24,
.feature_number = 100,
.optimizer = CCV_BBF_GENETIC_OPT | CCV_BBF_FLOAT_OPT,
};
int i, k;
while (getopt_long_only(argc, argv, "", bbf_options, &k) != -1)
{
switch (k)
{
case 0:
exit_with_help();
case 1:
positive_list = optarg;
break;
case 2:
background_list = optarg;
break;
case 3:
working_dir = optarg;
break;
case 4:
negnum = atoi(optarg);
break;
case 5:
width = atoi(optarg);
break;
case 6:
height = atoi(optarg);
break;
case 7:
base_dir = optarg;
break;
case 8:
params.layer = atoi(optarg);
break;
case 9:
params.pos_crit = atof(optarg);
break;
case 10:
params.neg_crit = atof(optarg);
break;
case 11:
params.balance_k = atof(optarg);
break;
case 12:
params.feature_number = atoi(optarg);
break;
}
}
assert(positive_list != 0);
assert(background_list != 0);
assert(working_dir != 0);
assert(negnum > 0);
assert(width > 0 && height > 0);
ccv_enable_default_cache();
FILE* r0 = fopen(positive_list, "r");
assert(r0 && "positive-list doesn't exists");
FILE* r1 = fopen(background_list, "r");
assert(r1 && "background-list doesn't exists");
char* file = (char*)malloc(1024);
int dirlen = (base_dir != 0) ? strlen(base_dir) + 1 : 0;
size_t len = 1024;
ssize_t read;
int capacity = 32, size = 0;
ccv_dense_matrix_t** posimg = (ccv_dense_matrix_t**)ccmalloc(sizeof(ccv_dense_matrix_t*) * capacity);
while ((read = getline(&file, &len, r0)) != -1)
{
while(read > 1 && isspace(file[read - 1]))
read--;
file[read] = 0;
char* posfile = (char*)ccmalloc(1024);
if (base_dir != 0)
{
strncpy(posfile, base_dir, 1024);
posfile[dirlen - 1] = '/';
}
strncpy(posfile + dirlen, file, 1024 - dirlen);
posimg[size] = 0;
ccv_read(posfile, &posimg[size], CCV_IO_GRAY | CCV_IO_ANY_FILE);
if (posimg != 0)
{
++size;
if (size >= capacity)
{
capacity *= 2;
posimg = (ccv_dense_matrix_t**)ccrealloc(posimg, sizeof(ccv_dense_matrix_t*) * capacity);
}
}
}
fclose(r0);
int posnum = size;
capacity = 32;
size = 0;
char** bgfiles = (char**)ccmalloc(sizeof(char*) * capacity);
while ((read = getline(&file, &len, r1)) != -1)
{
while(read > 1 && isspace(file[read - 1]))
read--;
file[read] = 0;
bgfiles[size] = (char*)ccmalloc(1024);
if (base_dir != 0)
{
strncpy(bgfiles[size], base_dir, 1024);
bgfiles[size][dirlen - 1] = '/';
}
strncpy(bgfiles[size] + dirlen, file, 1024 - dirlen);
++size;
if (size >= capacity)
{
capacity *= 2;
bgfiles = (char**)ccrealloc(bgfiles, sizeof(char*) * capacity);
}
}
fclose(r1);
int bgnum = size;
free(file);
ccv_bbf_classifier_cascade_new(posimg, posnum, bgfiles, bgnum, negnum, ccv_size(width, height), working_dir, params);
for (i = 0; i < bgnum; i++)
free(bgfiles[i]);
for (i = 0; i < posnum; i++)
ccv_matrix_free(&posimg[i]);
free(posimg);
free(bgfiles);
ccv_disable_cache();
return 0;
}
| 2,289 |
771 | {
"name": "localGfonts",
"desc": "Download and self-host Google fonts and CSS snippets",
"url": "https://labs.binaryunit.com/localgfonts/",
"tags": [
"CSS",
"Fonts"
],
"maintainers": [],
"addedAt": "2021-10-21"
}
| 101 |
13,585 | <gh_stars>1000+
package com.baomidou.mybatisplus.test.version;
import com.baomidou.mybatisplus.core.toolkit.Wrappers;
import com.baomidou.mybatisplus.extension.plugins.MybatisPlusInterceptor;
import com.baomidou.mybatisplus.extension.plugins.inner.OptimisticLockerInnerInterceptor;
import com.baomidou.mybatisplus.test.BaseDbTest;
import org.apache.ibatis.plugin.Interceptor;
import org.junit.jupiter.api.Test;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import static org.assertj.core.api.Assertions.assertThat;
/**
* @author miemie
* @since 2020-07-04
*/
public class VersionTest extends BaseDbTest<EntityMapper> {
@Test
void test() {
doTestAutoCommit(i -> {
int result = i.updateById(new Entity().setId(1L).setName("老张"));
assertThat(result).as("没放入version值更新成功").isEqualTo(1);
});
doTestAutoCommit(i -> {
int result = i.updateById(new Entity().setId(1L).setName("老张").setVersion(1));
assertThat(result).as("放入的version值不匹配更新失败").isEqualTo(0);
});
doTestAutoCommit(i -> {
Entity entity = new Entity().setId(1L).setName("老张").setVersion(0);
int result = i.updateById(entity);
assertThat(result).as("放入的version值匹配更新成功").isEqualTo(1);
assertThat(entity.getVersion()).isEqualTo(1);
});
doTestAutoCommit(i -> {
int result = i.update(new Entity().setName("老张"), Wrappers.<Entity>update().eq("id", 2));
assertThat(result).as("没放入version值更新成功").isEqualTo(1);
});
doTestAutoCommit(i -> {
int result = i.update(new Entity().setName("老张").setVersion(1), Wrappers.<Entity>update().eq("id", 2));
assertThat(result).as("放入的version值不匹配更新失败").isEqualTo(0);
});
doTestAutoCommit(i -> {
Entity entity = new Entity().setName("老张").setVersion(0);
int result = i.update(entity, Wrappers.<Entity>update().eq("id", 2));
assertThat(result).as("放入的version值匹配更新成功").isEqualTo(1);
assertThat(entity.getVersion()).isEqualTo(1);
});
}
@Override
protected List<Interceptor> interceptors() {
MybatisPlusInterceptor interceptor = new MybatisPlusInterceptor();
interceptor.addInnerInterceptor(new OptimisticLockerInnerInterceptor());
return Collections.singletonList(interceptor);
}
@Override
protected String tableDataSql() {
return "insert into entity(id,name) values(1,'老王'),(2,'老李')";
}
@Override
protected List<String> tableSql() {
return Arrays.asList("drop table if exists entity",
"CREATE TABLE IF NOT EXISTS entity (\n" +
"id BIGINT(20) NOT NULL,\n" +
"name VARCHAR(30) NULL DEFAULT NULL,\n" +
"version integer NOT NULL DEFAULT 0,\n" +
"PRIMARY KEY (id)" +
")");
}
}
| 1,431 |
1,288 | // Copyright 2014 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#include "packager/media/base/media_sample.h"
#include <inttypes.h>
#include "packager/base/logging.h"
#include "packager/base/strings/stringprintf.h"
namespace shaka {
namespace media {
MediaSample::MediaSample(const uint8_t* data,
size_t data_size,
const uint8_t* side_data,
size_t side_data_size,
bool is_key_frame)
: is_key_frame_(is_key_frame) {
if (!data) {
CHECK_EQ(data_size, 0u);
}
SetData(data, data_size);
if (side_data) {
std::shared_ptr<uint8_t> shared_side_data(new uint8_t[side_data_size],
std::default_delete<uint8_t[]>());
memcpy(shared_side_data.get(), side_data, side_data_size);
side_data_ = std::move(shared_side_data);
side_data_size_ = side_data_size;
}
}
MediaSample::MediaSample() {}
MediaSample::~MediaSample() {}
// static
std::shared_ptr<MediaSample> MediaSample::CopyFrom(const uint8_t* data,
size_t data_size,
bool is_key_frame) {
// If you hit this CHECK you likely have a bug in a demuxer. Go fix it.
CHECK(data);
return std::shared_ptr<MediaSample>(
new MediaSample(data, data_size, nullptr, 0u, is_key_frame));
}
// static
std::shared_ptr<MediaSample> MediaSample::CopyFrom(const uint8_t* data,
size_t data_size,
const uint8_t* side_data,
size_t side_data_size,
bool is_key_frame) {
// If you hit this CHECK you likely have a bug in a demuxer. Go fix it.
CHECK(data);
return std::shared_ptr<MediaSample>(new MediaSample(
data, data_size, side_data, side_data_size, is_key_frame));
}
// static
std::shared_ptr<MediaSample> MediaSample::FromMetadata(const uint8_t* metadata,
size_t metadata_size) {
return std::shared_ptr<MediaSample>(
new MediaSample(nullptr, 0, metadata, metadata_size, false));
}
// static
std::shared_ptr<MediaSample> MediaSample::CreateEmptyMediaSample() {
return std::shared_ptr<MediaSample>(new MediaSample);
}
// static
std::shared_ptr<MediaSample> MediaSample::CreateEOSBuffer() {
return std::shared_ptr<MediaSample>(
new MediaSample(nullptr, 0, nullptr, 0, false));
}
std::shared_ptr<MediaSample> MediaSample::Clone() const {
std::shared_ptr<MediaSample> new_media_sample(new MediaSample);
new_media_sample->dts_ = dts_;
new_media_sample->pts_ = pts_;
new_media_sample->duration_ = duration_;
new_media_sample->is_key_frame_ = is_key_frame_;
new_media_sample->is_encrypted_ = is_encrypted_;
new_media_sample->data_ = data_;
new_media_sample->data_size_ = data_size_;
new_media_sample->side_data_ = side_data_;
new_media_sample->side_data_size_ = side_data_size_;
new_media_sample->config_id_ = config_id_;
if (decrypt_config_) {
new_media_sample->decrypt_config_.reset(new DecryptConfig(
decrypt_config_->key_id(), decrypt_config_->iv(),
decrypt_config_->subsamples(), decrypt_config_->protection_scheme(),
decrypt_config_->crypt_byte_block(),
decrypt_config_->skip_byte_block()));
}
return new_media_sample;
}
void MediaSample::TransferData(std::shared_ptr<uint8_t> data,
size_t data_size) {
data_ = std::move(data);
data_size_ = data_size;
}
void MediaSample::SetData(const uint8_t* data, size_t data_size) {
std::shared_ptr<uint8_t> shared_data(new uint8_t[data_size],
std::default_delete<uint8_t[]>());
memcpy(shared_data.get(), data, data_size);
TransferData(std::move(shared_data), data_size);
}
std::string MediaSample::ToString() const {
if (end_of_stream())
return "End of stream sample\n";
return base::StringPrintf(
"dts: %" PRId64 "\n pts: %" PRId64 "\n duration: %" PRId64
"\n "
"is_key_frame: %s\n size: %zu\n side_data_size: %zu\n",
dts_, pts_, duration_, is_key_frame_ ? "true" : "false", data_size_,
side_data_size_);
}
} // namespace media
} // namespace shaka
| 2,078 |
3,227 | <reponame>ffteja/cgal
// Copyright (c) 2020 GeometryFactory Sarl (France).
// All rights reserved.
//
// This file is part of CGAL (www.cgal.org).
//
// $URL$
// $Id$
// SPDX-License-Identifier: GPL-3.0-or-later OR LicenseRef-Commercial
//
// Author(s): <NAME> <<EMAIL>>
#include "EnvelopeFunctions.h"
#include "ArrangementTypes.h"
#include <CGAL/envelope_2.h>
#include <CGAL/Envelope_diagram_1.h>
#include <vector>
template <typename Arr_>
auto EnvelopeFunctions<Arr_>::getXMonotoneCurves(Arrangement* arr)
-> std::vector<X_monotone_curve_2>
{
std::vector<X_monotone_curve_2> curves;
for (auto it = arr->edges_begin(); it != arr->edges_end(); ++it)
curves.push_back(it->curve());
return curves;
}
template <typename Arr_>
void EnvelopeFunctions<Arr_>::lowerEnvelope(
Arrangement* arr, Diagram_1& diagram)
{
auto curves = getXMonotoneCurves(arr);
CGAL::lower_envelope_x_monotone_2(
curves.begin(), curves.end(), diagram, *(arr->traits()));
}
template <typename Arr_>
void EnvelopeFunctions<Arr_>::upperEnvelope(
Arrangement* arr, Diagram_1& diagram)
{
auto curves = getXMonotoneCurves(arr);
CGAL::upper_envelope_x_monotone_2(
curves.begin(), curves.end(), diagram, *(arr->traits()));
}
ARRANGEMENT_DEMO_SPECIALIZE_ARR(EnvelopeFunctions)
| 515 |
1,442 | #include <poincare/norm_cdf.h>
#include <poincare/layout_helper.h>
#include <poincare/normal_distribution.h>
#include <poincare/serialization_helper.h>
#include <assert.h>
namespace Poincare {
constexpr Expression::FunctionHelper NormCDF::s_functionHelper;
int NormCDFNode::numberOfChildren() const { return NormCDF::s_functionHelper.numberOfChildren(); }
Layout NormCDFNode::createLayout(Preferences::PrintFloatMode floatDisplayMode, int numberOfSignificantDigits) const {
return LayoutHelper::Prefix(NormCDF(this), floatDisplayMode, numberOfSignificantDigits, NormCDF::s_functionHelper.name());
}
int NormCDFNode::serialize(char * buffer, int bufferSize, Preferences::PrintFloatMode floatDisplayMode, int numberOfSignificantDigits) const {
return SerializationHelper::Prefix(this, buffer, bufferSize, floatDisplayMode, numberOfSignificantDigits, NormCDF::s_functionHelper.name());
}
template<typename T>
Evaluation<T> NormCDFNode::templatedApproximate(ApproximationContext approximationContext) const {
Evaluation<T> aEvaluation = childAtIndex(0)->approximate(T(), approximationContext);
Evaluation<T> muEvaluation = childAtIndex(1)->approximate(T(), approximationContext);
Evaluation<T> sigmaEvaluation = childAtIndex(2)->approximate(T(), approximationContext);
const T a = aEvaluation.toScalar();
const T mu = muEvaluation.toScalar();
const T sigma = sigmaEvaluation.toScalar();
// CumulativeDistributiveFunctionAtAbscissa handles bad mu and var values
return Complex<T>::Builder(NormalDistribution::CumulativeDistributiveFunctionAtAbscissa(a, mu, sigma));
}
}
| 498 |
8,027 | <filename>src/com/facebook/buck/parser/detector/TargetConfigurationDetector.java
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.buck.parser.detector;
import com.facebook.buck.core.model.TargetConfiguration;
import com.facebook.buck.core.model.targetgraph.raw.UnconfiguredTargetNode;
import com.facebook.buck.util.types.Pair;
import com.google.common.collect.ImmutableList;
import java.util.Optional;
/** An utility to detect target configuration when it is not specified otherwise. */
public class TargetConfigurationDetector {
/** A matcher part of the rule */
interface Matcher {
boolean matches(UnconfiguredTargetNode node);
}
private final ImmutableList<Pair<Matcher, TargetConfiguration>> matchers;
TargetConfigurationDetector(ImmutableList<Pair<Matcher, TargetConfiguration>> matchers) {
this.matchers = matchers;
}
/** Find first matching configuration, or return empty if no rules match the target. */
public Optional<TargetConfiguration> detectTargetConfiguration(
UnconfiguredTargetNode unconfiguredBuildTarget) {
for (Pair<Matcher, TargetConfiguration> pair : matchers) {
if (pair.getFirst().matches(unconfiguredBuildTarget)) {
return Optional.of(pair.getSecond());
}
}
return Optional.empty();
}
/** Matcher implementation which matches the package prefix */
static class SpecMatcher implements Matcher {
static final String TYPE = "target";
private final SimplePackageSpec simplePackageSpec;
public SpecMatcher(SimplePackageSpec simplePackageSpec) {
this.simplePackageSpec = simplePackageSpec;
}
@Override
public boolean matches(UnconfiguredTargetNode node) {
return simplePackageSpec.matches(node.getBuildTarget());
}
@Override
public String toString() {
return simplePackageSpec.toString();
}
}
}
| 699 |
475 | public class TVChangeCommand implements AbstractCommand
{
private Television tv;
public TVChangeCommand()
{
tv = new Television();
}
public void execute()
{
tv.changeChannel();
}
} | 59 |
348 | <gh_stars>100-1000
{"nom":"Maulan","circ":"1ère circonscription","dpt":"Meuse","inscrits":94,"abs":42,"votants":52,"blancs":2,"nuls":0,"exp":50,"res":[{"nuance":"UDI","nom":"M. <NAME>","voix":27},{"nuance":"REM","nom":"<NAME>","voix":23}]} | 99 |
485 | <reponame>leileixiao/Macropodus
# !/usr/bin/python
# -*- coding: utf-8 -*-
# @time : 2020/1/16 22:34
# @author : Mo
# @function: dump of keras, error, no use.
from tensorflow.python.keras.models import save_model, load_model, Model
import tempfile
import types
def make_keras_picklable():
def __getstate__(self):
model_str = ""
with tempfile.NamedTemporaryFile(suffix='.hdf5', delete=True) as fd:
save_model(self, fd.name, overwrite=True)
model_str = fd.read()
d = {'model_str': model_str}
return d
def __setstate__(self, state):
with tempfile.NamedTemporaryFile(suffix='.hdf5', delete=True) as fd:
fd.write(state['model_str'])
fd.flush()
model = load_model(fd.name)
self.__dict__ = model.__dict__
cls = Model
cls.__getstate__ = __getstate__
cls.__setstate__ = __setstate__
| 424 |
1,355 | // Copyright (c) 2018 <NAME>
//
// I am making my contributions/submissions to this project solely in my
// personal capacity and am not conveying any rights to any intellectual
// property of any third parties.
#ifndef INCLUDE_JET_POINT_SIMPLE_LIST_SEARCHER3_H_
#define INCLUDE_JET_POINT_SIMPLE_LIST_SEARCHER3_H_
#include <jet/point_neighbor_searcher3.h>
#include <vector>
namespace jet {
//!
//! \brief Simple ad-hoc 3-D point searcher.
//!
//! This class implements 3-D point searcher simply by looking up every point in
//! the list. Thus, this class is not ideal for searches involing large number
//! of points, but only for small set of items.
//!
class PointSimpleListSearcher3 final : public PointNeighborSearcher3 {
public:
JET_NEIGHBOR_SEARCHER3_TYPE_NAME(PointSimpleListSearcher3)
class Builder;
//! Default constructor.
PointSimpleListSearcher3();
//! Copy constructor.
PointSimpleListSearcher3(const PointSimpleListSearcher3& other);
//!
//! \brief Builds internal structure for given points list.
//!
//! For this class, this function simply copies the given point list to the
//! internal list.
//!
//! \param[in] points The points to search.
//!
void build(const ConstArrayAccessor1<Vector3D>& points) override;
//!
//! Invokes the callback function for each nearby point around the origin
//! within given radius.
//!
//! \param[in] origin The origin position.
//! \param[in] radius The search radius.
//! \param[in] callback The callback function.
//!
void forEachNearbyPoint(
const Vector3D& origin,
double radius,
const ForEachNearbyPointFunc& callback) const override;
//!
//! Returns true if there are any nearby points for given origin within
//! radius.
//!
//! \param[in] origin The origin.
//! \param[in] radius The radius.
//!
//! \return True if has nearby point, false otherwise.
//!
bool hasNearbyPoint(
const Vector3D& origin, double radius) const override;
//!
//! \brief Creates a new instance of the object with same properties
//! than original.
//!
//! \return Copy of this object.
//!
PointNeighborSearcher3Ptr clone() const override;
//! Assignment operator.
PointSimpleListSearcher3& operator=(const PointSimpleListSearcher3& other);
//! Copy from the other instance.
void set(const PointSimpleListSearcher3& other);
//! Serializes the neighbor searcher into the buffer.
void serialize(std::vector<uint8_t>* buffer) const override;
//! Deserializes the neighbor searcher from the buffer.
void deserialize(const std::vector<uint8_t>& buffer) override;
//! Returns builder fox PointSimpleListSearcher3.
static Builder builder();
private:
std::vector<Vector3D> _points;
};
//! Shared pointer for the PointSimpleListSearcher3 type.
typedef std::shared_ptr<PointSimpleListSearcher3> PointSimpleListSearcher3Ptr;
//!
//! \brief Front-end to create PointSimpleListSearcher3 objects step by step.
//!
class PointSimpleListSearcher3::Builder final
: public PointNeighborSearcherBuilder3 {
public:
//! Builds PointSimpleListSearcher3 instance.
PointSimpleListSearcher3 build() const;
//! Builds shared pointer of PointSimpleListSearcher3 instance.
PointSimpleListSearcher3Ptr makeShared() const;
//! Returns shared pointer of PointNeighborSearcher3 type.
PointNeighborSearcher3Ptr buildPointNeighborSearcher() const override;
};
} // namespace jet
#endif // INCLUDE_JET_POINT_SIMPLE_LIST_SEARCHER3_H_
| 1,257 |
410 | <filename>Giveme5W1H/examples/datasets/news_cluster/data_raw/0cd6c1c92cf1a610cf97c97ddf9bf97fd44b9c6c177dbf6014c3bca2.json
{
"authors": [
"<NAME>"
],
"date_download": "2018-01-08T00:17:24",
"date_modify": null,
"date_publish": "2017-12-27T00:00:00",
"description": "American Airlines apologizes after G League players accused of stealing blankets, thrown off flight",
"filename": "http%3A%2F%2Fabcnews.go.com%2FSports%2Famerican-airlines-apologizes-league-players-accused-stealing-blankets%2Fstory%3Fid%3D52010243.json",
"image_url": "http://media.video-cdn.espn.com/motion/2018/0107/dm_180107_SCHEFTER_ON_ALEX_SMITH/dm_180107_SCHEFTER_ON_ALEX_SMITH_default.jpg",
"language": "en",
"localpath": null,
"source_domain": "abcnews.go.com",
"text": "American Airlines apologized to two NBA G League players who were kicked off a plane in Dallas after a flight attendant accused them of stealing blankets.\nAirline spokesman <NAME> said Tuesday that Memphis Hustle guard Marquis Teague and forward <NAME> boarded the flight bound for Sioux Falls, South Dakota, Sunday at Dallas-Fort Worth International Airport.\nThe flight was operated by Envoy Air, a subsidiary of American Airlines Group.\nTwo first-class passengers gave the players their blankets as they headed to their seats in coach. But a flight attendant accused them of theft and forced them off the plane.\nFreed said an airline manager apologized to the players and that they later flew first class to Sioux Falls.\nTeague, Burrell and the flight attendant are black. Hustle coach <NAME> and D<NAME>, one of his assistants, took to Twitter to voice their frustration after the incident.\nFreed told The Undefeated that the airline would be reaching out to Teague and Burrell and is continuing to review the matter.\nAmerican Airlines chief executive <NAME> told employees last month that the company will implement implicit-bias training.\nThe NAACP issued a \"travel advisory'' in October warning African-Americans they could face discrimination when flying on American. The alert followed several high-profile incidents including one involving an organizer of the Women's March who was booted from a flight after a dispute over her seat.\nAmerican pledged to hire an outside firm to review its diversity in hiring and promotion, train all 120,000 employees to counteract implicit bias, create a special team to review passengers' discrimination complaints, and improve resolution of employee complaints about bias.\nTeague, the brother of <NAME> of the Minnesota Timberwolves, was drafted by the Bulls in 2012, after playing one season for Kentucky. Burrell played for the University of Memphis for two seasons before joining the G League.\nThe Memphis Hustle is the G League affiliate of the Memphis Grizzlies.\nThe Associated Press contributed to this report.",
"title": "American Airlines apologizes after G League players accused of stealing blankets, thrown off flight",
"title_page": null,
"title_rss": null,
"url": "http://abcnews.go.com/Sports/american-airlines-apologizes-league-players-accused-stealing-blankets/story?id=52010243",
"dId": "0cd6c1c92cf1a610cf97c97ddf9bf97fd44b9c6c177dbf6014c3bca2",
"newsCluster": {
"CategoryId": 5,
"Category": "sports",
"TopicId": 1,
"Topic": "unspecific",
"EventId": 14,
"Event": "american_airlines_apologizes_after_g_league_players_accused_of_stealing_blankets_thrown_off_flight"
}
} | 992 |
1,093 | /*
* Copyright 2002-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.integration.gateway;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.mock;
import org.junit.Test;
import org.springframework.beans.factory.BeanFactory;
import org.springframework.integration.channel.DirectChannel;
import org.springframework.integration.handler.AbstractReplyProducingMessageHandler;
import org.springframework.integration.support.MessageBuilder;
import org.springframework.messaging.Message;
/**
* @author <NAME>
* @author <NAME>
*/
public class NestedGatewayTests {
@Test
public void nestedWithinHandler() {
DirectChannel innerChannel = new DirectChannel();
DirectChannel outerChannel = new DirectChannel();
innerChannel.subscribe(new AbstractReplyProducingMessageHandler() {
@Override
protected Object handleRequestMessage(Message<?> requestMessage) {
return requestMessage.getPayload() + "-reply";
}
});
final MessagingGatewaySupport innerGateway = new MessagingGatewaySupport() { };
innerGateway.setRequestChannel(innerChannel);
innerGateway.setBeanFactory(mock(BeanFactory.class));
innerGateway.afterPropertiesSet();
outerChannel.subscribe(new AbstractReplyProducingMessageHandler() {
@Override
protected Object handleRequestMessage(Message<?> requestMessage) {
return innerGateway.sendAndReceiveMessage(
"pre-" + requestMessage.getPayload()).getPayload() + "-post";
}
});
MessagingGatewaySupport outerGateway = new MessagingGatewaySupport() { };
outerGateway.setRequestChannel(outerChannel);
outerGateway.setBeanFactory(mock(BeanFactory.class));
outerGateway.afterPropertiesSet();
Message<?> reply = outerGateway.sendAndReceiveMessage("test");
assertThat(reply.getPayload()).isEqualTo("pre-test-reply-post");
}
@Test
public void replyChannelRetained() {
DirectChannel requestChannel = new DirectChannel();
DirectChannel replyChannel = new DirectChannel();
requestChannel.subscribe(new AbstractReplyProducingMessageHandler() {
@Override
protected Object handleRequestMessage(Message<?> requestMessage) {
return requestMessage.getPayload() + "-reply";
}
});
MessagingGatewaySupport gateway = new MessagingGatewaySupport() { };
gateway.setRequestChannel(requestChannel);
gateway.setBeanFactory(mock(BeanFactory.class));
gateway.afterPropertiesSet();
Message<?> message = MessageBuilder.withPayload("test")
.setReplyChannel(replyChannel).build();
Message<?> reply = gateway.sendAndReceiveMessage(message);
assertThat(reply.getPayload()).isEqualTo("test-reply");
assertThat(reply.getHeaders().getReplyChannel()).isEqualTo(replyChannel);
}
@Test
public void errorChannelRetained() {
DirectChannel requestChannel = new DirectChannel();
DirectChannel errorChannel = new DirectChannel();
requestChannel.subscribe(new AbstractReplyProducingMessageHandler() {
@Override
protected Object handleRequestMessage(Message<?> requestMessage) {
return requestMessage.getPayload() + "-reply";
}
});
MessagingGatewaySupport gateway = new MessagingGatewaySupport() { };
gateway.setRequestChannel(requestChannel);
gateway.setBeanFactory(mock(BeanFactory.class));
gateway.afterPropertiesSet();
Message<?> message = MessageBuilder.withPayload("test")
.setErrorChannel(errorChannel).build();
Message<?> reply = gateway.sendAndReceiveMessage(message);
assertThat(reply.getPayload()).isEqualTo("test-reply");
assertThat(reply.getHeaders().getErrorChannel()).isEqualTo(errorChannel);
}
}
| 1,248 |
348 | <reponame>chamberone/Leaflet.PixiOverlay
{"nom":"Rivière-Saas-et-Gourby","circ":"2ème circonscription","dpt":"Landes","inscrits":913,"abs":444,"votants":469,"blancs":33,"nuls":16,"exp":420,"res":[{"nuance":"REM","nom":"<NAME>","voix":255},{"nuance":"FI","nom":"<NAME>","voix":165}]} | 118 |
473 | import numpy as np
from allrank.click_models.cascade_models import BaseCascadeModel, DiverseClicksModel
from tests.click_models import click
base_click_model = BaseCascadeModel(0.0, 1)
click_model = DiverseClicksModel(base_click_model)
def test_diverse_clicks_model_simple():
assert click(click_model, np.array([[0, 1]]), [1]) == [1]
assert click(click_model, np.array([[0, 1], [0, 1]]), [1, 1]) == [1, 0]
assert click(click_model, np.array([[0, 1], [0, 1], [1, 1]]), [1, 1, 1]) == [1, 0, 0]
assert click(click_model, np.array([[0, 1], [0, 1], [2, 2], [1, 1]]), [1, 1, 1, 1]) == [1, 0, 1, 0]
| 257 |
454 | <gh_stars>100-1000
package io.vertx.tp.plugin.excel.tpl;
import io.vertx.codegen.annotations.Fluent;
import io.vertx.tp.plugin.excel.ExTpl;
import io.vertx.up.commune.element.TypeAtom;
import io.vertx.up.eon.Values;
import org.apache.poi.ss.usermodel.Cell;
import org.apache.poi.ss.usermodel.Row;
import org.apache.poi.ss.usermodel.Sheet;
import org.apache.poi.ss.usermodel.Workbook;
import java.util.Objects;
/**
* @author <a href="http://www.origin-x.cn">Lang</a>
* 风格插件,直接为 sheet 增加 Style 相关信息
*/
public class BlueTpl implements ExTpl {
private transient BlueDye dye;
@Override
@Fluent
public ExTpl bind(final Workbook workbook) {
this.dye = BlueDye.get(workbook);
return this;
}
@Override
public void applyStyle(final Sheet sheet, final TypeAtom TypeAtom) {
/*
* 读取可见区域
*/
if (Objects.nonNull(sheet)) {
/*
* 处理第一行
*/
final Row first = sheet.getRow(Values.IDX);
this.applyFirst(first);
final int dataStart;
if (TypeAtom.isComplex()) {
/*
* 处理 Title 行
*/
final Row cnHeader = sheet.getRow(Values.ONE);
final Row enHeader = sheet.getRow(Values.THREE);
this.applyHeader(cnHeader, enHeader);
/*
* 处理第二 Title 行
*/
final Row cnHeader1 = sheet.getRow(Values.TWO);
final Row enHeader1 = sheet.getRow(Values.FOUR);
this.applyHeader(cnHeader1, enHeader1);
dataStart = 5;
} else {
/*
* 处理 Title 行
*/
final Row cnHeader = sheet.getRow(Values.ONE);
final Row enHeader = sheet.getRow(Values.TWO);
this.applyHeader(cnHeader, enHeader);
dataStart = 3;
}
/*
* 处理数据行
*/
final int num = sheet.getPhysicalNumberOfRows();
for (int idx = dataStart; idx < num; idx++) {
final Row data = sheet.getRow(idx);
this.applyData(data, TypeAtom);
}
}
}
private void applyFirst(final Row row) {
// {Table}
final Cell table = row.getCell(Values.IDX);
this.dye.onTable(table);
// identifier
final Cell identifier = row.getCell(Values.ONE);
this.dye.onModel(identifier);
// Empty
final Cell empty = row.getCell(Values.TWO);
this.dye.onEmpty(empty);
}
private void applyHeader(final Row cnHeader, final Row enHeader) {
// Header for CN text
final int cells = cnHeader.getPhysicalNumberOfCells();
for (int idx = 0; idx < cells; idx++) {
final Cell cell = cnHeader.getCell(idx);
this.dye.onCnHeader(cell);
}
// Header for En text
final int enCells = enHeader.getPhysicalNumberOfCells();
for (int idx = 0; idx < enCells; idx++) {
final Cell cell = enHeader.getCell(idx);
this.dye.onEnHeader(cell);
}
}
private void applyData(final Row dataRow, final TypeAtom TypeAtom) {
final int enCells = dataRow.getPhysicalNumberOfCells();
for (int idx = 0; idx < enCells; idx++) {
final Cell cell = dataRow.getCell(idx);
final Class<?> type = TypeAtom.type(idx);
this.dye.onData(cell, type);
}
}
}
| 1,875 |
607 | package com.sandwich.koan.path.xmltransformation;
import com.sandwich.koan.constant.KoanConstants;
public class XmlVariableDictionary {
public static final String METHOD_NAME = wrapParam("method_name");
public static final String FILE_NAME = wrapParam("file_name");
public static final String FILE_PATH = wrapParam("file_path");
private static String wrapParam(String param) {
return new StringBuilder(KoanConstants.XML_PARAMETER_START)
.append(param).append(KoanConstants.XML_PARAMETER_END).toString();
}
}
| 175 |
621 | #!/usr/bin/env python3
import argparse
import gym
import universe
import numpy as np
import torch
import torch.nn.functional as F
from lib import wob_vnc, model_vnc
ENV_NAME = "wob.mini.ClickDialog-v0"
REMOTE_ADDR = 'vnc://localhost:5900+15900'
def step_env(env, action):
idle_count = 0
while True:
obs, reward, is_done, info = env.step([action])
if obs[0] is None:
idle_count += 1
continue
break
return obs[0], reward[0], is_done[0], info, idle_count
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("-m", "--model", help="Model file to load")
parser.add_argument("--save", help="Enables screenshots and gives an images prefix")
parser.add_argument("--count", type=int, default=1, help="Count of episodes to play, default=1")
parser.add_argument("--env", default=ENV_NAME, help="Environment name to solve, default=" + ENV_NAME)
parser.add_argument("--verbose", default=False, action='store_true', help="Display every step")
args = parser.parse_args()
env_name = args.env
if not env_name.startswith('wob.mini.'):
env_name = "wob.mini." + env_name
env = gym.make(env_name)
env = universe.wrappers.experimental.SoftmaxClickMouse(env)
if args.save is not None:
env = wob_vnc.MiniWoBPeeker(env, args.save)
env = wob_vnc.MiniWoBCropper(env)
wob_vnc.configure(env, REMOTE_ADDR, fps=5)
net = model_vnc.Model(input_shape=wob_vnc.WOB_SHAPE, n_actions=env.action_space.n)
if args.model:
net.load_state_dict(torch.load(args.model))
env.reset()
steps_count = 0
reward_sum = 0
for round_idx in range(args.count):
action = env.action_space.sample()
step_idx = 0
while True:
obs, reward, done, info, idle_count = step_env(env, action)
if args.verbose:
print(step_idx, reward, done, idle_count, info)
obs_v = torch.tensor([obs])
logits_v = net(obs_v)[0]
policy = F.softmax(logits_v, dim=1).data.numpy()[0]
action = np.random.choice(len(policy), p=policy)
step_idx += 1
reward_sum += reward
steps_count += 1
if done or reward != 0:
print("Round %d done" % round_idx)
break
print("Done %d rounds, mean steps %.2f, mean reward %.3f" % (
args.count, steps_count / args.count, reward_sum / args.count
))
pass
| 1,110 |
1,144 | <reponame>dram/metasfresh<filename>backend/de.metas.manufacturing/src/main/java/de/metas/manufacturing/generatedcomponents/PP_ComponentGenerator.java
/*
* #%L
* de.metas.manufacturing
* %%
* Copyright (C) 2020 metas GmbH
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as
* published by the Free Software Foundation, either version 2 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/gpl-2.0.html>.
* #L%
*/
package de.metas.manufacturing.generatedcomponents;
import de.metas.javaclasses.JavaClassId;
import lombok.NonNull;
import org.adempiere.ad.modelvalidator.annotations.Interceptor;
import org.adempiere.ad.modelvalidator.annotations.ModelChange;
import org.compiere.model.I_PP_ComponentGenerator;
import org.compiere.model.ModelValidator;
import org.springframework.stereotype.Component;
@Interceptor(I_PP_ComponentGenerator.class)
@Component
public class PP_ComponentGenerator
{
private final ManufacturingComponentGeneratorService manufacturingComponentGeneratorService;
public PP_ComponentGenerator(
@NonNull final ManufacturingComponentGeneratorService manufacturingComponentGeneratorService)
{
this.manufacturingComponentGeneratorService = manufacturingComponentGeneratorService;
}
@ModelChange(timings = { ModelValidator.TYPE_AFTER_NEW })
void generateDefaultParams(final I_PP_ComponentGenerator po)
{
final ComponentGeneratorId generatorId = ComponentGeneratorId.ofRepoId(po.getPP_ComponentGenerator_ID());
final JavaClassId generatorClassId = JavaClassId.ofRepoId(po.getAD_JavaClass_ID());
manufacturingComponentGeneratorService.createDefaultParameters(generatorId, generatorClassId);
}
}
| 612 |
654 | /*
* Copyright (C) 2014 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.lucasr.layoutsamples.widget;
import android.content.res.Resources;
import android.text.TextUtils;
import android.util.AttributeSet;
import android.view.View;
import android.view.View.MeasureSpec;
import android.view.ViewGroup.MarginLayoutParams;
import com.squareup.picasso.Picasso;
import com.squareup.picasso.Target;
import org.lucasr.layoutsamples.adapter.Tweet;
import org.lucasr.layoutsamples.adapter.TweetPresenter;
import org.lucasr.layoutsamples.app.R;
import org.lucasr.layoutsamples.canvas.UIElementGroup;
import org.lucasr.layoutsamples.canvas.ImageElement;
import org.lucasr.layoutsamples.canvas.TextElement;
import org.lucasr.layoutsamples.canvas.UIElement;
import org.lucasr.layoutsamples.canvas.UIElementHost;
import org.lucasr.layoutsamples.canvas.UIElementInflater;
import org.lucasr.layoutsamples.util.ImageUtils;
import java.util.EnumMap;
import java.util.EnumSet;
public class TweetElement extends UIElementGroup implements TweetPresenter {
private ImageElement mProfileImage;
private TextElement mAuthorText;
private TextElement mMessageText;
private ImageElement mPostImage;
private EnumMap<Action, UIElement> mActionIcons;
private ImageElementTarget mProfileImageTarget;
private ImageElementTarget mPostImageTarget;
public TweetElement(UIElementHost host) {
this(host, null);
}
public TweetElement(UIElementHost host, AttributeSet attrs) {
super(host, attrs);
final Resources res = getResources();
int padding = res.getDimensionPixelOffset(R.dimen.tweet_padding);
setPadding(padding, padding, padding, padding);
UIElementInflater.from(getContext()).inflate(R.layout.tweet_element_view, host, this);
mProfileImage = (ImageElement) findElementById(R.id.profile_image);
mAuthorText = (TextElement) findElementById(R.id.author_text);
mMessageText = (TextElement) findElementById(R.id.message_text);
mPostImage = (ImageElement) findElementById(R.id.post_image);
mProfileImageTarget = new ImageElementTarget(res, mProfileImage);
mPostImageTarget = new ImageElementTarget(res, mPostImage);
mActionIcons = new EnumMap(Action.class);
for (Action action : Action.values()) {
final int elementId;
switch (action) {
case REPLY:
elementId = R.id.reply_action;
break;
case RETWEET:
elementId = R.id.retweet_action;
break;
case FAVOURITE:
elementId = R.id.favourite_action;
break;
default:
throw new IllegalArgumentException("Unrecognized tweet action");
}
mActionIcons.put(action, findElementById(elementId));
}
}
private void layoutElement(UIElement element, int left, int top, int width, int height) {
MarginLayoutParams margins = (MarginLayoutParams) element.getLayoutParams();
final int leftWithMargins = left + margins.leftMargin;
final int topWithMargins = top + margins.topMargin;
element.layout(leftWithMargins, topWithMargins,
leftWithMargins + width, topWithMargins + height);
}
private int getWidthWithMargins(UIElement element) {
final MarginLayoutParams lp = (MarginLayoutParams) element.getLayoutParams();
return element.getWidth() + lp.leftMargin + lp.rightMargin;
}
private int getHeightWithMargins(UIElement element) {
final MarginLayoutParams lp = (MarginLayoutParams) element.getLayoutParams();
return element.getMeasuredHeight() + lp.topMargin + lp.bottomMargin;
}
private int getMeasuredWidthWithMargins(UIElement element) {
final MarginLayoutParams lp = (MarginLayoutParams) element.getLayoutParams();
return element.getMeasuredWidth() + lp.leftMargin + lp.rightMargin;
}
private int getMeasuredHeightWithMargins(UIElement element) {
final MarginLayoutParams lp = (MarginLayoutParams) element.getLayoutParams();
return element.getMeasuredHeight() + lp.topMargin + lp.bottomMargin;
}
private void cancelImageRequest(Target target) {
if (!isAttachedToHost() || target == null) {
return;
}
Picasso.with(getContext()).cancelRequest(target);
}
@Override
public boolean swapHost(UIElementHost host) {
if (host == null) {
cancelImageRequest(mProfileImageTarget);
cancelImageRequest(mPostImageTarget);
}
return super.swapHost(host);
}
@Override
public void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
final int widthSize = MeasureSpec.getSize(widthMeasureSpec);
int widthUsed = 0;
int heightUsed = 0;
measureElementWithMargins(mProfileImage,
widthMeasureSpec, widthUsed,
heightMeasureSpec, heightUsed);
widthUsed += getMeasuredWidthWithMargins(mProfileImage);
measureElementWithMargins(mAuthorText,
widthMeasureSpec, widthUsed,
heightMeasureSpec, heightUsed);
heightUsed += getMeasuredHeightWithMargins(mAuthorText);
measureElementWithMargins(mMessageText,
widthMeasureSpec, widthUsed,
heightMeasureSpec, heightUsed);
heightUsed += getMeasuredHeightWithMargins(mMessageText);
if (mPostImage.getVisibility() != View.GONE) {
measureElementWithMargins(mPostImage,
widthMeasureSpec, widthUsed,
heightMeasureSpec, heightUsed);
heightUsed += getMeasuredHeightWithMargins(mPostImage);
}
int maxIconHeight = 0;
for (Action action : Action.values()) {
final UIElement icon = mActionIcons.get(action);
measureElementWithMargins(icon,
widthMeasureSpec, widthUsed,
heightMeasureSpec, heightUsed);
final int height = getMeasuredHeightWithMargins(icon);
if (height > maxIconHeight) {
maxIconHeight = height;
}
widthUsed += getMeasuredWidthWithMargins(icon);
}
heightUsed += maxIconHeight;
int heightSize = heightUsed + getPaddingTop() + getPaddingBottom();
setMeasuredDimension(widthSize, heightSize);
}
@Override
public void onLayout(int l, int t, int r, int b) {
final int paddingLeft = getPaddingLeft();
final int paddingTop = getPaddingTop();
int currentTop = paddingTop;
layoutElement(mProfileImage, paddingLeft, currentTop,
mProfileImage.getMeasuredWidth(),
mProfileImage.getMeasuredHeight());
final int contentLeft = getWidthWithMargins(mProfileImage) + paddingLeft;
final int contentWidth = r - l - contentLeft - getPaddingRight();
layoutElement(mAuthorText, contentLeft, currentTop,
contentWidth, mAuthorText.getMeasuredHeight());
currentTop += getHeightWithMargins(mAuthorText);
layoutElement(mMessageText, contentLeft, currentTop,
contentWidth, mMessageText.getMeasuredHeight());
currentTop += getHeightWithMargins(mMessageText);
if (mPostImage.getVisibility() != View.GONE) {
layoutElement(mPostImage, contentLeft, currentTop,
contentWidth, mPostImage.getMeasuredHeight());
currentTop += getHeightWithMargins(mPostImage);
}
final int iconsWidth = contentWidth / mActionIcons.size();
int iconsLeft = contentLeft;
for (Action action : Action.values()) {
final UIElement icon = mActionIcons.get(action);
layoutElement(icon, iconsLeft, currentTop,
iconsWidth, icon.getMeasuredHeight());
iconsLeft += iconsWidth;
}
}
public void loadProfileImage(Tweet tweet, EnumSet<UpdateFlags> flags) {
ImageUtils.loadImage(getContext(), mProfileImage, mProfileImageTarget,
tweet.getProfileImageUrl(), flags);
}
public void loadPostImage(Tweet tweet, EnumSet<UpdateFlags> flags) {
ImageUtils.loadImage(getContext(), mPostImage, mPostImageTarget,
tweet.getPostImageUrl(), flags);
}
@Override
public void update(Tweet tweet, EnumSet<UpdateFlags> flags) {
mAuthorText.setText(tweet.getAuthorName());
mMessageText.setText(tweet.getMessage());
loadProfileImage(tweet, flags);
final boolean hasPostImage = !TextUtils.isEmpty(tweet.getPostImageUrl());
mPostImage.setVisibility(hasPostImage ? View.VISIBLE : View.GONE);
if (hasPostImage) {
loadPostImage(tweet, flags);
}
}
}
| 4,017 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.