max_stars_count
int64 301
224k
| text
stringlengths 6
1.05M
| token_count
int64 3
727k
|
---|---|---|
427 | #!/usr/bin/python
# build-swift-cmake.py
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ------------------------------------------------------------------------------
import argparse
import os
import subprocess
import sys
parser = argparse.ArgumentParser()
parser.add_argument('--lldb-extra-cmake-args', action='store',
help='extra arguments to be passed to lldb cmake')
parser.add_argument('--lldb-extra-xcodebuild-args', action='store',
help='extra arguments to be passed to lldb xcodebuild')
parser.add_argument('--update', action='store_true')
parser.add_argument('--test', action='store_true')
parser.add_argument('--curses', action='store_true',
help='test with curses test runners where available')
parser.add_argument('--release', action='store_true',
help='build in release mode')
parser.add_argument('--no-debugserver', action='store_true',
help='build without debugserver')
parser.add_argument(
'--no-system-debugserver',
action='store_false',
dest='use_system_debugserver',
help='do not copy in the system debugserver (default is to copy it in)')
parser.add_argument('--package', action='store_true',
help='build for packaging')
parser.add_argument('--foundation', action='store_true',
help='build swift foundation')
args = parser.parse_args()
def checkout_git(dir, repo, branch):
if not os.path.isdir(dir):
subprocess.call(["git", "clone", "-b", branch, repo, dir])
def update_git(dir):
if os.path.isdir(dir):
subprocess.call(["git", "fetch", "--all"], cwd=dir)
subprocess.call(["git", "merge", "--ff-only", "@{upstream}"], cwd=dir)
def use_gold_linker():
"""@return True if the gold linker should be used; False otherwise."""
return os.path.isfile("/usr/bin/ld.gold")
uname = str(subprocess.check_output(["uname", "-s"])).rstrip()
checkout_git(
"llvm",
"ssh://[email protected]/apple/swift-llvm.git",
"stable")
checkout_git(
"clang",
"ssh://[email protected]/apple/swift-clang.git",
"stable")
checkout_git("swift", "ssh://[email protected]/apple/swift.git", "master")
checkout_git("cmark", "ssh://[email protected]/apple/swift-cmark.git", "master")
checkout_git("ninja", "https://github.com/ninja-build/ninja.git", "master")
checkout_git(
"lldb",
"ssh://[email protected]/apple/swift-lldb.git",
"master")
if args.package:
checkout_git(
"llbuild",
"ssh://[email protected]/apple/swift-llbuild.git",
"master")
checkout_git(
"swiftpm",
"ssh://[email protected]/apple/swift-package-manager.git",
"master")
checkout_git(
"swift-corelibs-foundation",
"ssh://[email protected]/apple/swift-corelibs-foundation.git",
"master")
checkout_git(
"swift-corelibs-xctest",
"ssh://[email protected]/apple/swift-corelibs-xctest.git",
"master")
checkout_git(
"swift-integration-tests",
"ssh://[email protected]/apple/swift-integration-tests.git",
"master")
elif args.foundation:
checkout_git(
"swift-corelibs-foundation",
"ssh://[email protected]/apple/swift-corelibs-foundation.git",
"master")
if args.update:
update_git("llvm")
update_git("clang")
update_git("swift")
update_git("cmark")
update_git("ninja")
update_git("lldb")
if args.package:
update_git("llbuild")
update_git("swiftpm")
update_git("swift-corelibs-foundation")
update_git("swift-corelibs-xctest")
update_git("swift-integration-tests")
elif args.foundation:
update_git("swift-corelibs-foundation")
if not os.path.exists("install"):
os.makedirs("install")
package_darwin = args.package and (uname == "Darwin")
build_script_arguments = []
build_script_impl_arguments = []
if args.lldb_extra_xcodebuild_args:
build_script_impl_arguments.append(
"--lldb-extra-xcodebuild-args={}".format(
args.lldb_extra_xcodebuild_args))
if package_darwin:
# packaging preset
build_script_arguments += ["--preset=buildbot_osx_package"]
if not os.path.exists("symroot"):
os.makedirs("symroot")
if not os.path.exists("package"):
os.makedirs("package")
build_script_arguments += [
"install_destdir=" + os.getcwd() + "/install",
"installable_package=" + os.getcwd() + "/package/package.tar.gz",
"install_toolchain_dir=/Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain",
"install_symroot=" + os.getcwd() + "/symroot",
"symbols_package=" + os.getcwd() + "/package/symbols.tar.gz"]
elif args.package:
print("--package is unsupported on non-OS X platforms")
else:
if args.release:
build_script_arguments += ["--release", "--assertions", "--lldb"]
else:
build_script_arguments += ["--debug-swift",
"--debug-lldb", "--skip-build-benchmarks",
"--extra-cmake-options=-DCMAKE_CXX_FLAGS=-fno-limit-debug-info"]
if args.foundation:
build_script_arguments += ["--foundation"]
build_script_impl_arguments += ["--build-swift-static-stdlib=1"]
if args.lldb_extra_cmake_args and len(args.lldb_extra_cmake_args) > 0:
# Add the quoted version of the command line arg.
build_script_impl_arguments.append(
"--lldb-extra-cmake-args={}".format(args.lldb_extra_cmake_args))
if uname != "Darwin":
# we don't build with Xcode, so we can actually install
# build_script_impl_arguments += [ "--install-swift", "--install-lldb", "--install-prefix", "/usr", "--install-destdir", os.getcwd() + "/install", "--swift-install-components=compiler;clang-builtin-headers;stdlib;stdlib-experimental;sdk-overlay;editor-integration;tools;testsuite-tools;dev" ]
build_script_impl_arguments += [
"--install-swift",
"--install-lldb",
"--install-destdir",
os.getcwd() +
"/install",
"--swift-install-components=compiler;clang-builtin-headers;stdlib;stdlib-experimental;sdk-overlay;editor-integration;tools;testsuite-tools;dev"]
# build_script_impl_arguments += ["--reconfigure"]
# If we're on Linux, and if the /usr/bin/ld.gold exists, indicate we
# want to use the gold linker.
if use_gold_linker():
build_script_impl_arguments.append("--use-gold-linker")
if args.test:
build_script_arguments += ["--test"]
build_script_impl_arguments += ["--skip-test-cmark",
"--skip-test-swift"]
if args.curses:
build_script_impl_arguments += ["--lldb-test-with-curses"]
if args.no_debugserver:
build_script_impl_arguments += ['--lldb-no-debugserver']
elif args.use_system_debugserver:
build_script_impl_arguments += ['--lldb-use-system-debugserver']
args = ["./swift/utils/build-script"] + \
build_script_arguments + ["--"] + build_script_impl_arguments
print(" ".join(args))
return_code = subprocess.call(args)
sys.exit(return_code)
| 3,152 |
16,259 | # Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from dataclasses import dataclass, field
from fairseq import file_utils
from fairseq.data.encoders import register_bpe
from fairseq.dataclass import FairseqDataclass
@dataclass
class SentencepieceConfig(FairseqDataclass):
sentencepiece_model: str = field(
default="???", metadata={"help": "path to sentencepiece model"}
)
@register_bpe("sentencepiece", dataclass=SentencepieceConfig)
class SentencepieceBPE(object):
def __init__(self, cfg):
sentencepiece_model = file_utils.cached_path(cfg.sentencepiece_model)
try:
import sentencepiece as spm
self.sp = spm.SentencePieceProcessor()
self.sp.Load(sentencepiece_model)
except ImportError:
raise ImportError(
"Please install sentencepiece with: pip install sentencepiece"
)
def encode(self, x: str) -> str:
return " ".join(self.sp.EncodeAsPieces(x))
def decode(self, x: str) -> str:
return x.replace(" ", "").replace("\u2581", " ").strip()
def is_beginning_of_word(self, x: str) -> bool:
if x in ["<unk>", "<s>", "</s>", "<pad>"]:
# special elements are always considered beginnings
# HACK: this logic is already present in fairseq/tasks/masked_lm.py
# but these special tokens are also contained in the sentencepiece
# vocabulary which causes duplicate special tokens. This hack makes
# sure that they are all taken into account.
return True
return x.startswith("\u2581")
| 671 |
966 | <filename>examples/callbacks/callbacks.h
/*
* Copyright 2017 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef CLIF_EXAMPLES_CALLBACKS_CALLBACKS_H_
#define CLIF_EXAMPLES_CALLBACKS_CALLBACKS_H_
#include <functional>
namespace clif_example {
namespace callbacks {
class Data {
public:
int d;
};
class Functor {
public:
int operator()(const Data& d, int a) {
return d.d + a;
}
};
inline int Get(Data d, std::function<int(Data)> func) {
return func(d);
}
inline void Set(std::function<void(Data*)> func, Data* d) {
func(d);
}
inline std::function<int(const Data&, int)> GetCallback() {
return Functor();
}
} // namespace callbacks
} // namespace clif_example
#endif // CLIF_EXAMPLES_CALLBACKS_CALLBACKS_H_
| 424 |
587 | <gh_stars>100-1000
/*
* Copyright (C) 2013 salesforce.com, inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.auraframework.javascript;
import org.auraframework.def.BaseComponentDef;
import org.auraframework.system.AuraContext.Mode;
/**
* Allow javascript insertion into pre init block of inline.js
*/
public interface PreInitJavascript {
/**
* Whether to insert javascript based on current def and mode
*
* @param currentDef current application/component definition
* @param mode Aura mode
* @return whether to insert javascript based on current def and mode
*/
boolean shouldInsert(BaseComponentDef currentDef, Mode mode);
/**
* Returns javascript code based on current def and mode
*
* @param currentDef current application/component definition
* @param mode Aura mode
* @return javascript code based on current def and mode
*/
String getJavascriptCode(BaseComponentDef currentDef, Mode mode);
}
| 432 |
348 | <reponame>chamberone/Leaflet.PixiOverlay<gh_stars>100-1000
{"nom":"Desges","circ":"2ème circonscription","dpt":"Haute-Loire","inscrits":64,"abs":17,"votants":47,"blancs":4,"nuls":1,"exp":42,"res":[{"nuance":"LR","nom":"<NAME>","voix":34},{"nuance":"REM","nom":"<NAME>","voix":8}]} | 117 |
557 | <filename>package.json
{
"name": "console-importer",
"private": true,
"scripts": {
"dev": "norm",
"build": "norm build",
"icon": "rm app/images/icon.png; svg2png app/images/icon.svg -o app/images/icon.png",
"lint": "norm lint"
},
"devDependencies": {
"tiza": "^2.0.0"
}
}
| 139 |
677 | // Copyright 2017 The Lynx Authors. All rights reserved.
#if USING_V8
#ifndef RUNTIME_V8_HELPER
#define RUNTIME_V8_HELPER
#include <string>
#include "v8.h"
#include "runtime/base/lynx_value.h"
namespace jscore {
class V8Helper {
public:
static base::ScopedPtr<LynxValue> ConvertToLynxValue(v8::Local<v8::Context> context,
v8::Local<v8::Value> value);
static base::ScopedPtr<LynxArray> ConvertToLynxArray(v8::Local<v8::Context> context,
v8::Local<v8::Array> array);
static base::ScopedPtr<LynxArray> ConvertToLynxArray(v8::Local<v8::Context> context,
const v8::FunctionCallbackInfo<v8::Value>& info);
static base::ScopedPtr<LynxMap> ConvertToLynxMap(v8::Local<v8::Context> context,
v8::Local<v8::Object> object);
static base::ScopedPtr<LynxValue> ConvertToLynxFunction(v8::Local<v8::Context> context,
v8::Local<v8::Function> func);
static v8::Local<v8::String> ConvertToV8String(v8::Isolate* isolate, const std::string &s);
static v8::Local<v8::String> ConvertToV8String(v8::Isolate* isolate, const char* s);
static v8::Local<v8::Array> ConvertToV8Array(v8::Isolate* isolate, LynxArray* array);
static v8::Local<v8::Object> ConvertToV8Object(v8::Isolate* isolate, LynxObject* object);
static v8::Local<v8::Value> ConvertToV8Value(v8::Isolate* isolate, LynxValue* value);
static v8::Local<v8::Object> ConvertToV8Object(v8::Isolate* isolate, LynxMap* map);
static std::string ConvertToString(const v8::Local <v8::String> &s);
inline static v8::Local<v8::Number> ConvertToV8Int(v8::Isolate* isolate, LynxValue* value) {
return v8::Number::New(isolate, value->data_.i);
}
inline static v8::Local<v8::Number> ConvertToV8Long(v8::Isolate* isolate, LynxValue* value) {
return v8::Number::New(isolate, value->data_.l);
}
inline static v8::Local<v8::Number> ConvertToV8Float(v8::Isolate* isolate, LynxValue* value) {
return v8::Number::New(isolate, value->data_.f);
}
inline static v8::Local<v8::Number> ConvertToV8Double(v8::Isolate* isolate, LynxValue* value) {
return v8::Number::New(isolate, value->data_.d);
}
inline static v8::Local<v8::Boolean> ConvertToV8Boolean(v8::Isolate* isolate, LynxValue* value) {
return v8::Boolean::New(isolate, value->data_.b);
}
inline static v8::Local<v8::String> ConvertToV8String(v8::Isolate* isolate, LynxValue* value) {
return ConvertToV8String(isolate, value->data_.str);
}
static v8::Local<v8::String> JsonStringify(v8::Isolate* isolate, const v8::Local<v8::Value>& arg);
};
}
#endif
#endif // RUNTIME_V8_HELPER
| 1,317 |
1,337 | /*
* Copyright (c) 2008-2018 Haulmont.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.haulmont.cuba.gui.actions.list;
import com.haulmont.chile.core.model.MetaClass;
import com.haulmont.cuba.client.ClientConfig;
import com.haulmont.cuba.core.entity.Entity;
import com.haulmont.cuba.core.global.Configuration;
import com.haulmont.cuba.core.global.Messages;
import com.haulmont.cuba.core.global.Security;
import com.haulmont.cuba.gui.ScreenBuilders;
import com.haulmont.cuba.gui.builders.EditorBuilder;
import com.haulmont.cuba.gui.components.Action;
import com.haulmont.cuba.gui.components.ActionType;
import com.haulmont.cuba.gui.components.Component;
import com.haulmont.cuba.gui.components.actions.ListAction;
import com.haulmont.cuba.gui.components.data.meta.EntityDataUnit;
import com.haulmont.cuba.gui.icons.CubaIcon;
import com.haulmont.cuba.gui.icons.Icons;
import com.haulmont.cuba.gui.meta.*;
import com.haulmont.cuba.gui.screen.*;
import com.haulmont.cuba.gui.sys.ActionScreenInitializer;
import com.haulmont.cuba.security.entity.EntityOp;
import javax.annotation.Nullable;
import javax.inject.Inject;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Supplier;
import static com.haulmont.cuba.gui.screen.FrameOwner.WINDOW_COMMIT_AND_CLOSE_ACTION;
/**
* Standard action for creating an entity instance using its editor screen.
* <p>
* Should be defined for a list component ({@code Table}, {@code DataGrid}, etc.) in a screen XML descriptor.
* <p>
* The action instance can be parameterized using the nested {@code properties} XML element or programmatically in the
* screen controller.
*
* @param <E> type of entity
*/
@StudioAction(category = "List Actions", description = "Creates an entity instance using its editor screen")
@ActionType(CreateAction.ID)
public class CreateAction<E extends Entity> extends ListAction implements Action.DisabledWhenScreenReadOnly,
Action.ExecutableAction {
public static final String ID = "create";
@Inject
protected ScreenBuilders screenBuilders;
@Inject
protected Security security;
protected ActionScreenInitializer screenInitializer = new ActionScreenInitializer();
protected Supplier<E> newEntitySupplier;
protected Consumer<E> initializer;
protected Consumer<E> afterCommitHandler;
protected Function<E, E> transformation;
public CreateAction() {
this(ID);
}
public CreateAction(String id) {
super(id);
this.primary = true;
}
/**
* Returns the editor screen open mode if it was set by {@link #setOpenMode(OpenMode)} or in the screen XML.
* Otherwise returns null.
*/
@Nullable
public OpenMode getOpenMode() {
return screenInitializer.getOpenMode();
}
/**
* Sets the editor screen open mode.
*/
@StudioPropertiesItem
public void setOpenMode(OpenMode openMode) {
screenInitializer.setOpenMode(openMode);
}
/**
* Returns the editor screen id if it was set by {@link #setScreenId(String)} or in the screen XML.
* Otherwise returns null.
*/
@Nullable
public String getScreenId() {
return screenInitializer.getScreenId();
}
/**
* Sets the editor screen id.
*/
@StudioPropertiesItem
public void setScreenId(String screenId) {
screenInitializer.setScreenId(screenId);
}
/**
* Returns the editor screen class if it was set by {@link #setScreenClass(Class)} or in the screen XML.
* Otherwise returns null.
*/
@Nullable
public Class getScreenClass() {
return screenInitializer.getScreenClass();
}
/**
* Sets the editor screen class.
*/
@StudioPropertiesItem
public void setScreenClass(Class screenClass) {
screenInitializer.setScreenClass(screenClass);
}
/**
* Sets the editor screen options supplier. The supplier provides {@code ScreenOptions} to the
* opened screen.
* <p>
* The preferred way to set the supplier is using a controller method annotated with {@link Install}, e.g.:
* <pre>
* @Install(to = "petsTable.create", subject = "screenOptionsSupplier")
* protected ScreenOptions petsTableCreateScreenOptionsSupplier() {
* return new MapScreenOptions(ParamsMap.of("someParameter", 10));
* }
* </pre>
*/
public void setScreenOptionsSupplier(Supplier<ScreenOptions> screenOptionsSupplier) {
screenInitializer.setScreenOptionsSupplier(screenOptionsSupplier);
}
/**
* Sets the editor screen configurer. Use the configurer if you need to provide parameters to the
* opened screen through setters.
* <p>
* The preferred way to set the configurer is using a controller method annotated with {@link Install}, e.g.:
* <pre>
* @Install(to = "petsTable.create", subject = "screenConfigurer")
* protected void petsTableCreateScreenConfigurer(Screen editorScreen) {
* ((PetEdit) editorScreen).setSomeParameter(someValue);
* }
* </pre>
*/
public void setScreenConfigurer(Consumer<Screen> screenConfigurer) {
screenInitializer.setScreenConfigurer(screenConfigurer);
}
/**
* Sets the handler to be invoked when the editor screen closes.
* <p>
* The preferred way to set the handler is using a controller method annotated with {@link Install}, e.g.:
* <pre>
* @Install(to = "petsTable.create", subject = "afterCloseHandler")
* protected void petsTableCreateAfterCloseHandler(AfterCloseEvent event) {
* if (event.closedWith(StandardOutcome.COMMIT)) {
* System.out.println("Committed");
* }
* }
* </pre>
*/
public void setAfterCloseHandler(Consumer<Screen.AfterCloseEvent> afterCloseHandler) {
screenInitializer.setAfterCloseHandler(afterCloseHandler);
}
/**
* Sets the new entity supplier. The supplier should return a new entity instance.
* <p>
* The preferred way to set the supplier is using a controller method annotated with {@link Install}, e.g.:
* <pre>
* @Install(to = "petsTable.create", subject = "newEntitySupplier")
* protected Pet petsTableCreateNewEntitySupplier() {
* Pet pet = metadata.create(Pet.class);
* pet.setName("a cat");
* return pet;
* }
* </pre>
*/
public void setNewEntitySupplier(Supplier<E> newEntitySupplier) {
this.newEntitySupplier = newEntitySupplier;
}
/**
* Sets the new entity initializer. The initializer accepts the new entity instance and can perform its
* initialization.
* <p>
* The preferred way to set the initializer is using a controller method annotated with {@link Install}, e.g.:
* <pre>
* @Install(to = "petsTable.create", subject = "initializer")
* protected void petsTableCreateInitializer(Pet entity) {
* entity.setName("a cat");
* }
* </pre>
*/
public void setInitializer(Consumer<E> initializer) {
this.initializer = initializer;
}
/**
* Sets the handler to be invoked when the editor screen commits the new entity.
* <p>
* The preferred way to set the handler is using a controller method annotated with {@link Install}, e.g.:
* <pre>
* @Install(to = "petsTable.create", subject = "afterCommitHandler")
* protected void petsTableCreateAfterCommitHandler(Pet entity) {
* System.out.println("Created " + entity);
* }
* </pre>
*/
public void setAfterCommitHandler(Consumer<E> afterCommitHandler) {
this.afterCommitHandler = afterCommitHandler;
}
/**
* Sets the function to transform the committed in the editor screen entity before setting it to the target data container.
* <p>
* The preferred way to set the function is using a controller method annotated with {@link Install}, e.g.:
* <pre>
* @Install(to = "petsTable.create", subject = "transformation")
* protected Pet petsTableCreateTransformation(Pet entity) {
* return doTransform(entity);
* }
* </pre>
*/
public void setTransformation(Function<E, E> transformation) {
this.transformation = transformation;
}
@Inject
protected void setMessages(Messages messages) {
this.caption = messages.getMainMessage("actions.Create");
}
@Inject
protected void setIcons(Icons icons) {
this.icon = icons.get(CubaIcon.CREATE_ACTION);
}
@Inject
protected void setConfiguration(Configuration configuration) {
ClientConfig clientConfig = configuration.getConfig(ClientConfig.class);
setShortcut(clientConfig.getTableInsertShortcut());
}
@Override
protected boolean isPermitted() {
if (target == null || !(target.getItems() instanceof EntityDataUnit)) {
return false;
}
MetaClass metaClass = ((EntityDataUnit) target.getItems()).getEntityMetaClass();
if (metaClass == null) {
return true;
}
boolean createPermitted = security.isEntityOpPermitted(metaClass, EntityOp.CREATE);
if (!createPermitted) {
return false;
}
return super.isPermitted();
}
@Override
public void actionPerform(Component component) {
// if standard behaviour
if (!hasSubscriptions(ActionPerformedEvent.class)) {
execute();
} else {
super.actionPerform(component);
}
}
/**
* Executes the action.
*/
@SuppressWarnings("unchecked")
@Override
public void execute() {
if (target == null) {
throw new IllegalStateException("CreateAction target is not set");
}
if (!(target.getItems() instanceof EntityDataUnit)) {
throw new IllegalStateException("CreateAction target items is null or does not implement EntityDataUnit");
}
MetaClass metaClass = ((EntityDataUnit) target.getItems()).getEntityMetaClass();
if (metaClass == null) {
throw new IllegalStateException("Target is not bound to entity");
}
EditorBuilder builder = screenBuilders.editor(target);
if (newEntitySupplier != null) {
E entity = newEntitySupplier.get();
builder = builder.newEntity(entity);
} else {
builder = builder.newEntity();
}
if (initializer != null) {
builder = builder.withInitializer(initializer);
}
builder = screenInitializer.initBuilder(builder);
if (transformation != null) {
builder.withTransformation(transformation);
}
Screen editor = builder.build();
if (afterCommitHandler != null) {
editor.addAfterCloseListener(afterCloseEvent -> {
CloseAction closeAction = afterCloseEvent.getCloseAction();
if (closeAction.equals(WINDOW_COMMIT_AND_CLOSE_ACTION)) {
Entity committedEntity = ((EditorScreen) editor).getEditedEntity();
afterCommitHandler.accept((E) committedEntity);
}
});
}
screenInitializer.initScreen(editor);
editor.show();
}
} | 4,280 |
6,449 | <reponame>zhouit/motan
/*
* Copyright 2009-2016 Weibo, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.weibo.api.motan.protocol.restful.support.servlet;
import org.jboss.resteasy.spi.ResteasyDeployment;
import com.weibo.api.motan.exception.MotanFrameworkException;
import com.weibo.api.motan.protocol.restful.RestServer;
public class ServletRestServer implements RestServer {
private static ResteasyDeployment deployment;
public static void setResteasyDeployment(ResteasyDeployment deployment) {
ServletRestServer.deployment = deployment;
}
public void checkEnv() {
if (deployment == null) {
throw new MotanFrameworkException("please config <listener-class>"
+ RestfulServletContainerListener.class.getName() + "</listener-class> in your web.xml file");
}
}
@Override
public void start() {
}
@Override
public ResteasyDeployment getDeployment() {
return deployment;
}
@Override
public void stop() {
}
}
| 601 |
2,151 | <reponame>zipated/src
/* liblouis Braille Translation and Back-Translation Library
Based on the Linux screenreader BRLTTY, copyright (C) 1999-2006 by
The BRLTTY Team
Copyright (C) 2004, 2005, 2006, 2009
ViewPlus Technologies, Inc. www.viewplus.com and
JJB Software, Inc. www.jjb-software.com
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Maintained by <NAME> <EMAIL>
*/
#ifdef HAVE_CONFIG_H
# include "config.h"
#endif
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include "liblouis.h"
#include "louis.h"
#include <getopt.h>
#include "progname.h"
#include "version-etc.h"
static const struct option longopts[] =
{
{ "help", no_argument, NULL, 'h' },
{ "version", no_argument, NULL, 'v' },
{ NULL, 0, NULL, 0 }
};
const char version_etc_copyright[] =
"Copyright %s %d ViewPlus Technologies, Inc. and JJB Software, Inc.";
#define AUTHORS "<NAME>"
static void
print_help (void)
{
printf ("\
Usage: %s [OPTIONS]\n", program_name);
fputs ("\
Check the accuracy of hyphenation in Braille translation for both\n\
translated and untranslated words.\n\n", stdout);
fputs ("\
-h, --help display this help and exit\n\
-v, --version display version information and exit\n", stdout);
printf ("\n");
printf ("Report bugs to %s.\n", PACKAGE_BUGREPORT);
#ifdef PACKAGE_PACKAGER_BUG_REPORTS
printf ("Report %s bugs to: %s\n", PACKAGE_PACKAGER, PACKAGE_PACKAGER_BUG_REPORTS);
#endif
#ifdef PACKAGE_URL
printf ("%s home page: <%s>\n", PACKAGE_NAME, PACKAGE_URL);
#endif
}
#define BUFSIZE 256
static char inputBuffer[BUFSIZE];
static const TranslationTableHeader *validTable = NULL;
static unsigned int mode;
static char table[BUFSIZE];
static int
getInput (void)
{
int inputLength;
inputBuffer[0] = 0;
fgets (inputBuffer, sizeof (inputBuffer), stdin);
inputLength = strlen (inputBuffer) - 1;
if (inputLength < 0) /*EOF on script */
exit (0);
inputBuffer[inputLength] = 0;
return inputLength;
}
static void
paramLetters (void)
{
printf ("Press one of the letters in parentheses, then enter.\n");
printf ("(t)able, tr(a)nslated, (u)ntranslated, (r)un, (h)elp, (q)uit\n");
}
static int
getCommands (void)
{
paramLetters ();
do
{
printf ("Command: ");
getInput ();
switch (inputBuffer[0])
{
case 0:
break;
case 't':
do
{
printf ("Enter the name of a table or a list: ");
getInput ();
strcpy (table, inputBuffer);
validTable = lou_getTable (table);
if (validTable != NULL && validTable->hyphenStatesArray == 0)
{
printf ("No hyphenation table.\n");
validTable = NULL;
}
}
while (validTable == NULL);
break;
case 'a':
mode = 1;
break;
case 'u':
mode = 0;
break;
case 'r':
if (validTable == NULL)
{
printf ("You must enter a valid table name or list.\n");
inputBuffer[0] = 0;
}
break;
case 'h':
printf ("Commands: action\n");
printf ("(t)able: Enter a table name or list\n");
printf ("(r)un: run the hyphenation test loop\n");
printf ("tr(a)nslated: translated input\n");
printf ("(u)ntranslated: untranslated input\n");
printf ("(h)elp: print this page\n");
printf ("(q)uit: leave the program\n");
printf ("\n");
paramLetters ();
break;
case 'q':
exit (0);
default:
printf ("Bad choice.\n");
break;
}
}
while (inputBuffer[0] != 'r');
return 1;
}
int
main (int argc, char **argv)
{
widechar inbuf[BUFSIZE];
char hyphens[BUFSIZE];
int inlen;
int k;
int optc;
set_program_name (argv[0]);
while ((optc = getopt_long (argc, argv, "hv", longopts, NULL)) != -1)
switch (optc)
{
/* --help and --version exit immediately, per GNU coding standards. */
case 'v':
version_etc (stdout, program_name, PACKAGE_NAME, VERSION, AUTHORS, (char *) NULL);
exit (EXIT_SUCCESS);
break;
case 'h':
print_help ();
exit (EXIT_SUCCESS);
break;
default:
fprintf (stderr, "Try `%s --help' for more information.\n",
program_name);
exit (EXIT_FAILURE);
break;
}
if (optind < argc)
{
/* Print error message and exit. */
fprintf (stderr, "%s: extra operand: %s\n",
program_name, argv[optind]);
fprintf (stderr, "Try `%s --help' for more information.\n",
program_name);
exit (EXIT_FAILURE);
}
validTable = NULL;
mode = 0;
while (1)
{
getCommands ();
printf ("Type something, press enter, and view the results.\n");
printf ("A blank line returns to command entry.\n");
while (1)
{
inlen = getInput ();
if (inlen == 0)
break;
for (k = 0; k < inlen; k++)
inbuf[k] = inputBuffer[k];
if (!lou_hyphenate (table, inbuf, inlen, hyphens, mode))
{
printf ("Hyphenation error\n");
continue;
}
printf ("Hyphenation mask: %s\n", hyphens);
printf ("Hyphenated word: ");
for (k = 0; k < inlen; k++)
{
if (hyphens[k] == '1')
printf ("-");
printf ("%c", inbuf[k]);
}
printf ("\n");
}
}
lou_free ();
return 0;
}
| 2,321 |
1,997 | /*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package ai.djl.modality.cv.transform;
import ai.djl.modality.cv.util.NDImageUtils;
import ai.djl.ndarray.NDArray;
import ai.djl.translate.Transform;
/** A {@link Transform} that normalizes an image {@link NDArray} of shape CHW or NCHW. */
public class Normalize implements Transform {
private float[] mean;
private float[] std;
/**
* Creates a {@code Normalize} {@link Transform} that normalizes.
*
* @param mean the mean to normalize with for each channel
* @param std the standard deviation to normalize with for each channel
* @see NDImageUtils#normalize(NDArray, float[], float[])
*/
public Normalize(float[] mean, float[] std) {
this.mean = mean;
this.std = std;
}
/** {@inheritDoc} */
@Override
public NDArray transform(NDArray array) {
return NDImageUtils.normalize(array, mean, std);
}
}
| 480 |
1,341 | #ifndef PX_OBJECT_LABEL_H
#define PX_OBJECT_LABEL_H
#include "PX_Object.h"
typedef enum
{
PX_OBJECT_LABEL_STYLE_RECT,
PX_OBJECT_LABEL_STYLE_ROUNDRECT,
}PX_OBJECT_LABEL_STYLE;
typedef struct
{
PX_ALIGN Align;
px_color borderColor;
px_color TextColor;
px_color BackgroundColor;
px_bool bBorder;
PX_OBJECT_LABEL_STYLE style;
PX_FontModule *fontModule;
px_char *Text;
}PX_Object_Label;
PX_Object * PX_Object_LabelCreate(px_memorypool *mp,PX_Object *Parent,px_int x,px_int y,px_int Width,px_int Height,const px_char *Text,PX_FontModule *fm,px_color Color);
PX_Object_Label * PX_Object_GetLabel(PX_Object *Object);
px_char * PX_Object_LabelGetText(PX_Object *Label);
px_void PX_Object_LabelSetText(PX_Object *pLabel,const px_char *Text);
px_void PX_Object_LabelSetTextColor(PX_Object *pLabel,px_color Color);
px_void PX_Object_LabelSetBackgroundColor(PX_Object *pLabel,px_color Color);
px_void PX_Object_LabelSetAlign(PX_Object *pLabel,PX_ALIGN Align);
px_void PX_Object_LabelSetBorder(PX_Object *pLabel,px_bool b);
px_void PX_Object_LabelSetBorderColor(PX_Object *pLabel,px_color color);
px_void PX_Object_LabelSetStyle(PX_Object *pLabel,PX_OBJECT_LABEL_STYLE style);
#endif
| 532 |
1,085 | <filename>services/datastore/src/test/java/com/dremio/datastore/indexed/AbstractTestIndexedStore.java<gh_stars>1000+
/*
* Copyright (C) 2017-2019 Dremio Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dremio.datastore.indexed;
import static org.junit.Assert.assertEquals;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import com.dremio.datastore.SearchQueryUtils;
import com.dremio.datastore.SearchTypes;
import com.dremio.datastore.SearchTypes.SearchQuery;
import com.dremio.datastore.api.Document;
import com.dremio.datastore.api.FindByCondition;
import com.dremio.datastore.api.ImmutableFindByCondition;
import com.dremio.datastore.api.IndexedStore;
import com.dremio.datastore.api.KVStoreProvider;
import com.dremio.datastore.indexed.doughnut.Doughnut;
import com.dremio.datastore.indexed.doughnut.DoughnutIndexKeys;
import com.google.common.base.Function;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
/**
* Test the indexed store implementation
*/
public abstract class AbstractTestIndexedStore {
private Writer writer;
private KVStoreProvider provider;
private IndexedStore<String, Doughnut> kvStore;
private final Doughnut d1 = new Doughnut("original", "glazed", 1.29);
private final Doughnut d2 = new Doughnut("custard", "bavarian creme with chocolate icing", 1.39);
private final Doughnut d3 = new Doughnut("sourdough", "cake with glaze", 1.10);
private List<Integer> getCounts(String... filters) {
List<SearchQuery> queries = Lists.transform(Arrays.asList(filters), new Function<String, SearchQuery>() {
@Override
public SearchQuery apply(String input) {
return SearchFilterToQueryConverter.toQuery(input, DoughnutIndexKeys.MAPPING);
}
});
return kvStore.getCounts(queries.toArray(new SearchQuery[queries.size()]));
}
protected abstract KVStoreProvider createKVStoreProvider() throws Exception;
protected abstract IndexedStore<String, Doughnut> createKVStore();
protected KVStoreProvider getProvider(){
return provider;
}
protected IndexedStore<String, Doughnut> getKvStore() {
return kvStore;
}
protected void closeResources() throws Exception {
}
protected Doughnut getD1() {
return d1;
}
protected Doughnut getD2() {
return d2;
}
protected Doughnut getD3() {
return d3;
}
@Before
public void before() throws Exception {
provider = createKVStoreProvider();
kvStore = createKVStore();
}
private class Writer extends Thread implements Runnable {
@Override
public void run() {
for (int i = 0; i < 5000; ++i) {
String name = "pwriter_" + Integer.toString(i);
kvStore.put(name, new Doughnut(name, "bad_flavor_" + (i % 10), i));
}
}
}
@After
public final void after() throws Exception {
closeResources();
if (writer != null) {
writer.join();
}
provider.close();
}
@Test
public void put(){
kvStore.put("a", d1);
checkFindByName(d1);
checkFindByPrice(d1);
}
@Test
public void counts(){
addDoughnutsToStore();
assertEquals(
ImmutableList.of(1, 2, 0),
getCounts("n==original", "p=gt=1.10;p=lt=1.40", "p=lt=1.11;n=lt=custard"));
}
@Test
public void term() {
addDoughnutsToStore();
final SearchQuery termQuery = SearchQueryUtils.newTermQuery("flavor", d2.getFlavor());
final FindByCondition condition = new ImmutableFindByCondition.Builder().setCondition(termQuery).build();
verifyDoughnutsRetrieved(ImmutableList.of(d2), toListOfDoughnuts(kvStore.find(condition)));
}
@Test
public void termInt() {
final Doughnut d1 = new Doughnut("special", "dream", 2.1, 1, 2L);
final Doughnut d2 = new Doughnut("regular", "blueberry", 1.8, 2, 3L);
kvStore.put("a", d1);
kvStore.put("b", d2);
final SearchQuery termQuery = SearchQueryUtils.newTermQuery("thickness", d1.getThickness());
final FindByCondition condition = new ImmutableFindByCondition.Builder().setCondition(termQuery).build();
verifyDoughnutsRetrieved(ImmutableList.of(d1), toListOfDoughnuts(kvStore.find(condition)));
}
@Test
public void termDouble() {
addDoughnutsToStore();
final SearchQuery termDoubleQuery = SearchQueryUtils.newTermQuery("price", d2.getPrice());
final FindByCondition condition = new ImmutableFindByCondition.Builder().setCondition(termDoubleQuery).build();
verifyDoughnutsRetrieved(ImmutableList.of(d2), toListOfDoughnuts(kvStore.find(condition)));
}
@Test
public void termLong() {
final Doughnut d1 = new Doughnut("special", "dream", 2.1, 1, 2L);
final Doughnut d2 = new Doughnut("regular", "blueberry", 1.8, 2, 3L);
kvStore.put("a", d1);
kvStore.put("b", d2);
final SearchQuery termQuery = SearchQueryUtils.newTermQuery("diameter", d2.getDiameter());
final FindByCondition condition = new ImmutableFindByCondition.Builder().setCondition(termQuery).build();
verifyDoughnutsRetrieved(ImmutableList.of(d2), toListOfDoughnuts(kvStore.find(condition)));
}
@Test
public void rangeTerm() {
addDoughnutsToStore();
final SearchQuery termRangeQuery = SearchQueryUtils.newRangeTerm("name", "custard", "original", true, true);
final FindByCondition condition = new ImmutableFindByCondition.Builder().setCondition(termRangeQuery).build();
verifyDoughnutsRetrieved(ImmutableList.of(d1, d2), toListOfDoughnuts(kvStore.find(condition)));
}
@Test
public void rangeDouble() {
addDoughnutsToStore();
final SearchQuery rangeDoubleQuery = SearchQueryUtils.newRangeDouble("price", 1.10, 1.29, true, false);
final FindByCondition condition = new ImmutableFindByCondition.Builder().setCondition(rangeDoubleQuery).build();
verifyDoughnutsRetrieved(ImmutableList.of(d3), toListOfDoughnuts(kvStore.find(condition)));
}
@Test
public void rangeInt() {
final Doughnut d1 = new Doughnut("special", "dream", 2.1, 1, 2L);
final Doughnut d2 = new Doughnut("regular", "blueberry", 1.8,2, 3L);
kvStore.put("a", d1);
kvStore.put("b", d2);
final SearchQuery rangeIntQuery = SearchQueryUtils.newRangeInt("thickness", 0, 1, true, true);
final FindByCondition condition = new ImmutableFindByCondition.Builder().setCondition(rangeIntQuery).build();
verifyDoughnutsRetrieved(ImmutableList.of(d1), toListOfDoughnuts(kvStore.find(condition)));
}
@Test
public void rangeLong() {
final Doughnut d1 = new Doughnut("special", "dream", 2.1, 1, 2L);
final Doughnut d2 = new Doughnut("regular", "blueberry", 1.8,2, 3L);
kvStore.put("a", d1);
kvStore.put("b", d2);
final SearchQuery rangeLongQuery = SearchQueryUtils.newRangeLong("diameter", 0L, 2L, false, true);
final FindByCondition condition = new ImmutableFindByCondition.Builder().setCondition(rangeLongQuery).build();
verifyDoughnutsRetrieved(ImmutableList.of(d1), toListOfDoughnuts(kvStore.find(condition)));
}
@Test
public void exists() {
addDoughnutsToStore();
final SearchQuery containsQuery = SearchQueryUtils.newExistsQuery("name");
final FindByCondition condition = new ImmutableFindByCondition.Builder().setCondition(containsQuery).build();
verifyDoughnutsRetrieved(ImmutableList.of(d1, d2, d3), toListOfDoughnuts(kvStore.find(condition)));
}
@Test
public void notExists() {
addDoughnutsToStore();
final SearchQuery containsQuery = SearchQueryUtils.newDoesNotExistQuery("randomfield");
final FindByCondition condition = new ImmutableFindByCondition.Builder().setCondition(containsQuery).build();
verifyDoughnutsRetrieved(ImmutableList.of(d1, d2, d3), toListOfDoughnuts(kvStore.find(condition)));
}
@Test
public void contains() {
addDoughnutsToStore();
final SearchQuery containsQuery = SearchQueryUtils.newContainsTerm("name", "rigi");
final FindByCondition condition = new ImmutableFindByCondition.Builder().setCondition(containsQuery).build();
verifyDoughnutsRetrieved(ImmutableList.of(d1), toListOfDoughnuts(kvStore.find(condition)));
}
@Test
public void prefix() {
addDoughnutsToStore();
final SearchQuery containsQuery = SearchQueryUtils.newPrefixQuery("name", "cus");
final FindByCondition condition = new ImmutableFindByCondition.Builder().setCondition(containsQuery).build();
verifyDoughnutsRetrieved(ImmutableList.of(d2), toListOfDoughnuts(kvStore.find(condition)));
}
@Test
public void and() {
addDoughnutsToStore();
final SearchQuery firstQuery = SearchQueryUtils.newTermQuery("name", "custard");
final SearchQuery secondQuery = SearchQueryUtils.newTermQuery("price", 1.29);
final SearchQuery andQuery = SearchQueryUtils.and(firstQuery, secondQuery);
final FindByCondition condition = new ImmutableFindByCondition.Builder().setCondition(andQuery).build();
assertEquals(0, Iterables.size(kvStore.find(condition)));
}
@Test
public void or() {
addDoughnutsToStore();
final SearchQuery firstQuery = SearchQueryUtils.newTermQuery("name", "custard");
final SearchQuery secondQuery = SearchQueryUtils.newTermQuery("price", 1.29);
final SearchQuery andQuery = SearchQueryUtils.or(firstQuery, secondQuery);
final FindByCondition condition = new ImmutableFindByCondition.Builder().setCondition(andQuery).build();
verifyDoughnutsRetrieved(ImmutableList.of(d1, d2), toListOfDoughnuts(kvStore.find(condition)));
}
@Test
public void not() {
addDoughnutsToStore();
final SearchQuery query = SearchQueryUtils.not(SearchQueryUtils.newTermQuery("name", "original"));
final FindByCondition condition = new ImmutableFindByCondition.Builder().setCondition(query).build();
assertEquals(2, Iterables.size(kvStore.find(condition)));
}
@Test
public void containsSpecialChars() {
final Doughnut special = new Doughnut("spe*\\?ial", "Dulce De Leche", 0.25);
kvStore.put("special", special);
addDoughnutsToStore();
final SearchQuery containsQuery = SearchQueryUtils.newContainsTerm("name", "spe*\\?ial");
final FindByCondition condition = new ImmutableFindByCondition.Builder().setCondition(containsQuery).build();
verifyDoughnutsRetrieved(ImmutableList.of(special), toListOfDoughnuts(kvStore.find(condition)));
}
@Test
public void delete() {
kvStore.put("a", d1);
checkFindByName(d1);
kvStore.delete("a");
assertEquals(
0,
Iterables.size(kvStore.find(newCondition("n==" + d1.getName(), DoughnutIndexKeys.MAPPING).build())));
}
@Test
public void paginatedSearch() {
final int numDoughnuts = 4000;
addData(numDoughnuts);
List<Doughnut> found = findByFlavor(new Doughnut("", "good_flavor_0", 0));
assertEquals(numDoughnuts / 4, found.size());
}
@Test
public void limit() {
addData(100);
final int limit = 2;
final Iterable<Document<String, Doughnut>> result = kvStore.find(new ImmutableFindByCondition.Builder()
.setCondition(SearchQueryUtils.newMatchAllQuery())
.setLimit(limit)
.build());
assertEquals(limit, Iterables.size(result));
}
@Test
public void skip() {
kvStore.put("a", d1);
kvStore.put("b", d2);
kvStore.put("c", d3);
final int offset = 2;
final Iterable<Document<String, Doughnut>> result = kvStore.find(new ImmutableFindByCondition.Builder()
.setCondition(SearchQueryUtils.newMatchAllQuery())
.setOffset(offset)
.build());
final List<Doughnut> doughnuts = toListOfDoughnuts(result);
assertEquals(d3, doughnuts.get(0));
}
@Test
public void order() {
// Test sorting by name ascending, then price descending.
final Doughnut firstDonut = new Doughnut("2name", "1flavor", 10);
final Doughnut secondDonut = new Doughnut("2name", "2flavor", 1);
final Doughnut thirdDonut = new Doughnut("3name", "3flavor", 0);
kvStore.put("doughnut1", thirdDonut); // should be third.
kvStore.put("doughnut2", secondDonut); // should be second.
kvStore.put("doughnut3", firstDonut); // should be first;
final Iterable<Document<String, Doughnut>> result = kvStore.find(new ImmutableFindByCondition.Builder()
.setCondition(SearchQueryUtils.newMatchAllQuery())
.setSort(
ImmutableList.of(
SearchTypes.SearchFieldSorting.newBuilder()
.setField("name").setType(SearchTypes.SearchFieldSorting.FieldType.STRING).setOrder(SearchTypes.SortOrder.ASCENDING).build(),
SearchTypes.SearchFieldSorting.newBuilder()
.setField("price").setType(SearchTypes.SearchFieldSorting.FieldType.DOUBLE).setOrder(SearchTypes.SortOrder.DESCENDING).build()))
.build());
final List<Doughnut> doughnuts = toListOfDoughnuts(result);
assertEquals(firstDonut, doughnuts.get(0));
assertEquals(secondDonut, doughnuts.get(1));
assertEquals(thirdDonut, doughnuts.get(2));
}
@Test
public void searchAfterWithParallelUpdates() throws InterruptedException {
int numDoughnuts = 10000;
// Populate store with good_flavor_*.
List<String> fetchKeys = new ArrayList<>();
for (int i = 0; i < numDoughnuts; i++) {
String name = Integer.toString(i);
kvStore.put(name, new Doughnut(name, "good_flavor_" + (i % 10), i));
if (i % 10 == 0) {
fetchKeys.add(name);
}
}
// Lookup entries matching flavor=goodflavor_0, while there are updates in progress.
writer = new Writer();
writer.start();
List<Doughnut> found = findByFlavor(new Doughnut("", "good_flavor_0", 0));
assertEquals(numDoughnuts / 10, found.size());
writer.join();
writer = null;
}
@Test
public void emptyAnd() {
addDoughnutsToStore();
final SearchQuery andQuery = SearchQueryUtils.and();
final FindByCondition condition = new ImmutableFindByCondition.Builder().setCondition(andQuery).build();
verifyDoughnutsRetrieved(ImmutableList.of(), toListOfDoughnuts(kvStore.find(condition)));
}
@Test
public void emptyOr() {
addDoughnutsToStore();
final SearchQuery orQuery = SearchQueryUtils.or();
final FindByCondition condition = new ImmutableFindByCondition.Builder().setCondition(orQuery).build();
verifyDoughnutsRetrieved(ImmutableList.of(), toListOfDoughnuts(kvStore.find(condition)));
}
private void addData(int numDoughnuts) {
for (int i = 0; i < numDoughnuts; i++) {
final String name = Integer.toString(i);
kvStore.put(name, new Doughnut(name, "good_flavor_" + (i % 4), i));
}
}
protected void verifyDoughnutsRetrieved(List<Doughnut> expected, List<Doughnut> retrieved) {
assertEquals(expected.size(), retrieved.size());
for (int i = 0; i < expected.size(); i++) {
assertEquals(expected.get(i), retrieved.get(i));
}
}
protected void addDoughnutsToStore() {
kvStore.put("a", d1);
kvStore.put("b", d2);
kvStore.put("c", d3);
}
private void checkFindByName(Doughnut d) {
final Iterable<Document<String, Doughnut>> iter =
kvStore.find(newCondition("n==" + d.getName(), DoughnutIndexKeys.MAPPING).build());
final List<Doughnut> doughnuts = toListOfDoughnuts(iter);
Assert.assertEquals(1, doughnuts.size());
Assert.assertEquals(d, doughnuts.get(0));
}
private void assertNoResult(String filterStr) {
assertEquals(
0, Iterables.size(kvStore.find(newCondition(filterStr, DoughnutIndexKeys.MAPPING).build())));
}
private void checkFindByPrice(Doughnut d) {
final Iterable<Document<String, Doughnut>> iter =
kvStore.find(newCondition("p==" + d.getPrice(), DoughnutIndexKeys.MAPPING).build());
final List<Doughnut> doughnuts = toListOfDoughnuts(iter);
Assert.assertEquals(1, doughnuts.size());
Assert.assertEquals(d, doughnuts.get(0));
}
private List<Doughnut> findByFlavor(Doughnut d){
Iterable<Document<String, Doughnut>> iter = kvStore.find(
newCondition("f==" + d.getFlavor(), DoughnutIndexKeys.MAPPING)
.setPageSize(100)
.build());
return toListOfDoughnuts(iter);
}
private ImmutableFindByCondition.Builder newCondition(String conditionStr, FilterIndexMapping mapping) {
return new ImmutableFindByCondition.Builder()
.setCondition(SearchFilterToQueryConverter.toQuery(conditionStr, mapping));
}
protected List<Doughnut> toListOfDoughnuts(Iterable<Document<String, Doughnut>> docs) {
return StreamSupport.stream(docs.spliterator(), false)
.map(doc -> doc.getValue())
.collect(Collectors.toList());
}
}
| 6,045 |
3,427 | /*
* Licensed to GraphHopper GmbH under one or more contributor
* license agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*
* GraphHopper GmbH licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.graphhopper.coll;
import com.graphhopper.util.BitUtil;
import java.util.Comparator;
import java.util.TreeMap;
/**
* A priority queue for integer-float key-value pairs implemented by a TreeMap. As the tree map does not allow multiple
* values for the same key we store the value inside the key which is composed as value | key.
* <p>
*
* @author <NAME>
*/
public class GHTreeMapComposed {
private static final Integer NOT_EMPTY = -3;
private final BitUtil bitUtil = BitUtil.BIG;
private final TreeMap<Long, Integer> map;
public GHTreeMapComposed() {
map = new TreeMap<>(new Comparator<Long>() {
// we cannot just use the long sorting because the values are floats
@Override
public int compare(Long o1, Long o2) {
// for two entries to be equal both value and key must be equal
if (o1.equals(o2)) return 0;
int value1 = bitUtil.getIntHigh(o1);
int value2 = bitUtil.getIntHigh(o2);
if (value1 == value2) {
// we enforce a deterministic order by looking at the size of the key (although there is no real
// reason to prefer one entry over the other)
int key1 = bitUtil.getIntLow(o1);
int key2 = bitUtil.getIntLow(o2);
if (key1 == key2) return 0;
return key1 < key2 ? -1 : 1;
}
float f1 = Float.intBitsToFloat(value1);
float f2 = Float.intBitsToFloat(value2);
return Float.compare(f1, f2);
}
});
}
public void clear() {
map.clear();
}
void remove(int key, float value) {
long v = bitUtil.toLong(Float.floatToRawIntBits(value), key);
Integer prev = map.remove(v);
if (prev == null) {
throw new IllegalStateException("cannot remove key " + key + " with value " + value
+ " - did you insert this key with this value before ?");
}
}
public void update(int key, float oldValue, float value) {
remove(key, oldValue);
insert(key, value);
}
public void insert(int key, float value) {
long v = bitUtil.toLong(Float.floatToRawIntBits(value), key);
map.put(v, NOT_EMPTY);
}
public float peekValue() {
long key = map.firstEntry().getKey();
return Float.intBitsToFloat(bitUtil.getIntHigh(key));
}
public int peekKey() {
long key = map.firstEntry().getKey();
return bitUtil.getIntLow(key);
}
/**
* @return removes the smallest entry (key and value) from this collection
*/
public int pollKey() {
if (map.isEmpty())
throw new IllegalStateException("Cannot poll collection is empty!");
long key = map.pollFirstEntry().getKey();
return bitUtil.getIntLow(key);
}
public int getSize() {
return map.size();
}
public boolean isEmpty() {
return map.isEmpty();
}
@Override
public String toString() {
return map.toString();
}
}
| 1,606 |
763 | package org.batfish.datamodel.answers;
public enum AnswerStatus {
FAILURE,
NOTFOUND,
STALE,
SUCCESS
}
| 47 |
997 | #ifndef PQCLEAN_NTRULPR1277_CLEAN_CRYPTO_SORT_INT32_H
#define PQCLEAN_NTRULPR1277_CLEAN_CRYPTO_SORT_INT32_H
#include <stdint.h>
#define PQCLEAN_NTRULPR1277_CLEAN_crypto_sort_int32_BYTES 4
void PQCLEAN_NTRULPR1277_CLEAN_crypto_sort_int32(int32_t *x, long long n);
#endif
| 140 |
4,372 | <gh_stars>1000+
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.shardingsphere.datetime.database.config;
import lombok.Getter;
import org.apache.shardingsphere.infra.database.type.DatabaseType;
import org.apache.shardingsphere.infra.database.type.DatabaseTypeEngine;
import org.apache.shardingsphere.infra.datasource.pool.creator.DataSourcePoolCreator;
import org.apache.shardingsphere.infra.exception.ShardingSphereException;
import org.apache.shardingsphere.infra.yaml.config.swapper.YamlDataSourceConfigurationSwapper;
import org.yaml.snakeyaml.Yaml;
import javax.sql.DataSource;
import java.io.IOException;
import java.io.InputStream;
import java.util.Collections;
import java.util.Map;
/**
* Database datetime service configuration.
*/
@Getter
public final class DatabaseDatetimeServiceConfiguration {
private static final DatabaseDatetimeServiceConfiguration INSTANCE = new DatabaseDatetimeServiceConfiguration();
private static final String CONFIG_FILE = "datetime-database-config.yaml";
private final DataSource dataSource;
private final DatabaseType databaseType;
private DatabaseDatetimeServiceConfiguration() {
dataSource = DataSourcePoolCreator.create(new YamlDataSourceConfigurationSwapper().swapToDataSourceProperties(loadDataSourceConfiguration()));
databaseType = DatabaseTypeEngine.getDatabaseType(Collections.singleton(dataSource));
}
@SuppressWarnings("unchecked")
private Map<String, Object> loadDataSourceConfiguration() {
try (InputStream inputStream = getClass().getClassLoader().getResourceAsStream(CONFIG_FILE)) {
return new Yaml().loadAs(inputStream, Map.class);
} catch (final IOException ex) {
throw new ShardingSphereException("Cannot load " + CONFIG_FILE + "file.", ex);
}
}
/**
* Get time service configuration instance.
*
* @return time service configuration
*/
public static DatabaseDatetimeServiceConfiguration getInstance() {
return INSTANCE;
}
}
| 866 |
686 | <filename>dlls/d3dx11_43/tests/d3dx11.c
/*
* Copyright 2016 <NAME> for CodeWeavers
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
*/
#define COBJMACROS
#include "initguid.h"
#include "d3d11.h"
#include "d3dx11.h"
#include "wine/test.h"
#include "wine/heap.h"
static WCHAR temp_dir[MAX_PATH];
static BOOL create_file(const WCHAR *filename, const char *data, unsigned int size, WCHAR *out_path)
{
WCHAR path[MAX_PATH];
DWORD written;
HANDLE file;
if (!temp_dir[0])
GetTempPathW(ARRAY_SIZE(temp_dir), temp_dir);
lstrcpyW(path, temp_dir);
lstrcatW(path, filename);
file = CreateFileW(path, GENERIC_WRITE, 0, NULL, CREATE_ALWAYS, FILE_ATTRIBUTE_NORMAL, 0);
if (file == INVALID_HANDLE_VALUE)
return FALSE;
if (WriteFile(file, data, size, &written, NULL))
{
CloseHandle(file);
if (out_path)
lstrcpyW(out_path, path);
return TRUE;
}
CloseHandle(file);
return FALSE;
}
static void delete_file(const WCHAR *filename)
{
WCHAR path[MAX_PATH];
lstrcpyW(path, temp_dir);
lstrcatW(path, filename);
DeleteFileW(path);
}
static BOOL create_directory(const WCHAR *dir)
{
WCHAR path[MAX_PATH];
lstrcpyW(path, temp_dir);
lstrcatW(path, dir);
return CreateDirectoryW(path, NULL);
}
static void delete_directory(const WCHAR *dir)
{
WCHAR path[MAX_PATH];
lstrcpyW(path, temp_dir);
lstrcatW(path, dir);
RemoveDirectoryW(path);
}
static void test_D3DX11CreateAsyncMemoryLoader(void)
{
ID3DX11DataLoader *loader;
SIZE_T size;
DWORD data;
HRESULT hr;
void *ptr;
hr = D3DX11CreateAsyncMemoryLoader(NULL, 0, NULL);
ok(hr == E_FAIL, "Got unexpected hr %#x.\n", hr);
hr = D3DX11CreateAsyncMemoryLoader(NULL, 0, &loader);
ok(hr == E_FAIL, "Got unexpected hr %#x.\n", hr);
hr = D3DX11CreateAsyncMemoryLoader(&data, 0, &loader);
ok(hr == S_OK, "Got unexpected hr %#x.\n", hr);
size = 100;
hr = ID3DX11DataLoader_Decompress(loader, &ptr, &size);
ok(hr == S_OK, "Got unexpected hr %#x.\n", hr);
ok(ptr == &data, "Got data pointer %p, original %p.\n", ptr, &data);
ok(!size, "Got unexpected data size.\n");
/* Load() is no-op. */
hr = ID3DX11DataLoader_Load(loader);
ok(hr == S_OK, "Got unexpected hr %#x.\n", hr);
hr = ID3DX11DataLoader_Destroy(loader);
ok(hr == S_OK, "Got unexpected hr %#x.\n", hr);
data = 0;
hr = D3DX11CreateAsyncMemoryLoader(&data, sizeof(data), &loader);
ok(hr == S_OK, "Got unexpected hr %#x.\n", hr);
/* Load() is no-op. */
hr = ID3DX11DataLoader_Load(loader);
ok(hr == S_OK, "Got unexpected hr %#x.\n", hr);
hr = ID3DX11DataLoader_Decompress(loader, &ptr, &size);
ok(hr == S_OK, "Got unexpected hr %#x.\n", hr);
ok(ptr == &data, "Got data pointer %p, original %p.\n", ptr, &data);
ok(size == sizeof(data), "Got unexpected data size.\n");
hr = ID3DX11DataLoader_Destroy(loader);
ok(hr == S_OK, "Got unexpected hr %#x.\n", hr);
}
static void create_testfile(WCHAR *path, const void *data, int data_len)
{
DWORD written;
HANDLE file;
BOOL ret;
GetTempPathW(MAX_PATH, path);
lstrcatW(path, L"asyncloader.data");
file = CreateFileW(path, GENERIC_READ | GENERIC_WRITE, 0, NULL, CREATE_ALWAYS, 0, 0);
ok(file != INVALID_HANDLE_VALUE, "Test file creation failed, at %s, error %d.\n",
wine_dbgstr_w(path), GetLastError());
ret = WriteFile(file, data, data_len, &written, NULL);
ok(ret, "Write to test file failed.\n");
CloseHandle(file);
}
static void test_D3DX11CreateAsyncFileLoader(void)
{
static const char test_data1[] = "test data";
static const char test_data2[] = "more test data";
ID3DX11DataLoader *loader;
WCHAR path[MAX_PATH];
SIZE_T size;
HRESULT hr;
void *ptr;
BOOL ret;
hr = D3DX11CreateAsyncFileLoaderA(NULL, NULL);
ok(hr == E_FAIL, "Got unexpected hr %#x.\n", hr);
hr = D3DX11CreateAsyncFileLoaderA(NULL, &loader);
ok(hr == E_FAIL, "Got unexpected hr %#x.\n", hr);
hr = D3DX11CreateAsyncFileLoaderA("nonexistentfilename", &loader);
ok(hr == S_OK, "Got unexpected hr %#x.\n", hr);
hr = ID3DX11DataLoader_Decompress(loader, &ptr, &size);
ok(hr == E_FAIL, "Got unexpected hr %#x.\n", hr);
hr = ID3DX11DataLoader_Load(loader);
ok(hr == D3D11_ERROR_FILE_NOT_FOUND, "Got unexpected hr %#x.\n", hr);
hr = ID3DX11DataLoader_Decompress(loader, &ptr, &size);
ok(hr == E_FAIL, "Got unexpected hr %#x.\n", hr);
hr = ID3DX11DataLoader_Destroy(loader);
ok(hr == S_OK, "Got unexpected hr %#x.\n", hr);
/* Test file sharing using dummy empty file. */
create_testfile(path, test_data1, sizeof(test_data1));
hr = D3DX11CreateAsyncFileLoaderW(path, &loader);
ok(SUCCEEDED(hr), "Failed to create file loader, hr %#x.\n", hr);
ret = DeleteFileW(path);
ok(ret, "DeleteFile() failed, ret %d, error %d.\n", ret, GetLastError());
/* File was removed before Load(). */
hr = ID3DX11DataLoader_Load(loader);
ok(hr == D3D11_ERROR_FILE_NOT_FOUND, "Load() returned unexpected result, hr %#x.\n", hr);
/* Create it again. */
create_testfile(path, test_data1, sizeof(test_data1));
hr = ID3DX11DataLoader_Load(loader);
ok(SUCCEEDED(hr), "Load() failed, hr %#x.\n", hr);
/* Already loaded. */
hr = ID3DX11DataLoader_Load(loader);
ok(SUCCEEDED(hr), "Load() failed, hr %#x.\n", hr);
ret = DeleteFileW(path);
ok(ret, "DeleteFile() failed, ret %d, error %d.\n", ret, GetLastError());
/* Already loaded, file removed. */
hr = ID3DX11DataLoader_Load(loader);
ok(hr == D3D11_ERROR_FILE_NOT_FOUND, "Load() returned unexpected result, hr %#x.\n", hr);
/* Decompress still works. */
ptr = NULL;
hr = ID3DX11DataLoader_Decompress(loader, &ptr, &size);
ok(SUCCEEDED(hr), "Decompress() failed, hr %#x.\n", hr);
ok(ptr != NULL, "Got unexpected ptr %p.\n", ptr);
ok(size == sizeof(test_data1), "Got unexpected decompressed size.\n");
if (size == sizeof(test_data1))
ok(!memcmp(ptr, test_data1, size), "Got unexpected file data.\n");
/* Create it again, with different data. */
create_testfile(path, test_data2, sizeof(test_data2));
hr = ID3DX11DataLoader_Load(loader);
ok(SUCCEEDED(hr), "Load() failed, hr %#x.\n", hr);
ptr = NULL;
hr = ID3DX11DataLoader_Decompress(loader, &ptr, &size);
ok(SUCCEEDED(hr), "Decompress() failed, hr %#x.\n", hr);
ok(ptr != NULL, "Got unexpected ptr %p.\n", ptr);
ok(size == sizeof(test_data2), "Got unexpected decompressed size.\n");
if (size == sizeof(test_data2))
ok(!memcmp(ptr, test_data2, size), "Got unexpected file data.\n");
hr = ID3DX11DataLoader_Destroy(loader);
ok(SUCCEEDED(hr), "Destroy() failed, hr %#x.\n", hr);
ret = DeleteFileW(path);
ok(ret, "DeleteFile() failed, ret %d, error %d.\n", ret, GetLastError());
}
static void test_D3DX11CreateAsyncResourceLoader(void)
{
ID3DX11DataLoader *loader;
HRESULT hr;
hr = D3DX11CreateAsyncResourceLoaderA(NULL, NULL, NULL);
ok(hr == E_FAIL, "Got unexpected hr %#x.\n", hr);
hr = D3DX11CreateAsyncResourceLoaderA(NULL, NULL, &loader);
ok(hr == D3DX11_ERR_INVALID_DATA, "Got unexpected hr %#x.\n", hr);
hr = D3DX11CreateAsyncResourceLoaderA(NULL, "noname", &loader);
ok(hr == D3DX11_ERR_INVALID_DATA, "Got unexpected hr %#x.\n", hr);
hr = D3DX11CreateAsyncResourceLoaderW(NULL, NULL, NULL);
ok(hr == E_FAIL, "Got unexpected hr %#x.\n", hr);
hr = D3DX11CreateAsyncResourceLoaderW(NULL, NULL, &loader);
ok(hr == D3DX11_ERR_INVALID_DATA, "Got unexpected hr %#x.\n", hr);
hr = D3DX11CreateAsyncResourceLoaderW(NULL, L"noname", &loader);
ok(hr == D3DX11_ERR_INVALID_DATA, "Got unexpected hr %#x.\n", hr);
}
static HRESULT WINAPI test_d3dinclude_open(ID3DInclude *iface, D3D_INCLUDE_TYPE include_type,
const char *filename, const void *parent_data, const void **data, UINT *bytes)
{
static const char include1[] =
"#define LIGHT float4(0.0f, 0.2f, 0.5f, 1.0f)\n";
static const char include2[] =
"#include \"include1.h\"\n"
"float4 light_color = LIGHT;\n";
char *buffer;
trace("filename %s.\n", filename);
trace("parent_data %p: %s.\n", parent_data, parent_data ? (char *)parent_data : "(null)");
if (!strcmp(filename, "include1.h"))
{
buffer = heap_alloc(strlen(include1));
CopyMemory(buffer, include1, strlen(include1));
*bytes = strlen(include1);
ok(include_type == D3D_INCLUDE_LOCAL, "Unexpected include type %d.\n", include_type);
ok(!strncmp(include2, parent_data, strlen(include2)),
"Unexpected parent_data value.\n");
}
else if (!strcmp(filename, "include\\include2.h"))
{
buffer = heap_alloc(strlen(include2));
CopyMemory(buffer, include2, strlen(include2));
*bytes = strlen(include2);
ok(!parent_data, "Unexpected parent_data value.\n");
ok(include_type == D3D_INCLUDE_LOCAL, "Unexpected include type %d.\n", include_type);
}
else
{
ok(0, "Unexpected #include for file %s.\n", filename);
return E_INVALIDARG;
}
*data = buffer;
return S_OK;
}
static HRESULT WINAPI test_d3dinclude_close(ID3DInclude *iface, const void *data)
{
heap_free((void *)data);
return S_OK;
}
static const struct ID3DIncludeVtbl test_d3dinclude_vtbl =
{
test_d3dinclude_open,
test_d3dinclude_close
};
struct test_d3dinclude
{
ID3DInclude ID3DInclude_iface;
};
static void test_D3DX11CompileFromFile(void)
{
struct test_d3dinclude include = {{&test_d3dinclude_vtbl}};
WCHAR filename[MAX_PATH], directory[MAX_PATH];
ID3D10Blob *blob = NULL, *errors = NULL;
CHAR filename_a[MAX_PATH];
HRESULT hr, result;
DWORD len;
static const char ps_code[] =
"#include \"include\\include2.h\"\n"
"\n"
"float4 main() : COLOR\n"
"{\n"
" return light_color;\n"
"}";
static const char include1[] =
"#define LIGHT float4(0.0f, 0.2f, 0.5f, 1.0f)\n";
static const char include1_wrong[] =
"#define LIGHT nope\n";
static const char include2[] =
"#include \"include1.h\"\n"
"float4 light_color = LIGHT;\n";
create_file(L"source.ps", ps_code, strlen(ps_code), filename);
create_directory(L"include");
create_file(L"include\\include1.h", include1_wrong, strlen(include1_wrong), NULL);
create_file(L"include1.h", include1, strlen(include1), NULL);
create_file(L"include\\include2.h", include2, strlen(include2), NULL);
hr = D3DX11CompileFromFileW(filename, NULL, &include.ID3DInclude_iface, "main", "ps_2_0", 0, 0, NULL, &blob, &errors, &result);
todo_wine ok(hr == S_OK && hr == result, "Got hr %#x, result %#x.\n", hr, result);
todo_wine ok(!!blob, "Got unexpected blob.\n");
ok(!errors, "Got unexpected errors.\n");
if (blob)
{
ID3D10Blob_Release(blob);
blob = NULL;
}
/* Windows always seems to resolve includes from the initial file location
* instead of using the immediate parent, as it would be the case for
* standard C preprocessor includes. */
hr = D3DX11CompileFromFileW(filename, NULL, NULL, "main", "ps_2_0", 0, 0, NULL, &blob, &errors, &result);
todo_wine ok(hr == S_OK && hr == result, "Got hr %#x, result %#x.\n", hr, result);
todo_wine ok(!!blob, "Got unexpected blob.\n");
ok(!errors, "Got unexpected errors.\n");
if (blob)
{
ID3D10Blob_Release(blob);
blob = NULL;
}
len = WideCharToMultiByte(CP_ACP, 0, filename, -1, NULL, 0, NULL, NULL);
WideCharToMultiByte(CP_ACP, 0, filename, -1, filename_a, len, NULL, NULL);
hr = D3DX11CompileFromFileA(filename_a, NULL, NULL, "main", "ps_2_0", 0, 0, NULL, &blob, &errors, &result);
todo_wine ok(hr == S_OK && hr == result, "Got hr %#x, result %#x.\n", hr, result);
todo_wine ok(!!blob, "Got unexpected blob.\n");
ok(!errors, "Got unexpected errors.\n");
if (blob)
{
ID3D10Blob_Release(blob);
blob = NULL;
}
GetCurrentDirectoryW(MAX_PATH, directory);
SetCurrentDirectoryW(temp_dir);
hr = D3DX11CompileFromFileW(L"source.ps", NULL, NULL, "main", "ps_2_0", 0, 0, NULL, &blob, &errors, &result);
todo_wine ok(hr == S_OK && hr == result, "Got hr %#x, result %#x.\n", hr, result);
todo_wine ok(!!blob, "Got unexpected blob.\n");
ok(!errors, "Got unexpected errors.\n");
if (blob)
{
ID3D10Blob_Release(blob);
blob = NULL;
}
SetCurrentDirectoryW(directory);
delete_file(L"source.ps");
delete_file(L"include\\include1.h");
delete_file(L"include1.h");
delete_file(L"include\\include2.h");
delete_directory(L"include");
}
START_TEST(d3dx11)
{
test_D3DX11CreateAsyncMemoryLoader();
test_D3DX11CreateAsyncFileLoader();
test_D3DX11CreateAsyncResourceLoader();
test_D3DX11CompileFromFile();
}
| 5,785 |
638 | # defines custom classes
class Thingy(object):
def __init__(self, num):
self.number = num
def __str__(self):
return "<Thingy @" + hex(id(self)) + ", number=" + str(self.number) + ">"
class OtherThingy(object):
def __init__(self, num):
self.number = num
def __str__(self):
return "<OtherThingy @" + hex(id(self)) + ", number=" + str(self.number) + ">"
| 170 |
1,192 | <gh_stars>1000+
//==- llvm/Support/ArrayRecycler.h - Recycling of Arrays ---------*- C++ -*-==//
//
// The LLVM Compiler Infrastructure
//
// This file is distributed under the University of Illinois Open Source
// License. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
//
// This file defines the ArrayRecycler class template which can recycle small
// arrays allocated from one of the allocators in Allocator.h
//
//===----------------------------------------------------------------------===//
#ifndef LLVM_SUPPORT_ARRAYRECYCLER_H
#define LLVM_SUPPORT_ARRAYRECYCLER_H
#include "llvm/ADT/SmallVector.h"
#include "llvm/Support/Allocator.h"
#include "llvm/Support/MathExtras.h"
namespace llvm {
/// Recycle small arrays allocated from a BumpPtrAllocator.
///
/// Arrays are allocated in a small number of fixed sizes. For each supported
/// array size, the ArrayRecycler keeps a free list of available arrays.
///
template<class T, size_t Align = AlignOf<T>::Alignment>
class ArrayRecycler {
// The free list for a given array size is a simple singly linked list.
// We can't use iplist or Recycler here since those classes can't be copied.
struct FreeList {
FreeList *Next;
};
static_assert(Align >= AlignOf<FreeList>::Alignment, "Object underaligned");
static_assert(sizeof(T) >= sizeof(FreeList), "Objects are too small");
// Keep a free list for each array size.
SmallVector<FreeList*, 8> Bucket;
// Remove an entry from the free list in Bucket[Idx] and return it.
// Return NULL if no entries are available.
T *pop(unsigned Idx) {
if (Idx >= Bucket.size())
return nullptr;
FreeList *Entry = Bucket[Idx];
if (!Entry)
return nullptr;
Bucket[Idx] = Entry->Next;
return reinterpret_cast<T*>(Entry);
}
// Add an entry to the free list at Bucket[Idx].
void push(unsigned Idx, T *Ptr) {
assert(Ptr && "Cannot recycle NULL pointer");
FreeList *Entry = reinterpret_cast<FreeList*>(Ptr);
if (Idx >= Bucket.size())
Bucket.resize(size_t(Idx) + 1);
Entry->Next = Bucket[Idx];
Bucket[Idx] = Entry;
}
public:
/// The size of an allocated array is represented by a Capacity instance.
///
/// This class is much smaller than a size_t, and it provides methods to work
/// with the set of legal array capacities.
class Capacity {
uint8_t Index;
explicit Capacity(uint8_t idx) : Index(idx) {}
public:
Capacity() : Index(0) {}
/// Get the capacity of an array that can hold at least N elements.
static Capacity get(size_t N) {
return Capacity(N ? Log2_64_Ceil(N) : 0);
}
/// Get the number of elements in an array with this capacity.
size_t getSize() const { return size_t(1u) << Index; }
/// Get the bucket number for this capacity.
unsigned getBucket() const { return Index; }
/// Get the next larger capacity. Large capacities grow exponentially, so
/// this function can be used to reallocate incrementally growing vectors
/// in amortized linear time.
Capacity getNext() const { return Capacity(Index + 1); }
};
~ArrayRecycler() {
// The client should always call clear() so recycled arrays can be returned
// to the allocator.
assert(Bucket.empty() && "Non-empty ArrayRecycler deleted!");
}
/// Release all the tracked allocations to the allocator. The recycler must
/// be free of any tracked allocations before being deleted.
template<class AllocatorType>
void clear(AllocatorType &Allocator) {
for (; !Bucket.empty(); Bucket.pop_back())
while (T *Ptr = pop(Bucket.size() - 1))
Allocator.Deallocate(Ptr);
}
/// Special case for BumpPtrAllocator which has an empty Deallocate()
/// function.
///
/// There is no need to traverse the free lists, pulling all the objects into
/// cache.
void clear(BumpPtrAllocator&) {
Bucket.clear();
}
/// Allocate an array of at least the requested capacity.
///
/// Return an existing recycled array, or allocate one from Allocator if
/// none are available for recycling.
///
template<class AllocatorType>
T *allocate(Capacity Cap, AllocatorType &Allocator) {
// Try to recycle an existing array.
if (T *Ptr = pop(Cap.getBucket()))
return Ptr;
// Nope, get more memory.
return static_cast<T*>(Allocator.Allocate(sizeof(T)*Cap.getSize(), Align));
}
/// Deallocate an array with the specified Capacity.
///
/// Cap must be the same capacity that was given to allocate().
///
void deallocate(Capacity Cap, T *Ptr) {
push(Cap.getBucket(), Ptr);
}
};
} // end llvm namespace
#endif
| 1,522 |
512 | import responses
from binance.spot import Spot as Client
from tests.util import random_str
from urllib.parse import urlencode
from tests.util import mock_http_response
mock_item = {"key_1": "value_1", "key_2": "value_2"}
mock_exception = {"code": -1, "msg": "error message"}
key = random_str()
secret = random_str()
params = {
"asset": "BNB",
"startTime": "1590969041003",
"endTime": "1590969041003",
"size": 10,
"recvWindow": 1000,
}
@mock_http_response(
responses.GET,
"/sapi/v1/margin/interestHistory\\?" + urlencode(params),
mock_item,
200,
)
def test_margin_interest_history():
"""Tests the API endpoint to query margin interest history"""
client = Client(key, secret)
response = client.margin_interest_history(**params)
response.should.equal(mock_item)
| 306 |
2,360 | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
class Mvdefaults(Package):
homepage = "http://www.example.com"
url = "http://www.example.com/mvdefaults-1.0.tar.gz"
version('1.0', '<KEY>')
variant('foo', values=('a', 'b', 'c'), default=('a', 'b', 'c'),
multi=True, description='')
| 179 |
4,067 | import java.util.ArrayList;
import java.util.Random;
import java.io.*;
class list_java {
public static void main(String[] args) {
ArrayList<Integer> x = new ArrayList<Integer>();
Random rand = new Random();
int n = 10000;
for (int i = 0; i < n; i++) {
x.add(rand.nextInt());
}
for (int i = 0; i < n; i++) {
x.add(rand.nextInt(n), rand.nextInt());
}
for (int i = 0; i < n; i++) {
x.remove(rand.nextInt(n));
}
}
}
| 233 |
1,738 | <reponame>jeikabu/lumberyard<filename>dev/Gems/LmbrCentral/Code/Source/Animation/SimpleAnimationComponent.h
/*
* All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or
* its licensors.
*
* For complete copyright and license terms please see the LICENSE at the root of this
* distribution (the "License"). All use of this software is governed by the License,
* or, if provided, by the license below or the license accompanying this file. Do not
* remove or modify any license notices. This file is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
*/
#pragma once
#include <ICryAnimation.h>
#include <AzCore/Component/TickBus.h>
#include <AzCore/Component/Component.h>
#include <AzCore/Component/TransformBus.h>
#include <AzCore/Math/Transform.h>
#include <AzCore/std/containers/map.h>
#include <AzCore/std/smart_ptr/weak_ptr.h>
#include <LmbrCentral/Rendering/MeshComponentBus.h>
#include <LmbrCentral/Animation/SimpleAnimationComponentBus.h>
namespace LmbrCentral
{
/*!
* \class SimpleAnimator
* \brief Provides animation facilities to both Editor Simple Animation Component and Simple Animation Component
*/
class SimpleAnimator
: public MeshComponentNotificationBus::Handler
, public AZ::TickBus::Handler
, public AZ::TransformNotificationBus::Handler
{
public:
SimpleAnimator()
: m_activeAnimatedLayerManager(nullptr)
{
}
/**
* Activates the Animator to be used for a particular character Instance
* @param characterInstance The character instance for the mesh assigned to the component controlling this Animator
* @param entityId The Entity id of the Component this Animator is servicing
*/
void Activate(ICharacterInstance* characterInstance, AZ::EntityId entityId);
/**
* Starts the indicated animation
* @param animatedLayer An AnimatedLayer that indicates parameters for the animation to be started
* @return A Result indicating whether or not the animation was successfully started
*/
SimpleAnimationComponentRequests::Result StartAnimation(const AnimatedLayer& animatedLayer);
/**
* Starts an animation as configured by the indicated Animated Layer
* @param animatedLayer A Set of AnimatedLayers that indicates parameters for the animation to be started (key'ed on the layer id)
* @return A Result indicating whether or not the animation was successfully started
*/
SimpleAnimationComponentRequests::Result StartAnimations(const AnimatedLayer::AnimatedLayerSet& animatedLayer);
/**
* Stops animations on all Active animated layers
* @param animatedLayerIds Bitset indicating all layers that have animations that should be stopped
* @param blendOutTime Time that the animations take to blend out
* @return A Result indicating whether the animations were stopped properly or not
*/
SimpleAnimationComponentRequests::Result StopAnimations(const SimpleAnimationComponentRequests::LayerMask& animatedLayerIds, float blendOutTime);
/**
* Stops animations on the indicated layer
* @param animatedLayerId Id of the layer on which animations are to be stopped
* @param blendOutTime Time that the animations take to blend out
* @return A Result indicating whether the animations were stopped properly or not
*/
SimpleAnimationComponentRequests::Result StopAnimation(const AnimatedLayer::LayerId animatedLayerId, float blendOutTime);
/**
* Sets the playback speed for a layer.
* @param animatedLayerId Id of the layer to change.
* @param playbackSpeed (1.0 = normal speed) to set for the specified layer.
* @return A Result indicating whether or not the speed was changed.
*/
SimpleAnimationComponentRequests::Result SetPlaybackSpeed(AnimatedLayer::LayerId animatedLayerId, float playbackSpeed);
/**
* Sets the playback speed for a layer.
* @param animatedLayerId Id of the layer to change.
* @param weight [0..1] to set on the specified layer.
* @return A Result indicating whether or not the speed was changed.
*/
SimpleAnimationComponentRequests::Result SetPlaybackWeight(AnimatedLayer::LayerId layerId, float weight);
/**
* Stops all currently active animations
* @return Result indicating whether animations were stopped properly or not
*/
SimpleAnimationComponentRequests::Result StopAllAnimations();
/**
* Deactivates this animator, decouples from the character instance but not from the entity id
* Disconnects from the Mesh component events bus
*/
void Deactivate();
//////////////////////////////////////////////////////////////////////////
// MeshcomponentEvents Bus Handler implementation
/**
* Bus event received when the mesh attached to the mesh component on this entity is created or changes.
*/
void OnMeshCreated(const AZ::Data::Asset<AZ::Data::AssetData>& asset) override;
/**
* Bus event received when the mesh attached to the mesh component on this entity is destroyed
*/
void OnMeshDestroyed() override;
//////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////
// TickBus
void OnTick(float deltaTime, AZ::ScriptTimePoint time) override;
//////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////
// Transform notification bus handler
/// Called when the local transform of the entity has changed.
void OnTransformChanged(const AZ::Transform& /*local*/, const AZ::Transform& /*world*/) override;
//////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////
/*!
* \class AnimatedLayerManager
* \brief Maintains the set of Active animated layers, is responsible for actually activating animations on the character instance
*/
class AnimatedLayerManager
{
public:
friend void SimpleAnimator::OnTick(float, AZ::ScriptTimePoint);
/**
* Constructor for ActiveAnimatedLayers
* @param characterInstance character instance for the mesh being managed
* @param entityId The Entity id of the Animator this AnimatedLayerManager is servicing
*/
AnimatedLayerManager(ICharacterInstance* characterInstance, AZ::EntityId entityId);
/**
* Adds an animated layer to the active set
* @param animatedLayer The animated layer to be activated
* @return A Result indicating whether the layer was activated or not
*/
SimpleAnimationComponentRequests::Result ActivateAnimatedLayer(const AnimatedLayer& animatedLayer);
/**
* Deactivates the indicated Animated layer
* @param layerId Layer id of layer to be deactivated
* @param blendOutTime Time that the animations take to blend out
* @return A Result indicating whether the layer was deactivated or not
*/
SimpleAnimationComponentRequests::Result DeactivateAnimatedLayer(AnimatedLayer::LayerId layerId,float blendOutTime);
/**
* Stops animations on all layers for this character
* @return A result indicating whether the animations were stopped or not
*/
SimpleAnimationComponentRequests::Result DeactivateAllAnimatedLayers();
/**
* @param layerId
* @return true if the layer is active.
*/
bool IsLayerActive(AnimatedLayer::LayerId layerId) const;
/**
* Retrieve associated character instance.
*/
ICharacterInstance* GetCharacter() { return m_characterInstance; }
~AnimatedLayerManager();
private:
/**
* Gets an active animated layer for the indicated layer id
* @param layerId Layer id to be fetched
* @return AnimatedLayer if one is active at the provided layer id, null otherwise
*/
const AnimatedLayer* GetActiveAnimatedLayer(AnimatedLayer::LayerId layerId) const;
//! The character instance for the mesh being Animated
ICharacterInstance* m_characterInstance = nullptr;
// The Entity id of the Component this Animator is servicing
AZ::EntityId m_attachedEntityId;
//! Stores the currently active animations
AZStd::map<AnimatedLayer::LayerId, AnimatedLayer> m_activeAnimatedLayers;
};
//////////////////////////////////////////////////////////////////////////
private:
//! Manages and animates active Animated Layers
std::unique_ptr<AnimatedLayerManager> m_activeAnimatedLayerManager = nullptr;
// The Entity id of the Component this Animator is servicing
AZ::EntityId m_attachedEntityId;
//! Stores the location of this entity, is updated whenever the entity moves
AZ::Transform m_currentEntityLocation;
//! Character instance of the mesh being animated
ICharacterInstance* m_meshCharacterInstance = nullptr;
/*!
* Updates the character instance when a new mesh gets attached to the mesh component
* or the attached mesh is removed.
* Is responsible for stopping all currently active animations on the old character instance
* and updating the Animator to refer to a character instance attached to the new mesh.
*/
void UpdateCharacterInstance();
};
class SimpleAnimationComponent
: public AZ::Component
, public SimpleAnimationComponentRequestBus::Handler
, public MeshComponentNotificationBus::Handler
{
public:
friend class EditorSimpleAnimationComponent;
AZ_COMPONENT(SimpleAnimationComponent, SimpleAnimationComponentTypeId);
SimpleAnimationComponent() = default;
//////////////////////////////////////////////////////////////////////////
// AZ::Component interface implementation
void Init() override;
void Activate() override;
void Deactivate() override;
//////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////
// MeshComponentNotificationBus interface implementation
void OnMeshCreated(const AZ::Data::Asset<AZ::Data::AssetData>& asset) override;
void OnMeshDestroyed() override;
//////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////
// SimpleAnimationComponentRequestBus interface implementation
SimpleAnimationComponentRequests::Result StartDefaultAnimations() override;
SimpleAnimationComponentRequests::Result StartAnimation(const AnimatedLayer& animatedLayer) override;
SimpleAnimationComponentRequests::Result StartAnimationSet(const AnimatedLayer::AnimatedLayerSet& animationSet) override;
SimpleAnimationComponentRequests::Result StopAllAnimations() override;
SimpleAnimationComponentRequests::Result StopAnimationsOnLayer(AnimatedLayer::LayerId layerId, float blendOutTime) override;
SimpleAnimationComponentRequests::Result SetPlaybackSpeed(AnimatedLayer::LayerId layerId, float playbackSpeed) override;
SimpleAnimationComponentRequests::Result SetPlaybackWeight(AnimatedLayer::LayerId layerId, float weight) override;
SimpleAnimationComponentRequests::Result StopAnimationsOnLayers(LayerMask layerIds, float blendOutTime) override;
//////////////////////////////////////////////////////////////////////////
protected:
void LinkToCharacterInstance(ICharacterInstance* characterInstance);
static void GetProvidedServices(AZ::ComponentDescriptor::DependencyArrayType& provided)
{
provided.push_back(AZ_CRC("AnimationService", 0x553f5760));
provided.push_back(AZ_CRC("SimpleAnimationService", 0x8710444f));
}
static void GetRequiredServices(AZ::ComponentDescriptor::DependencyArrayType& required)
{
required.push_back(AZ_CRC("SkinnedMeshService", 0xac7cea96));
required.push_back(AZ_CRC("TransformService", 0x8ee22c50));
}
static void GetIncompatibleServices(AZ::ComponentDescriptor::DependencyArrayType& incompatible)
{
incompatible.push_back(AZ_CRC("AnimationService", 0x553f5760));
}
static void Reflect(AZ::ReflectContext* context);
private:
SimpleAnimationComponent(const SimpleAnimationComponent&) = delete;
/*
* Reflects default animation settings for multiple layers as configured in the editor
* This will be EMPTY post load , Only used for reflection
*/
AZStd::list<AnimatedLayer> m_defaultAnimationSettings;
//! Set that stores default animation settings for multiple layers as configured in the editor
AnimatedLayer::AnimatedLayerSet m_defaultAnimLayerSet;
//! Provides animation services to the component
SimpleAnimator m_animator;
//! Tracks whether the mesh asset attached to the skinned mesh is ready for animation. We will queue animation requests until it is
bool m_isMeshAssetReady = false;
//! Stores the animations requested while the skinned mesh asset was unavailable
AZStd::vector<AnimatedLayer> m_animationsQueuedBeforeAssetReady;
};
} // namespace LmbrCentral
| 4,570 |
390 | #
# Copyright (c) 2019 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import time
import tensorflow as tf
import tensorflow.contrib.keras as keras
from tensorflow.python.framework.errors_impl import UnavailableError
#========== MULTI NODE CONFIG ===============#
# Define input flags to identify the job and task
tf.app.flags.DEFINE_string('data_dir', './dataset/imagenet', 'training data directory.')
tf.app.flags.DEFINE_string("job_name", "", "Either 'ps' or 'worker'")
tf.app.flags.DEFINE_integer("task_index", 0, "Index of task within the job")
tf.app.flags.DEFINE_string("ps_hosts", "",
"Comma-separated list of hostname:port pairs")
tf.app.flags.DEFINE_string("worker_hosts", "",
"Comma-separated list of hostname:port pairs")
FLAGS = tf.app.flags.FLAGS
ps_hosts = FLAGS.ps_hosts.split(",")
worker_hosts = FLAGS.worker_hosts.split(",")
# Create a tensorflow cluster
# Replace localhost with the host names if you are running on multiple hosts
# cluster = tf.train.ClusterSpec({"ps": ["localhost:2222"],
# "worker": [ "localhost:2223",
# "localhost:2224",
# "localhost:2225"]})
cluster = tf.train.ClusterSpec({"ps": ps_hosts, "worker": worker_hosts})
# Start the server
server = tf.train.Server(cluster,
job_name=FLAGS.job_name,
task_index=FLAGS.task_index)
N_CATEGORY = 1000
def load_data():
global train_generator
global validation_generator
datagen = keras.preprocessing.image.ImageDataGenerator(data_format='channels_last')
train_generator = datagen.flow_from_directory(os.path.join(FLAGS.data_dir, 'i1k-extracted/train'),
target_size=(227, 227),
batch_size=128,
class_mode='categorical')
validation_generator = datagen.flow_from_directory(os.path.join(FLAGS.data_dir, 'i1k-extracted/val'),
target_size=(227, 227),
batch_size=128,
class_mode='categorical',
shuffle=False)
def create_model():
DROPOUT = 0.5
model_input = keras.layers.Input(shape=(227, 227, 3))
# First convolutional Layer (96x11x11)
z = keras.layers.Conv2D(filters=96, kernel_size=(11, 11), strides=(4, 4), activation="relu")(model_input)
z = keras.layers.MaxPooling2D(pool_size=(3, 3), strides=(2, 2))(z)
z = keras.layers.BatchNormalization()(z)
# Second convolutional Layer (256x5x5)
z = keras.layers.ZeroPadding2D(padding=(2, 2))(z)
z = keras.layers.Convolution2D(filters=256, kernel_size=(5, 5), strides=(1, 1), activation="relu")(z)
z = keras.layers.MaxPooling2D(pool_size=(3, 3), strides=(2, 2))(z)
z = keras.layers.BatchNormalization()(z)
# Rest 3 convolutional layers
z = keras.layers.ZeroPadding2D(padding=(1, 1))(z)
z = keras.layers.Convolution2D(filters=384, kernel_size=(3, 3), strides=(1, 1), activation="relu")(z)
z = keras.layers.ZeroPadding2D(padding=(1, 1))(z)
z = keras.layers.Convolution2D(filters=384, kernel_size=(3, 3), strides=(1, 1), activation="relu")(z)
z = keras.layers.ZeroPadding2D(padding=(1, 1))(z)
z = keras.layers.Convolution2D(filters=256, kernel_size=(3, 3), strides=(1, 1), activation="relu")(z)
z = keras.layers.MaxPooling2D(pool_size=(3, 3), strides=(2, 2))(z)
z = keras.layers.Flatten()(z)
z = keras.layers.Dense(4096, activation="relu")(z)
z = keras.layers.Dropout(DROPOUT)(z)
z = keras.layers.Dense(4096, activation="relu")(z)
z = keras.layers.Dropout(DROPOUT)(z)
final_dim = 1 if N_CATEGORY == 2 else N_CATEGORY
final_act = "sigmoid" if N_CATEGORY == 2 else "softmax"
model_output = keras.layers.Dense(final_dim, activation=final_act)(z)
model = keras.models.Model(model_input, model_output)
model.summary()
return model
def create_optimizer(model, targets):
WEIGHT_DECAY = 0.0005
MOMENTUM = 0.9
LEARNING_RATE = 0.01
predictions = model.output
loss = tf.reduce_mean(
keras.losses.categorical_crossentropy(targets, predictions))
# Keras-like learning rate descent function
learning_rate = tf.constant(LEARNING_RATE, dtype=tf.float32)
weight_decay = tf.constant(WEIGHT_DECAY, dtype=tf.float32)
lr_compute_decay = tf.multiply(tf.cast(global_step, dtype=tf.float32), weight_decay)
lr_compute_denominator = tf.add(lr_compute_decay, tf.constant(1, dtype=tf.float32))
lr_compute_multiplier = tf.div(tf.constant(1, dtype=tf.float32), lr_compute_denominator)
lr_operation = tf.multiply(learning_rate, lr_compute_multiplier)
optimizer = tf.train.MomentumOptimizer(learning_rate=lr_operation, momentum=MOMENTUM)
# Barrier to compute gradients after updating moving avg of batch norm
with tf.control_dependencies(model.updates):
barrier = tf.no_op(name="update_barrier")
with tf.control_dependencies([barrier]):
grads = optimizer.compute_gradients(
loss,
model.trainable_weights)
grad_updates = optimizer.apply_gradients(grads, global_step=global_step)
with tf.control_dependencies([grad_updates]):
train_op = tf.identity(loss, name="train")
accuracy = tf.contrib.metrics.accuracy(labels=tf.argmax(targets, 1),
predictions=tf.argmax(predictions, 1))
return (train_op, loss, predictions, accuracy, optimizer._learning_rate)
# Train the model (a single step)
def train(train_op, global_step, step, accuracy, learning_rate):
log_frequency = 20
start_time = time.time()
batch_x, batch_y = train_generator.next()
# perform the operations we defined earlier on batch
loss_value, step_value = sess.run(
[train_op, global_step],
feed_dict={
model.inputs[0]: batch_x,
targets: batch_y})
if step % log_frequency == 0:
elapsed_time = time.time() - start_time
acc = sess.run(accuracy, feed_dict={model.inputs[0]: batch_x,
targets: batch_y})
lr_val = sess.run(learning_rate)
print("{},".format(time.strftime('%X %x %Z')),
"Step: %d," % step_value,
"Iteration: %2d," % step,
"Cost: %.4f," % loss_value,
"Accuracy: %.16f" % acc,
"AvgTime: %3.2fms" % float(elapsed_time * 1000 / log_frequency),
"Learning rate: %.16f" % lr_val)
def validate(epoch, total_loss, accuracy):
batch_test_x, batch_test_y = validation_generator.next()
test_accuracies = []
test_losses = []
test_batch_index = 0
while test_batch_index * 128 < 50000:
test_batch_loss, test_batch_acc = sess.run([total_loss, accuracy],
feed_dict={
model.inputs[0]: batch_test_x,
targets: batch_test_y})
test_accuracies.append(test_batch_acc)
test_losses.append(test_batch_loss)
test_batch_index += 1
mean_test_accuracy = sum(test_accuracies) / test_batch_index
mean_test_loss = sum(test_losses) / test_batch_index
print('{} Epoch {} ended. '
'Validation loss: {}'.format(time.strftime('%X %x %Z'), epoch, mean_test_loss))
print('Validation accuracy: {}%'.format(mean_test_accuracy))
if FLAGS.job_name == "ps":
server.join()
elif FLAGS.job_name == "worker":
load_data()
# Assign operations to local server
with tf.device(tf.train.replica_device_setter(
worker_device="/job:worker/task:%d" % FLAGS.task_index,
cluster=cluster)):
keras.backend.set_learning_phase(True)
keras.backend.manual_variable_initialization(True)
model = create_model()
targets = tf.placeholder(tf.float32, shape=[None, 1000], name="y-input")
global_step = tf.get_variable('global_step', [],
initializer=tf.constant_initializer(0),
trainable=False)
train_op, total_loss, predictions, accuracy, learning_rate = create_optimizer(model, targets)
init_op = tf.global_variables_initializer()
saver = tf.train.Saver()
sv = tf.train.Supervisor(is_chief=(FLAGS.task_index == 0),
global_step=global_step,
logdir="./output/train_logs",
saver=saver,
save_model_secs=600,
init_op=init_op)
print("Waiting for other servers")
with sv.managed_session(server.target) as sess:
keras.backend.set_session(sess)
step = 0
epoch = 1
while not sv.should_stop() and step < 1000900:
try:
train(train_op, global_step, step, accuracy=accuracy, learning_rate=learning_rate)
step += 1
if step % 10009 == 0:
keras.backend.set_learning_phase(False)
validate(epoch, total_loss, accuracy=accuracy)
epoch += 1
keras.backend.set_learning_phase(True)
except UnavailableError as e:
print('WARNING: {}'.format(e))
sv.stop()
print("done")
| 4,728 |
549 | #include "../data-structures/segment_tree.cpp"
const int ID = 0;
int f(int a, int b) { return a + b; }
struct HLD {
int n, curhead, curloc;
vi sz, head, parent, loc;
vvi adj; segment_tree values;
HLD(int _n) : n(_n), sz(n, 1), head(n),
parent(n, -1), loc(n), adj(n) {
vector<ll> tmp(n, ID); values = segment_tree(tmp); }
void add_edge(int u, int v) {
adj[u].push_back(v); adj[v].push_back(u); }
void update_cost(int u, int v, int c) {
if (parent[v] == u) swap(u, v); assert(parent[u] == v);
values.update(loc[u], c); }
int csz(int u) {
rep(i,0,size(adj[u])) if (adj[u][i] != parent[u])
sz[u] += csz(adj[parent[adj[u][i]] = u][i]);
return sz[u]; }
void part(int u) {
head[u] = curhead; loc[u] = curloc++;
int best = -1;
rep(i,0,size(adj[u]))
if (adj[u][i] != parent[u] &&
(best == -1 || sz[adj[u][i]] > sz[best]))
best = adj[u][i];
if (best != -1) part(best);
rep(i,0,size(adj[u]))
if (adj[u][i] != parent[u] && adj[u][i] != best)
part(curhead = adj[u][i]); }
void build(int r = 0) {
curloc = 0, csz(curhead = r), part(r); }
int lca(int u, int v) {
vi uat, vat; int res = -1;
while (u != -1) uat.push_back(u), u = parent[head[u]];
while (v != -1) vat.push_back(v), v = parent[head[v]];
u = (int)size(uat) - 1, v = (int)size(vat) - 1;
while (u >= 0 && v >= 0 && head[uat[u]] == head[vat[v]])
res = (loc[uat[u]] < loc[vat[v]] ? uat[u] : vat[v]),
u--, v--;
return res; }
int query_upto(int u, int v) { int res = ID;
while (head[u] != head[v])
res = f(res, values.query(loc[head[u]], loc[u]).x),
u = parent[head[u]];
return f(res, values.query(loc[v] + 1, loc[u]).x); }
int query(int u, int v) { int l = lca(u, v);
return f(query_upto(u, l), query_upto(v, l)); } };
// vim: cc=60 ts=2 sts=2 sw=2:
| 954 |
335 | {
"word": "Nettle",
"definitions": [
"Irritate or annoy (someone)",
"Sting with nettles."
],
"parts-of-speech": "Verb"
} | 76 |
11,351 | <reponame>LearnJavaByus/eureka
package com.netflix.discovery;
/**
* constants pertaining to property based client configs
*
* @author <NAME>
*/
final class PropertyBasedClientConfigConstants {
static final String CLIENT_REGION_FALLBACK_KEY = "eureka.region";
// NOTE: all keys are before any prefixes are applied
static final String CLIENT_REGION_KEY = "region";
static final String REGISTRATION_ENABLED_KEY = "registration.enabled";
static final String FETCH_REGISTRY_ENABLED_KEY = "shouldFetchRegistry";
static final String SHOULD_ENFORCE_FETCH_REGISTRY_AT_INIT_KEY = "shouldEnforceFetchRegistryAtInit";
static final String REGISTRY_REFRESH_INTERVAL_KEY = "client.refresh.interval";
static final String REGISTRATION_REPLICATION_INTERVAL_KEY = "appinfo.replicate.interval";
static final String INITIAL_REGISTRATION_REPLICATION_DELAY_KEY = "appinfo.initial.replicate.time";
static final String HEARTBEAT_THREADPOOL_SIZE_KEY = "client.heartbeat.threadPoolSize";
static final String HEARTBEAT_BACKOFF_BOUND_KEY = "client.heartbeat.exponentialBackOffBound";
static final String CACHEREFRESH_THREADPOOL_SIZE_KEY = "client.cacheRefresh.threadPoolSize";
static final String CACHEREFRESH_BACKOFF_BOUND_KEY = "client.cacheRefresh.exponentialBackOffBound";
static final String SHOULD_UNREGISTER_ON_SHUTDOWN_KEY = "shouldUnregisterOnShutdown";
static final String SHOULD_ONDEMAND_UPDATE_STATUS_KEY = "shouldOnDemandUpdateStatusChange";
static final String SHOULD_ENFORCE_REGISTRATION_AT_INIT = "shouldEnforceRegistrationAtInit";
static final String SHOULD_DISABLE_DELTA_KEY = "disableDelta";
static final String SHOULD_FETCH_REMOTE_REGION_KEY = "fetchRemoteRegionsRegistry";
static final String SHOULD_FILTER_ONLY_UP_INSTANCES_KEY = "shouldFilterOnlyUpInstances";
static final String FETCH_SINGLE_VIP_ONLY_KEY = "registryRefreshSingleVipAddress";
static final String CLIENT_ENCODER_NAME_KEY = "encoderName";
static final String CLIENT_DECODER_NAME_KEY = "decoderName";
static final String CLIENT_DATA_ACCEPT_KEY = "clientDataAccept";
static final String BACKUP_REGISTRY_CLASSNAME_KEY = "backupregistry";
static final String SHOULD_PREFER_SAME_ZONE_SERVER_KEY = "preferSameZone";
static final String SHOULD_ALLOW_REDIRECTS_KEY = "allowRedirects";
static final String SHOULD_USE_DNS_KEY = "shouldUseDns";
static final String EUREKA_SERVER_URL_POLL_INTERVAL_KEY = "serviceUrlPollIntervalMs";
static final String EUREKA_SERVER_URL_CONTEXT_KEY = "eurekaServer.context";
static final String EUREKA_SERVER_FALLBACK_URL_CONTEXT_KEY = "context";
static final String EUREKA_SERVER_PORT_KEY = "eurekaServer.port";
static final String EUREKA_SERVER_FALLBACK_PORT_KEY = "port";
static final String EUREKA_SERVER_DNS_NAME_KEY = "eurekaServer.domainName";
static final String EUREKA_SERVER_FALLBACK_DNS_NAME_KEY = "domainName";
static final String EUREKA_SERVER_PROXY_HOST_KEY = "eurekaServer.proxyHost";
static final String EUREKA_SERVER_PROXY_PORT_KEY = "eurekaServer.proxyPort";
static final String EUREKA_SERVER_PROXY_USERNAME_KEY = "eurekaServer.proxyUserName";
static final String EUREKA_SERVER_PROXY_PASSWORD_KEY = "eurekaServer.proxyPassword";
static final String EUREKA_SERVER_GZIP_CONTENT_KEY = "eurekaServer.gzipContent";
static final String EUREKA_SERVER_READ_TIMEOUT_KEY = "eurekaServer.readTimeout";
static final String EUREKA_SERVER_CONNECT_TIMEOUT_KEY = "eurekaServer.connectTimeout";
static final String EUREKA_SERVER_MAX_CONNECTIONS_KEY = "eurekaServer.maxTotalConnections";
static final String EUREKA_SERVER_MAX_CONNECTIONS_PER_HOST_KEY = "eurekaServer.maxConnectionsPerHost";
// yeah the case on eurekaserver is different, backwards compatibility requirements :(
static final String EUREKA_SERVER_CONNECTION_IDLE_TIMEOUT_KEY = "eurekaserver.connectionIdleTimeoutInSeconds";
static final String SHOULD_LOG_DELTA_DIFF_KEY = "printDeltaFullDiff";
static final String CONFIG_DOLLAR_REPLACEMENT_KEY = "dollarReplacement";
static final String CONFIG_ESCAPE_CHAR_REPLACEMENT_KEY = "escapeCharReplacement";
// additional namespaces
static final String CONFIG_EXPERIMENTAL_PREFIX = "experimental";
static final String CONFIG_AVAILABILITY_ZONE_PREFIX = "availabilityZones";
static final String CONFIG_EUREKA_SERVER_SERVICE_URL_PREFIX = "serviceUrl";
static class Values {
static final String CONFIG_DOLLAR_REPLACEMENT = "_-";
static final String CONFIG_ESCAPE_CHAR_REPLACEMENT = "__";
static final String DEFAULT_CLIENT_REGION = "us-east-1";
static final int DEFAULT_EXECUTOR_THREAD_POOL_SIZE = 5;
static final int DEFAULT_EXECUTOR_THREAD_POOL_BACKOFF_BOUND = 10;
}
}
| 1,678 |
2,151 | <reponame>zipated/src<gh_stars>1000+
// Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "content/browser/renderer_host/media/audio_input_delegate_impl.h"
#include <utility>
#include "base/bind.h"
#include "base/callback.h"
#include "base/command_line.h"
#include "base/memory/weak_ptr.h"
#include "base/sequence_checker.h"
#include "base/strings/string_number_conversions.h"
#include "base/strings/string_piece.h"
#include "base/strings/stringprintf.h"
#include "content/browser/media/capture/desktop_capture_device_uma_types.h"
#include "content/browser/media/capture/web_contents_audio_input_stream.h"
#include "content/browser/media/media_internals.h"
#include "content/browser/renderer_host/media/audio_input_device_manager.h"
#include "content/browser/renderer_host/media/media_stream_manager.h"
#include "content/public/browser/browser_thread.h"
#include "content/public/browser/render_process_host.h"
#include "content/public/browser/web_contents_media_capture_id.h"
#include "media/audio/audio_input_controller.h"
#include "media/audio/audio_input_sync_writer.h"
#include "media/audio/audio_logging.h"
#include "media/audio/audio_manager.h"
#include "media/base/media_switches.h"
#include "media/base/user_input_monitor.h"
namespace content {
namespace {
void NotifyProcessHostStreamAdded(int render_process_id) {
DCHECK_CURRENTLY_ON(BrowserThread::UI);
auto* process_host = RenderProcessHost::FromID(render_process_id);
if (process_host)
process_host->OnMediaStreamAdded();
}
void NotifyProcessHostStreamRemoved(int render_process_id) {
DCHECK_CURRENTLY_ON(BrowserThread::UI);
auto* process_host = RenderProcessHost::FromID(render_process_id);
if (process_host)
process_host->OnMediaStreamRemoved();
}
// Safe to call from any thread.
void LogMessage(int stream_id, const std::string& message) {
const std::string out_message =
base::StringPrintf("[stream_id=%d] %s", stream_id, message.c_str());
MediaStreamManager::SendMessageToNativeLog(out_message);
DVLOG(1) << out_message;
}
} // namespace
class AudioInputDelegateImpl::ControllerEventHandler
: public media::AudioInputController::EventHandler {
public:
ControllerEventHandler(int stream_id,
base::WeakPtr<AudioInputDelegateImpl> weak_delegate)
: stream_id_(stream_id), weak_delegate_(std::move(weak_delegate)) {}
void OnCreated(bool initially_muted) override {
BrowserThread::PostTask(
BrowserThread::IO, FROM_HERE,
base::BindOnce(&AudioInputDelegateImpl::SendCreatedNotification,
weak_delegate_, initially_muted));
}
void OnError(media::AudioInputController::ErrorCode error_code) override {
// To ensure that the error is logged even during the destruction sequence,
// we log it here.
LogMessage(stream_id_,
base::StringPrintf("AIC reports error_code=%d", error_code));
BrowserThread::PostTask(
BrowserThread::IO, FROM_HERE,
base::BindOnce(&AudioInputDelegateImpl::OnError, weak_delegate_));
}
void OnLog(base::StringPiece message) override {
LogMessage(stream_id_, message.as_string());
}
void OnMuted(bool is_muted) override {
LogMessage(stream_id_, is_muted ? "OnMuted: State changed to muted"
: "OnMuted: State changed to not muted");
BrowserThread::PostTask(BrowserThread::IO, FROM_HERE,
base::BindOnce(&AudioInputDelegateImpl::OnMuted,
weak_delegate_, is_muted));
}
private:
const int stream_id_;
// Bound to the IO thread.
const base::WeakPtr<AudioInputDelegateImpl> weak_delegate_;
};
std::unique_ptr<media::AudioInputDelegate> AudioInputDelegateImpl::Create(
media::AudioManager* audio_manager,
AudioMirroringManager* mirroring_manager,
media::UserInputMonitor* user_input_monitor,
int render_process_id,
int render_frame_id,
AudioInputDeviceManager* audio_input_device_manager,
media::mojom::AudioLogPtr audio_log,
AudioInputDeviceManager::KeyboardMicRegistration keyboard_mic_registration,
uint32_t shared_memory_count,
int stream_id,
int session_id,
bool automatic_gain_control,
const media::AudioParameters& audio_parameters,
EventHandler* subscriber) {
// Check if we have the permission to open the device and which device to use.
const MediaStreamDevice* device =
audio_input_device_manager->GetOpenedDeviceById(session_id);
if (!device) {
LogMessage(stream_id, "Permission for stream not granted.");
DLOG(WARNING) << "No permission has been granted to input stream with "
<< "session_id=" << session_id;
return nullptr;
}
media::AudioParameters possibly_modified_parameters = audio_parameters;
if (base::CommandLine::ForCurrentProcess()->HasSwitch(
switches::kUseFakeDeviceForMediaStream)) {
possibly_modified_parameters.set_format(media::AudioParameters::AUDIO_FAKE);
}
auto foreign_socket = std::make_unique<base::CancelableSyncSocket>();
std::unique_ptr<media::AudioInputSyncWriter> writer =
media::AudioInputSyncWriter::Create(
base::BindRepeating(&LogMessage, stream_id), shared_memory_count,
possibly_modified_parameters, foreign_socket.get());
if (!writer) {
LogMessage(stream_id, "Failed to set up sync writer.");
return nullptr;
}
LogMessage(
stream_id,
base::StringPrintf("OnCreateStream(render_frame_id=%d, session_id=%d): "
"device_name=%s, AGC=%d",
render_frame_id, session_id, device->name.c_str(),
automatic_gain_control));
return base::WrapUnique(new AudioInputDelegateImpl(
audio_manager, mirroring_manager, user_input_monitor,
possibly_modified_parameters, render_process_id, std::move(audio_log),
std::move(keyboard_mic_registration), stream_id, automatic_gain_control,
subscriber, device, std::move(writer), std::move(foreign_socket)));
}
AudioInputDelegateImpl::AudioInputDelegateImpl(
media::AudioManager* audio_manager,
AudioMirroringManager* mirroring_manager,
media::UserInputMonitor* user_input_monitor,
const media::AudioParameters& audio_parameters,
int render_process_id,
media::mojom::AudioLogPtr audio_log,
AudioInputDeviceManager::KeyboardMicRegistration keyboard_mic_registration,
int stream_id,
bool automatic_gain_control,
EventHandler* subscriber,
const MediaStreamDevice* device,
std::unique_ptr<media::AudioInputSyncWriter> writer,
std::unique_ptr<base::CancelableSyncSocket> foreign_socket)
: subscriber_(subscriber),
controller_event_handler_(),
writer_(std::move(writer)),
foreign_socket_(std::move(foreign_socket)),
audio_log_(std::move(audio_log)),
controller_(),
keyboard_mic_registration_(std::move(keyboard_mic_registration)),
stream_id_(stream_id),
render_process_id_(render_process_id),
weak_factory_(this) {
// Prevent process backgrounding while audio input is active:
BrowserThread::PostTask(
BrowserThread::UI, FROM_HERE,
base::BindOnce(&NotifyProcessHostStreamAdded, render_process_id_));
controller_event_handler_ = std::make_unique<ControllerEventHandler>(
stream_id_, weak_factory_.GetWeakPtr());
const std::string& device_id = device->id;
if (WebContentsMediaCaptureId::Parse(device_id, nullptr)) {
// For MEDIA_DESKTOP_AUDIO_CAPTURE, the source is selected from picker
// window, we do not mute the source audio.
// For MEDIA_TAB_AUDIO_CAPTURE, the probable use case is Cast, we mute
// the source audio.
// TODO(qiangchen): Analyze audio constraints to make a duplicating or
// diverting decision. It would give web developer more flexibility.
controller_ = media::AudioInputController::CreateForStream(
audio_manager->GetTaskRunner(), controller_event_handler_.get(),
WebContentsAudioInputStream::Create(
device_id, audio_parameters, audio_manager->GetWorkerTaskRunner(),
mirroring_manager),
writer_.get(), user_input_monitor);
DCHECK(controller_);
// Only count for captures from desktop media picker dialog.
if (device->type == MEDIA_DESKTOP_AUDIO_CAPTURE)
IncrementDesktopCaptureCounter(TAB_AUDIO_CAPTURER_CREATED);
} else {
controller_ = media::AudioInputController::Create(
audio_manager, controller_event_handler_.get(), writer_.get(),
user_input_monitor, audio_parameters, device_id,
automatic_gain_control);
// Only count for captures from desktop media picker dialog and system loop
// back audio.
if (device->type == MEDIA_DESKTOP_AUDIO_CAPTURE &&
(media::AudioDeviceDescription::IsLoopbackDevice(device_id))) {
IncrementDesktopCaptureCounter(SYSTEM_LOOPBACK_AUDIO_CAPTURER_CREATED);
}
}
DCHECK(controller_);
audio_log_->OnCreated(audio_parameters, device_id);
}
AudioInputDelegateImpl::~AudioInputDelegateImpl() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
audio_log_->OnClosed();
LogMessage(stream_id_, "Closing stream");
BrowserThread::PostTask(
BrowserThread::UI, FROM_HERE,
base::BindOnce(&NotifyProcessHostStreamRemoved, render_process_id_));
// We pass |controller_event_handler_| and |writer_| in here to make sure they
// stay alive until |controller_| has finished closing.
controller_->Close(base::BindOnce(
[](int stream_id, std::unique_ptr<ControllerEventHandler>,
std::unique_ptr<media::AudioInputSyncWriter>) {
LogMessage(stream_id, "Stream is now closed");
},
stream_id_, std::move(controller_event_handler_), std::move(writer_)));
}
int AudioInputDelegateImpl::GetStreamId() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
return stream_id_;
}
void AudioInputDelegateImpl::OnRecordStream() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
LogMessage(stream_id_, "OnRecordStream");
controller_->Record();
audio_log_->OnStarted();
}
void AudioInputDelegateImpl::OnSetVolume(double volume) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK_GE(volume, 0);
DCHECK_LE(volume, 1);
controller_->SetVolume(volume);
audio_log_->OnSetVolume(volume);
}
void AudioInputDelegateImpl::OnSetOutputDeviceForAec(
const std::string& raw_output_device_id) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
controller_->SetOutputDeviceForAec(raw_output_device_id);
audio_log_->OnLogMessage("SetOutputDeviceForAec");
}
void AudioInputDelegateImpl::SendCreatedNotification(bool initially_muted) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(foreign_socket_);
subscriber_->OnStreamCreated(stream_id_, writer_->TakeSharedMemoryRegion(),
std::move(foreign_socket_), initially_muted);
}
void AudioInputDelegateImpl::OnMuted(bool is_muted) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
subscriber_->OnMuted(stream_id_, is_muted);
}
void AudioInputDelegateImpl::OnError() {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
audio_log_->OnError();
subscriber_->OnStreamError(stream_id_);
}
} // namespace content
| 4,131 |
2,151 | <gh_stars>1000+
import sys
if sys.version_info[0] == 2:
def Bchr(value):
return chr(value)
else:
def Bchr(value):
return value
| 75 |
2,113 | //-----------------------------------------------------------------------------
// Copyright (c) 2012 GarageGames, LLC
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
//-----------------------------------------------------------------------------
#include "platform/input/razerHydra/razerHydraData.h"
#include "platform/input/razerHydra/razerHydraUtil.h"
RazerHyrdaControllerData::RazerHyrdaControllerData()
{
reset();
}
void RazerHyrdaControllerData::reset()
{
mDataSet = false;
mShoulder = false;
mThumb = false;
mStart = false;
mButton1 = false;
mButton2 = false;
mButton3 = false;
mButton4 = false;
mIsDocked = false;
}
void RazerHyrdaControllerData::setData(const sixenseControllerData& data, const F32& maxAxisRadius)
{
// Controller position
RazerHydraUtil::convertPosition(data.pos, mRawPos[0], mRawPos[1], mRawPos[2]);
mPos[0] = (S32)mFloor(mRawPos[0]);
mPos[1] = (S32)mFloor(mRawPos[1]);
mPos[2] = (S32)mFloor(mRawPos[2]);
mPosPoint.set(mPos[0], mPos[1], mPos[2]);
// Controller rotation
RazerHydraUtil::convertRotation(data.rot_mat, mRot);
mRotQuat.set(mRot);
// Controller rotation as axis, but only if not docked
if(!data.is_docked)
{
RazerHydraUtil::calculateAxisRotation(mRot, maxAxisRadius, mRotAxis);
}
else
{
mRotAxis.x = 0.0f;
mRotAxis.y = 0.0f;
}
// Thumb stick
mThumbStick[0] = data.joystick_x;
mThumbStick[1] = data.joystick_y;
// Trigger
mTrigger = data.trigger;
//Buttons
mShoulder = data.buttons & SIXENSE_BUTTON_BUMPER;
mThumb = data.buttons & SIXENSE_BUTTON_JOYSTICK;
mStart = data.buttons & SIXENSE_BUTTON_START;
mButton1 = data.buttons & SIXENSE_BUTTON_1;
mButton2 = data.buttons & SIXENSE_BUTTON_2;
mButton3 = data.buttons & SIXENSE_BUTTON_3;
mButton4 = data.buttons & SIXENSE_BUTTON_4;
// Other data
mIsDocked = data.is_docked;
// Store the current sequence number
mSequenceNum = data.sequence_number;
mDataSet = true;
}
U32 RazerHyrdaControllerData::compare(RazerHyrdaControllerData* other)
{
S32 result = DIFF_NONE;
// Check position
if(mDataSet)
{
if(mPos[0] != other->mPos[0])
result |= DIFF_POSX;
if(mPos[1] != other->mPos[1])
result |= DIFF_POSY;
if(mPos[2] != other->mPos[2])
result |= DIFF_POSZ;
}
else
{
result |= DIFF_POS;
}
// Check rotation
if(mRotQuat != other->mRotQuat || !mDataSet)
{
result |= DIFF_ROT;
}
// Check rotation as axis
if(mRotAxis.x != other->mRotAxis.x || !mDataSet)
{
result |= DIFF_ROTAXISX;
}
if(mRotAxis.y != other->mRotAxis.y || !mDataSet)
{
result |= DIFF_ROTAXISY;
}
// Check thumb stick
if(mThumbStick[0] != other->mThumbStick[0] || !mDataSet)
{
result |= DIFF_AXISX;
}
if(mThumbStick[1] != other->mThumbStick[1] || !mDataSet)
{
result |= DIFF_AXISY;
}
// Check trigger
if(mTrigger != other->mTrigger || !mDataSet)
{
result |= DIFF_TRIGGER;
}
// Check buttons
if(mShoulder != other->mShoulder)
{
result |= DIFF_BUTTON_SHOULDER;
}
if(mThumb != other->mThumb)
{
result |= DIFF_BUTTON_THUMB;
}
if(mStart != other->mStart)
{
result |= DIFF_BUTTON_START;
}
if(mButton1 != other->mButton1)
{
result |= DIFF_BUTTON1;
}
if(mButton2 != other->mButton2)
{
result |= DIFF_BUTTON2;
}
if(mButton3 != other->mButton3)
{
result |= DIFF_BUTTON3;
}
if(mButton4 != other->mButton4)
{
result |= DIFF_BUTTON4;
}
return result;
}
U32 RazerHyrdaControllerData::compareMeta(RazerHyrdaControllerData* other)
{
S32 result = DIFF_NONE;
if(mIsDocked != other->mIsDocked || !mDataSet)
{
result |= METADIFF_DOCKED;
}
return result;
}
| 1,978 |
318 | # encoding: utf-8
def main_hrun():
""" API test: parse command line options and run commands.
"""
import argparse
from httprunner import logger
from httprunner.__about__ import __description__, __version__
from httprunner.api import HttpRunner
from httprunner.compat import is_py2
from httprunner.utils import (create_scaffold, get_python2_retire_msg,
prettify_json_file, validate_json_file)
parser = argparse.ArgumentParser(description=__description__)
parser.add_argument(
'-V', '--version', dest='version', action='store_true',
help="show version")
parser.add_argument(
'testcase_paths', nargs='*',
help="testcase file path")
parser.add_argument(
'--log-level', default='INFO',
help="Specify logging level, default is INFO.")
parser.add_argument(
'--log-file',
help="Write logs to specified file path.")
parser.add_argument(
'--dot-env-path',
help="Specify .env file path, which is useful for keeping sensitive data.")
parser.add_argument(
'--report-template',
help="specify report template path.")
parser.add_argument(
'--report-dir',
help="specify report save directory.")
parser.add_argument(
'--failfast', action='store_true', default=False,
help="Stop the test run on the first error or failure.")
parser.add_argument(
'--save-tests', action='store_true', default=False,
help="Save loaded tests and parsed tests to JSON file.")
parser.add_argument(
'--startproject',
help="Specify new project name.")
parser.add_argument(
'--validate', nargs='*',
help="Validate JSON testcase format.")
parser.add_argument(
'--prettify', nargs='*',
help="Prettify JSON testcase format.")
args = parser.parse_args()
logger.setup_logger(args.log_level, args.log_file)
if is_py2:
logger.log_warning(get_python2_retire_msg())
if args.version:
logger.color_print("{}".format(__version__), "GREEN")
exit(0)
if args.validate:
validate_json_file(args.validate)
exit(0)
if args.prettify:
prettify_json_file(args.prettify)
exit(0)
project_name = args.startproject
if project_name:
create_scaffold(project_name)
exit(0)
runner = HttpRunner(
failfast=args.failfast,
save_tests=args.save_tests,
report_template=args.report_template,
report_dir=args.report_dir
)
try:
for path in args.testcase_paths:
runner.run(path, dot_env_path=args.dot_env_path)
except Exception:
logger.log_error("!!!!!!!!!! exception stage: {} !!!!!!!!!!".format(runner.exception_stage))
raise
return 0
def main_locust():
""" Performance test with locust: parse command line options and run commands.
"""
# monkey patch ssl at beginning to avoid RecursionError when running locust.
from gevent import monkey; monkey.patch_ssl()
import multiprocessing
import sys
from httprunner import logger
try:
from httprunner import locusts
except ImportError:
msg = "Locust is not installed, install first and try again.\n"
msg += "install command: pip install locustio"
print(msg)
exit(1)
sys.argv[0] = 'locust'
if len(sys.argv) == 1:
sys.argv.extend(["-h"])
if sys.argv[1] in ["-h", "--help", "-V", "--version"]:
locusts.start_locust_main()
sys.exit(0)
# set logging level
if "-L" in sys.argv:
loglevel_index = sys.argv.index('-L') + 1
elif "--loglevel" in sys.argv:
loglevel_index = sys.argv.index('--loglevel') + 1
else:
loglevel_index = None
if loglevel_index and loglevel_index < len(sys.argv):
loglevel = sys.argv[loglevel_index]
else:
# default
loglevel = "WARNING"
logger.setup_logger(loglevel)
# get testcase file path
try:
if "-f" in sys.argv:
testcase_index = sys.argv.index('-f') + 1
elif "--locustfile" in sys.argv:
testcase_index = sys.argv.index('--locustfile') + 1
else:
testcase_index = None
assert testcase_index and testcase_index < len(sys.argv)
except AssertionError:
print("Testcase file is not specified, exit.")
sys.exit(1)
testcase_file_path = sys.argv[testcase_index]
sys.argv[testcase_index] = locusts.parse_locustfile(testcase_file_path)
if "--processes" in sys.argv:
""" locusts -f locustfile.py --processes 4
"""
if "--no-web" in sys.argv:
logger.log_error("conflict parameter args: --processes & --no-web. \nexit.")
sys.exit(1)
processes_index = sys.argv.index('--processes')
processes_count_index = processes_index + 1
if processes_count_index >= len(sys.argv):
""" do not specify processes count explicitly
locusts -f locustfile.py --processes
"""
processes_count = multiprocessing.cpu_count()
logger.log_warning("processes count not specified, use {} by default.".format(processes_count))
else:
try:
""" locusts -f locustfile.py --processes 4 """
processes_count = int(sys.argv[processes_count_index])
sys.argv.pop(processes_count_index)
except ValueError:
""" locusts -f locustfile.py --processes -P 8888 """
processes_count = multiprocessing.cpu_count()
logger.log_warning("processes count not specified, use {} by default.".format(processes_count))
sys.argv.pop(processes_index)
locusts.run_locusts_with_processes(sys.argv, processes_count)
else:
locusts.start_locust_main()
| 2,613 |
841 | <filename>resteasy-core-spi/src/main/java/org/jboss/resteasy/spi/MarshalledEntity.java
package org.jboss.resteasy.spi;
/**
* Allows you to access the entity's raw bytes as well as the marshalled object.
*
* @author <a href="mailto:<EMAIL>"><NAME></a>
* @version $Revision: 1 $
*/
public interface MarshalledEntity<T>
{
byte[] getMarshalledBytes();
T getEntity();
}
| 134 |
765 | <reponame>hyu-iot/gem5<filename>src/dev/arm/VExpressFastmodel.py
# Copyright 2019 Google, Inc.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from m5.objects.FastModelGIC import FastModelGIC, SCFastModelGIC
from m5.objects.Gic import ArmSPI
from m5.objects.RealView import VExpress_GEM5_Base, HDLcd
from m5.objects.SubSystem import SubSystem
class VExpressFastmodel(VExpress_GEM5_Base):
gic = FastModelGIC(
sc_gic=SCFastModelGIC(
reg_base=0x2c000000,
reg_base_per_redistributor="0.0.0.0=0x2c010000",
spi_count=988,
))
hdlcd = HDLcd(
pxl_clk=VExpress_GEM5_Base.dcc.osc_pxl, pio_addr=0x2b000000,
interrupt=ArmSPI(num=95))
# Remove original timer to prevent from possible conflict with Fastmodel
# timer.
generic_timer = SubSystem()
generic_timer_mem = SubSystem()
sys_counter = SubSystem()
def _on_chip_devices(self):
return [
self.gic,
self.hdlcd,
self.system_watchdog,
self.trusted_watchdog,
]
def setupBootLoader(self, cur_sys, loc, boot_loader=None):
if boot_loader is None:
boot_loader = [ loc('boot_v2.arm64') ]
super().setupBootLoader(cur_sys, boot_loader)
| 955 |
1,720 | #pragma once
#include <QAbstractItemModel>
#include <QModelIndex>
#include <QVariant>
#include <QWidget>
#include "FrameDump.h"
#include <QMetaObject>
class GsPacketData;
class PacketTreeModel : public QAbstractItemModel
{
Q_OBJECT
public:
explicit PacketTreeModel(QWidget* = nullptr);
~PacketTreeModel();
struct DrawKickIndexInfo
{
uint32 parentIndex;
uint32 childIndex;
uint32 cmdIndex;
};
QVariant data(const QModelIndex&, int) const override;
Qt::ItemFlags flags(const QModelIndex&) const override;
QVariant headerData(int, Qt::Orientation, int = Qt::DisplayRole) const override;
QModelIndex index(int, int, const QModelIndex& = QModelIndex()) const override;
QModelIndex parent(const QModelIndex&) const override;
int rowCount(const QModelIndex& = QModelIndex()) const override;
int columnCount(const QModelIndex& = QModelIndex()) const override;
void setupModelData(CFrameDump&);
const std::vector<DrawKickIndexInfo>& GetDrawKickIndexes();
private:
GsPacketData* m_rootItem;
std::vector<DrawKickIndexInfo> m_drawKickIndexInfo;
};
| 360 |
16,989 | // Copyright 2021 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.bazel.bzlmod;
import com.google.auto.value.AutoValue;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.devtools.build.lib.packages.Attribute;
import net.starlark.java.annot.StarlarkBuiltin;
import net.starlark.java.eval.StarlarkValue;
import net.starlark.java.syntax.Location;
/**
* Represents a tag class, which is a "class" of {@link Tag}s that share the same attribute schema.
*/
@StarlarkBuiltin(name = "tag_class", doc = "Defines a schema of attributes for a tag.")
@AutoValue
public abstract class TagClass implements StarlarkValue {
/** The list of attributes of this tag class. */
public abstract ImmutableList<Attribute> getAttributes();
/** Documentation about this tag class. */
public abstract String getDoc();
/** The Starlark code location where this tag class was defined. */
public abstract Location getLocation();
/**
* A mapping from the <em>public</em> name of an attribute to the position of said attribute in
* {@link #getAttributes}.
*/
public abstract ImmutableMap<String, Integer> getAttributeIndices();
public static TagClass create(
ImmutableList<Attribute> attributes, String doc, Location location) {
ImmutableMap.Builder<String, Integer> attributeIndicesBuilder =
ImmutableMap.builderWithExpectedSize(attributes.size());
for (int i = 0; i < attributes.size(); i++) {
attributeIndicesBuilder.put(attributes.get(i).getPublicName(), i);
}
return new AutoValue_TagClass(attributes, doc, location, attributeIndicesBuilder.build());
}
}
| 644 |
5,102 | package com.nepxion.discovery.common.entity;
/**
* <p>Title: Nepxion Discovery</p>
* <p>Description: Nepxion Discovery</p>
* <p>Copyright: Copyright (c) 2017-2050</p>
* <p>Company: Nepxion</p>
* @author <NAME>
* @version 1.0
*/
import com.nepxion.discovery.common.constant.DiscoveryConstant;
public enum DiscoveryType {
NACOS(DiscoveryConstant.NACOS),
CONSUL(DiscoveryConstant.CONSUL),
EUREKA(DiscoveryConstant.EUREKA),
ZOOKEEPER(DiscoveryConstant.ZOOKEEPER);
private String value;
private DiscoveryType(String value) {
this.value = value;
}
public String getValue() {
return value;
}
public static DiscoveryType fromString(String value) {
for (DiscoveryType type : DiscoveryType.values()) {
if (type.getValue().equalsIgnoreCase(value)) {
return type;
}
}
throw new IllegalArgumentException("No matched type with value=" + value);
}
@Override
public String toString() {
return value;
}
} | 425 |
5,169 | {
"name": "KRSFakeNavigationBar",
"version": "1.0.0",
"summary": "A Fake Navigation Bar For Each View Controller.",
"description": "A fake navigation bar for each view controller, so that you can customlize nav bar style on different screen.",
"homepage": "https://github.com/karosLi/KRSFakeNavigationBar",
"license": "MIT",
"authors": {
"karosLi": "<EMAIL>"
},
"platforms": {
"ios": "7.0"
},
"source": {
"git": "https://github.com/karosLi/KRSFakeNavigationBar.git",
"tag": "1.0.0"
},
"source_files": [
"KRSFakeNavigationBar",
"KRSFakeNavigationBar/**/*.{h,m}"
],
"exclude_files": "Classes/Exclude",
"frameworks": [
"Foundation",
"UIKit"
],
"libraries": "objc"
}
| 292 |
956 | from trex_stl_lib.api import *
import argparse
class STLS1(object):
def create_stream (self):
# Teredo Ipv6 over Ipv4
pkt = Ether()/IP(src="192.168.127.12",dst="172.16.31.10")/UDP(dport=3797,sport=3544)/IPv6(dst="2001:0:4137:9350:8000:f12a:b9c8:2815",src="fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b")/UDP(dport=12,sport=1025)/ICMPv6Unknown()
vm = STLScVmRaw( [
# tuple gen for inner Ipv6
STLVmTupleGen ( ip_min="192.168.127.12", ip_max="172.16.31.10",
port_min=1025, port_max=65535,
name="tuple"), # define tuple gen
STLVmWrFlowVar (fv_name="tuple.ip", pkt_offset= "IPv6.src",offset_fixup=12 ), # write ip to packet IPv6.src to LSB
STLVmWrFlowVar (fv_name="tuple.port", pkt_offset= "UDP:1.sport" ) #write udp.port (after ipv6)
]
)
# burst of 100 packets
return STLStream(packet = STLPktBuilder(pkt = pkt ,vm = vm),
mode = STLTXSingleBurst( pps = 1, total_pkts = 17) )
def get_streams (self, tunables, **kwargs):
parser = argparse.ArgumentParser(description='Argparser for {}'.format(os.path.basename(__file__)),
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
args = parser.parse_args(tunables)
# create 1 stream
return [ self.create_stream() ]
def register():
return STLS1()
| 898 |
483 | <reponame>shridharsahil/cosbench_3
/**
Copyright 2013 Intel Corporation, All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.intel.cosbench.driver.util;
import static com.intel.cosbench.driver.util.Defaults.*;
import static com.intel.cosbench.driver.util.Division.*;
import java.util.Random;
import com.intel.cosbench.config.Config;
import com.intel.cosbench.config.ConfigException;
import com.intel.cosbench.driver.generator.*;
/**
* This class encapsulates logic to pick up objects.
*
* @author ywang19, qzheng7
*
*/
public class ObjectPicker {
private Division division;
private NameGenerator conNmGen;
private NameGenerator objNmGen;
public ObjectPicker() {
/* empty */
}
public void init(String division, Config config) {
conNmGen = getConNmGen(config, Boolean.FALSE);
objNmGen = getObjNmGen(config, Boolean.FALSE);
this.division = Division.getDivision(division);
}
public void init4Lister(String division, Config config) {
conNmGen = getConNmGen(config, Boolean.TRUE);
objNmGen = getObjNmGen(config, Boolean.TRUE);
this.division = Division.getDivision(division);
}
private static NameGenerator getConNmGen(Config config, boolean isLister) {
String pattern = isLister ? config.get("containers", null)
: config.get("containers");
if (pattern == null)
return null;
String prefix = config.get("cprefix", CONTAINER_PREFIX);
String suffix = config.get("csuffix", CONTAINER_SUFFIX);
return Generators.getNameGenerator(pattern, prefix, suffix);
}
private static NameGenerator getObjNmGen(Config config, boolean isLister) {
String pattern = isLister ? config.get("objects", null)
: config.get("objects");
if (pattern == null)
return null;
String prefix = config.get("oprefix", OBJECT_PREFIX);
String suffix = config.get("osuffix", OBJECT_SUFFIX);
return Generators.getNameGenerator(pattern, prefix, suffix);
}
public String[] pickObjPath(Random random, int idx, int all) {
synchronized(this) {
if (division.equals(OBJECT))
return new String[] { conNmGen.next(random),
objNmGen.next(random, idx, all) };
if (division.equals(CONTAINER))
return new String[] { conNmGen.next(random, idx, all),
objNmGen.next(random) };
return new String[] { conNmGen.next(random), objNmGen.next(random) };
}
}
/* a path picker for Lister */
public String[] pickTargetPath(Random random, int idx, int all) {
synchronized (this) {
if (conNmGen == null && objNmGen != null) {
throw new ConfigException("no such key defined: " + "containers");
} else if (conNmGen == null && objNmGen == null) {
return new String[] { "", "" };
} else if (objNmGen == null) {
return new String[] { conNmGen.next(random, idx, all), "" };
} else {
return new String[] { conNmGen.next(random), objNmGen.next(random) };
}
}
}
}
| 1,359 |
14,668 | <filename>third_party/blink/renderer/modules/webaudio/audio_basic_processor_handler.cc
/*
* Copyright (C) 2010, Google Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
* DAMAGE.
*/
#include <memory>
#include "third_party/blink/renderer/modules/webaudio/audio_basic_processor_handler.h"
#include "third_party/blink/renderer/modules/webaudio/audio_node_input.h"
#include "third_party/blink/renderer/modules/webaudio/audio_node_output.h"
#include "third_party/blink/renderer/platform/audio/audio_bus.h"
#include "third_party/blink/renderer/platform/audio/audio_processor.h"
namespace blink {
AudioBasicProcessorHandler::AudioBasicProcessorHandler(
NodeType node_type,
AudioNode& node,
float sample_rate,
std::unique_ptr<AudioProcessor> processor)
: AudioHandler(node_type, node, sample_rate),
processor_(std::move(processor)) {
AddInput();
AddOutput(1);
}
AudioBasicProcessorHandler::~AudioBasicProcessorHandler() {
// Safe to call the uninitialize() because it's final.
Uninitialize();
}
void AudioBasicProcessorHandler::Initialize() {
if (IsInitialized())
return;
DCHECK(Processor());
Processor()->Initialize();
AudioHandler::Initialize();
}
void AudioBasicProcessorHandler::Uninitialize() {
if (!IsInitialized())
return;
DCHECK(Processor());
Processor()->Uninitialize();
AudioHandler::Uninitialize();
}
void AudioBasicProcessorHandler::Process(uint32_t frames_to_process) {
AudioBus* destination_bus = Output(0).Bus();
if (!IsInitialized() || !Processor() ||
Processor()->NumberOfChannels() != NumberOfChannels()) {
destination_bus->Zero();
} else {
scoped_refptr<AudioBus> source_bus = Input(0).Bus();
// FIXME: if we take "tail time" into account, then we can avoid calling
// processor()->process() once the tail dies down.
if (!Input(0).IsConnected())
source_bus->Zero();
Processor()->Process(source_bus.get(), destination_bus, frames_to_process);
}
}
void AudioBasicProcessorHandler::ProcessOnlyAudioParams(
uint32_t frames_to_process) {
if (!IsInitialized() || !Processor())
return;
Processor()->ProcessOnlyAudioParams(frames_to_process);
}
// Nice optimization in the very common case allowing for "in-place" processing
void AudioBasicProcessorHandler::PullInputs(uint32_t frames_to_process) {
// Render input stream - suggest to the input to render directly into output
// bus for in-place processing in process() if possible.
Input(0).Pull(Output(0).Bus(), frames_to_process);
}
// As soon as we know the channel count of our input, we can lazily initialize.
// Sometimes this may be called more than once with different channel counts, in
// which case we must safely uninitialize and then re-initialize with the new
// channel count.
void AudioBasicProcessorHandler::CheckNumberOfChannelsForInput(
AudioNodeInput* input) {
DCHECK(Context()->IsAudioThread());
Context()->AssertGraphOwner();
DCHECK_EQ(input, &Input(0));
DCHECK(Processor());
unsigned number_of_channels = input->NumberOfChannels();
if (IsInitialized() && number_of_channels != Output(0).NumberOfChannels()) {
// We're already initialized but the channel count has changed.
Uninitialize();
}
if (!IsInitialized()) {
// This will propagate the channel count to any nodes connected further down
// the chain...
Output(0).SetNumberOfChannels(number_of_channels);
// Re-initialize the processor with the new channel count.
Processor()->SetNumberOfChannels(number_of_channels);
Initialize();
}
AudioHandler::CheckNumberOfChannelsForInput(input);
}
unsigned AudioBasicProcessorHandler::NumberOfChannels() {
return Output(0).NumberOfChannels();
}
bool AudioBasicProcessorHandler::RequiresTailProcessing() const {
return processor_->RequiresTailProcessing();
}
double AudioBasicProcessorHandler::TailTime() const {
return processor_->TailTime();
}
double AudioBasicProcessorHandler::LatencyTime() const {
return processor_->LatencyTime();
}
bool AudioBasicProcessorHandler::HasNonFiniteOutput() const {
AudioBus* output_bus = Output(0).Bus();
for (wtf_size_t k = 0; k < output_bus->NumberOfChannels(); ++k) {
AudioChannel* channel = output_bus->Channel(k);
if (channel->length() > 0 && !std::isfinite(channel->Data()[0])) {
return true;
}
}
return false;
}
} // namespace blink
| 1,746 |
532 | <reponame>Deiv99/cinolib<gh_stars>100-1000
/********************************************************************************
* This file is part of CinoLib *
* Copyright(C) 2016: <NAME> *
* *
* The MIT License *
* *
* Permission is hereby granted, free of charge, to any person obtaining a *
* copy of this software and associated documentation files (the "Software"), *
* to deal in the Software without restriction, including without limitation *
* the rights to use, copy, modify, merge, publish, distribute, sublicense, *
* and/or sell copies of the Software, and to permit persons to whom the *
* Software is furnished to do so, subject to the following conditions: *
* *
* The above copyright notice and this permission notice shall be included in *
* all copies or substantial portions of the Software. *
* *
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR *
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *
* FITNESS FOR A PARTICULAR PURPOSE AND NON INFRINGEMENT. IN NO EVENT SHALL THE *
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER *
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING *
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS *
* IN THE SOFTWARE. *
* *
* Author(s): *
* *
* <NAME> (<EMAIL>) *
* http://pers.ge.imati.cnr.it/livesu/ *
* *
* Italian National Research Council (CNR) *
* Institute for Applied Mathematics and Information Technologies (IMATI) *
* Via de Marini, 6 *
* 16149 Genoa, *
* Italy *
*********************************************************************************/
#ifndef CINO_SMOOTHER_H
#define CINO_SMOOTHER_H
#include <cinolib/meshes/meshes.h>
namespace cinolib
{
/* Smooths a given polygonal mesh minimizing the same fairing energy
* described in:
*
* Practical Hex-Mesh Optimization via Edge-Cone Rectification
* <NAME>, <NAME>, <NAME>, <NAME>
* ACM Transactions on Graphics (SIGGRAPH 2015)
*
* Mesh vertices are grouped in 3 categories:
*
* - R: regular vertices, smoothed in the tangent space
* - F: feature vertices, smoothed along the sharp crease they belong to
* - C: corner vertices, at the intersection of multiple features, be held in place
*
* NOTE: feature lines are detected as chains of consecutive edges marked in the mesh.
* While the classification is done internally, proper edge marking/unmarking must be
* done prior calling the smother.
*
* The energy being minimized is the following :
*
* E_smooth = w_regular * E_regular +
* w_feature * E_feature +
* w_corner * E_corner +
* w_laplacian * E_laplacian
*
* E_regular = \sum_{\forall i \in R} (n*v_i + d)^2,
* where <n,d> is the plane tangent to the mesh at v_i
*
* E_feature = \sum_{\forall i \in F} (v_i - (v_i + t*d))^2 + t^2,
* where <t,d> is the line L::= v_i + t*d tangent to the crease at v_i,
* parameterized by the extra varaible t
*
* E_corner = \sum_{\forall i \in C} (v_i - v_i*)^2,
* where v_i* is the current position of v_i
*
* E_laplacian = \sum_{\forall i} \sum_{\forall j \in N(i)} (v_i - v_j)^2
*
* NOTE: for E_regular, E_feature and E_corner v_i is meanth to be the ith
* vertex of the mesh to be smoothed, projected on the target surface. For
* each category type we use a dedicated spatial data structure, ensuring
* that surface vertices map to surface vertices, feature lines to feature
* lines, and corners to corners.
*
* TODO: iterate until convergence
* TODO: optionally use ray casting instead of closest point for projection
*/
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
typedef struct
{
uint n_iters = 1; // # of smoothing iterations
double w_regular = 10.0; // attraction to tangent space for regular vertices
double w_feature = 100.0; // attraction to tangent curve for feature vertices
double w_corner = 100.0; // attraction to closest corner for features corner
double w_laplace = 0.001; // weight of laplacian energy terms
int laplacian_mode = UNIFORM; // laplacian mode (UNIFORM or COTANGENT)
bool reproject_on_target = true; // reproject to target surface after each smoothing iteration
//bool with_ray_casting = false; // reproject via ray casting if true, via closest point if false
}
SmootherOptions;
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
template<class M1, class V1, class E1, class P1,
class M2, class V2, class E2, class P2>
CINO_INLINE
void mesh_smoother( AbstractPolygonMesh<M1,V1,E1,P1> & m,
const AbstractPolygonMesh<M2,V2,E2,P2> & target,
const SmootherOptions & opt = SmootherOptions());
}
#ifndef CINO_STATIC_LIB
#include "smoother.cpp"
#endif
#endif // CINO_SMOOTHER_H
| 2,927 |
702 | <gh_stars>100-1000
/*
* Copyright 2016 MongoDB, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mongodb.spark;
import com.mongodb.spark.config.ReadConfig;
import com.mongodb.spark.config.WriteConfig;
import com.mongodb.spark.rdd.api.java.JavaMongoRDD;
import com.mongodb.spark.sql.CharacterBean;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.SparkSession;
import org.bson.Document;
import org.junit.Test;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static java.util.Arrays.asList;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
public final class NoSparkConfTest extends JavaRequiresMongoDB {
private final SparkConf sparkConf = new SparkConf().setMaster("local").setAppName("MongoSparkConnector");
@Test
public void shouldBeAbleToUseConfigsWithRDDs() {
WriteConfig writeConfig = WriteConfig.create(getOptions());
ReadConfig readConfig = ReadConfig.create(getOptions());
JavaSparkContext jsc = getJavaSparkContext(sparkConf);
List<Document> documents = asList(Document.parse("{test: 0}"), Document.parse("{test: 1}"), Document.parse("{test: 2}"));
MongoSpark.save(jsc.parallelize(documents), writeConfig);
JavaMongoRDD<Document> mongoRDD = MongoSpark.load(jsc, readConfig);
assertEquals(mongoRDD.count(), 3);
List<Integer> counters = mongoRDD.map(new Function<Document, Integer>() {
@Override
public Integer call(final Document x) throws Exception {
return x.getInteger("test");
}
}).collect();
assertEquals(counters, asList(0,1,2));
}
@Test
public void shouldBeAbleToUseConfigsWithDataFrames() {
JavaSparkContext jsc = getJavaSparkContext(sparkConf);
SparkSession sparkSession = SparkSession.builder().getOrCreate();
List<CharacterBean> characters = asList(new CharacterBean("Gandalf", 1000), new CharacterBean("<NAME>", 50));
MongoSpark.write(sparkSession.createDataFrame(jsc.parallelize(characters), CharacterBean.class))
.options(getOptions())
.save();
List<CharacterBean> ds = MongoSpark.read(sparkSession)
.options(getOptions())
.load()
.as(Encoders.bean(CharacterBean.class))
.collectAsList();
assertThat(ds, is(characters));
}
private Map<String, String> getOptions() {
Map<String, String> options = new HashMap<String, String>();
options.put("uri", getMongoClientURI());
options.put("database", getDatabaseName());
options.put("collection", getCollectionName());
options.put("partitioner", "TestPartitioner$");
return options;
}
}
| 1,315 |
12,278 | <filename>tests/Futures/Promise-test.cpp
////////////////////////////////////////////////////////////////////////////////
/// DISCLAIMER
///
/// Copyright 2014-2020 ArangoDB GmbH, Cologne, Germany
/// Copyright 2004-2014 triAGENS GmbH, Cologne, Germany
///
/// Licensed under the Apache License, Version 2.0 (the "License");
/// you may not use this file except in compliance with the License.
/// You may obtain a copy of the License at
///
/// http://www.apache.org/licenses/LICENSE-2.0
///
/// Unless required by applicable law or agreed to in writing, software
/// distributed under the License is distributed on an "AS IS" BASIS,
/// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
/// See the License for the specific language governing permissions and
/// limitations under the License.
///
/// Copyright holder is ArangoDB GmbH, Cologne, Germany
///
/// @author <NAME>
////////////////////////////////////////////////////////////////////////////////
#include "Futures/Promise.h"
#include "gtest/gtest.h"
using namespace arangodb::futures;
namespace {
auto makeValid() {
auto valid = Promise<int>();
EXPECT_TRUE(valid.valid());
return valid;
}
auto makeInvalid() {
auto invalid = Promise<int>::makeEmpty();
EXPECT_FALSE(invalid.valid());
return invalid;
}
template <typename T>
constexpr typename std::decay<T>::type copy(T&& value) noexcept(
noexcept(typename std::decay<T>::type(std::forward<T>(value)))) {
return std::forward<T>(value);
}
typedef std::domain_error eggs_t;
static eggs_t eggs("eggs");
} // namespace
// -----------------------------------------------------------------------------
// --SECTION-- test suite
// -----------------------------------------------------------------------------
TEST(PromiseTest, makeEmpty) {
auto p = Promise<int>::makeEmpty();
ASSERT_TRUE(p.isFulfilled());
}
TEST(PromiseTest, special) {
ASSERT_FALSE(std::is_copy_constructible<Promise<int>>::value);
ASSERT_FALSE(std::is_copy_assignable<Promise<int>>::value);
ASSERT_TRUE(std::is_move_constructible<Promise<int>>::value);
ASSERT_TRUE(std::is_move_assignable<Promise<int>>::value);
}
TEST(PromiseTest, getFuture) {
Promise<int> p;
Future<int> f = p.getFuture();
ASSERT_FALSE(f.isReady());
}
TEST(PromiseTest, setValueUnit) {
Promise<Unit> p;
p.setValue();
}
TEST(PromiseTest, ctorPostconditionValid) {
// Ctors/factories that promise valid -- postcondition: valid()
#define DOIT(CREATION_EXPR) \
do { \
auto p1 = (CREATION_EXPR); \
ASSERT_TRUE(p1.valid()); \
auto p2 = std::move(p1); \
ASSERT_FALSE(p1.valid()); \
ASSERT_TRUE(p2.valid()); \
} while (false)
DOIT(makeValid());
DOIT(Promise<int>());
DOIT(Promise<int>{});
DOIT(Promise<Unit>());
DOIT(Promise<Unit>{});
#undef DOIT
}
TEST(PromiseTest, ctorPostconditionInvali) {
// Ctors/factories that promise invalid -- postcondition: !valid()
#define DOIT(CREATION_EXPR) \
do { \
auto p1 = (CREATION_EXPR); \
ASSERT_FALSE(p1.valid()); \
auto p2 = std::move(p1); \
ASSERT_FALSE(p1.valid()); \
ASSERT_FALSE(p2.valid()); \
} while (false)
DOIT(makeInvalid());
DOIT(Promise<int>::makeEmpty());
#undef DOIT
}
TEST(PromiseTest, lacksPreconditionValid) {
// Ops that don't throw PromiseInvalid if !valid() --
// without precondition: valid()
#define DOIT(STMT) \
do { \
auto p = makeValid(); \
{ STMT; } \
copy(std::move(p)); \
STMT; \
} while (false)
// misc methods that don't require isValid()
DOIT(p.valid());
DOIT(p.isFulfilled());
// move-ctor - move-copy to local, copy(), pass-by-move-value
DOIT(auto other = std::move(p));
DOIT(copy(std::move(p)));
DOIT(([](auto) {})(std::move(p)));
// move-assignment into either {valid | invalid}
DOIT({
auto other = makeValid();
other = std::move(p);
});
DOIT({
auto other = makeInvalid();
other = std::move(p);
});
#undef DOIT
}
TEST(PromiseTest, hasPreconditionValid) {
// Ops that require validity; precondition: valid();
// throw PromiseInvalid if !valid()
#define DOIT(STMT) \
do { \
auto p = makeValid(); \
STMT; \
::copy(std::move(p)); \
EXPECT_ANY_THROW(STMT); \
} while (false)
auto const except = std::logic_error("foo");
auto const ewrap = std::make_exception_ptr(except);
DOIT(p.getFuture());
DOIT(p.setException(except));
DOIT(p.setException(ewrap));
// DOIT(p.setInterruptHandler([](auto&) {}));
DOIT(p.setValue(42));
DOIT(p.setTry(Try<int>(42)));
DOIT(p.setTry(Try<int>(ewrap)));
DOIT(p.setWith([] { return 42; }));
#undef DOIT
}
TEST(PromiseTest, hasPostconditionValid) {
// Ops that preserve validity -- postcondition: valid()
#define DOIT(STMT) \
do { \
auto p = makeValid(); \
STMT; \
ASSERT_TRUE(p.valid()); \
} while (false)
auto const swallow = [](auto) {};
DOIT(swallow(p.valid())); // p.valid() itself preserves validity
DOIT(swallow(p.isFulfilled()));
#undef DOIT
}
TEST(PromiseTest, hasPostconditionInvalid) {
// Ops that consume *this -- postcondition: !valid()
#define DOIT(CTOR, STMT) \
do { \
auto p = (CTOR); \
STMT; \
ASSERT_FALSE(p.valid()); \
} while (false)
// move-ctor of {valid|invalid}
DOIT(makeValid(), { auto other{std::move(p)}; });
DOIT(makeInvalid(), { auto other{std::move(p)}; });
// move-assignment of {valid|invalid} into {valid|invalid}
DOIT(makeValid(), {
auto other = makeValid();
other = std::move(p);
});
DOIT(makeValid(), {
auto other = makeInvalid();
other = std::move(p);
});
DOIT(makeInvalid(), {
auto other = makeValid();
other = std::move(p);
});
DOIT(makeInvalid(), {
auto other = makeInvalid();
other = std::move(p);
});
// pass-by-value of {valid|invalid}
DOIT(makeValid(), {
auto const byval = [](auto) {};
byval(std::move(p));
});
DOIT(makeInvalid(), {
auto const byval = [](auto) {};
byval(std::move(p));
});
#undef DOIT
}
TEST(PromiseTest, setValue) {
Promise<int> fund;
auto ffund = fund.getFuture();
fund.setValue(42);
ASSERT_TRUE(42 == ffund.get());
struct Foo {
std::string name;
int value;
};
Promise<Foo> pod;
auto fpod = pod.getFuture();
Foo f = {"the answer", 42};
pod.setValue(f);
Foo f2 = fpod.get();
ASSERT_TRUE(f.name == f2.name);
ASSERT_TRUE(f.value == f2.value);
pod = Promise<Foo>();
fpod = pod.getFuture();
pod.setValue(std::move(f2));
Foo f3 = fpod.get();
ASSERT_TRUE(f.name == f3.name);
ASSERT_TRUE(f.value == f3.value);
Promise<std::unique_ptr<int>> mov;
auto fmov = mov.getFuture();
mov.setValue(std::make_unique<int>(42));
std::unique_ptr<int> ptr = std::move(fmov).get();
ASSERT_TRUE(42 == *ptr);
Promise<Unit> v;
auto fv = v.getFuture();
v.setValue();
ASSERT_TRUE(fv.isReady());
}
TEST(PromiseTest, setException) {
{
Promise<int> p;
auto f = p.getFuture();
p.setException(eggs);
EXPECT_THROW(f.get(), eggs_t);
}
{
Promise<int> p;
auto f = p.getFuture();
p.setException(std::make_exception_ptr(eggs));
EXPECT_THROW(f.get(), eggs_t);
}
}
TEST(PromiseTest, setWith) {
{
Promise<int> p;
auto f = p.getFuture();
p.setWith([] { return 42; });
ASSERT_TRUE(42 == f.get());
}
{
Promise<int> p;
auto f = p.getFuture();
p.setWith([]() -> int { throw eggs; });
EXPECT_THROW(f.get(), eggs_t);
}
}
TEST(PromiseTest, isFulfilled) {
Promise<int> p;
ASSERT_FALSE(p.isFulfilled());
p.setValue(42);
ASSERT_TRUE(p.isFulfilled());
}
TEST(PromiseTest, isFulfilledWithFuture) {
Promise<int> p;
auto f = p.getFuture(); // so core_ will become null
ASSERT_FALSE(p.isFulfilled());
p.setValue(42); // after here
ASSERT_TRUE(p.isFulfilled());
}
TEST(PromiseTest, brokenOnDelete) {
auto p = std::make_unique<Promise<int>>();
auto f = p->getFuture();
ASSERT_FALSE(f.isReady());
p.reset();
ASSERT_TRUE(f.isReady());
auto t = f.getTry();
ASSERT_TRUE(t.hasException());
EXPECT_THROW(t.throwIfFailed(), FutureException);
// ASSERT_TRUE(t.hasException<BrokenPromise>());
}
/*TEST(PromiseTest, brokenPromiseHasTypeInfo) {
auto pInt = std::make_unique<Promise<int>>();
auto fInt = pInt->getFuture();
auto pFloat = std::make_unique<Promise<float>>();
auto fFloat = pFloat->getFuture();
pInt.reset();
pFloat.reset();
try {
auto whatInt = fInt.getTry().exception().what();
} catch(e) {
}
auto whatFloat = fFloat.getTry().exception().what();
ASSERT_TRUE(whatInt != whatFloat);
}*/
| 3,625 |
521 | <reponame>infinitio/elle
#include <sstream>
#include <string>
#include <elle/UUID.hh>
#include <elle/meta.hh>
#include <elle/test.hh>
#include <elle/das/Symbol.hh>
#include <elle/das/flatten.hh>
#include <elle/das/model.hh>
#include <elle/das/printer.hh>
ELLE_DAS_SYMBOL(name);
ELLE_DAS_SYMBOL(model);
ELLE_DAS_SYMBOL(id);
namespace das
{
// template <typename T, typename Fields_>
// class Model
// {
// public:
// using Fields = Fields_;
// };
// template <typename T>
// struct DefaultModel
// {};
// namespace
// {
// template <typename O>
// struct stringify_object
// {
// template <typename S>
// struct stringify
// {
// using type = std::string;
// static
// type
// value(O const& o)
// {
// return elle::sprintf("%s = %s", S::name(), S::attr_get(o));
// }
// };
// };
// }
// template <typename T>
// std::enable_if_exists_t<
// typename DefaultModel<T>, std::ostream&>::type
// operator <<(std::ostream& s, T const& o)
// {
// using Fields = typename DefaultModel<T>::type::Fields;
// s << elle::type_info(o) <<
// Fields::template map<stringify_object<T>::template stringify>::value(o);
// return s;
// }
// namespace
// {
// template <typename O>
// struct flatten_object
// {
// template <typename S>
// struct flatten
// {
// using type = typename S::template attr_type<O>::type;
// static
// type
// value(O const& o)
// {
// return S::attr_get(o);
// }
// };
// };
// }
// // Flatten
// template <typename Model, typename T>
// typename Model::Fields::template map<flatten_object<T>::template flatten>
// ::type::template apply<std::tuple>::type
// flatten(T const& o)
// {
// using Fields = typename Model::Fields;
// return Fields::template map<flatten_object<T>::template flatten>::value(o);
// }
// template <typename T>
// decltype(flatten<typename DefaultModel<T>::type, T>(std::declval<T>()))
// flatten(T const& o)
// {
// return flatten<typename DefaultModel<T>::type, T>(o);
// }
}
class Device
{
public:
std::string name;
boost::optional<std::string> model;
elle::UUID id;
Device(std::string name_,
boost::optional<std::string> model_ = boost::none,
elle::UUID id_ = elle::UUID::random())
: name(std::move(name_))
, model(std::move(model_))
, id(std::move(id_))
{}
// Device()
// : name()
// , model()
// , id()
// {}
bool
operator ==(Device const& other) const
{
return this->id == other.id && this->name == other.name;
}
};
using DasDevice = elle::das::Model<
Device, elle::meta::List<Symbol_name, Symbol_model, Symbol_id>>;
namespace das
{
template <>
struct DefaultModel<Device>
{
public:
using type = DasDevice;
};
}
static
void
printer()
{
using elle::das::operator <<;
Device d("name", boost::none, elle::UUID());
std::stringstream s;
s << d;
BOOST_CHECK_EQUAL(
s.str(),
"Device(name = name, model = null, id = 00000000-0000-0000-0000-000000000000)");
}
ELLE_TEST_SUITE()
{
auto& suite = boost::unit_test::framework::master_test_suite();
suite.add(BOOST_TEST_CASE(printer), 0, valgrind(1));
suite.add(BOOST_TEST_CASE(flatten), 0, valgrind(1));
}
| 1,458 |
1,020 | <filename>framework/src/test/java/org/robobinding/customviewbinding/KeepFirstAttributesTest.java
package org.robobinding.customviewbinding;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import org.robobinding.viewattribute.property.OneWayPropertyViewAttribute;
import org.robobinding.viewattribute.property.OneWayPropertyViewAttributeFactory;
import org.robobinding.viewbinding.BindingAttributeMappings;
/**
* @since 1.0
* @author <NAME>
*
*/
@RunWith(MockitoJUnitRunner.class)
public class KeepFirstAttributesTest {
private String attributeName = "attributeName";
@Mock
private BindingAttributeMappings<ViewType> bindingAttributeMappings;
private KeepFirstAttributes<ViewType> keepFirstAttributes;
@Before
public void setUp() {
keepFirstAttributes = new KeepFirstAttributes<ViewType>(bindingAttributeMappings);
}
@Test
public void whenMapSameOneWayPropertyAgain_thenMappedOnceOnly() {
Class<OneWayPropertyViewAttribute1> viewAttributeClass = null;
keepFirstAttributes.mapOneWayProperty(viewAttributeClass, attributeName);
keepFirstAttributes.mapOneWayProperty(viewAttributeClass, attributeName);
verify(bindingAttributeMappings, times(1)).mapOneWayProperty(viewAttributeClass, attributeName);
}
@Test
public void whenMapSameOneWayPropertyByFactoryAgain_thenMappedOnceOnly() {
Class<OneWayPropertyViewAttribute1> viewAttributeClass = null;
OneWayPropertyViewAttributeFactory<ViewType> factory = null;
keepFirstAttributes.mapOneWayProperty(viewAttributeClass, attributeName);
keepFirstAttributes.mapOneWayProperty(factory, attributeName);
verify(bindingAttributeMappings, times(1)).mapOneWayProperty(viewAttributeClass, attributeName);
}
public static interface ViewType {}
public static interface OneWayPropertyViewAttribute1 extends OneWayPropertyViewAttribute<ViewType, Object>{}
}
| 656 |
348 | {"nom":"Combrit","circ":"7ème circonscription","dpt":"Finistère","inscrits":3462,"abs":1665,"votants":1797,"blancs":170,"nuls":45,"exp":1582,"res":[{"nuance":"REM","nom":"<NAME>","voix":1106},{"nuance":"LR","nom":"<NAME>","voix":476}]} | 93 |
6,098 | <filename>h2o-extensions/steam/src/main/java/hex/steam/SteamHelloMessenger.java
package hex.steam;
import org.apache.log4j.Logger;
import water.H2O;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
public class SteamHelloMessenger implements SteamMessenger {
Logger LOG = Logger.getLogger(SteamHelloMessenger.class);
private SteamMessageSender sender;
@Override
public void onConnectionStateChange(SteamMessageSender sender) {
this.sender = sender;
}
@Override
public void onMessage(Map<String, String> message) {
if ("hello".equals(message.get(TYPE))) {
assert sender != null : "Received message but sender is null";
Map<String, String> response = new HashMap<>();
response.put(TYPE, "hello_response");
response.put(ID, message.get(ID) + "_response");
response.put("version", H2O.ABV.projectVersion());
response.put("branch", H2O.ABV.branchName());
response.put("hash", H2O.ABV.lastCommitHash());
response.put("cloud_size", String.valueOf(H2O.CLOUD.size()));
try {
sender.sendMessage(response);
} catch (IOException e) {
LOG.error("Failed to send response to hello.", e);
}
}
}
}
| 569 |
434 | <filename>parallaxbacklayout/src/main/java/com/github/anzewei/parallaxbacklayout/widget/ParallaxBackLayout.java
package com.github.anzewei.parallaxbacklayout.widget;
import android.annotation.TargetApi;
import android.app.Activity;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Rect;
import android.graphics.drawable.Drawable;
import android.graphics.drawable.GradientDrawable;
import android.os.Build;
import android.support.annotation.IntDef;
import android.support.v4.view.ViewCompat;
import android.util.Log;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.view.WindowInsets;
import android.widget.FrameLayout;
import com.github.anzewei.parallaxbacklayout.ViewDragHelper;
import com.github.anzewei.parallaxbacklayout.transform.CoverTransform;
import com.github.anzewei.parallaxbacklayout.transform.ITransform;
import com.github.anzewei.parallaxbacklayout.transform.ParallaxTransform;
import com.github.anzewei.parallaxbacklayout.transform.SlideTransform;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import static com.github.anzewei.parallaxbacklayout.ViewDragHelper.EDGE_BOTTOM;
import static com.github.anzewei.parallaxbacklayout.ViewDragHelper.EDGE_RIGHT;
import static com.github.anzewei.parallaxbacklayout.ViewDragHelper.EDGE_TOP;
/**
* The type Parallax back layout.
*/
public class ParallaxBackLayout extends FrameLayout {
//region cont
@IntDef({LAYOUT_COVER, LAYOUT_PARALLAX, LAYOUT_SLIDE, LAYOUT_CUSTOM})
@Retention(RetentionPolicy.SOURCE)
public @interface LayoutType {
}
@IntDef({ViewDragHelper.EDGE_LEFT, EDGE_RIGHT, EDGE_TOP, EDGE_BOTTOM})
@Retention(RetentionPolicy.SOURCE)
public @interface Edge {
}
@IntDef({EDGE_MODE_DEFAULT, EDGE_MODE_FULL})
@Retention(RetentionPolicy.SOURCE)
public @interface EdgeMode {
}
private static final int DEFAULT_SCRIM_COLOR = 0x99000000;
private static final int FULL_ALPHA = 255;
/**
* Default threshold of scroll
*/
private static final float DEFAULT_SCROLL_THRESHOLD = 0.5f;
private static final int OVERSCROLL_DISTANCE = 0;
private static final int EDGE_LEFT = ViewDragHelper.EDGE_LEFT;
/**
* The constant LAYOUT_PARALLAX.
*/
public static final int LAYOUT_PARALLAX = 1;
/**
* The constant LAYOUT_COVER.
*/
public static final int LAYOUT_COVER = 0;
/**
* The constant LAYOUT_SLIDE.
*/
public static final int LAYOUT_SLIDE = 2;
public static final int LAYOUT_CUSTOM = -1;
public static final int EDGE_MODE_FULL = 0;
public static final int EDGE_MODE_DEFAULT = 1;
//endregion
//region field
/**
* Threshold of scroll, we will close the activity, when scrollPercent over
* this value;
*/
private float mScrollThreshold = DEFAULT_SCROLL_THRESHOLD;
private Activity mSwipeHelper;
private Rect mInsets = new Rect();
private boolean mEnable = true;
private View mContentView;
private ViewDragHelper mDragHelper;
private ParallaxSlideCallback mSlideCallback;
private ITransform mTransform;
private int mContentLeft;
private int mEdgeMode = EDGE_MODE_DEFAULT;
private int mContentTop;
private int mLayoutType = LAYOUT_PARALLAX;
private IBackgroundView mBackgroundView;
// private String mThumbFile;
private Drawable mShadowLeft;
// private Bitmap mSecondBitmap;
// private Paint mPaintCache;
private boolean mInLayout;
/**
* Edge being dragged
*/
private int mTrackingEdge;
private int mFlingVelocity = 30;
private
@Edge
int mEdgeFlag = -1;
//endregion
//region super method
/**
* Instantiates a new Parallax back layout.
*
* @param context the context
*/
public ParallaxBackLayout(Context context) {
super(context);
mDragHelper = ViewDragHelper.create(this, new ViewDragCallback());
setEdgeFlag(EDGE_LEFT);
}
@TargetApi(Build.VERSION_CODES.KITKAT_WATCH)
@Override
public WindowInsets onApplyWindowInsets(WindowInsets insets) {
int top = insets.getSystemWindowInsetTop();
if (mContentView.getLayoutParams() instanceof MarginLayoutParams) {
MarginLayoutParams params = (MarginLayoutParams) mContentView.getLayoutParams();
mInsets.set(params.leftMargin, params.topMargin + top, params.rightMargin, params.bottomMargin);
}
applyWindowInset();
return super.onApplyWindowInsets(insets);
}
@Override
public boolean onInterceptTouchEvent(MotionEvent event) {
if (!mEnable || !mBackgroundView.canGoBack()) {
return false;
}
try {
return mDragHelper.shouldInterceptTouchEvent(event);
} catch (ArrayIndexOutOfBoundsException e) {
// FIXME: handle exception
// issues #9
return false;
} catch (IllegalArgumentException iae){
return false;
}
}
@Override
public boolean onTouchEvent(MotionEvent event) {
if (!mEnable || !mBackgroundView.canGoBack()) {
return false;
}
mDragHelper.processTouchEvent(event);
return true;
}
@Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
mInLayout = true;
applyWindowInset();
if (mContentView != null) {
int cleft = mContentLeft;
int ctop = mContentTop;
Log.d(View.VIEW_LOG_TAG, "left = " + left + " top = " + top);
ViewGroup.LayoutParams params = mContentView.getLayoutParams();
if (params instanceof MarginLayoutParams) {
cleft += ((MarginLayoutParams) params).leftMargin;
ctop += ((MarginLayoutParams) params).topMargin;
}
mContentView.layout(cleft, ctop,
cleft + mContentView.getMeasuredWidth(),
ctop + mContentView.getMeasuredHeight());
}
mInLayout = false;
}
@Override
public void requestLayout() {
if (!mInLayout) {
super.requestLayout();
}
}
@Override
public void computeScroll() {
if (mDragHelper.continueSettling(true)) {
ViewCompat.postInvalidateOnAnimation(this);
}
}
@Override
protected boolean drawChild(Canvas canvas, View child, long drawingTime) {
Log.d(VIEW_LOG_TAG, "drawChild");
final boolean drawContent = child == mContentView;
if (mEnable)
drawThumb(canvas, child);
boolean ret = super.drawChild(canvas, child, drawingTime);
if (mEnable && drawContent
&& mDragHelper.getViewDragState() != ViewDragHelper.STATE_IDLE) {
drawShadow(canvas, child);
}
return ret;
}
//endregion
//region private method
/**
* Set up contentView which will be moved by user gesture
*
* @param view
*/
private void setContentView(View view) {
mContentView = view;
}
private void applyWindowInset() {
if (mInsets == null)
return;
if (mEdgeMode == EDGE_MODE_FULL) {
mDragHelper.setEdgeSize(Math.max(getWidth(), getHeight()));
} else if (mEdgeFlag == EDGE_TOP)
mDragHelper.setEdgeSize(mInsets.top + mDragHelper.getEdgeSizeDefault());
else if (mEdgeFlag == EDGE_BOTTOM) {
mDragHelper.setEdgeSize(mInsets.bottom + mDragHelper.getEdgeSizeDefault());
} else if (mEdgeFlag == ViewDragHelper.EDGE_LEFT) {
mDragHelper.setEdgeSize(mDragHelper.getEdgeSizeDefault() + mInsets.left);
} else
mDragHelper.setEdgeSize(mDragHelper.getEdgeSizeDefault() + mInsets.right);
}
/**
*
*/
private void drawThumb(Canvas canvas, View child) {
if (mContentLeft == 0 && mContentTop == 0)
return;
int store = canvas.save();
mTransform.transform(canvas, this, child);
mBackgroundView.draw(canvas);
canvas.restoreToCount(store);
}
/**
* draw shadow
*/
private void drawShadow(Canvas canvas, View child) {
if (mContentLeft == 0 && mContentTop == 0)
return;
if(mShadowLeft == null)
return;
if (mEdgeFlag == EDGE_LEFT) {
mShadowLeft.setBounds(child.getLeft() - mShadowLeft.getIntrinsicWidth(), child.getTop(),
child.getLeft(), child.getBottom());
mShadowLeft.setAlpha((getWidth()-child.getLeft())*255/getWidth());
} else if (mEdgeFlag == EDGE_RIGHT) {
mShadowLeft.setBounds(child.getRight(), child.getTop(),
child.getRight() + mShadowLeft.getIntrinsicWidth(), child.getBottom());
mShadowLeft.setAlpha(child.getRight()*255/getWidth());
} else if (mEdgeFlag == EDGE_BOTTOM) {
mShadowLeft.setBounds(child.getLeft(), child.getBottom(),
child.getRight(), child.getBottom() + mShadowLeft.getIntrinsicHeight());
mShadowLeft.setAlpha(child.getBottom()*255/getHeight());
} else if (mEdgeFlag == EDGE_TOP) {
mShadowLeft.setBounds(child.getLeft(), child.getTop() - mShadowLeft.getIntrinsicHeight() + getSystemTop(),
child.getRight(), child.getTop() + getSystemTop());
mShadowLeft.setAlpha((getHeight()-child.getTop())*255/getHeight());
}
mShadowLeft.draw(canvas);
}
//endregion
//region Public Method
/**
* Sets enable gesture.
*
* @param enable the enable
*/
public void setEnableGesture(boolean enable) {
mEnable = enable;
}
/**
* set slide callback
*
* @param slideCallback callback
*/
public void setSlideCallback(ParallaxSlideCallback slideCallback) {
mSlideCallback = slideCallback;
}
/**
* Set scroll threshold, we will close the activity, when scrollPercent over
* this value
*
* @param threshold the threshold
*/
public void setScrollThresHold(float threshold) {
if (threshold >= 1.0f || threshold <= 0) {
throw new IllegalArgumentException("Threshold value should be between 0 and 1.0");
}
mScrollThreshold = threshold;
}
/**
* Set scroll threshold, we will close the activity, when scrollPercent over
* this value
*
* @param velocity the fling velocity
*/
public void setVelocity(int velocity) {
mFlingVelocity = velocity;
}
/**
* attach to activity
*
* @param activity the activity
*/
public void attachToActivity(Activity activity) {
mSwipeHelper = activity;
ViewGroup decor = (ViewGroup) activity.getWindow().getDecorView();
ViewGroup decorChild = (ViewGroup) decor.getChildAt(0);
decor.removeView(decorChild);
addView(decorChild, ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT);
setContentView(decorChild);
decor.addView(this);
}
/**
* set the slide mode fullscreen or default
*
* @param mode
*/
public void setEdgeMode(@EdgeMode int mode) {
mEdgeMode = mode;
applyWindowInset();
}
/**
* Scroll out contentView and finish the activity
*
* @param duration default 0
*/
public boolean scrollToFinishActivity(int duration) {
if (!mEnable || !mBackgroundView.canGoBack()) {
return false;
}
final int childWidth = getWidth();
int left = 0, top = 0;
mTrackingEdge = mEdgeFlag;
switch (mTrackingEdge) {
case EDGE_LEFT:
left = childWidth;
break;
case EDGE_BOTTOM:
top = -getHeight();
break;
case EDGE_RIGHT:
left = -getWidth();
break;
case EDGE_TOP:
top = getHeight();
break;
}
if (mDragHelper.smoothSlideViewTo(mContentView, left, top, duration)) {
ViewCompat.postInvalidateOnAnimation(this);
postInvalidate();
return true;
}
return false;
}
/**
* shadow drawable
*
* @param drawable
*/
public void setShadowDrawable(Drawable drawable) {
mShadowLeft = drawable;
}
/**
* Sets background view.
*
* @param backgroundView the background view
*/
public void setBackgroundView(IBackgroundView backgroundView) {
mBackgroundView = backgroundView;
}
public int getEdgeFlag() {
return mEdgeFlag;
}
/**
* Sets edge flag.
*
* @param edgeFlag the edge flag
*/
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
public void setEdgeFlag(@Edge int edgeFlag) {
if (mEdgeFlag == edgeFlag)
return;
mEdgeFlag = edgeFlag;
mDragHelper.setEdgeTrackingEnabled(edgeFlag);
GradientDrawable.Orientation orientation = GradientDrawable.Orientation.LEFT_RIGHT;
if (edgeFlag == EDGE_LEFT)
orientation = GradientDrawable.Orientation.RIGHT_LEFT;
else if (edgeFlag == EDGE_TOP) {
orientation = GradientDrawable.Orientation.BOTTOM_TOP;
} else if (edgeFlag == EDGE_RIGHT)
orientation = GradientDrawable.Orientation.LEFT_RIGHT;
else if (edgeFlag == EDGE_BOTTOM)
orientation = GradientDrawable.Orientation.TOP_BOTTOM;
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN) {
mShadowLeft = null;
}
if (mShadowLeft == null) {
int colors[] = {0x66000000, 0x11000000, 0x00000000};
ShadowDrawable drawable = new ShadowDrawable(orientation, colors);
drawable.setGradientRadius(90);
drawable.setSize(50, 50);
mShadowLeft = drawable;
} else if (mShadowLeft instanceof ShadowDrawable) {
((ShadowDrawable) mShadowLeft).setOrientation(orientation);
}
applyWindowInset();
}
public int getSystemTop() {
return mInsets.top;
}
public int getSystemLeft() {
return mInsets.left;
}
public int getLayoutType() {
return mLayoutType;
}
/**
* Sets layout type.
*
* @param layoutType the layout type
*/
public void setLayoutType(@LayoutType int layoutType, ITransform transform) {
mLayoutType = layoutType;
switch (layoutType) {
case LAYOUT_CUSTOM:
assert transform != null;
mTransform = transform;
break;
case LAYOUT_COVER:
mTransform = new CoverTransform();
break;
case LAYOUT_PARALLAX:
mTransform = new ParallaxTransform();
break;
case LAYOUT_SLIDE:
mTransform = new SlideTransform();
break;
}
}
//endregion
//region class
private class ViewDragCallback extends ViewDragHelper.Callback {
private float mScrollPercent;
@Override
public boolean tryCaptureView(View view, int pointerId) {
boolean ret = mDragHelper.isEdgeTouched(mEdgeFlag, pointerId);
if (ret) {
mTrackingEdge = mEdgeFlag;
}
boolean directionCheck = false;
if (mEdgeFlag == EDGE_LEFT || mEdgeFlag == EDGE_RIGHT) {
directionCheck = !mDragHelper.checkTouchSlop(ViewDragHelper.DIRECTION_VERTICAL, pointerId);
} else if (mEdgeFlag == EDGE_BOTTOM || mEdgeFlag == EDGE_TOP) {
directionCheck = !mDragHelper
.checkTouchSlop(ViewDragHelper.DIRECTION_HORIZONTAL, pointerId);
}
return ret & directionCheck;
}
@Override
public int getViewHorizontalDragRange(View child) {
return mEdgeFlag & (EDGE_LEFT | EDGE_RIGHT);
}
@Override
public int getViewVerticalDragRange(View child) {
return mEdgeFlag & (EDGE_BOTTOM | EDGE_TOP);
}
@Override
public void onViewPositionChanged(View changedView, int left, int top, int dx, int dy) {
super.onViewPositionChanged(changedView, left, top, dx, dy);
if ((mTrackingEdge & EDGE_LEFT) != 0) {
mScrollPercent = Math.abs((float) (left - mInsets.left)
/ mContentView.getWidth());
}
if ((mTrackingEdge & EDGE_RIGHT) != 0) {
mScrollPercent = Math.abs((float) (left - mInsets.left)
/ mContentView.getWidth());
}
if ((mTrackingEdge & EDGE_BOTTOM) != 0) {
mScrollPercent = Math.abs((float) (top - getSystemTop())
/ mContentView.getHeight());
}
if ((mTrackingEdge & EDGE_TOP) != 0) {
mScrollPercent = Math.abs((float) top
/ mContentView.getHeight());
}
mContentLeft = left;
mContentTop = top;
invalidate();
if (mSlideCallback != null)
mSlideCallback.onPositionChanged(mScrollPercent);
if (mScrollPercent >= 0.999f) {
if (!mSwipeHelper.isFinishing()) {
mSwipeHelper.finish();
mSwipeHelper.overridePendingTransition(0, 0);
}
}
}
@Override
public void onViewReleased(View releasedChild, float xvel, float yvel) {
final int childWidth = releasedChild.getWidth();
final int childHeight = releasedChild.getHeight();
boolean fling = false;
int left = mInsets.left, top = 0;
if ((mTrackingEdge & EDGE_LEFT) != 0) {
if (Math.abs(xvel) > mFlingVelocity) {
fling = true;
}
left = xvel >= 0 && (fling || mScrollPercent > mScrollThreshold)
? childWidth + mInsets.left : mInsets.left;
}
if ((mTrackingEdge & EDGE_RIGHT) != 0) {
if (Math.abs(xvel) > mFlingVelocity) {
fling = true;
}
left = xvel <= 0 && (fling || mScrollPercent > mScrollThreshold)
? -childWidth + mInsets.left : mInsets.left;
}
if ((mTrackingEdge & EDGE_TOP) != 0) {
if (Math.abs(yvel) > mFlingVelocity) {
fling = true;
}
top = yvel >= 0 && (fling || mScrollPercent > mScrollThreshold)
? childHeight : 0;
}
if ((mTrackingEdge & EDGE_BOTTOM) != 0) {
if (Math.abs(yvel) > mFlingVelocity) {
fling = true;
}
top = yvel <= 0 && (fling || mScrollPercent > mScrollThreshold)
? -childHeight + getSystemTop() : 0;
}
mDragHelper.settleCapturedViewAt(left, top);
invalidate();
}
@Override
public void onViewDragStateChanged(int state) {
super.onViewDragStateChanged(state);
if (mSlideCallback != null)
mSlideCallback.onStateChanged(state);
}
@Override
public int clampViewPositionHorizontal(View child, int left, int dx) {
int ret = mInsets.left;
if ((mTrackingEdge & EDGE_LEFT) != 0) {
ret = Math.min(child.getWidth(), Math.max(left, 0));
} else if ((mTrackingEdge & EDGE_RIGHT) != 0) {
ret = Math.min(mInsets.left, Math.max(left, -child.getWidth()));
} else {
}
return ret;
}
@Override
public int clampViewPositionVertical(View child, int top, int dy) {
int ret = mContentView.getTop();
if ((mTrackingEdge & EDGE_BOTTOM) != 0) {
ret = Math.min(0, Math.max(top, -child.getHeight()));
} else if ((mTrackingEdge & EDGE_TOP) != 0) {
ret = Math.min(child.getHeight(), Math.max(top, 0));
}
return ret;
}
}
/**
* The interface Background view.
*/
public interface IBackgroundView {
/**
* Draw.
*
* @param canvas the canvas
*/
void draw(Canvas canvas);
/**
* Can go back boolean.
*
* @return the boolean
*/
boolean canGoBack();
}
public interface ParallaxSlideCallback {
void onStateChanged(int state);
void onPositionChanged(float percent);
}
//endregion
}
| 9,586 |
1,018 | /*
* Copyright 2016-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.glowroot.ui;
import java.util.Hashtable;
import java.util.List;
import java.util.Set;
import javax.naming.Context;
import javax.naming.NamingEnumeration;
import javax.naming.NamingException;
import javax.naming.directory.SearchControls;
import javax.naming.directory.SearchResult;
import javax.naming.ldap.InitialLdapContext;
import javax.naming.ldap.LdapContext;
import com.google.common.collect.Sets;
import org.checkerframework.checker.nullness.qual.Nullable;
import org.immutables.value.Value;
import org.glowroot.agent.api.Instrumentation;
import org.glowroot.common2.config.LdapConfig;
import org.glowroot.common2.repo.util.Encryption;
import org.glowroot.common2.repo.util.LazySecretKey;
import static com.google.common.base.Preconditions.checkNotNull;
class LdapAuthentication {
static Set<String> getGlowrootRoles(Set<String> ldapGroupDns, LdapConfig ldapConfig) {
Set<String> glowrootRoles = Sets.newHashSet();
for (String ldapGroupDn : ldapGroupDns) {
List<String> roles = ldapConfig.roleMappings().get(ldapGroupDn);
if (roles != null) {
glowrootRoles.addAll(roles);
}
}
return glowrootRoles;
}
// optional newPlainPassword can be passed in to test LDAP from
// AdminJsonService.testLdapConnection() without possibility of throwing
// org.glowroot.common.repo.util.LazySecretKey.SymmetricEncryptionKeyMissingException
static Set<String> authenticateAndGetLdapGroupDns(String username, String password,
LdapConfig ldapConfig, @Nullable String passwordOverride, LazySecretKey lazySecretKey)
throws Exception {
String systemUsername = ldapConfig.username();
String systemPassword = getPassword(ldapConfig, passwordOverride, lazySecretKey);
LdapContext ldapContext;
try {
ldapContext = createLdapContext(systemUsername, systemPassword, ldapConfig);
} catch (NamingException e) {
throw new AuthenticationException("System LDAP authentication failed", e);
}
String userDn;
try {
userDn = getUserDn(ldapContext, username, ldapConfig);
} catch (NamingException e) {
throw new AuthenticationException(e);
}
if (userDn == null) {
throw new AuthenticationException("User not found: " + username);
}
try {
createLdapContext(userDn, password, ldapConfig);
return getGroupDnsForUserDn(ldapContext, userDn, ldapConfig);
} catch (NamingException e) {
throw new AuthenticationException(e);
}
}
@Instrumentation.TraceEntry(message = "create ldap context", timer = "ldap")
private static LdapContext createLdapContext(String username, String password,
LdapConfig ldapConfig) throws NamingException {
Hashtable<String, Object> env = new Hashtable<String, Object>();
env.put(Context.SECURITY_AUTHENTICATION, "simple");
env.put(Context.SECURITY_PRINCIPAL, username);
env.put(Context.SECURITY_CREDENTIALS, password);
env.put(Context.INITIAL_CONTEXT_FACTORY, "com.sun.jndi.ldap.LdapCtxFactory");
env.put(Context.PROVIDER_URL, ldapConfig.url());
return new InitialLdapContext(env, null);
}
private static String getPassword(LdapConfig ldapConfig, @Nullable String passwordOverride,
LazySecretKey lazySecretKey) throws Exception {
if (passwordOverride != null) {
return passwordOverride;
}
String password = ldapConfig.encryptedPassword();
if (password.isEmpty()) {
return "";
}
return Encryption.decrypt(password, lazySecretKey);
}
@Instrumentation.TraceEntry(message = "get ldap user DN for username: {{1}}", timer = "ldap")
private static @Nullable String getUserDn(LdapContext ldapContext, String username,
LdapConfig ldapConfig) throws NamingException {
SearchControls searchCtls = new SearchControls();
searchCtls.setSearchScope(SearchControls.SUBTREE_SCOPE);
NamingEnumeration<?> namingEnum = ldapContext.search(ldapConfig.userBaseDn(),
ldapConfig.userSearchFilter(), new String[] {username}, searchCtls);
try {
if (!namingEnum.hasMore()) {
return null;
}
SearchResult result = (SearchResult) checkNotNull(namingEnum.next());
String userDn = result.getNameInNamespace();
if (namingEnum.hasMore()) {
throw new IllegalStateException("More than matching user: " + username);
}
return userDn;
} finally {
namingEnum.close();
}
}
@Instrumentation.TraceEntry(message = "get ldap group DNs for user DN: {{1}}", timer = "ldap")
private static Set<String> getGroupDnsForUserDn(LdapContext ldapContext, String userDn,
LdapConfig ldapConfig) throws NamingException {
SearchControls searchCtls = new SearchControls();
searchCtls.setSearchScope(SearchControls.SUBTREE_SCOPE);
NamingEnumeration<?> namingEnum = ldapContext.search(ldapConfig.groupBaseDn(),
ldapConfig.groupSearchFilter(), new String[] {userDn}, searchCtls);
try {
Set<String> ldapGroups = Sets.newHashSet();
while (namingEnum.hasMore()) {
SearchResult result = (SearchResult) checkNotNull(namingEnum.next());
ldapGroups.add(result.getNameInNamespace());
}
return ldapGroups;
} finally {
namingEnum.close();
}
}
@Value.Immutable
interface AuthenticationResult {
String userDn();
Set<String> ldapGroupDns();
}
@SuppressWarnings("serial")
static class AuthenticationException extends Exception {
AuthenticationException(String message) {
super(message);
}
private AuthenticationException(Throwable cause) {
super(cause);
}
private AuthenticationException(String message, @Nullable Throwable cause) {
super(message, cause);
}
}
}
| 2,773 |
339 | package org.wso2.carbon.esb.scenarios.test.healthcare;
import org.testng.annotations.AfterTest;
import org.testng.annotations.BeforeTest;
import org.wso2.carbon.aarservices.stub.ExceptionException;
import org.wso2.carbon.authenticator.stub.LoginAuthenticationExceptionException;
import org.wso2.carbon.automation.engine.annotations.ExecutionEnvironment;
import org.wso2.carbon.automation.engine.annotations.SetEnvironment;
import org.wso2.esb.integration.common.utils.servers.axis2.SampleAxis2Server;
import java.io.IOException;
import java.net.MalformedURLException;
import java.rmi.RemoteException;
public class HealthCareScenarioServerStartupTestCase {
private SampleAxis2Server axis2Server1 = null;
private String[] serviceNames = {"geows", "hcfacilitylocator", "hcinformationservice"};
@SetEnvironment(executionEnvironments = {ExecutionEnvironment.STANDALONE})
@BeforeTest(alwaysRun = true)
public void deployServices()
throws IOException, LoginAuthenticationExceptionException, ExceptionException {
axis2Server1 = new SampleAxis2Server("test_axis2_server_9009.xml");
axis2Server1.start();
axis2Server1.deployService(serviceNames[0]);
axis2Server1.deployService(serviceNames[1]);
axis2Server1.deployService(serviceNames[2]);
}
@SetEnvironment(executionEnvironments = {ExecutionEnvironment.STANDALONE})
@AfterTest(alwaysRun = true)
public void unDeployServices()
throws MalformedURLException, LoginAuthenticationExceptionException, ExceptionException,
RemoteException {
if (axis2Server1 != null && axis2Server1.isStarted()) {
axis2Server1.stop();
}
}
}
| 610 |
3,631 | <reponame>kostola/drools<filename>kie-pmml-trusty/kie-pmml-models/kie-pmml-models-drools/kie-pmml-models-drools-common/src/main/java/org/kie/pmml/models/drools/tuples/KiePMMLReasonCodeAndValue.java
/*
* Copyright 2020 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.pmml.models.drools.tuples;
import java.io.Serializable;
import java.util.Objects;
import java.util.StringJoiner;
/**
* Tupla representing the <b>Reason Code</b> and its computed <b>value</b> as used inside <b>Scorecard</b>
*
* @see <a href=http://dmg.org/pmml/v4-4/Scorecard.html#rankinReasongCodes>Ranking Reason Codes</a>
*/
public class KiePMMLReasonCodeAndValue implements Serializable {
private static final long serialVersionUID = 5978972455322748898L;
private final String reasonCode;
private final double value;
public KiePMMLReasonCodeAndValue(String reasonCode, double value) {
this.reasonCode = reasonCode;
this.value = value;
}
public String getReasonCode() {
return reasonCode;
}
public double getValue() {
return value;
}
@Override
public String toString() {
return new StringJoiner(", ", KiePMMLReasonCodeAndValue.class.getSimpleName() + "[", "]")
.add("reasonCode='" + reasonCode + "'")
.add("value=" + value)
.toString();
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
KiePMMLReasonCodeAndValue that = (KiePMMLReasonCodeAndValue) o;
return Double.compare(that.value, value) == 0 &&
Objects.equals(reasonCode, that.reasonCode);
}
@Override
public int hashCode() {
return Objects.hash(reasonCode, value);
}
}
| 914 |
1,273 | {
"recordings": [
{
"method": "PUT",
"url": "https://fakestorageaccount.queue.core.windows.net/queue156816848946303887",
"query": {
"timeout": "30"
},
"requestBody": null,
"status": 201,
"response": "",
"responseHeaders": {
"date": "Wed, 11 Sep 2019 02:21:28 GMT",
"server": "Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0",
"x-ms-request-id": "5019a475-f003-0027-1e47-68fcb5000000",
"x-ms-version": "2019-02-02",
"x-ms-client-request-id": "ef18483d-e154-4699-8f68-a570d6fbdb71",
"content-length": "0"
}
},
{
"method": "POST",
"url": "https://fakestorageaccount.queue.core.windows.net/queue156816848946303887/messages",
"query": {
"messagettl": "40",
"timeout": "30",
"visibilitytimeout": "0"
},
"requestBody": "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?><QueueMessage><MessageText/></QueueMessage>",
"status": 201,
"response": "<?xml version=\"1.0\" encoding=\"utf-8\"?><QueueMessagesList><QueueMessage><MessageId>917274f0-d51e-4768-9892-e6fdb9663621</MessageId><InsertionTime>Wed, 11 Sep 2019 02:21:29 GMT</InsertionTime><ExpirationTime>Wed, 11 Sep 2019 02:22:09 GMT</ExpirationTime><PopReceipt>AgAAAAMAAAAAAAAAdUgTn0do1QE=</PopReceipt><TimeNextVisible>Wed, 11 Sep 2019 02:21:29 GMT</TimeNextVisible></QueueMessage></QueueMessagesList>",
"responseHeaders": {
"date": "Wed, 11 Sep 2019 02:21:29 GMT",
"server": "Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0",
"transfer-encoding": "chunked",
"content-type": "application/xml",
"x-ms-request-id": "5019a4ef-f003-0027-6f47-68fcb5000000",
"x-ms-version": "2019-02-02",
"x-ms-client-request-id": "e32cb822-b8b2-440d-b250-f0add3eb277f"
}
},
{
"method": "GET",
"url": "https://fakestorageaccount.queue.core.windows.net/queue156816848946303887/messages",
"query": {
"numofmessages": "2",
"peekonly": "true",
"timeout": "30"
},
"requestBody": null,
"status": 200,
"response": "<?xml version=\"1.0\" encoding=\"utf-8\"?><QueueMessagesList><QueueMessage><MessageId>917274f0-d51e-4768-9892-e6fdb9663621</MessageId><InsertionTime>Wed, 11 Sep 2019 02:21:29 GMT</InsertionTime><ExpirationTime>Wed, 11 Sep 2019 02:22:09 GMT</ExpirationTime><DequeueCount>0</DequeueCount><MessageText /></QueueMessage></QueueMessagesList>",
"responseHeaders": {
"date": "Wed, 11 Sep 2019 02:21:29 GMT",
"server": "Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0",
"transfer-encoding": "chunked",
"content-type": "application/xml",
"x-ms-request-id": "5019a564-f003-0027-4047-68fcb5000000",
"cache-control": "no-cache",
"x-ms-version": "2019-02-02",
"x-ms-client-request-id": "11a4e0c8-c23a-4bc8-bfbd-b138b8f8347a"
}
},
{
"method": "GET",
"url": "https://fakestorageaccount.queue.core.windows.net/queue156816848946303887/messages",
"query": {
"numofmessages": "2",
"timeout": "30",
"visibilitytimeout": "10"
},
"requestBody": null,
"status": 200,
"response": "<?xml version=\"1.0\" encoding=\"utf-8\"?><QueueMessagesList><QueueMessage><MessageId>917274f0-d51e-4768-9892-e6fdb9663621</MessageId><InsertionTime>Wed, 11 Sep 2019 02:21:29 GMT</InsertionTime><ExpirationTime>Wed, 11 Sep 2019 02:22:09 GMT</ExpirationTime><PopReceipt>AgAAAAMAAAAAAAAAeXVHpUdo1QE=</PopReceipt><TimeNextVisible>Wed, 11 Sep 2019 02:21:40 GMT</TimeNextVisible><DequeueCount>1</DequeueCount><MessageText /></QueueMessage></QueueMessagesList>",
"responseHeaders": {
"date": "Wed, 11 Sep 2019 02:21:29 GMT",
"server": "Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0",
"transfer-encoding": "chunked",
"content-type": "application/xml",
"x-ms-request-id": "5019a5d6-f003-0027-0947-68fcb5000000",
"cache-control": "no-cache",
"x-ms-version": "2019-02-02",
"x-ms-client-request-id": "848c67c4-b758-45fe-9963-24cb5cf0f4c6"
}
},
{
"method": "DELETE",
"url": "https://fakestorageaccount.queue.core.windows.net/queue156816848946303887",
"query": {
"timeout": "30"
},
"requestBody": null,
"status": 204,
"response": "",
"responseHeaders": {
"date": "Wed, 11 Sep 2019 02:21:29 GMT",
"server": "Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0",
"x-ms-request-id": "5019a665-f003-0027-6547-68fcb5000000",
"x-ms-version": "2019-02-02",
"x-ms-client-request-id": "2bc65755-684b-42a6-a7f8-ed26908e0396",
"content-length": "0"
}
}
],
"uniqueTestInfo": {
"queue": "queue156816848946303887"
}
} | 1,967 |
1,699 | <gh_stars>1000+
//
// AKMorphingOscillatorDSP.hpp
// AudioKit
//
// Created by <NAME>, revision history on Github.
// Copyright © 2018 AudioKit. All rights reserved.
//
#pragma once
#import <AVFoundation/AVFoundation.h>
typedef NS_ENUM(AUParameterAddress, AKMorphingOscillatorParameter) {
AKMorphingOscillatorParameterFrequency,
AKMorphingOscillatorParameterAmplitude,
AKMorphingOscillatorParameterIndex,
AKMorphingOscillatorParameterDetuningOffset,
AKMorphingOscillatorParameterDetuningMultiplier,
AKMorphingOscillatorParameterRampDuration
};
#ifndef __cplusplus
AKDSPRef createMorphingOscillatorDSP(int channelCount, double sampleRate);
#else
#import "AKSoundpipeDSPBase.hpp"
class AKMorphingOscillatorDSP : public AKSoundpipeDSPBase {
private:
struct InternalData;
std::unique_ptr<InternalData> data;
public:
AKMorphingOscillatorDSP();
float frequencyLowerBound = 0.0;
float frequencyUpperBound = 22050.0;
float amplitudeLowerBound = 0.0;
float amplitudeUpperBound = 1.0;
float indexLowerBound = 0.0;
float indexUpperBound = 1000.0;
float detuningOffsetLowerBound = -1000.0;
float detuningOffsetUpperBound = 1000.0;
float detuningMultiplierLowerBound = 0.9;
float detuningMultiplierUpperBound = 1.11;
float defaultFrequency = 440;
float defaultAmplitude = 0.5;
float defaultIndex = 0.0;
float defaultDetuningOffset = 0;
float defaultDetuningMultiplier = 1;
int defaultRampDurationSamples = 10000;
// Uses the ParameterAddress as a key
void setParameter(AUParameterAddress address, float value, bool immediate) override;
// Uses the ParameterAddress as a key
float getParameter(AUParameterAddress address) override;
void init(int channelCount, double sampleRate) override;
void deinit() override;
void reset() override;
void process(AUAudioFrameCount frameCount, AUAudioFrameCount bufferOffset) override;
void setupIndividualWaveform(uint32_t waveform, uint32_t size) override;
void setIndividualWaveformValue(uint32_t waveform, uint32_t index, float value) override;
};
#endif
| 739 |
3,189 | <reponame>muha0039/dexcount-gradle-plugin
package com.getkeepsafe.dexcount.integration;
public class Lib {
public static void foo() {
android.util.Log.d("Lib", "I'm in an AAR!");
}
}
| 84 |
599 | # -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
import random
import string
class BasicAuthGenerator:
alphabet = string.ascii_letters + string.digits
pw_length = 16
username_random_length = 4
max_retries = 5
def _generate_random_string(self, length):
"""
随机生成字符串 生成 大小写数字, 且包含至少一位数字
"""
password_chars = [random.choice(self.alphabet) for _ in range(length - 1)]
password_chars.append(random.choice(string.digits))
random.shuffle(password_chars)
return ''.join(password_chars)
def generate_username(self):
return self._generate_random_string(self.username_random_length)
def generate_password(self):
return self._generate_random_string(self.pw_length)
def _generate_multiple_random_string(self, number, length):
"""
产生一组不相同的随机串 预留
"""
random_string_list = set()
retied = 0
while retied < self.max_retries:
for i in range(number):
random_string_list.add(self._generate_random_string(length))
# if random string duplicated, length of set will lower than number
if len(random_string_list) == number:
return random_string_list
raise RuntimeError("Can not generate unique string after tried for %s times!" % self.max_retries)
def generate_basic_auth_by_role(self, role):
return {'username': '%s-%s' % (role, self.generate_username()), 'password': self.generate_password()}
| 865 |
308 | #ifndef __GLABEL_H__
#define __GLABEL_H__
#include "cocos2d.h"
#include "FairyGUIMacros.h"
#include "GComponent.h"
NS_FGUI_BEGIN
class GTextField;
class GLabel : public GComponent
{
public:
GLabel();
virtual ~GLabel();
CREATE_FUNC(GLabel);
const std::string& getTitle() { return getText(); }
void setTitle(const std::string& value) { setText(value); };
virtual const std::string& getText() const override;
virtual void setText(const std::string& value) override;
virtual const std::string& getIcon() const override;
virtual void setIcon(const std::string& value) override;
cocos2d::Color3B getTitleColor() const;
void setTitleColor(const cocos2d::Color3B& value);
int getTitleFontSize() const;
void setTitleFontSize(int value);
GTextField* getTextField() const;
virtual cocos2d::Value getProp(ObjectPropID propId) override;
virtual void setProp(ObjectPropID propId, const cocos2d::Value& value) override;
protected:
virtual void constructExtension(ByteBuffer* buffer) override;
virtual void setup_afterAdd(ByteBuffer* buffer, int beginPos) override;
private:
GObject* _titleObject;
GObject* _iconObject;
};
NS_FGUI_END
#endif
| 491 |
816 | <gh_stars>100-1000
# --coding:utf-8--
#
# Copyright (c) 2020 vesoft inc. All rights reserved.
#
# This source code is licensed under Apache 2.0 License,
# attached with Common Clause Condition 1.0, found in the LICENSES directory.
from tests.common.nebula_test_suite import NebulaTestSuite
class TestFetchQuery(NebulaTestSuite):
@classmethod
def prepare(self):
self.use_student_space()
def test_fetch_vertex(self):
# fetch *
cmd = 'FETCH PROP ON * 2001;'
resp = self.execute(cmd)
self.check_resp_succeeded(resp)
expect_result = [[2001, 'Mary', 25, 'female', 5, 'Math']]
self.check_out_of_order_result(resp.rows, expect_result)
# fetch with specified tag
cmd = 'FETCH PROP ON teacher 2001;'
resp = self.execute(cmd)
expect_result = [[2001, 5, 'Math']]
self.check_resp_succeeded(resp)
self.check_out_of_order_result(resp.rows, expect_result)
# fetch with yield
cmd = 'FETCH PROP ON teacher 2001 YIELD teacher.grade AS Grade, teacher.subject AS Subject;'
resp = self.execute(cmd)
expect_result = [[2001, 5, 'Math']]
self.check_resp_succeeded(resp)
self.check_out_of_order_result(resp.rows, expect_result)
def test_fetch_edge(self):
# fetch edge
cmd = 'FETCH PROP ON is_colleagues 2001 -> 2002'
resp = self.execute(cmd)
self.check_resp_succeeded(resp)
expect_result = [[2001, 2002, 0, 2015, 0]]
self.check_out_of_order_result(resp.rows, expect_result)
| 671 |
2,100 | /*
* Copyright (C) 2017 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <jni.h>
#include <GL/glx.h>
#include <stdio.h>
#ifdef __cplusplus
extern "C" {
#endif
/**
* Native JNI helper function to create an OpenGL 3.2 Core context.
* This is done in native code to catch the X11 error, when creating
* the context, to prevent it from taking down the whole process.
*/
JNIEXPORT jlong JNICALL Java_com_google_gapid_glcanvas_GlCanvas_createContext0(
JNIEnv* env, jlong clazz, Display* display, GLXFBConfig config) {
PFNGLXCREATECONTEXTATTRIBSARBPROC glXCreateContextAttribsARB =
(PFNGLXCREATECONTEXTATTRIBSARBPROC)glXGetProcAddress(
(const GLubyte*)"glXCreateContextAttribsARB");
if (glXCreateContextAttribsARB == nullptr) {
// This shouldn't really happen, as we check this Java side.
return 0;
}
const int attr[] = {
GLX_RENDER_TYPE, GLX_RGBA_TYPE,
GLX_CONTEXT_MAJOR_VERSION_ARB, 3,
GLX_CONTEXT_MINOR_VERSION_ARB, 2,
GLX_CONTEXT_PROFILE_MASK_ARB, GLX_CONTEXT_CORE_PROFILE_BIT_ARB,
None,
};
auto oldHandler = XSetErrorHandler([](Display*, XErrorEvent*)->int{ return 0; });
auto context = glXCreateContextAttribsARB(display, config, 0, true, attr);
XSetErrorHandler(oldHandler);
return (jlong)context;
}
#ifdef __cplusplus
}
#endif
| 657 |
1,104 | {
"agi": "AGI",
"agility": "Agilidade",
"armor": "Armadura",
"astralcombat": "Combate Astral",
"athletics": "Atletismo",
"attribute": "Atributo",
"attributes": "Atributos",
"awakened": "Awakened",
"biotech": "Biotech",
"cha": "CAR",
"charisma": "Carisma",
"close": "Perto",
"closecombat": "Combate Engajado ",
"con": "Con",
"condition": "Monitor de Condição",
"conjuring": "Conjurando",
"contacts": "Contatos",
"cues": "Cues",
"d6s": "D6s",
"damage": "Dano",
"defense": "Defesa",
"disguise": "Disfarce",
"dispositions": "Dispositions",
"edg": "EDG",
"edge": "Moral",
"electronics": "Eletronicos",
"emerged": "Emerged",
"engineering": "Engenharia",
"escapeartist": "Arte da Fuga",
"essence": "Essência",
"exploit": "EXPLOIT!",
"far": "Distânte",
"firearms": "Armas de fogo",
"gear": "Gear",
"glitch": "GLITCH!",
"glitchdie": "Glitch Die",
"hacking": "Hackear",
"heavyweapons": "Armas Pesadas",
"intimidation": "Intimidação",
"karmabalance": "Balanço de Karma",
"keywords": "Palavra-Chave",
"knowledge": "Conhecimento",
"log": "LOG",
"logic": "Lógica",
"metatype": "Metatipo",
"modifier": "Modificador",
"name": "Nome",
"near": "Próximo",
"negotiation": "Negociação",
"nocons": "sem consequência...",
"none": "Nenhum",
"NPC": "PdM",
"npcoptions": "Opções de PdM",
"npctoggle": "Npc Toggle",
"null": "Atributo nulo",
"perception": "Percepção",
"physical": "Físico",
"physicalbonus": "Bônus Físico",
"pilotground": "Piloting (Ground)",
"pilotother": "Piloting (Other)",
"plot": "Plot",
"projectileweapons": "Armas de projétil",
"public": "Público",
"qualities": "Qualidades",
"shadowamps": "Shadow Amps",
"sheettype": "Tipo da Ficha",
"skills": "Perícias",
"sorcery": "Feitiçaria",
"spelldefense": "Defesa Magica",
"stealth": "Furtividade",
"str": "FOR",
"strength": "Força",
"stun": "Stun",
"stunbonus": "Stun Bonus",
"survival": "Sobrevivência",
"tasking": "Tasking",
"topps": "The Topps Company, Inc. has sole ownership of the names, logo, artwork, marks, photographs, sounds, audio, video and/or any proprietary material used in connection with the game Shadowrun. The Topps Company, Inc. has granted permission to use such names, logos, artwork, marks and/or any proprietary materials for promotional and informational purposes on its website but does not endorse, and is not affiliated with any official capacity whatsoever.",
"totalkarma": "Carma Total",
"tracking": "Rastrear",
"unarmed": "Desarmado",
"useroptions-english": "Inglês",
"useroptions-french": "Francês",
"useroptions-npc": "Use the NPC sheet by default.",
"useroptions-template": "Modelo de ficha",
"useroptions-template-description": "Config. o modelo de estilo padrão. Você pode usar o modelo Inglês original ou o modelo francês",
"useroptions-whisper": "Ative para rolagem pública ou para gm.",
"vehicleweapons": "Armas de Veículo",
"weapons": "Armas",
"whisper": "Sussurrar",
"wil": "WIL",
"willpower": "Força de Vontade",
"wounds": "Ferimentos",
"+S": "+S",
"+A": "+A",
"+W": "+W",
"+L": "+L",
"+C": "+C",
"+E": "+E",
"(K)": "(K)"
} | 1,477 |
435 | package datawave.ingest.input.reader;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import java.io.IOException;
/**
* Empty class that is used to process shard index stats bulk ingest files. The class is only referenced in order to allow the bulk input operation to succeed
* and to enable the shardStats to be recognized as a type.
*
* @param <K>
* @param <V>
*/
public class ShardStatsRecordReader<K,V> extends RecordReader<K,V> {
@Override
public void initialize(InputSplit inputSplit, TaskAttemptContext taskAttemptContext) throws IOException, InterruptedException {
}
@Override
public boolean nextKeyValue() throws IOException, InterruptedException {
return false;
}
@Override
public K getCurrentKey() throws IOException, InterruptedException {
return null;
}
@Override
public V getCurrentValue() throws IOException, InterruptedException {
return null;
}
@Override
public float getProgress() throws IOException, InterruptedException {
return 0;
}
@Override
public void close() throws IOException {
}
}
| 433 |
745 | <gh_stars>100-1000
#pragma once
#include <unistd.h>
#include "device.hpp"
#include "host_tensor.hpp"
#include "transform_forward_convolution_into_gemm_v6r1_nchw_kcyx_nkhw.hpp"
#include "driver_contraction_dlops_v1r2.hpp"
template <typename TInWei,
typename TAcc,
typename TOut,
typename InLengths,
typename WeiLengths,
typename OutLengths,
typename ConvStrides,
typename ConvDilations,
typename InLeftPads,
typename InRightPads>
void device_convolution_forward_implicit_gemm_v6r1_dlops_nchw_kcyx_nkhw(
const InLengths& in_n_c_hi_wi_lengths,
const WeiLengths& wei_k_c_y_x_lengths,
const OutLengths& out_n_k_ho_wo_lengths,
const ConvStrides& conv_strides,
const ConvDilations& conv_dilations,
const InLeftPads& in_left_pads,
const InRightPads& in_right_pads,
const Tensor<TInWei>& in_n_c_hi_wi,
const Tensor<TInWei>& wei_k_c_y_x,
Tensor<TOut>& out_n_k_ho_wo,
ck::index_t nrepeat)
{
using namespace ck;
std::cout << __func__ << std::endl;
constexpr auto I0 = Number<0>{};
constexpr auto I1 = Number<1>{};
constexpr auto I2 = Number<2>{};
DeviceMem in_n_c_hi_wi_device_buf(sizeof(TInWei) * in_n_c_hi_wi.mDesc.GetElementSpace());
DeviceMem wei_k_c_y_x_device_buf(sizeof(TInWei) * wei_k_c_y_x.mDesc.GetElementSpace());
DeviceMem out_n_k_ho_wo_device_buf(sizeof(TOut) * out_n_k_ho_wo.mDesc.GetElementSpace());
in_n_c_hi_wi_device_buf.ToDevice(in_n_c_hi_wi.mData.data());
wei_k_c_y_x_device_buf.ToDevice(wei_k_c_y_x.mData.data());
out_n_k_ho_wo_device_buf.ToDevice(out_n_k_ho_wo.mData.data());
const auto in_desc_n_c_hi_wi = make_naive_tensor_descriptor_packed(in_n_c_hi_wi_lengths);
const auto wei_desc_k_c_y_x = make_naive_tensor_descriptor_packed(wei_k_c_y_x_lengths);
const auto out_desc_n_k_ho_wo = make_naive_tensor_descriptor_packed(out_n_k_ho_wo_lengths);
#if 1
// [8, 1, 128, 1] * [8, 4, 32, 1] = [1, 128, 4, 32] for fp32
// cdata = 64, BlockSize = 256
constexpr index_t BlockSize = 256;
constexpr index_t GN0 = 4;
constexpr index_t GK1 = 1;
constexpr index_t GM1PerBlockGM11 = 128;
constexpr index_t GN1PerBlockGN11 = 32;
constexpr index_t GK0PerBlock = 8;
constexpr index_t BM1PerThreadBM11 = 4;
constexpr index_t BN1PerThreadBN11 = 4;
constexpr index_t BK0PerThread = 1;
using BM10BN10ThreadClusterBM10Xs = Sequence<8, 2>;
using BM10BN10ThreadClusterBN10Xs = Sequence<8, 2>;
using ABlockTransferThreadSliceLengths_GK0_GM0_GM10_GM11_GK1 = Sequence<4, 1, 1, 1, 1>;
using ABlockTransferThreadClusterLengths_GK0_GM0_GM10_GM11_GK1 = Sequence<2, 1, 1, 128, 1>;
using ABlockTransferSrcVectorTensorLengths_GK0_GM0_GM10_GM11_GK1 = Sequence<4, 1, 1, 1, 1>;
using ABlockTransferDstVectorTensorLengths_GK0_GM0_GM10_GM11_GK1 = Sequence<1, 1, 1, 1, 1>;
using BBlockTransferThreadSliceLengths_GK0_GN0_GN10_GN11_GK1 = Sequence<1, 4, 1, 1, 1>;
using BBlockTransferThreadClusterLengths_GK0_GN0_GN10_GN11_GK1 = Sequence<8, 1, 1, 32, 1>;
using BBlockTransferSrcVectorTensorLengths_GK0_GN0_GN10_GN11_GK1 = Sequence<1, 1, 1, 1, 1>;
using BBlockTransferDstVectorTensorLengths_GK0_GN0_GN10_GN11_GK1 = Sequence<1, 1, 1, 1, 1>;
constexpr index_t CThreadTransferDstScalarPerVector_BN1 = 1;
#elif 1
// [8, 1, 128, 2] * [8, 4, 32, 2] = [1, 128, 4, 32] for fp16
// cdata = 64, BlockSize = 256
constexpr index_t BlockSize = 256;
constexpr index_t GN0 = 4;
constexpr index_t GK1 = 2;
constexpr index_t GM1PerBlockGM11 = 128;
constexpr index_t GN1PerBlockGN11 = 32;
constexpr index_t GK0PerBlock = 8;
constexpr index_t BM1PerThreadBM11 = 4;
constexpr index_t BN1PerThreadBN11 = 4;
constexpr index_t BK0PerThread = 1;
using BM10BN10ThreadClusterBM10Xs = Sequence<8, 2>;
using BM10BN10ThreadClusterBN10Xs = Sequence<8, 2>;
using ABlockTransferThreadSliceLengths_GK0_GM0_GM10_GM11_GK1 = Sequence<4, 1, 1, 1, 2>;
using ABlockTransferThreadClusterLengths_GK0_GM0_GM10_GM11_GK1 = Sequence<2, 1, 1, 128, 1>;
using ABlockTransferSrcVectorTensorLengths_GK0_GM0_GM10_GM11_GK1 = Sequence<4, 1, 1, 1, 1>;
using ABlockTransferDstVectorTensorLengths_GK0_GM0_GM10_GM11_GK1 = Sequence<1, 1, 1, 1, 2>;
using BBlockTransferThreadSliceLengths_GK0_GN0_GN10_GN11_GK1 = Sequence<1, 4, 1, 1, 2>;
using BBlockTransferThreadClusterLengths_GK0_GN0_GN10_GN11_GK1 = Sequence<8, 1, 1, 32, 1>;
using BBlockTransferSrcVectorTensorLengths_GK0_GN0_GN10_GN11_GK1 = Sequence<1, 1, 1, 1, 1>;
using BBlockTransferDstVectorTensorLengths_GK0_GN0_GN10_GN11_GK1 = Sequence<1, 1, 1, 1, 2>;
constexpr index_t CThreadTransferDstScalarPerVector_BN1 = 1;
#endif
const auto descs =
transform_forward_convolution_into_contraction_v6r1_nchw_kcyx_nkhw_pad(wei_desc_k_c_y_x,
in_desc_n_c_hi_wi,
out_desc_n_k_ho_wo,
conv_strides,
conv_dilations,
in_left_pads,
in_right_pads,
Number<GN0>{},
Number<GK1>{});
const auto wei_grid_desc_gk0_gm0_gm1_gk1 = descs[I0];
const auto in_grid_desc_gk0_gn0_gn1_gk1 = descs[I1];
const auto out_grid_desc_gm0_gm1_gn0_gn1 = descs[I2];
// HACK: hacks that control index calculation when iterating over A, B, C matrix
constexpr auto wei_grid_step_hacks =
make_tuple(make_tuple(Sequence<0, 0, 0, 0, 0, 0, 0>{}, // 0+: GK0
Sequence<0, 0, 0, 0, 0, 0, 0>{}, // 1+: GM0
Sequence<0, 0, 0, 0, 0, 0, 0>{}, // 2+: GM10
Sequence<0, 0, 0, 0, 0, 0, 0>{}, // 3+: GM11
Sequence<0, 0, 0, 0, 0, 0, 0>{}), // 4+: GK1
make_tuple(Sequence<0, 0, 0, 0, 0, 0, 0>{}, // 0-: GK0
Sequence<0, 0, 0, 0, 0, 0, 0>{}, // 1-: GM0
Sequence<0, 0, 0, 0, 0, 0, 0>{}, // 2-: GM10
Sequence<0, 0, 0, 0, 0, 0, 0>{}, // 3-: GM11
Sequence<0, 0, 0, 0, 0, 0, 0>{})); // 4-: GK1
constexpr auto in_grid_step_hacks = make_tuple(
make_tuple(Sequence<0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0>{}, // 0+: GK0
Sequence<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0>{}, // 1+: GN0
Sequence<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0>{}, // 2+: GN10
Sequence<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0>{}, // 3+: GN11
Sequence<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0>{}), // 4+: GK1
make_tuple(Sequence<0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0>{}, // 0-: GK0
Sequence<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0>{}, // 1-: GN0
Sequence<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0>{}, // 2-: GN10
Sequence<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0>{}, // 3-: GN11
Sequence<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0>{})); // 4-: GK1
constexpr auto out_grid_step_hacks = make_tuple(
make_tuple(
Sequence<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0>{}, // 0+: GM10
Sequence<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0>{}, // 1+: BM0
Sequence<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0>{}, // 2+: BM1
Sequence<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0>{}, // 3+: GN10
Sequence<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0>{}, // 4+: BN0
Sequence<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0>{}), // 5+: GN1
make_tuple(
Sequence<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0>{}, // 0-: GM10
Sequence<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0>{}, // 1-: BM0
Sequence<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0>{}, // 2-: BM1
Sequence<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0>{}, // 3-: GN10
Sequence<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0>{}, // 4-: BN0
Sequence<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0>{})); // 5-: GN1
constexpr auto wei_grid_move_slice_window_step_hacks = Sequence<0, 0, 0, 0, 0, 0, 0>{};
constexpr auto in_grid_move_slice_window_step_hacks =
Sequence<0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 2, 0, 0, 0, 0, 0>{};
for(index_t i = 0; i < 5; ++i)
{
float ave_time = driver_contraction_dlops_v1r2<
BlockSize,
TInWei,
TAcc,
TOut,
InMemoryDataOperationEnum_t::Set,
decltype(wei_grid_desc_gk0_gm0_gm1_gk1),
decltype(in_grid_desc_gk0_gn0_gn1_gk1),
decltype(out_grid_desc_gm0_gm1_gn0_gn1),
GM1PerBlockGM11,
GN1PerBlockGN11,
GK0PerBlock,
BM1PerThreadBM11,
BN1PerThreadBN11,
BK0PerThread,
BM10BN10ThreadClusterBM10Xs,
BM10BN10ThreadClusterBN10Xs,
ABlockTransferThreadSliceLengths_GK0_GM0_GM10_GM11_GK1,
ABlockTransferThreadClusterLengths_GK0_GM0_GM10_GM11_GK1,
Sequence<1, 2, 3, 0, 4>, // ABlockTransferThreadClusterArrangeOrder
Sequence<3, 2, 1, 0, 4>, // ABlockTransferSrcAccessOrder
ABlockTransferSrcVectorTensorLengths_GK0_GM0_GM10_GM11_GK1,
ABlockTransferDstVectorTensorLengths_GK0_GM0_GM10_GM11_GK1,
Sequence<0, 1, 2, 3, 4>, // ABlockTransferSrcVectorTensorContiguousDimOrder
BBlockTransferThreadSliceLengths_GK0_GN0_GN10_GN11_GK1,
BBlockTransferThreadClusterLengths_GK0_GN0_GN10_GN11_GK1,
Sequence<0, 4, 1, 2, 3>, // BBlockTransferThreadClusterArrangeOrder
Sequence<4, 3, 2, 0, 1>, // BBlockTransferSrcAccessOrder
BBlockTransferSrcVectorTensorLengths_GK0_GN0_GN10_GN11_GK1,
BBlockTransferDstVectorTensorLengths_GK0_GN0_GN10_GN11_GK1,
Sequence<0, 1, 2, 3, 4>, // BBlockTransferSrcVectorTensorContiguousDimOrder
Sequence<3, 4, 5, 0, 1, 2>, // CThreadTransferSrcDstAccessOrder
5, // CThreadTransferSrcDstVectorDim
CThreadTransferDstScalarPerVector_BN1,
decltype(wei_grid_step_hacks),
decltype(in_grid_step_hacks),
decltype(out_grid_step_hacks),
decltype(wei_grid_move_slice_window_step_hacks),
decltype(in_grid_move_slice_window_step_hacks)>(
static_cast<TInWei*>(wei_k_c_y_x_device_buf.GetDeviceBuffer()),
static_cast<TInWei*>(in_n_c_hi_wi_device_buf.GetDeviceBuffer()),
static_cast<TOut*>(out_n_k_ho_wo_device_buf.GetDeviceBuffer()),
wei_grid_desc_gk0_gm0_gm1_gk1,
in_grid_desc_gk0_gn0_gn1_gk1,
out_grid_desc_gm0_gm1_gn0_gn1,
wei_grid_step_hacks,
in_grid_step_hacks,
out_grid_step_hacks,
wei_grid_move_slice_window_step_hacks,
in_grid_move_slice_window_step_hacks,
nrepeat);
float perf = static_cast<float>(calculate_convolution_flops(
in_desc_n_c_hi_wi, wei_desc_k_c_y_x, out_desc_n_k_ho_wo)) /
(std::size_t(1000) * 1000 * 1000) / ave_time;
std::cout << "Average time : " << ave_time << " ms, " << perf << " TFlop/s" << std::endl;
}
// copy result back to host
out_n_k_ho_wo_device_buf.FromDevice(out_n_k_ho_wo.mData.data());
}
| 7,044 |
922 | package com.redhat.developer.demos.recommendation;
import java.io.IOException;
import java.net.URL;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.Response;
import okhttp3.logging.HttpLoggingInterceptor;
import org.arquillian.cube.istio.api.IstioResource;
import org.arquillian.cube.istio.api.RestoreIstioResource;
import org.arquillian.cube.istio.impl.IstioAssistant;
import org.arquillian.cube.openshift.impl.enricher.AwaitRoute;
import org.arquillian.cube.openshift.impl.enricher.RouteURL;
import org.jboss.arquillian.junit.Arquillian;
import org.jboss.arquillian.test.api.ArquillianResource;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import static org.assertj.core.api.Assertions.assertThat;
@RunWith(Arquillian.class)
@IstioResource("classpath:dark-launch-redirect-traffic-to-new-version.yml")
@RestoreIstioResource("classpath:virtual-service-recommendation-v1.yml")
public class DarkLaunchIT {
@RouteURL("customer")
@AwaitRoute
private URL url;
@ArquillianResource
IstioAssistant istioAssistant;
/**
* Istio resources takes some time until all Envoy proxies of the cluster receives the update.
* In case of local installations might take one second in case of real clusters some tenths of seconds.
*/
@Before
public void waitUntilIstioResourcesArePopulated() {
istioAssistant.await(createRequestForRecommendationV2(), response -> {
try {
return response.body().string().contains("v2");
} catch (IOException e) {
return false;
}
});
}
@Test
public void should_return_accessing_v2_message() throws IOException {
// Given
final Request request = createRequestForRecommendationV2();
// When
final String content = makeRequest(request);
// Then
assertThat(content)
.startsWith("customer => preference => recommendation v2 from");
}
private String makeRequest(Request request) throws IOException {
final HttpLoggingInterceptor interceptor = new HttpLoggingInterceptor();
interceptor.setLevel(HttpLoggingInterceptor.Level.HEADERS);
OkHttpClient client = new OkHttpClient.Builder()
.addNetworkInterceptor(interceptor)
.build();
try(Response response = client.newCall(request).execute()) {
return response.body().string();
}
}
private Request createRequestForRecommendationV2() {
return new Request.Builder()
.url(url.toString())
.addHeader("User-Agent", "Recommendation-v2-DarkLaunch-Test")
.build();
}
}
| 1,050 |
542 | <filename>monk/keras_tests.py
import os
import sys
import time
from monk.pip_functionality_tests.keras.test_default_train import test_default_train
from monk.pip_functionality_tests.keras.test_default_eval_infer import test_default_eval_infer
from monk.pip_functionality_tests.keras.test_update_copy_from import test_update_copy_from
from monk.pip_functionality_tests.keras.test_update_normal import test_update_normal
from monk.pip_functionality_tests.keras.test_update_eval_infer import test_update_eval_infer
from monk.pip_functionality_tests.keras.test_expert_train import test_expert_train
from monk.pip_functionality_tests.keras.test_expert_eval_infer import test_expert_eval_infer
from monk.pip_functionality_tests.keras.test_switch_default import test_switch_default
from monk.pip_functionality_tests.keras.test_switch_expert import test_switch_expert
from monk.pip_functionality_tests.keras.test_compare import test_compare
from monk.pip_functionality_tests.keras.test_analyse import test_analyse
def run_functionality_tests():
origstdout = sys.stdout
print("Running Tests...");
sys.stdout = open("test_logs.txt", 'w');
system_dict = {};
system_dict["total_tests"] = 0;
system_dict["successful_tests"] = 0;
system_dict["failed_tests_lists"] = [];
system_dict["failed_tests_exceptions"] = [];
system_dict["skipped_tests_lists"] = [];
start = time.time()
print("Running 1/11");
system_dict = test_default_train(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running 2/11");
sys.stdout = open("test_logs.txt", 'a');
system_dict = test_default_eval_infer(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running 3/11");
sys.stdout = open("test_logs.txt", 'a');
system_dict = test_update_copy_from(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running 4/11");
sys.stdout = open("test_logs.txt", 'a');
system_dict = test_update_normal(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running 5/11");
sys.stdout = open("test_logs.txt", 'a');
system_dict = test_update_eval_infer(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running 6/11");
sys.stdout = open("test_logs.txt", 'a');
system_dict = test_expert_train(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("Running 7/11");
sys.stdout = open("test_logs.txt", 'a');
system_dict = test_expert_eval_infer(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running 8/11");
sys.stdout = open("test_logs.txt", 'a');
system_dict = test_switch_default(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running 9/11");
sys.stdout = open("test_logs.txt", 'a');
system_dict = test_switch_expert(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running 10/11");
sys.stdout = open("test_logs.txt", 'a');
system_dict = test_compare(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running 11/11");
sys.stdout = open("test_logs.txt", 'a');
system_dict = test_analyse(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
sys.stdout = open("test_logs.txt", 'a');
end = time.time();
print("Total Tests - {}".format(system_dict["total_tests"]));
print("Time Taken - {} sec".format(end-start));
print("Num Successful Tests - {}".format(system_dict["successful_tests"]));
print("Num Failed Tests - {}".format(len(system_dict["failed_tests_lists"])));
print("Num Skipped Tests - {}".format(len(system_dict["skipped_tests_lists"])));
print("");
for i in range(len(system_dict["failed_tests_lists"])):
print("{}. Failed Test:".format(i+1));
print("Name - {}".format(system_dict["failed_tests_lists"][i]));
print("Error - {}".format(system_dict["failed_tests_exceptions"][i]));
print("");
print("Skipped Tests List - {}".format(system_dict["skipped_tests_lists"]));
print("");
sys.stdout = origstdout;
print("Total Tests - {}".format(system_dict["total_tests"]));
print("Time Taken - {} sec".format(end-start));
print("Num Successful Tests - {}".format(system_dict["successful_tests"]));
print("Num Failed Tests - {}".format(len(system_dict["failed_tests_lists"])));
print("Num Skipped Tests - {}".format(len(system_dict["skipped_tests_lists"])));
print("See test_logs.txt for errors");
print("");
os.system("rm -r workspace");
def run_unit_tests():
from monk.pip_unit_tests.keras.test_optimizer_sgd import test_optimizer_sgd
from monk.pip_unit_tests.keras.test_optimizer_nesterov_sgd import test_optimizer_nesterov_sgd
from monk.pip_unit_tests.keras.test_optimizer_rmsprop import test_optimizer_rmsprop
from monk.pip_unit_tests.keras.test_optimizer_adam import test_optimizer_adam
from monk.pip_unit_tests.keras.test_optimizer_nadam import test_optimizer_nadam
from monk.pip_unit_tests.keras.test_optimizer_adamax import test_optimizer_adamax
from monk.pip_unit_tests.keras.test_optimizer_adadelta import test_optimizer_adadelta
from monk.pip_unit_tests.keras.test_optimizer_adagrad import test_optimizer_adagrad
from monk.pip_unit_tests.keras.test_loss_l1 import test_loss_l1
from monk.pip_unit_tests.keras.test_loss_l2 import test_loss_l2
from monk.pip_unit_tests.keras.test_loss_crossentropy import test_loss_crossentropy
from monk.pip_unit_tests.keras.test_loss_binary_crossentropy import test_loss_binary_crossentropy
from monk.pip_unit_tests.keras.test_loss_kldiv import test_loss_kldiv
from monk.pip_unit_tests.keras.test_loss_hinge import test_loss_hinge
from monk.pip_unit_tests.keras.test_loss_squared_hinge import test_loss_squared_hinge
origstdout = sys.stdout
print("Running Tests...");
sys.stdout = open("test_logs.txt", 'w');
system_dict = {};
system_dict["total_tests"] = 0;
system_dict["successful_tests"] = 0;
system_dict["failed_tests_lists"] = [];
system_dict["failed_tests_exceptions"] = [];
system_dict["skipped_tests_lists"] = [];
start = time.time()
exp_num = 1;
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_optimizer_sgd(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_optimizer_nesterov_sgd(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_optimizer_rmsprop(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_optimizer_adam(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_optimizer_nadam(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_optimizer_adamax(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_optimizer_adadelta(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_optimizer_adagrad(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_loss_l1(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_loss_l2(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_loss_crossentropy(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_loss_binary_crossentropy(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_loss_kldiv(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_loss_hinge(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_loss_squared_hinge(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
from monk.pip_unit_tests.keras.test_layer_convolution1d import test_layer_convolution1d
from monk.pip_unit_tests.keras.test_layer_convolution2d import test_layer_convolution2d
from monk.pip_unit_tests.keras.test_layer_convolution3d import test_layer_convolution3d
from monk.pip_unit_tests.keras.test_layer_transposed_convolution2d import test_layer_transposed_convolution2d
from monk.pip_unit_tests.keras.test_layer_transposed_convolution3d import test_layer_transposed_convolution3d
from monk.pip_unit_tests.keras.test_layer_max_pooling1d import test_layer_max_pooling1d
from monk.pip_unit_tests.keras.test_layer_max_pooling2d import test_layer_max_pooling2d
from monk.pip_unit_tests.keras.test_layer_max_pooling3d import test_layer_max_pooling3d
from monk.pip_unit_tests.keras.test_layer_average_pooling1d import test_layer_average_pooling1d
from monk.pip_unit_tests.keras.test_layer_average_pooling2d import test_layer_average_pooling2d
from monk.pip_unit_tests.keras.test_layer_average_pooling3d import test_layer_average_pooling3d
from monk.pip_unit_tests.keras.test_layer_global_max_pooling1d import test_layer_global_max_pooling1d
from monk.pip_unit_tests.keras.test_layer_global_max_pooling2d import test_layer_global_max_pooling2d
from monk.pip_unit_tests.keras.test_layer_global_max_pooling3d import test_layer_global_max_pooling3d
from monk.pip_unit_tests.keras.test_layer_global_average_pooling1d import test_layer_global_average_pooling1d
from monk.pip_unit_tests.keras.test_layer_global_average_pooling2d import test_layer_global_average_pooling2d
from monk.pip_unit_tests.keras.test_layer_global_average_pooling3d import test_layer_global_average_pooling3d
from monk.pip_unit_tests.keras.test_layer_batch_normalization import test_layer_batch_normalization
from monk.pip_unit_tests.keras.test_layer_identity import test_layer_identity
from monk.pip_unit_tests.keras.test_layer_fully_connected import test_layer_fully_connected
from monk.pip_unit_tests.keras.test_layer_dropout import test_layer_dropout
from monk.pip_unit_tests.keras.test_layer_flatten import test_layer_flatten
from monk.pip_unit_tests.keras.test_layer_concatenate import test_layer_concatenate
from monk.pip_unit_tests.keras.test_layer_add import test_layer_add
from monk.pip_unit_tests.keras.test_activation_relu import test_activation_relu
from monk.pip_unit_tests.keras.test_activation_softmax import test_activation_softmax
from monk.pip_unit_tests.keras.test_activation_thresholded_relu import test_activation_thresholded_relu
from monk.pip_unit_tests.keras.test_activation_elu import test_activation_elu
from monk.pip_unit_tests.keras.test_activation_prelu import test_activation_prelu
from monk.pip_unit_tests.keras.test_activation_leaky_relu import test_activation_leaky_relu
from monk.pip_unit_tests.keras.test_activation_selu import test_activation_selu
from monk.pip_unit_tests.keras.test_activation_softplus import test_activation_softplus
from monk.pip_unit_tests.keras.test_activation_softsign import test_activation_softsign
from monk.pip_unit_tests.keras.test_activation_tanh import test_activation_tanh
from monk.pip_unit_tests.keras.test_activation_sigmoid import test_activation_sigmoid
from monk.pip_unit_tests.keras.test_activation_hard_sigmoid import test_activation_hard_sigmoid
from monk.pip_unit_tests.keras.test_initializer_xavier_normal import test_initializer_xavier_normal
from monk.pip_unit_tests.keras.test_initializer_xavier_uniform import test_initializer_xavier_uniform
from monk.pip_unit_tests.keras.test_initializer_random_normal import test_initializer_random_normal
from monk.pip_unit_tests.keras.test_initializer_random_uniform import test_initializer_random_uniform
from monk.pip_unit_tests.keras.test_initializer_lecun_normal import test_initializer_lecun_normal
from monk.pip_unit_tests.keras.test_initializer_lecun_uniform import test_initializer_lecun_uniform
from monk.pip_unit_tests.keras.test_initializer_he_normal import test_initializer_he_normal
from monk.pip_unit_tests.keras.test_initializer_he_uniform import test_initializer_he_uniform
from monk.pip_unit_tests.keras.test_initializer_truncated_normal import test_initializer_truncated_normal
from monk.pip_unit_tests.keras.test_initializer_orthogonal import test_initializer_orthogonal
from monk.pip_unit_tests.keras.test_initializer_variance_scaling import test_initializer_variance_scaling
from monk.pip_unit_tests.keras.test_block_resnet_v1 import test_block_resnet_v1
from monk.pip_unit_tests.keras.test_block_resnet_v2 import test_block_resnet_v2
from monk.pip_unit_tests.keras.test_block_resnet_v1_bottleneck import test_block_resnet_v1_bottleneck
from monk.pip_unit_tests.keras.test_block_resnet_v2_bottleneck import test_block_resnet_v2_bottleneck
from monk.pip_unit_tests.keras.test_block_resnext import test_block_resnext
from monk.pip_unit_tests.keras.test_block_mobilenet_v2_linear_bottleneck import test_block_mobilenet_v2_linear_bottleneck
from monk.pip_unit_tests.keras.test_block_mobilenet_v2_inverted_linear_bottleneck import test_block_mobilenet_v2_inverted_linear_bottleneck
from monk.pip_unit_tests.keras.test_block_squeezenet_fire import test_block_squeezenet_fire
from monk.pip_unit_tests.keras.test_block_densenet import test_block_densenet
from monk.pip_unit_tests.keras.test_block_conv_bn_relu import test_block_conv_bn_relu
from monk.pip_unit_tests.keras.test_block_inception_a import test_block_inception_a
from monk.pip_unit_tests.keras.test_block_inception_b import test_block_inception_b
from monk.pip_unit_tests.keras.test_block_inception_c import test_block_inception_c
from monk.pip_unit_tests.keras.test_block_inception_d import test_block_inception_d
from monk.pip_unit_tests.keras.test_block_inception_e import test_block_inception_e
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_convolution1d(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_convolution2d(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_convolution3d(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_transposed_convolution2d(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_transposed_convolution3d(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_max_pooling1d(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_max_pooling2d(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_max_pooling3d(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_average_pooling1d(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_average_pooling2d(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_average_pooling3d(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_global_max_pooling1d(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_global_max_pooling2d(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_global_max_pooling3d(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_global_average_pooling1d(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_global_average_pooling2d(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_global_average_pooling3d(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_batch_normalization(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_identity(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_fully_connected(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_dropout(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_flatten(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_activation_relu(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_activation_softmax(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_activation_thresholded_relu(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_activation_elu(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_activation_prelu(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_activation_leaky_relu(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_activation_selu(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_activation_softplus(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_activation_softsign(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_activation_tanh(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_activation_sigmoid(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_activation_hard_sigmoid(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_concatenate(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_add(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_initializer_xavier_normal(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_initializer_xavier_uniform(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_initializer_random_normal(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_initializer_random_uniform(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_initializer_lecun_normal(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_initializer_lecun_uniform(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_initializer_he_normal(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_initializer_he_uniform(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_initializer_truncated_normal(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_initializer_orthogonal(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_initializer_variance_scaling(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_block_resnet_v1(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_block_resnet_v2(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_block_resnet_v1_bottleneck(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_block_resnet_v2_bottleneck(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_block_resnext(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_block_mobilenet_v2_linear_bottleneck(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_block_mobilenet_v2_inverted_linear_bottleneck(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_block_squeezenet_fire(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_block_densenet(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_block_conv_bn_relu(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_block_inception_a(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_block_inception_b(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_block_inception_c(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_block_inception_d(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_block_inception_e(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
sys.stdout = open("test_logs.txt", 'a');
end = time.time();
print("Total Tests - {}".format(system_dict["total_tests"]));
print("Time Taken - {} sec".format(end-start));
print("Num Successful Tests - {}".format(system_dict["successful_tests"]));
print("Num Failed Tests - {}".format(len(system_dict["failed_tests_lists"])));
print("Num Skipped Tests - {}".format(len(system_dict["skipped_tests_lists"])));
print("");
for i in range(len(system_dict["failed_tests_lists"])):
print("{}. Failed Test:".format(i+1));
print("Name - {}".format(system_dict["failed_tests_lists"][i]));
print("Error - {}".format(system_dict["failed_tests_exceptions"][i]));
print("");
print("Skipped Tests List - {}".format(system_dict["skipped_tests_lists"]));
print("");
sys.stdout = origstdout;
print("Total Tests - {}".format(system_dict["total_tests"]));
print("Time Taken - {} sec".format(end-start));
print("Num Successful Tests - {}".format(system_dict["successful_tests"]));
print("Num Failed Tests - {}".format(len(system_dict["failed_tests_lists"])));
print("Num Skipped Tests - {}".format(len(system_dict["skipped_tests_lists"])));
print("See test_logs.txt for errors");
print("");
os.system("rm -r workspace"); | 15,657 |
648 | {"resourceType":"ValueSet","id":"v2-0506","meta":{"profile":["http://hl7.org/fhir/StructureDefinition/shareablevalueset"]},"text":{"status":"additional","div":"<div xmlns=\"http://www.w3.org/1999/xhtml\">\n <p>Service Request Relationship</p>\n\n <table class=\"grid\">\n <tr>\n <td>\n <b>Code</b>\n </td>\n <td>\n <b>Description</b>\n </td>\n <td>\n <b>Comment</b>\n </td>\n <td>\n <b>Version</b>\n </td>\n </tr>\n <tr>\n <td>C\n <a name=\"C\"> </a>\n </td>\n <td>Compound</td>\n <td>A compound is an extempo order which may be made up of multiple drugs. For example, many hospitals have a standard item called "Magic Mouthwash". The item is ordered that way by the physician. The extempo items will contain multiple products, such as Ma</td>\n <td>added v2.5</td>\n </tr>\n <tr>\n <td>E\n <a name=\"E\"> </a>\n </td>\n <td>Exclusive</td>\n <td>An exclusive order is an order where only one of the multiple items should be administered at any one dosage time. The nurse may chose between the alternatives, but should only give ONE of them. An example would be: Phenergan 25 mg PO, IM or R q6h prn (</td>\n <td>added v2.5</td>\n </tr>\n <tr>\n <td>N\n <a name=\"N\"> </a>\n </td>\n <td>Nurse prerogative</td>\n <td>Where a set of two or more orders exist and the Nurse, or other caregiver, has the prerogative to choose which order will be administered at a particular point in time. For example,<p> Milk of Magnesia PO 30 ml qhs (at bedtime)<p> Dulcolax Supp R @ hs pr</td>\n <td>added v2.5</td>\n </tr>\n <tr>\n <td>S\n <a name=\"S\"> </a>\n </td>\n <td>Simultaneous</td>\n <td>A simultaneous order is 2 or more drugs which are ordered to be given at the same time. A common example of this would be Demerol and Phenergan (Phenergan is given with the Demerol to control the nausea that Demerol can cause). The order could be: Demer</td>\n <td>added v2.5</td>\n </tr>\n <tr>\n <td>T\n <a name=\"T\"> </a>\n </td>\n <td>Tapering</td>\n <td>A tapering order is one in which the same drug is used, but it has a declining dosage over a number of days.<p>For example, Decadron 0.5 mg is often ordered this way. The order would look like this:<p> Decadron 0.5 mg qid (four times a day) for 2 days, t</td>\n <td>added v2.5</td>\n </tr>\n </table>\n\n </div>"},"extension":[{"url":"http://hl7.org/fhir/StructureDefinition/structuredefinition-ballot-status","valueString":"External"},{"url":"http://hl7.org/fhir/StructureDefinition/structuredefinition-fmm","valueInteger":0}],"url":"http://hl7.org/fhir/ValueSet/v2-0506","version":"2.8.2","name":"v2 Service Request Relationship","status":"active","experimental":false,"publisher":"HL7, Inc","contact":[{"telecom":[{"system":"url","value":"http://hl7.org"}]}],"description":"FHIR Value set/code system definition for HL7 v2 table 0506 ( Service Request Relationship)","immutable":true,"compose":{"include":[{"system":"http://hl7.org/fhir/v2/0506"}]}} | 1,901 |
5,169 | <filename>Specs/1/a/e/NetworkResponseSpoofer/10.2.0/NetworkResponseSpoofer.podspec.json<gh_stars>1000+
{
"name": "NetworkResponseSpoofer",
"version": "10.2.0",
"swift_versions": "5.0.1",
"summary": "Network response record and replay library for iOS, watchOS, tvOS and macOS.",
"homepage": "https://github.com/HotwireDotCom/NetworkResponseSpoofer.git",
"license": "MIT",
"authors": {
"<NAME>": "<EMAIL>"
},
"description": "NetworkResponseSpoofer is a network response record and replay library for iOS, watchOS, tvOS and macOS.\nIt’s built on top of the Foundation URL Loading System to make recording and replaying network requests really simple.",
"source": {
"git": "https://github.com/HotwireDotCom/NetworkResponseSpoofer.git",
"tag": "10.2.0"
},
"requires_arc": true,
"platforms": {
"ios": "10.0"
},
"dependencies": {
"RealmSwift": [
]
},
"default_subspecs": "Core",
"swift_version": "5.0.1",
"subspecs": [
{
"name": "Core",
"source_files": "Source/Core/**/*.swift",
"frameworks": "Foundation",
"platforms": {
"ios": "10.0"
}
},
{
"name": "SpooferUI",
"source_files": "Source/iOS_UI/**/*.swift",
"resources": [
"Source/iOS_UI/View/**/*.storyboard",
"Source/iOS_UI/View/**/*.xcassets"
],
"dependencies": {
"NetworkResponseSpoofer/Core": [
]
},
"frameworks": [
"Foundation",
"UIKit"
],
"platforms": {
"ios": "10.0"
}
}
]
}
| 687 |
1,858 | <reponame>aarogyaswamy/100daysofpython
import random
MAX_GUESSES = 5
START, END = 1, 20
def get_random_number():
"""Get a random number between START and END, returns int"""
return random.randint(START, END)
class Game:
"""Number guess class, make it callable to initiate game"""
def __init__(self):
"""Init _guesses, _answer, _win to set(), get_random_number(), False"""
self._guesses = set()
self._answer = get_random_number()
self._win = False
def guess(self):
"""Ask user for input, convert to int, raise ValueError outputting
the following errors when applicable:
'Please enter a number'
'Should be a number'
'Number not in range'
'Already guessed'
If all good, return the int"""
guess = input(f'Guess a number between {START} and {END}: ')
if not guess:
raise ValueError('Please enter a number')
try:
guess = int(guess)
except ValueError:
raise ValueError('Should be a number')
if guess not in range(START, END+1):
raise ValueError('Number not in range')
if guess in self._guesses:
raise ValueError('Already guessed')
self._guesses.add(guess)
return guess
def _validate_guess(self, guess):
"""Verify if guess is correct, print the following when applicable:
{guess} is correct!
{guess} is too high
{guess} is too low
Return a boolean"""
if guess == self._answer:
print(f'{guess} is correct!')
return True
else:
high_or_low = 'low' if guess < self._answer else 'high'
print(f'{guess} is too {high_or_low}')
return False
@property
def num_guesses(self):
return len(self._guesses)
def __call__(self):
"""Entry point / game loop, use a loop break/continue,
see the tests for the exact win/lose messaging"""
while len(self._guesses) < MAX_GUESSES:
try:
guess = self.guess()
except ValueError as ve:
print(ve)
continue
win = self._validate_guess(guess)
if win:
guess_str = self.num_guesses == 1 and "guess" or "guesses"
print(f'It took you {self.num_guesses} {guess_str}')
self._win = True
break
else:
# else on while/for = anti-pattern? do find it useful in this case!
print(f'Guessed {MAX_GUESSES} times, answer was {self._answer}')
if __name__ == '__main__':
game = Game()
game()
| 1,258 |
488 |
#include <iostream>
#include <sstream>
#include <string>
#include <map>
#include "abstract_handle.h"
#include "loopAdapter.h"
#include <assert.h>
using namespace std;
using namespace AbstractHandle;
// an internal map to avoid duplicated nodes
map<string, abstract_node*> file_node_map;
map<MyLoop*, abstract_node*> loop_node_map;
/* Only handle for loops
* */
string loopNode::getConstructTypeName() const
{
return string("ForStatement");
}
/* source position is the required info. for MyLoop */
bool loopNode::hasSourcePos() const
{
return true;
}
string loopNode::getFileName() const
{
return getNode()->sourceFileName;
}
/*
* */
abstract_node* loopNode::getParent() const
{
if (getNode()->parent!=NULL)
{
abstract_node* result = loop_node_map[getNode()->parent];
if (result == NULL)
return new loopNode(getNode()->parent);
else
return result;
}
else
return getFileNode();
}
/*
* Create the fileNode on the fly when needed
*/
abstract_node* loopNode::getFileNode() const
{
abstract_node* filenode = file_node_map[getNode()->sourceFileName];
if (filenode==NULL)
{
filenode = new fileNode (getNode()->sourceFileName);
file_node_map[getNode()->sourceFileName]=filenode;
}
return filenode;
}
/* MyLoop only keeps line number of loops */
source_position loopNode::getStartPos() const
{
source_position result;
result.line=getNode()->line_number;
result.column=0;
return result;
}
// return the numbering within a scope (file)
// MyLoop can be attached to a file node or another loop node.
// The parameter must be either of a fileNode or a loopNode
size_t loopNode::getNumbering(const abstract_node * another_node) const
{
int number=1;
const fileNode* file_node = dynamic_cast<const fileNode*> (another_node);
const loopNode* p_loop_node = dynamic_cast<const loopNode*> (another_node);
vector<MyLoop*> loops;
if (file_node)
loops = file_node->getMLoops();
else if (p_loop_node)
loops = p_loop_node->getChildren();
else
ROSE_ABORT();
for (vector<MyLoop*>::iterator i=loops.begin();
i!=loops.end(); i++)
{
if ((*i)==getNode())
break;
else
number++;
}
return number;
}
AbstractHandle::abstract_node* loopNode::findNode(std::string construct_type_str, AbstractHandle::specifier mspecifier) const
{
abstract_node* result=NULL;
//Get all matched nodes according to node type
vector<MyLoop*> loops = getChildren();
for (vector<MyLoop *>::iterator i=loops.begin();i!=loops.end();i++)
{
abstract_node* cnode = new loopNode(*i);
if (mspecifier.get_type()==e_position)
{
if (isEqual(mspecifier.get_value().positions, cnode->getSourcePos()))
{
result = cnode;
break;
}
}
else if (mspecifier.get_type()==e_numbering)
{
if (mspecifier.get_value().int_v == cnode->getNumbering(this))
{
result = cnode;
break;
}
}
else
{
cerr<<"error: unhandled specifier type in loopNode::findNode()"<<endl;
ROSE_ABORT();
}
}//end for
return result;
}
std::string loopNode::toString() const
{
std::string result;
//We ignore this for simplicity
//result= getNode()->loop_code;
return result;
}
// A simplest implementation here, for now
bool loopNode::operator==(const abstract_node & x) const
{
MyLoop* other_node = (dynamic_cast<const loopNode&> (x)).getNode();
return (mNode ==other_node);
}
//-----------------fileNode -----------------------------
std::string fileNode::getConstructTypeName() const
{
return string("SourceFile");
}
bool fileNode::hasName() const
{
return true;
}
std::string fileNode::getName() const
{
return mfileName;
}
AbstractHandle::abstract_node* fileNode::findNode(std::string construct_type_str, AbstractHandle::specifier mspecifier) const
{
abstract_node* result=NULL;
//Get all matched nodes according to node type
vector<MyLoop*> loops = mLoops;
for (vector<MyLoop *>::iterator i=loops.begin();i!=loops.end();i++)
{
abstract_node* cnode = new loopNode(*i);
if (mspecifier.get_type()==e_position)
{
if (isEqual(mspecifier.get_value().positions, cnode->getSourcePos()))
{
result = cnode;
break;
}
} else if (mspecifier.get_type()==e_numbering)
{
if (mspecifier.get_value().int_v == cnode->getNumbering(this))
{
result = cnode;
break;
}
}
else
{
cerr<<"error: unhandled specifier type in loopNode::findNode()"<<endl;
ROSE_ABORT();
}
}//end for
return result;
}
std::string fileNode::getFileName() const
{
return mfileName;
}
std::string fileNode::toString() const
{
return mfileName;
}
bool fileNode::operator == (const abstract_node & x) const
{
return (toString()==x.toString());
}
| 1,837 |
335 | package examples;
import com.github.kokorin.jaffree.ffprobe.FFprobe;
import com.github.kokorin.jaffree.ffprobe.FFprobeResult;
import com.github.kokorin.jaffree.ffprobe.Stream;
public class ShowStreamsExample {
public static void main(String[] args) {
if (args.length != 1) {
System.err.println("Exactly 1 argument expected: path to media file");
System.exit(1);
}
String pathToVideo = args[0];
FFprobeResult result = FFprobe.atPath()
.setShowStreams(true)
.setInput(pathToVideo)
.execute();
for (Stream stream : result.getStreams()) {
System.out.println("Stream #" + stream.getIndex()
+ " type: " + stream.getCodecType()
+ " duration: " + stream.getDuration() + " seconds");
}
}
}
| 401 |
371 | <filename>tests/data/test_load_statsbomb.py
import os
import socceraction.data.statsbomb as sb
from socceraction.data.statsbomb import (
StatsBombCompetitionSchema,
StatsBombEventSchema,
StatsBombGameSchema,
StatsBombPlayerSchema,
StatsBombTeamSchema,
)
class TestStatsBombLoader:
def setup_method(self) -> None:
data_dir = os.path.join(
os.path.dirname(__file__), os.pardir, 'datasets', 'statsbomb', 'raw'
)
self.SBL = sb.StatsBombLoader(root=data_dir, getter='local')
def test_default_remote(self) -> None:
SBL = sb.StatsBombLoader()
assert SBL.root == 'https://raw.githubusercontent.com/statsbomb/open-data/master/data/'
def test_competitions(self) -> None:
df_competitions = self.SBL.competitions()
assert len(df_competitions) > 0
StatsBombCompetitionSchema.validate(df_competitions)
def test_games(self) -> None:
df_games = self.SBL.games(43, 3) # World Cup, 2018
assert len(df_games) == 64
StatsBombGameSchema.validate(df_games)
def test_teams(self) -> None:
df_teams = self.SBL.teams(7584)
assert len(df_teams) == 2
StatsBombTeamSchema.validate(df_teams)
def test_players(self) -> None:
df_players = self.SBL.players(7584)
assert len(df_players) == 26
StatsBombPlayerSchema.validate(df_players)
def test_events(self) -> None:
df_events = self.SBL.events(7584)
assert len(df_events) > 0
StatsBombEventSchema.validate(df_events)
def test_extract_player_games(self) -> None:
df_events = self.SBL.events(7584)
df_player_games = sb.extract_player_games(df_events)
assert len(df_player_games) == 26
assert len(df_player_games.player_name.unique()) == 26
assert set(df_player_games.team_name) == {'Belgium', 'Japan'}
assert df_player_games.minutes_played.sum() == 22 * 95
| 847 |
918 | <reponame>returnnullptr/uplink
# Local imports
from uplink.converters import interfaces, register_default_converter_factory
class StringConverter(interfaces.Converter):
def convert(self, value):
return str(value)
@register_default_converter_factory
class StandardConverter(interfaces.Factory):
"""
The default converter, this class seeks to provide sane alternatives
for (de)serialization when all else fails -- e.g., no other
converters could handle a particular type.
"""
def create_request_body_converter(self, cls, *args, **kwargs):
if isinstance(cls, interfaces.Converter):
return cls
def create_response_body_converter(self, cls, *args, **kwargs):
if isinstance(cls, interfaces.Converter):
return cls
def create_string_converter(self, cls, *args, **kwargs):
if isinstance(cls, interfaces.Converter):
return cls
return StringConverter()
| 361 |
348 | {"nom":"Saint-Junien-les-Combes","dpt":"Haute-Vienne","inscrits":147,"abs":17,"votants":130,"blancs":15,"nuls":5,"exp":110,"res":[{"panneau":"1","voix":77},{"panneau":"2","voix":33}]} | 76 |
783 | void stage_cu()
{
// Writing instruction memory
Xil_Out32(0x50001004, 0);
Xil_Out32(0x50001000, 0xBF000707);
Xil_Out32(0x50001004, 4);
Xil_Out32(0x50001000, 0xBF810000);
// Writing SGPRs for wavefront 1
Xil_Out32(0x50002004, 0);
Xil_Out32(0x50002008, 0x1F);
Xil_Out32(0x5000200C, 0x21);
Xil_Out32(0x50002010, 0xF);
Xil_Out32(0x50002014, 0x2C);
Xil_Out32(0x50002000, 1);
Xil_Out32(0x50002004, 16);
Xil_Out32(0x50002008, 0x16);
Xil_Out32(0x5000200C, 0x29);
Xil_Out32(0x50002010, 0x15);
Xil_Out32(0x50002014, 0x1);
Xil_Out32(0x50002000, 1);
Xil_Out32(0x50002004, 32);
Xil_Out32(0x50002008, 0x14);
Xil_Out32(0x5000200C, 0x2D);
Xil_Out32(0x50002010, 0x0);
Xil_Out32(0x50002014, 0x0);
Xil_Out32(0x50002000, 1);
}
| 423 |
19,438 | /*
* Copyright (c) 2021, <NAME> <<EMAIL>>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#pragma once
#include <LibGUI/Label.h>
#include <LibGUI/SettingsWindow.h>
namespace DisplaySettings {
class FontSettingsWidget final : public GUI::SettingsWindow::Tab {
C_OBJECT(FontSettingsWidget);
public:
virtual ~FontSettingsWidget() override;
virtual void apply_settings() override;
private:
FontSettingsWidget();
RefPtr<GUI::Label> m_default_font_label;
RefPtr<GUI::Label> m_fixed_width_font_label;
};
}
| 190 |
534 | <filename>src/main/resources/assets/mekanism/models/block/bin/advanced.json<gh_stars>100-1000
{
"parent": "mekanism:block/machine",
"textures": {
"sides": "mekanism:block/bin/advanced_side",
"front": "mekanism:block/bin/advanced_front",
"west": "mekanism:block/bin/advanced_side",
"east": "mekanism:block/bin/advanced_side",
"south": "mekanism:block/bin/advanced_side",
"up": "mekanism:block/bin/advanced_top",
"down": "mekanism:block/bin/bottom"
}
} | 206 |
777 | <filename>device/serial/serial_device_enumerator.h
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef DEVICE_SERIAL_SERIAL_DEVICE_ENUMERATOR_H_
#define DEVICE_SERIAL_SERIAL_DEVICE_ENUMERATOR_H_
#include <memory>
#include "device/serial/serial.mojom.h"
namespace device {
// Discovers and enumerates serial devices available to the host.
class SerialDeviceEnumerator {
public:
static std::unique_ptr<SerialDeviceEnumerator> Create();
SerialDeviceEnumerator();
virtual ~SerialDeviceEnumerator();
virtual std::vector<serial::DeviceInfoPtr> GetDevices() = 0;
};
} // namespace device
#endif // DEVICE_SERIAL_SERIAL_DEVICE_ENUMERATOR_H_
| 250 |
3,380 | <reponame>jvishnuvardhan/datasets<filename>tensorflow_datasets/d4rl/d4rl_mujoco_ant/d4rl_mujoco_ant_test.py
# coding=utf-8
# Copyright 2021 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""D4Rl Ant dataset from Mujoco."""
from tensorflow_datasets.d4rl.d4rl_mujoco_ant import d4rl_mujoco_ant
import tensorflow_datasets.public_api as tfds
class D4rlMujocoAntTest(tfds.testing.DatasetBuilderTestCase):
"""Tests for ant datasets."""
DATASET_CLASS = d4rl_mujoco_ant.D4rlMujocoAnt
SPLITS = {
'train': 2, # Number of fake train example
}
SKIP_TF1_GRAPH_MODE = True
DL_EXTRACT_RESULT = {'file_path': 'ant_medium.hdf5'}
DL_DOWNLOAD_RESULT = {'file_path': 'ant_medium.hdf5'}
BUILDER_CONFIG_NAMES_TO_TEST = ['v0-medium']
class D4rlMujocoAntInfosTest(tfds.testing.DatasetBuilderTestCase):
"""Tests for ant datasets with step metadata."""
DATASET_CLASS = d4rl_mujoco_ant.D4rlMujocoAnt
SPLITS = {
'train': 2, # Number of fake train example
}
SKIP_TF1_GRAPH_MODE = True
DL_EXTRACT_RESULT = {'file_path': 'ant_random-v1.hdf5'}
DL_DOWNLOAD_RESULT = {'file_path': 'ant_random-v1.hdf5'}
BUILDER_CONFIG_NAMES_TO_TEST = ['v1-random']
class D4rlMujocoAntReplayTest(tfds.testing.DatasetBuilderTestCase):
"""Tests for ant datasets with replay.
These datasets have the following special features:
* Contain step metadata.
* Contain two fields of episode metadata.
* Use float64 types (instead of float32)
* Rewards are stored with shape (1,) instead of scalar
"""
DATASET_CLASS = d4rl_mujoco_ant.D4rlMujocoAnt
SPLITS = {
'train': 2, # Number of fake train example
}
SKIP_TF1_GRAPH_MODE = True
DL_EXTRACT_RESULT = {'file_path': 'ant_medium_replay-v1.hdf5'}
DL_DOWNLOAD_RESULT = {'file_path': 'ant_medium_replay-v1.hdf5'}
BUILDER_CONFIG_NAMES_TO_TEST = ['v1-medium-replay']
class D4rlMujocoAntMetadataTest(tfds.testing.DatasetBuilderTestCase):
"""Tests for ant datasets with all the metadata fields."""
DATASET_CLASS = d4rl_mujoco_ant.D4rlMujocoAnt
SPLITS = {
'train': 2, # Number of fake train example
}
SKIP_TF1_GRAPH_MODE = True
DL_EXTRACT_RESULT = {'file_path': 'ant_medium-v2.hdf5'}
DL_DOWNLOAD_RESULT = {'file_path': 'ant_medium-v2.hdf5'}
BUILDER_CONFIG_NAMES_TO_TEST = ['v2-medium']
if __name__ == '__main__':
tfds.testing.test_main()
| 1,122 |
679 | <gh_stars>100-1000
/**************************************************************
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*************************************************************/
package com.sun.star.wizards.report;
//import com.sun.star.ucb.CommandAbortedException;
import com.sun.star.ucb.XSimpleFileAccess;
import com.sun.star.uno.Exception;
import com.sun.star.uno.UnoRuntime;
import com.sun.star.uno.XInterface;
import com.sun.star.wizards.common.Desktop;
import com.sun.star.wizards.common.*;
import com.sun.star.wizards.ui.*;
import com.sun.star.awt.VclWindowPeerAttribute;
import com.sun.star.awt.XTextComponent;
//import com.sun.star.container.XHierarchicalNameAccess;
//import com.sun.star.container.XNameAccess;
//import com.sun.star.lang.EventObject;
//import com.sun.star.lang.XMultiServiceFactory;
import com.sun.star.lang.XMultiServiceFactory;
import com.sun.star.wizards.db.RecordParser;
//import com.sun.star.wizards.document.OfficeDocument;
public class ReportFinalizer
{
WizardDialog CurUnoDialog;
XTextComponent xTitleTextBox;
XTextComponent[] xSaveTextBox = new XTextComponent[2];
Object chkTemplate;
String CHANGEREPORTTITLE_FUNCNAME = "changeReportTitle";
String TOGGLESUBTEMPLATECONTROLS_FUNCNAME = "toggleSubTemplateControls";
// String slblHowProceed;
// String slblChooseReportKind;
String TemplatePath;
String StoreName;
boolean bfinalaskbeforeOverwrite;
String DefaultName;
String OldDefaultName;
// ReportTextDocument CurReportDocument;
IReportDocument CurReportDocument;
// Desktop.OfficePathRetriever curofficepath;
// short curtabindex;
// String sMsgReportDocumentNameDuplicate;
public static final int SOCREATEDOCUMENT = 1;
public static final int SOCREATETEMPLATE = 2;
public static final int SOUSETEMPLATE = 3;
private XMultiServiceFactory m_xMSF;
// public Finalizer(ReportTextDocument _CurReportDocument, WizardDialog _CurUnoDialog) {
public ReportFinalizer(XMultiServiceFactory _xMSF, IReportDocument _CurReportDocument, WizardDialog _CurUnoDialog)
{
m_xMSF = _xMSF;
this.CurUnoDialog = _CurUnoDialog;
this.CurReportDocument = _CurReportDocument;
short curtabindex = (short) (ReportWizard.SOSTOREPAGE * 100);
Desktop odesktop = new Desktop();
// curofficepath = odesktop.new OfficePathRetriever(m_xMSF);
String sSaveAsTemplate = CurUnoDialog.m_oResource.getResText(UIConsts.RID_REPORT + 40);
String sUseTemplate = CurUnoDialog.m_oResource.getResText(UIConsts.RID_REPORT + 41);
String sEditTemplate = CurUnoDialog.m_oResource.getResText(UIConsts.RID_REPORT + 42);
String sSaveAsDocument = CurUnoDialog.m_oResource.getResText(UIConsts.RID_REPORT + 43);
// String sSaveAs = CurUnoDialog.m_oResource.getResText(UIConsts.RID_REPORT + 44);
String sReportTitle = CurUnoDialog.m_oResource.getResText(UIConsts.RID_REPORT + 33);
String slblHowProceed = CurUnoDialog.m_oResource.getResText(UIConsts.RID_REPORT + 78);
String slblChooseReportKind = CurUnoDialog.m_oResource.getResText(UIConsts.RID_REPORT + 79);
CurUnoDialog.insertControlModel("com.sun.star.awt.UnoControlFixedTextModel", "lblTitle",
new String[]
{
PropertyNames.PROPERTY_HEIGHT, PropertyNames.PROPERTY_LABEL, PropertyNames.PROPERTY_POSITION_X, PropertyNames.PROPERTY_POSITION_Y, PropertyNames.PROPERTY_STEP, PropertyNames.PROPERTY_TABINDEX, PropertyNames.PROPERTY_WIDTH
},
new Object[]
{
8, sReportTitle, 95, 27, new Integer(ReportWizard.SOSTOREPAGE), new Short(curtabindex++), 68
});
xTitleTextBox = CurUnoDialog.insertTextField("txtTitle", CHANGEREPORTTITLE_FUNCNAME, this,
new String[]
{
PropertyNames.PROPERTY_HEIGHT, PropertyNames.PROPERTY_HELPURL, PropertyNames.PROPERTY_POSITION_X, PropertyNames.PROPERTY_POSITION_Y, PropertyNames.PROPERTY_STEP, PropertyNames.PROPERTY_TABINDEX, PropertyNames.PROPERTY_WIDTH
},
new Object[]
{
12, "HID:WIZARDS_HID_DLGREPORT_4_TITLE", 95, 37, new Integer(ReportWizard.SOSTOREPAGE), new Short(curtabindex++), 209
});
CurUnoDialog.insertControlModel("com.sun.star.awt.UnoControlFixedTextModel", "lblChooseReportKind",
new String[]
{
PropertyNames.PROPERTY_HEIGHT, PropertyNames.PROPERTY_LABEL, PropertyNames.PROPERTY_POSITION_X, PropertyNames.PROPERTY_POSITION_Y, PropertyNames.PROPERTY_STEP, PropertyNames.PROPERTY_TABINDEX, PropertyNames.PROPERTY_WIDTH
},
new Object[]
{
8, slblChooseReportKind, 95, 57, new Integer(ReportWizard.SOSTOREPAGE), new Short(curtabindex++), 209
});
CurUnoDialog.insertRadioButton("optCreateDocument", TOGGLESUBTEMPLATECONTROLS_FUNCNAME, this,
new String[]
{
PropertyNames.PROPERTY_HEIGHT, PropertyNames.PROPERTY_HELPURL, PropertyNames.PROPERTY_LABEL, PropertyNames.PROPERTY_POSITION_X, PropertyNames.PROPERTY_POSITION_Y, PropertyNames.PROPERTY_STATE, PropertyNames.PROPERTY_STEP, PropertyNames.PROPERTY_TABINDEX, PropertyNames.PROPERTY_WIDTH
},
new Object[]
{
10, "HID:WIZARDS_HID_DLGREPORT_5_OPTSTATDOCUMENT", sSaveAsDocument, 95, 69, new Short((short) 0), new Integer(ReportWizard.SOSTOREPAGE), new Short(curtabindex++), 138
});
CurUnoDialog.insertRadioButton("optCreateReportTemplate", TOGGLESUBTEMPLATECONTROLS_FUNCNAME, this,
new String[]
{
PropertyNames.PROPERTY_HEIGHT, PropertyNames.PROPERTY_HELPURL, PropertyNames.PROPERTY_LABEL, PropertyNames.PROPERTY_POSITION_X, PropertyNames.PROPERTY_POSITION_Y, PropertyNames.PROPERTY_STATE, PropertyNames.PROPERTY_STEP, PropertyNames.PROPERTY_TABINDEX, PropertyNames.PROPERTY_WIDTH
},
new Object[]
{
8, "HID:WIZARDS_HID_DLGREPORT_5_OPTDYNTEMPLATE", sSaveAsTemplate, 95, 81, new Short((short) 1), new Integer(ReportWizard.SOSTOREPAGE), new Short(curtabindex++), 209
});
CurUnoDialog.insertControlModel("com.sun.star.awt.UnoControlFixedTextModel", "lblHowProceed",
new String[]
{
PropertyNames.PROPERTY_HEIGHT, PropertyNames.PROPERTY_LABEL, PropertyNames.PROPERTY_POSITION_X, PropertyNames.PROPERTY_POSITION_Y, PropertyNames.PROPERTY_STEP, PropertyNames.PROPERTY_TABINDEX, PropertyNames.PROPERTY_WIDTH
},
new Object[]
{
8, slblHowProceed, 105, 93, new Integer(ReportWizard.SOSTOREPAGE), new Short(curtabindex++), 209
});
CurUnoDialog.insertRadioButton("optEditTemplate", TOGGLESUBTEMPLATECONTROLS_FUNCNAME, this,
new String[]
{
PropertyNames.PROPERTY_HEIGHT, PropertyNames.PROPERTY_HELPURL, PropertyNames.PROPERTY_LABEL, PropertyNames.PROPERTY_POSITION_X, PropertyNames.PROPERTY_POSITION_Y, PropertyNames.PROPERTY_STEP, PropertyNames.PROPERTY_TABINDEX, PropertyNames.PROPERTY_WIDTH
},
new Object[]
{
10, "HID:WIZARDS_HID_DLGREPORT_5_OPTEDITTEMPLATE", sEditTemplate, 111, 105, 6, new Short(curtabindex++), 138
});
CurUnoDialog.insertRadioButton("optUseTemplate", TOGGLESUBTEMPLATECONTROLS_FUNCNAME, this,
new String[]
{
PropertyNames.PROPERTY_HEIGHT, PropertyNames.PROPERTY_HELPURL, PropertyNames.PROPERTY_LABEL, PropertyNames.PROPERTY_POSITION_X, PropertyNames.PROPERTY_POSITION_Y, PropertyNames.PROPERTY_STATE, PropertyNames.PROPERTY_STEP, PropertyNames.PROPERTY_TABINDEX, PropertyNames.PROPERTY_WIDTH
},
new Object[]
{
10, "HID:WIZARDS_HID_DLGREPORT_5_OPTUSETEMPLATE", sUseTemplate, 111, 115, new Short((short) 1), new Integer(ReportWizard.SOSTOREPAGE), new Short(curtabindex++), 138
});
}
/*
* This function is called if one of the radio buttons is pressed
*/
public void toggleSubTemplateControls()
{
// String sStorePath = PropertyNames.EMPTY_STRING;
Short iState = (Short) CurUnoDialog.getControlProperty("optCreateReportTemplate", PropertyNames.PROPERTY_STATE);
boolean bDoTemplateEnable = iState.shortValue() == 1;
CurUnoDialog.setControlProperty("optEditTemplate", PropertyNames.PROPERTY_ENABLED, bDoTemplateEnable);
CurUnoDialog.setControlProperty("optUseTemplate", PropertyNames.PROPERTY_ENABLED, bDoTemplateEnable);
CurUnoDialog.setControlProperty("lblHowProceed", PropertyNames.PROPERTY_ENABLED, bDoTemplateEnable);
String sTitle = xTitleTextBox.getText();
boolean bDoEnable = sTitle.equals(PropertyNames.EMPTY_STRING);
CurUnoDialog.enableFinishButton(!bDoEnable);
}
// private boolean fileexists(XMultiServiceFactory _xMSF, String _spath){
// try {
// XInterface xUcbInterface = (XInterface) _xMSF.createInstance("com.sun.star.ucb.SimpleFileAccess");
// XSimpleFileAccess xSimpleFileAccess = (XSimpleFileAccess) com.sun.star.uno.UnoRuntime.queryInterface(XSimpleFileAccess.class, xUcbInterface);
// return xSimpleFileAccess.exists(_spath);
// } catch (Exception exception) {
// exception.printStackTrace(System.out);
// return false;
// }}
public void initialize(RecordParser _CurDBMetaData)
{
String FirstCommandName = (_CurDBMetaData.getIncludedCommandNames())[0];
DefaultName = Desktop.getUniqueName(_CurDBMetaData.getReportDocuments(), FirstCommandName);
if (!DefaultName.equals(OldDefaultName))
{
OldDefaultName = DefaultName;
}
xTitleTextBox.setText(DefaultName);
}
public String getStoreName()
{
if (CurUnoDialog != null)
{
String LocStoreName = xTitleTextBox.getText();
if (!LocStoreName.equals(PropertyNames.EMPTY_STRING))
{
StoreName = LocStoreName;
}
}
return (StoreName);
}
public String getStorePath()
{
try
{
StoreName = getStoreName();
String StorePath;
XInterface xInterface = (XInterface) m_xMSF.createInstance("com.sun.star.ucb.SimpleFileAccess");
XSimpleFileAccess xSimpleFileAccess = UnoRuntime.queryInterface(XSimpleFileAccess.class, xInterface);
StorePath = FileAccess.getOfficePath(m_xMSF, "Temp", xSimpleFileAccess) + "/" + StoreName;
return StorePath;
}
catch (Exception e)
{
e.printStackTrace(System.out);
return PropertyNames.EMPTY_STRING;
}
}
public void changeReportTitle()
{
final String TitleName = xTitleTextBox.getText();
CurReportDocument.liveupdate_updateReportTitle(TitleName);
CurUnoDialog.enableFinishButton(!PropertyNames.EMPTY_STRING.equals(TitleName));
}
public int getReportOpenMode()
{
int ReportMode = SOCREATEDOCUMENT;
boolean bcreateTemplate = ((Short) CurUnoDialog.getControlProperty("optCreateReportTemplate", PropertyNames.PROPERTY_STATE)).shortValue() == (short) 1;
if (bcreateTemplate)
{
ReportMode = SOCREATETEMPLATE;
}
boolean buseTemplate = ((Short) CurUnoDialog.getControlProperty("optUseTemplate", PropertyNames.PROPERTY_STATE)).shortValue() == (short) 1;
if (buseTemplate)
{
ReportMode = SOUSETEMPLATE;
}
boolean buseDocument = ((Short) CurUnoDialog.getControlProperty("optCreateDocument", PropertyNames.PROPERTY_STATE)).shortValue() == (short) 1;
if (buseDocument)
{
ReportMode = SOCREATEDOCUMENT;
}
return ReportMode;
}
public boolean finish()
{
StoreName = getStoreName();
if (!CurReportDocument.getRecordParser().getReportDocuments().hasByHierarchicalName(StoreName))
{
try
{
CurReportDocument.store(StoreName, getReportOpenMode());
ReportWizard.bCloseDocument = false;
return true;
}
catch(Exception e)
{
CurUnoDialog.showMessageBox("ErrorBox", VclWindowPeerAttribute.OK,e.getLocalizedMessage() );
CurUnoDialog.enableFinishButton(false);
return false;
}
}
String sMsgReportDocumentNameDuplicate = CurUnoDialog.m_oResource.getResText(UIConsts.RID_REPORT + 76);
String sShowMsgReportNameisDuplicate = JavaTools.replaceSubString(sMsgReportDocumentNameDuplicate, StoreName, "%REPORTNAME");
/* int iMsg = */ CurUnoDialog.showMessageBox("ErrorBox", VclWindowPeerAttribute.OK, sShowMsgReportNameisDuplicate);
CurUnoDialog.enableFinishButton(false);
return false;
}
}
| 5,987 |
2,392 | // Copyright (c) 2012 INRIA Sophia-Antipolis (France).
// All rights reserved.
//
// This file is part of CGAL (www.cgal.org).
//
// $URL: https://github.com/CGAL/cgal/blob/v5.1/AABB_tree/include/CGAL/AABB_face_graph_triangle_primitive.h $
// $Id: AABB_face_graph_triangle_primitive.h 18f4e44 2020-07-07T11:44:40+02:00 Laurent Rineau
// SPDX-License-Identifier: GPL-3.0-or-later OR LicenseRef-Commercial
//
//
// Author(s) : <NAME>
//
#ifndef CGAL_AABB_FACE_GRAPH_TRIANGLE_PRIMITIVE_H
#define CGAL_AABB_FACE_GRAPH_TRIANGLE_PRIMITIVE_H
#include <CGAL/license/AABB_tree.h>
#include <CGAL/disable_warnings.h>
#include <CGAL/AABB_primitive.h>
#include <CGAL/boost/graph/property_maps.h>
#include <CGAL/Default.h>
#include <boost/mpl/if.hpp>
namespace CGAL {
/*!
* \ingroup PkgAABBTreeRef
* Primitive type for a facet of a polyhedral surface.
* It wraps a handle to a facet of a polyhedron to a 3D triangle.
* The polyhedron from which the primitive is built should not be deleted
* while the AABB tree holding the primitive is in use.
* The triangle type of the primitive (`Datum`) is `CGAL::Kernel_traits< boost::property_traits< VertexPointPMap >::%value_type >::%Kernel::Triangle_3`.
*
* \cgalModels `AABBPrimitiveWithSharedData`
*
*\tparam FaceGraph is a model of the face graph concept.
*\tparam VertexPointPMap is a property map with `boost::graph_traits<FaceGraph>::%vertex_descriptor`
* as key type and a \cgal Kernel `Point_3` as value type.
* The default is `typename boost::property_map< FaceGraph,vertex_point_t>::%const_type`.
*\tparam OneFaceGraphPerTree is either `CGAL::Tag_true` or `CGAL::Tag_false`.
* In the former case, we guarantee that all the primitives will be from a
* common `FaceGraph` and some data will be factorized so that the size of
* the primitive is reduced. In the latter case, the primitives can be from
* different graphs and extra storage is required in the primitives. The default is `CGAL::Tag_true`.
*\tparam CacheDatum is either `CGAL::Tag_true` or `CGAL::Tag_false`. In the former case, the datum is stored
* in the primitive, while in the latter it is constructed on the fly to reduce the memory footprint.
* The default is `CGAL::Tag_false` (datum is not stored).
*\sa `AABBPrimitive`
*\sa `AABB_primitive<Id,ObjectPropertyMap,PointPropertyMapPolyhedron,ExternalPropertyMaps,CacheDatum>`
*\sa `AABB_halfedge_graph_segment_primitive<HalfedgeGraph,OneHalfedgeGraphPerTree,CacheDatum>`
*/
template < class FaceGraph,
class VertexPointPMap = Default,
class OneFaceGraphPerTree = Tag_true,
class CacheDatum=Tag_false >
class AABB_face_graph_triangle_primitive
#ifndef DOXYGEN_RUNNING
: public AABB_primitive<typename boost::mpl::if_<OneFaceGraphPerTree,
typename boost::graph_traits<FaceGraph>::face_descriptor,
std::pair<typename boost::graph_traits<FaceGraph>::face_descriptor, const FaceGraph*> >::type,
Triangle_from_face_descriptor_map<
FaceGraph,
typename Default::Get<VertexPointPMap,
typename boost::property_map< FaceGraph,
vertex_point_t>::const_type >::type>,
One_point_from_face_descriptor_map<
FaceGraph,
typename Default::Get<VertexPointPMap,
typename boost::property_map< FaceGraph,
vertex_point_t>::const_type >::type>,
OneFaceGraphPerTree,
CacheDatum >
#endif
{
typedef typename Default::Get<VertexPointPMap, typename boost::property_map< FaceGraph, vertex_point_t>::const_type >::type VertexPointPMap_;
typedef typename boost::graph_traits<FaceGraph>::face_descriptor FD;
typedef typename boost::mpl::if_<OneFaceGraphPerTree, FD, std::pair<FD, const FaceGraph*> >::type Id_;
typedef Triangle_from_face_descriptor_map<FaceGraph,VertexPointPMap_> Triangle_property_map;
typedef One_point_from_face_descriptor_map<FaceGraph,VertexPointPMap_> Point_property_map;
typedef AABB_primitive< Id_,
Triangle_property_map,
Point_property_map,
OneFaceGraphPerTree,
CacheDatum > Base;
FD make_id(FD fd, const FaceGraph&, Tag_true)
{
return fd;
}
std::pair<FD, const FaceGraph*> make_id(FD fd, const FaceGraph& fg, Tag_false)
{
return std::make_pair(fd, &fg);
}
public:
#ifdef DOXYGEN_RUNNING
/// \name Types
/// @{
/*!
The point type.
*/
typedef boost::property_traits<VertexPointPMap>::value_type Point;
/*!
Geometric data type.
*/
typedef Kernel_traits<Point>::Kernel::Triangle_3 Datum;
/*!
Id type:
- `boost::graph_traits<FaceGraph>::%face_descriptor` if `OneFaceGraphPerTree` is `CGAL::Tag_true`
- `std::pair<boost::graph_traits<FaceGraph>::%face_descriptor, const FaceGraph*>` if `OneFaceGraphPerTree` is `CGAL::Tag_false`
*/
unspecified_type Id;
/// @}
/*!
If `OneFaceGraphPerTree` is CGAL::Tag_true, constructs a `Shared_data` object from a reference to the polyhedon `graph`.
*/
static unspecified_type construct_shared_data( FaceGraph& graph );
#else
typedef typename Base::Id Id;
#endif
typedef typename boost::graph_traits<FaceGraph>::face_descriptor face_descriptor;
// constructors
/*!
\tparam Iterator an input iterator with `Id` as value type.
constructs a primitive.
If `VertexPointPMap` is the default of the class, an additional constructor
is available with `vppm` set to `get(vertex_point, graph)`.
*/
template <class Iterator>
AABB_face_graph_triangle_primitive(Iterator it, const FaceGraph& graph, VertexPointPMap_ vppm)
: Base( Id_(make_id(*it, graph, OneFaceGraphPerTree())),
Triangle_property_map(const_cast<FaceGraph*>(&graph),vppm),
Point_property_map(const_cast<FaceGraph*>(&graph),vppm) )
{}
/*!
constructs a primitive.
If `VertexPointPMap` is the default of the class, an additional constructor
is available with `vppm` set to `get(vertex_point, graph)`.
*/
AABB_face_graph_triangle_primitive(face_descriptor fd, const FaceGraph& graph, VertexPointPMap_ vppm)
: Base( Id_(make_id(fd, graph, OneFaceGraphPerTree())),
Triangle_property_map(const_cast<FaceGraph*>(&graph),vppm),
Point_property_map(const_cast<FaceGraph*>(&graph),vppm) )
{}
#ifndef DOXYGEN_RUNNING
template <class Iterator>
AABB_face_graph_triangle_primitive(Iterator it, const FaceGraph& graph)
: Base( Id_(make_id(*it, graph, OneFaceGraphPerTree())),
Triangle_property_map(const_cast<FaceGraph*>(&graph)),
Point_property_map(const_cast<FaceGraph*>(&graph)) )
{}
AABB_face_graph_triangle_primitive(face_descriptor fd, const FaceGraph& graph)
: Base( Id_(make_id(fd, graph, OneFaceGraphPerTree())),
Triangle_property_map(const_cast<FaceGraph*>(&graph)),
Point_property_map(const_cast<FaceGraph*>(&graph)) )
{}
#endif
/// \internal
typedef internal::Cstr_shared_data<FaceGraph, Base, Triangle_property_map, Point_property_map, OneFaceGraphPerTree> Cstr_shared_data;
/// \internal
static
typename Cstr_shared_data::Shared_data
construct_shared_data(const FaceGraph& graph)
{
return Cstr_shared_data::construct_shared_data(const_cast<FaceGraph&>(graph));
}
static
typename Cstr_shared_data::Shared_data
construct_shared_data(const FaceGraph& graph, const VertexPointPMap_& vpm)
{
return Cstr_shared_data::construct_shared_data(const_cast<FaceGraph&>(graph), vpm);
}
};
} // end namespace CGAL
#include <CGAL/enable_warnings.h>
#endif // CGAL_AABB_FACE_GRAPH_TRIANGLE_PRIMITIVE_H
| 3,398 |
3,710 | <gh_stars>1000+
#include "igs_resource_thread.h"
#include "igs_resource_multithread.h"
namespace {
#if defined _WIN32 // vc compile_type
unsigned __stdcall
#else
void *
#endif
function_(void *param) {
igs::resource::thread_execute_interface *pp =
static_cast<igs::resource::thread_execute_interface *>(param);
pp->run();
return 0;
};
}
void igs::resource::multithread::add(void *thread_execute_instance) {
this->thre_exec_.push_back(thread_execute_instance);
}
void igs::resource::multithread::run(void) {
if (1 == this->thre_exec_.size()) {
/* 指定が一個の場合はスレッド実行せず、ただ実行 */
igs::resource::thread_execute_interface *pp =
static_cast<igs::resource::thread_execute_interface *>(
this->thre_exec_.at(0));
pp->run();
return;
}
// pthread_t = unsigned long int(rhel4)
// HANDLE = unsigned long(vc6.0) = void *(vc2005)
#if defined _WIN32 // vc compile_type
std::vector<HANDLE> id;
#else
std::vector<pthread_t> id;
#endif
{
std::vector<void *>::iterator it;
for (it = this->thre_exec_.begin(); it != this->thre_exec_.end(); ++it) {
id.push_back(igs::resource::thread_run(function_, *it));
}
}
{
#if defined _WIN32 // vc compile_type
std::vector<HANDLE>::iterator it;
#else
std::vector<pthread_t>::iterator it;
#endif
for (it = id.begin(); it != id.end(); ++it) {
igs::resource::thread_join(*it);
}
}
id.clear();
}
void igs::resource::multithread::clear(void) { this->thre_exec_.clear(); }
| 631 |
631 | <reponame>QianJianhua1/QPanda-2
#ifndef CHEMIQ_H
#define CHEMIQ_H
#include "Components/ChemiQ/Psi4Wrapper.h"
#include "Components/ChemiQ/ChemiqUtil.h"
#include "Core/QuantumMachine/QVec.h"
#include "Core/Utilities/Tools/OriginCollection.h"
#include "Core/Variational/Optimizer.h"
QPANDA_BEGIN
class QuantumMachine;
/**
* @brief ChemiQ Algorithm class
* @ingroup QAlgChemiQ
*/
class DLLEXPORT ChemiQ
{
public:
/**
* @brief Constructor of ChemiQ
*/
ChemiQ();
~ChemiQ();
/**
* @brief Initialize the quantum chemistry calculation
* @param[in] std::string The dir of the psi4 chemistry calculation package
*/
void initialize(const std::string& dir);
/**
* @brief Finalize the quantum chemistry calculation
*/
void finalize();
/**
* @brief Set the molecular model to calculate
* separated by commas('\n') between atoms
* @param[in] std::string molecule model
*/
void setMolecule(const std::string& molecule)
{
m_molecules.clear();
m_molecules.push_back(molecule);
}
/**
* @brief Setup molecular models, separated by semicolons(';')
* and separated by commas(',') between atoms
* @param[in] std::string molecule model
*/
void setMoleculesStr(const std::string &molecule)
{
m_molecules.clear();
auto str = QString(molecule);
auto vec = str.split(";", QString::SkipEmptyParts);
for (auto& i : vec)
{
auto atoms = QString(i).split(",", QString::SkipEmptyParts);
std::string molecule;
for (int i = 0; i < atoms.size(); i++)
{
if (i != 0)
{
molecule += "\n";
}
molecule += atoms[i].data();
}
m_molecules.push_back(molecule);
}
}
/**
* @brief Set the molecular model to calculate
* @param[in] vector_s molecule model
* @see vector_s
*/
void setMolecules(const vector_s& molecules)
{
m_molecules = molecules;
}
/**
* @brief Set the multiplicity of the molecular model
* @param[in] int multiplicity
*/
void setMultiplicity(int multiplicity)
{
m_psi4_wapper.setMultiplicity(multiplicity);
}
/**
* @brief Set the charge of the molecular model
* @param[in] int charge
*/
void setCharge(int charge)
{
m_psi4_wapper.setCharge(charge);
}
/**
* @brief Set the calculation basis
* @param[in] std::string basis
*/
void setBasis(const std::string &basis)
{
m_psi4_wapper.setBasis(basis);
}
/**
* @brief set Eq Tolerance
* @param[in] double the val of Tolerance
*/
void setEqTolerance(const double val)
{
m_psi4_wapper.setEqTolerance(val);
}
/**
* @brief Set the transform type from Fermion operator to Pauli operator
* @param[in] TransFormType transform type
* @see TransFormType
*/
void setTransformType(TransFormType type)
{
m_transform_type = type;
}
/**
* @brief Set the ucc type to contruct the Fermion operator
* @param[in] UccType ucc type
* @see UccType
*/
void setUccType(UccType ucc_type)
{
m_ucc_type = ucc_type;
}
/**
* @brief Set the optimizer type
* @param[in] OptimizerType optimizer type
* @see OptimizerType
*/
void setOptimizerType(OptimizerType optimizer_type)
{
m_optimizer_type = optimizer_type;
}
/**
* @brief Set the optimizer iteration number
* @param[in] size_t iteration number
*/
void setOptimizerIterNum(size_t iter_num)
{
m_optimizer_iter_num = iter_num;
}
/**
* @brief Set the optimizer function callback number
* @param[in] size_t function callback number
*/
void setOptimizerFuncCallNum(size_t num)
{
m_optimizer_func_call_num = num;
}
/**
* @brief Set the optimizer xatol.It is the Absolute error in xopt between
* iterations that is acceptable for convergence.
* @param[in] double absolute error between iterations
*/
void setOptimizerXatol(double value)
{
m_xatol = value;
}
/**
* @brief Set the optimizer fatol.It is the Absolute error in func(xopt)
* between iterations that is acceptable for convergence.
* @param[in] double absolute error between func(xopt)
*/
void setOptimizerFatol(double value)
{
m_fatol = value;
}
/**
* @brief Whether to print the optimized log to the terminal.
* @param[in] bool enable
*/
void setOptimizerDisp(bool enable)
{
m_disp = enable;
}
/**
* @brief Set the learing rate when using Gradient optimizer
* @param[in] double learing rate
*/
void setLearningRate(double learning_rate)
{
m_learning_rate = learning_rate;
}
/**
* @brief Set the evolution time when doing hamiltonian simulation
* @param[in] double evolution time
*/
void setEvolutionTime(double t)
{
m_evolutionTime = t;
}
/**
* @brief Set the hamiltonian simulation slices
* (e^iAt/n*e^iBt/n)^n, n is the slices
* @param[in] double hamiltonian simulation slices
*/
void setHamiltonianSimulationSlices(size_t slices)
{
m_hamiltonian_simulation_slices = slices;
}
/**
* @brief Set the directory to save the calculated data.
* If it's a not exist dir data will not be saved.
* @param[in] std::string dir
*/
void setSaveDataDir(const std::string dir)
{
m_save_data_dir = dir;
}
/**
* @brief Set the quantum machine type
* @param[in] QMachineType quantum machine type
* @see QMachineType
*/
void setQuantumMachineType(QMachineType type)
{
m_quantum_machine_type = type;
}
/**
* @brief Set random default optimizer paramter
* @param[in] bool enable
*/
void setRandomPara(bool enable)
{
m_random_para = enable;
}
/**
* @brief Set the default optimizer paramter by the given paramter
* @param[in] vecotr_d default paramter
* @see vector_d
*/
void setDefaultOptimizedPara(const vector_d ¶)
{
m_default_optimized_para = para;
}
/**
* @brief Set to get hamiltonian from file
* @param[in] bool enable
*/
void setToGetHamiltonianFromFile(bool enable)
{
m_hamiltonian_in_file = enable;
}
/**
* @brief Set hamiltonian generation only
* @param[in] bool enable
*/
void setHamiltonianGenerationOnly(bool enable)
{
m_hamiltonian_gen_only = enable;
}
/**
* @brief get qubits num with the above config.
* @return int -1:means failed.
*/
int getQubitsNum();
/**
* @brief exec molecule calculate.
* @return bool true:success; false:failed
*/
bool exec();
/**
* @brief get last error.
* @return std::string last error
*/
std::string getLastError() const
{
return m_last_err;
}
/**
* @brief get calculated energies of the molecules.
* @return vector_d energies
*/
vector_d getEnergies() const
{
return m_energies;
}
private:
void initOptimizedPara(size_t size);
size_t getMoleculerElectronNum(const std::string &moleculer) const;
QCircuit prepareInitialState(QVec &qlist, size_t en);
QOptimizationResult optimizeByGradient();
QOptimizationResult optimizeByNoGradient();
/*
state is quantum state,paulis is a map like '1:X 2:Y 3:Z'.
parity check of partial element of state, number of paulis are
invloved position
*/
bool ParityCheck(size_t state, const QTerm &paulis) const;
std::shared_ptr<Variational::Optimizer>
genGradientOptimizer(std::vector<var> ¶);
QResultPair callVQE(
const vector_d ¶,
const QHamiltonian &hamiltonian);
/*
Get expectation of one PauliOpComplex.
*/
double getExpectation(
const QHamiltonian &unitary_cc,
const QHamiltonianItem &component);
bool testTermination(
const vector_d& p1,
const vector_d& p2,
double e1,
double e2) const;
bool saveResult() const;
bool saveMoleculeOptimizedResult(
size_t index,
const std::string &molecule,
const std::string &pauli,
const QOptimizationResult& result) const;
bool writeBaseData() const;
bool writeExecLog(bool exec_flag) const;
bool writeProgress(size_t iter_num) const;
bool updateBaseData();
bool getLastIthMoleculeResult(size_t index);
bool getLastIthMoleculeOptimizedPara(const std::string& filename);
bool getDataFromPsi4(size_t index);
bool saveGradientOptimizerCacheFile(
const vector_d& best_paras,
const vector_d& cur_paras,
const vector_d& last_paras,
double b_value,
double c_value,
double l_value,
size_t cur_iter
) const;
bool restoreGradientOptimizerParaFromCache(
vector_d& best_paras,
vector_d& cur_paras,
vector_d& last_paras,
double &b_value,
double &c_value,
double &l_value,
size_t &cur_iter
);
vector_d getVectorFromString(const std::string& para) const;
private:
QMachineType m_quantum_machine_type{CPU};
Psi4Wrapper m_psi4_wapper;
std::string m_chemiq_dir;
std::string m_last_err;
vector_s m_molecules;
TransFormType m_transform_type{TransFormType::Jordan_Wigner};
UccType m_ucc_type{UccType::UCCS};
OptimizerType m_optimizer_type{OptimizerType::NELDER_MEAD};
size_t m_optimizer_iter_num{1000};
size_t m_optimizer_func_call_num{1000};
double m_xatol{1e-4};
double m_fatol{1e-4};
double m_learning_rate{ 0.2 };
double m_evolutionTime{ 1.0 };
size_t m_hamiltonian_simulation_slices{ 3 };
std::string m_save_data_dir;
vector_d m_energies;
std::vector<Eigen::MatrixXi> m_BK;
std::unique_ptr<QuantumMachine> m_machine;
QVec m_qubit_vec;
size_t m_qn{0};
size_t m_electron_num{0};
size_t m_para_num{0};
PauliOperator m_pauli;
size_t m_func_calls{ 0 };
int m_process_i{ 0 };
OriginCollection m_optimizer_data_db;
bool m_disp{ false };
bool m_random_para{ false };
vector_d m_default_optimized_para;
vector_d m_optimized_para;
bool m_break_restoration{false};
size_t m_last_iters{0};
size_t m_last_fcalls{0};
int m_last_process_i{ -1 };
bool m_hamiltonian_in_file{ false };
bool m_hamiltonian_gen_only{ false };
};
QPANDA_END
#endif // CHEMIQ_H
| 4,766 |
3,459 | /* Mednafen - Multi-system Emulator
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
namespace MDFN_IEN_WSWAN
{
void Comm_Init(const char *wfence_path) MDFN_COLD;
void Comm_Kill(void) MDFN_COLD;
void Comm_Reset(void);
void Comm_StateAction(StateMem *sm, const unsigned load, const bool data_only);
void Comm_Process(void);
uint8 Comm_Read(uint8 A);
void Comm_Write(uint8 A, uint8 V);
}
| 313 |
348 | <reponame>chamberone/Leaflet.PixiOverlay
{"nom":"Saint-Jean-de-Touslas","circ":"11ème circonscription","dpt":"Rhône","inscrits":632,"abs":298,"votants":334,"blancs":33,"nuls":5,"exp":296,"res":[{"nuance":"REM","nom":"<NAME>","voix":171},{"nuance":"LR","nom":"<NAME>","voix":125}]} | 115 |
381 | # Generated by pypy/tool/import_cffi.py
import cffi
ffi = cffi.FFI()
ffi.embedding_api("""
int add3(int, int, int, int);
""")
ffi.embedding_init_code(r"""
from _add3_cffi import ffi
import sys
@ffi.def_extern()
def add3(x, y, z, t):
sys.stdout.write("adding %d, %d, %d, %d\n" % (x, y, z, t))
sys.stdout.flush()
return x + y + z + t
""")
ffi.set_source("_add3_cffi", """
""")
fn = ffi.compile(verbose=True)
print('FILENAME: %s' % (fn,))
| 254 |
947 | /*-------------------------------------------------------------------------
*
* key_bitmap.c
* Routines for bitmap scan of orioledb table
*
* Copyright (c) 2021-2022, Oriole DB Inc.
*
* IDENTIFICATION
* contrib/orioledb/src/tableam/key_bitmap.c
*-------------------------------------------------------------------------
*/
#include "postgres.h"
#include "orioledb.h"
#include "btree/io.h"
#include "btree/iterator.h"
#include "btree/page_chunks.h"
#include "tableam/bitmap_scan.h"
#include "tableam/index_scan.h"
#include "tuple/slot.h"
#include "lib/rbtree.h"
static int bm_rbt_comparator(const RBTNode *a, const RBTNode *b, void *arg);
static void bm_rbt_combiner(RBTNode *existing, const RBTNode *newdata, void *arg);
static RBTNode *bm_rbt_allocfunc(void *arg);
static void bm_rbt_freefunc(RBTNode *x, void *arg);
#define HIGH_PART_MASK (0xFFFFFFFFFFFFFC00)
#define LOW_PART_MASK (0x00000000000003FF)
#define BITMAP_SIZE 0x80
typedef struct
{
RBTNode rbtnode;
uint64 key;
uint8 *bitmap;
} OKeyBitmapRBTNode;
RBTree *
o_keybitmap_create(void)
{
return rbt_create(sizeof(OKeyBitmapRBTNode),
bm_rbt_comparator,
bm_rbt_combiner,
bm_rbt_allocfunc,
bm_rbt_freefunc,
NULL);
}
void
o_keybitmap_insert(RBTree *rbtree, uint64 value)
{
OKeyBitmapRBTNode node;
bool is_new;
node.key = value;
node.bitmap = NULL;
(void) rbt_insert(rbtree, &node.rbtnode, &is_new);
}
bool
o_keybitmap_test(RBTree *rbtree, uint64 value)
{
OKeyBitmapRBTNode node;
OKeyBitmapRBTNode *found;
node.key = value;
node.bitmap = NULL;
found = (OKeyBitmapRBTNode *) rbt_find(rbtree, &node.rbtnode);
if (!found)
return false;
if (found->bitmap)
{
int offset = (value & LOW_PART_MASK);
if (found->bitmap[offset >> 3] & (1 << (offset & 7)))
return true;
else
return false;
}
else
{
return (found->key == value);
}
}
bool
o_keybitmap_range_is_valid(RBTree *rbtree, uint64 low, uint64 high)
{
OKeyBitmapRBTNode lowNode;
OKeyBitmapRBTNode *node;
int i,
iStart,
iEnd;
uint8 startMask,
endMask;
bool valid = false;
bool first = true;
while (!valid && ((low & HIGH_PART_MASK) <= (high & HIGH_PART_MASK)))
{
bool skip_step = false;
lowNode.key = low;
lowNode.bitmap = NULL;
node = (OKeyBitmapRBTNode *) rbt_find_great_equal(rbtree,
&lowNode.rbtnode);
if (!node)
break;
if (!node->bitmap)
{
if (node->key >= low && node->key < high)
valid = true;
skip_step = true;
}
if (!skip_step)
{
if ((low & HIGH_PART_MASK) == (node->key & HIGH_PART_MASK))
{
iStart = (low & LOW_PART_MASK) >> 3;
startMask = 0xFF << ((low & LOW_PART_MASK) & 7);
}
else
{
iStart = 0;
startMask = 0xFF;
}
if ((high & HIGH_PART_MASK) == (node->key & HIGH_PART_MASK))
{
iEnd = ((high - 1) & LOW_PART_MASK) >> 3;
endMask = 0xFF >> (7 - (((high - 1) & LOW_PART_MASK) & 7));
}
else
{
iEnd = BITMAP_SIZE - 1;
endMask = 0xFF;
}
for (i = iStart; i <= iEnd; i++)
{
uint8 mask;
mask = (i == iStart) ? startMask : 0xFF;
if (i == iEnd)
mask &= endMask;
if (node->bitmap[i] & mask)
valid = true;
}
}
if (!valid)
{
low = node->key;
if (!first)
low += (1L << 10);
}
first = false;
}
return valid;
}
static int
find_next_offset(uint8 *bitmap, int minOffset)
{
int i;
uint8 mask;
i = minOffset >> 3;
mask = 0xFF << (minOffset & 7);
while (i < BITMAP_SIZE)
{
mask &= bitmap[i];
if (mask)
{
int result;
result = i << 3;
while (!(mask & 1))
{
result++;
mask >>= 1;
}
return result;
}
mask = 0xFF;
i++;
}
return -1;
}
uint64
o_keybitmap_get_next(RBTree *rbtree, uint64 prev, bool *found)
{
OKeyBitmapRBTNode lowNode;
OKeyBitmapRBTNode *node;
RBTreeIterator iter;
lowNode.key = prev;
lowNode.bitmap = NULL;
node = (OKeyBitmapRBTNode *) rbt_find_great_equal(rbtree,
&lowNode.rbtnode);
if (!node)
{
*found = false;
return 0;
}
if (!node->bitmap)
{
if (node->key >= prev)
{
*found = true;
return node->key;
}
}
else if ((prev & HIGH_PART_MASK) == (node->key & HIGH_PART_MASK))
{
int nextOffset;
nextOffset = find_next_offset(node->bitmap, prev & LOW_PART_MASK);
if (nextOffset >= 0)
{
*found = true;
return node->key + nextOffset;
}
}
if ((prev & HIGH_PART_MASK) == (node->key & HIGH_PART_MASK))
{
rbt_begin_iterate(rbtree, LeftRightWalk, &iter);
iter.last_visited = (RBTNode *) node;
node = (OKeyBitmapRBTNode *) rbt_iterate(&iter);
}
if (!node)
{
*found = false;
return 0;
}
if (!node->bitmap)
{
*found = true;
return node->key;
}
else
{
int nextOffset = find_next_offset(node->bitmap, 0);
Assert(nextOffset >= 0);
*found = true;
return node->key + nextOffset;
}
}
static void
free_tree_node(RBTNode *node)
{
OKeyBitmapRBTNode *keyNode = (OKeyBitmapRBTNode *) node;
if (node->left == node)
{
Assert(node->right == node);
return;
}
if (keyNode->bitmap)
pfree(keyNode->bitmap);
free_tree_node(node->left);
free_tree_node(node->right);
pfree(node);
}
void
o_keybitmap_free(RBTree *tree)
{
free_tree_node(*((RBTNode **) tree));
pfree(tree);
}
bool
o_keybitmap_is_empty(RBTree *rbtree)
{
return rbt_leftmost(rbtree) == NULL;
}
void
o_keybitmap_intersect(RBTree *a, RBTree *b)
{
RBTreeIterator iterA;
RBTreeIterator iterB;
OKeyBitmapRBTNode *nodeA;
OKeyBitmapRBTNode *nodeB;
List *removing = NIL;
ListCell *lc;
rbt_begin_iterate(a, LeftRightWalk, &iterA);
rbt_begin_iterate(b, LeftRightWalk, &iterB);
nodeB = (OKeyBitmapRBTNode *) rbt_iterate(&iterB);
while ((nodeA = (OKeyBitmapRBTNode *) rbt_iterate(&iterA)) != NULL)
{
while (nodeB &&
(nodeB->key & HIGH_PART_MASK) < (nodeA->key & HIGH_PART_MASK))
{
nodeB = (OKeyBitmapRBTNode *) rbt_iterate(&iterB);
}
if (!nodeB ||
(nodeB->key & HIGH_PART_MASK) > (nodeA->key & HIGH_PART_MASK))
{
OKeyBitmapRBTNode *removed_node;
removed_node = palloc0(sizeof(OKeyBitmapRBTNode));
memcpy(removed_node, nodeA, sizeof(OKeyBitmapRBTNode));
removing = lappend(removing, removed_node);
continue;
}
Assert((nodeA->key & HIGH_PART_MASK) == (nodeB->key & HIGH_PART_MASK));
if (!nodeA->bitmap)
{
if (!nodeB->bitmap)
{
if (nodeA->key != nodeB->key)
{
OKeyBitmapRBTNode *removed_node;
removed_node = palloc0(sizeof(OKeyBitmapRBTNode));
memcpy(removed_node, nodeA, sizeof(OKeyBitmapRBTNode));
removing = lappend(removing, removed_node);
continue;
}
}
else
{
int offset = (nodeA->key & LOW_PART_MASK);
if (!(nodeB->bitmap[offset >> 3] & (1 << (offset & 7))))
{
OKeyBitmapRBTNode *removed_node;
removed_node = palloc0(sizeof(OKeyBitmapRBTNode));
memcpy(removed_node, nodeA, sizeof(OKeyBitmapRBTNode));
removing = lappend(removing, removed_node);
continue;
}
}
}
else
{
if (!nodeB->bitmap)
{
int offset = (nodeB->key & LOW_PART_MASK);
if (!(nodeA->bitmap[offset >> 3] & (1 << (offset & 7))))
{
OKeyBitmapRBTNode *removed_node;
removed_node = palloc0(sizeof(OKeyBitmapRBTNode));
memcpy(removed_node, nodeA, sizeof(OKeyBitmapRBTNode));
removing = lappend(removing, removed_node);
continue;
}
pfree(nodeA->bitmap);
nodeA->bitmap = NULL;
nodeA->key = nodeB->key;
}
else
{
int i;
bool empty = true;
for (i = 0; i < BITMAP_SIZE; i++)
{
nodeA->bitmap[i] &= nodeB->bitmap[i];
if (nodeA->bitmap[i] != 0)
empty = false;
}
if (empty)
{
OKeyBitmapRBTNode *removed_node;
removed_node = palloc0(sizeof(OKeyBitmapRBTNode));
memcpy(removed_node, nodeA, sizeof(OKeyBitmapRBTNode));
removing = lappend(removing, removed_node);
continue;
}
}
}
}
foreach(lc, removing)
{
OKeyBitmapRBTNode *search_node;
OKeyBitmapRBTNode *removing_node;
search_node = (OKeyBitmapRBTNode *) lfirst(lc);
if (search_node->bitmap)
pfree(search_node->bitmap);
removing_node = (OKeyBitmapRBTNode *) rbt_find(a,
(RBTNode *) search_node);
rbt_delete(a, (RBTNode *) removing_node);
pfree(search_node);
}
list_free(removing);
}
void
o_keybitmap_union(RBTree *a, RBTree *b)
{
RBTreeIterator iterB;
OKeyBitmapRBTNode *nodeB;
rbt_begin_iterate(b, LeftRightWalk, &iterB);
while ((nodeB = (OKeyBitmapRBTNode *) rbt_iterate(&iterB)) != NULL)
{
bool is_new;
rbt_insert(a, &nodeB->rbtnode, &is_new);
}
}
static int
bm_rbt_comparator(const RBTNode *a, const RBTNode *b, void *arg)
{
const OKeyBitmapRBTNode *keyA = (OKeyBitmapRBTNode *) a;
const OKeyBitmapRBTNode *keyB = (OKeyBitmapRBTNode *) b;
uint64 va = keyA->key & HIGH_PART_MASK;
uint64 vb = keyB->key & HIGH_PART_MASK;
return va > vb ? 1 : va < vb ? -1 : 0;
}
static void
node_make_bitmap(OKeyBitmapRBTNode *node)
{
int offset;
node->bitmap = palloc0(BITMAP_SIZE);
offset = node->key & LOW_PART_MASK;
node->bitmap[offset >> 3] |= 1 << (offset & 7);
node->key &= HIGH_PART_MASK;
}
static void
bm_rbt_combiner(RBTNode *existing,
const RBTNode *newdata,
void *arg)
{
OKeyBitmapRBTNode *old = (OKeyBitmapRBTNode *) existing;
OKeyBitmapRBTNode *new = (OKeyBitmapRBTNode *) newdata;
if (!old->bitmap)
{
if (!new->bitmap && new->key == old->key)
return;
node_make_bitmap(old);
}
if (!new->bitmap)
{
int offset = new->key & LOW_PART_MASK;
old->bitmap[offset >> 3] |= 1 << (offset & 7);
}
else
{
int i;
for (i = 0; i < BITMAP_SIZE; i++)
old->bitmap[i] |= new->bitmap[i];
}
}
static RBTNode *
bm_rbt_allocfunc(void *arg)
{
RBTNode *result = palloc0(sizeof(OKeyBitmapRBTNode));
return result;
}
static void
bm_rbt_freefunc(RBTNode *x, void *arg)
{
pfree(x);
}
| 4,665 |
578 | <reponame>JesusGonfer/syndesis
/*
* Copyright (C) 2016 Red Hat, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.syndesis.server.jsondb.dao;
import java.nio.charset.StandardCharsets;
import java.util.Set;
import java.util.stream.Stream;
import io.syndesis.common.model.Kind;
import io.syndesis.common.model.WithId;
import io.syndesis.common.model.WithUsage;
import io.syndesis.common.model.connection.ConnectionOverview;
import io.syndesis.common.model.validation.TargetWithDomain;
import io.syndesis.server.jsondb.JsonDB;
import org.immutables.value.Value;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;
import org.reflections.Reflections;
import org.reflections.util.ConfigurationBuilder;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class JsonDbDaoTest<T extends WithId<T> & WithUsage> {
private static final byte[] JSON_BYTES = "{\"uses\":14}".getBytes(StandardCharsets.UTF_8);
static final JsonDB JSONDB = mock(JsonDB.class);
@ParameterizedTest
@MethodSource("parameters")
public void shouldDeserializeUsage(final JsonDbDao<T> dao) {
final String path = "/" + Kind.from(dao.getType()).getPluralModelName() + "/:id";
when(JSONDB.getAsByteArray(path)).thenReturn(JSON_BYTES);
final T fetched = dao.fetch("id");
assertThat(fetched.getUses()).isEqualTo(14);
}
static Stream<JsonDbDao<?>> parameters() {
final Reflections reflections = new Reflections(new ConfigurationBuilder()
.forPackages("io.syndesis")
.filterInputsBy(r -> !r.contains("Immutable")));
final Set<Class<? extends WithUsage>> withUsageSubtypes = reflections.getSubTypesOf(WithUsage.class);
final Set<Class<?>> immutables = reflections.getTypesAnnotatedWith(Value.Immutable.class);
@SuppressWarnings("rawtypes")
final Set<Class<? extends TargetWithDomain>> withDomainClasses = reflections.getSubTypesOf(TargetWithDomain.class);
withUsageSubtypes.retainAll(immutables);
withUsageSubtypes.removeAll(withDomainClasses);
withUsageSubtypes.remove(ConnectionOverview.class); // not sure why this
// is a DAO type
return withUsageSubtypes.stream()
.map(JsonDbDaoTest::stubDao);
}
static <T extends WithId<T>> JsonDbDao<?> stubDao(Class<?> type) {
return new JsonDbDao<T>(JSONDB) {
@SuppressWarnings("unchecked")
@Override
public Class<T> getType() {
return (Class<T>) type;
}
@Override
public String toString() {
return type.getSimpleName();
}
};
}
}
| 1,306 |
333 | package com.alipay.api.domain;
import java.util.List;
import com.alipay.api.AlipayObject;
import com.alipay.api.internal.mapping.ApiField;
import com.alipay.api.internal.mapping.ApiListField;
/**
* 支付宝门店绑定默认小程序
*
* @author auto create
* @since 1.0, 2020-12-04 15:16:03
*/
public class AlipayMerchantStoreMiniappBindModel extends AlipayObject {
private static final long serialVersionUID = 3524591173757821869L;
/**
* 门店小程序绑定操作的类型,分为以下枚举类型:
- STORE_DEFAULT_MINIAPP,门店默认小程序
*/
@ApiField("operation")
private String operation;
/**
* 门店和小程序绑定映射关系
*/
@ApiListField("shop_app_relation")
@ApiField("shop_app_relation")
private List<ShopAppRelation> shopAppRelation;
public String getOperation() {
return this.operation;
}
public void setOperation(String operation) {
this.operation = operation;
}
public List<ShopAppRelation> getShopAppRelation() {
return this.shopAppRelation;
}
public void setShopAppRelation(List<ShopAppRelation> shopAppRelation) {
this.shopAppRelation = shopAppRelation;
}
}
| 534 |
17,104 | // boost/io/quoted_manip.hpp ---------------------------------------------------------//
// Copyright <NAME> 2010
// Distributed under the Boost Software License, Version 1.0.
// See http://www.boost.org/LICENSE_1_0.txt
// Library home page http://www.boost.org/libs/io
//--------------------------------------------------------------------------------------//
#ifndef BOOST_IO_QUOTED_MANIP
#define BOOST_IO_QUOTED_MANIP
#include <iosfwd>
#include <ios>
#include <string>
#include <iterator>
#include <boost/io/ios_state.hpp>
namespace mars_boost {} namespace boost = mars_boost; namespace mars_boost
{
namespace io
{
namespace detail { template <class String, class Char> struct quoted_proxy; }
// ------------ public interface ------------------------------------------------//
// manipulator for const std::basic_string&
template <class Char, class Traits, class Alloc>
detail::quoted_proxy<std::basic_string<Char, Traits, Alloc> const &, Char>
quoted(const std::basic_string<Char, Traits, Alloc>& s,
Char escape='\\', Char delim='\"');
// manipulator for non-const std::basic_string&
template <class Char, class Traits, class Alloc>
detail::quoted_proxy<std::basic_string<Char, Traits, Alloc> &, Char>
quoted(std::basic_string<Char, Traits, Alloc>& s,
Char escape='\\', Char delim='\"');
// manipulator for const C-string*
template <class Char>
detail::quoted_proxy<const Char*, Char>
quoted(const Char* s, Char escape='\\', Char delim='\"');
// ----------- implementation details -------------------------------------------//
namespace detail
{
// proxy used as an argument pack
template <class String, class Char>
struct quoted_proxy
{
String string;
Char escape;
Char delim;
quoted_proxy(String s_, Char escape_, Char delim_)
: string(s_), escape(escape_), delim(delim_) {}
private:
// String may be a const type, so disable the assignment operator
quoted_proxy& operator=(const quoted_proxy&); // = deleted
};
// abstract away difference between proxies with const or non-const basic_strings
template <class Char, class Traits, class Alloc>
std::basic_ostream<Char, Traits>&
basic_string_inserter_imp(std::basic_ostream<Char, Traits>& os,
std::basic_string<Char, Traits, Alloc> const & string, Char escape, Char delim)
{
os << delim;
typename std::basic_string<Char, Traits, Alloc>::const_iterator
end_it = string.end();
for (typename std::basic_string<Char, Traits, Alloc>::const_iterator
it = string.begin();
it != end_it;
++it )
{
if (*it == delim || *it == escape)
os << escape;
os << *it;
}
os << delim;
return os;
}
// inserter for const std::basic_string& proxies
template <class Char, class Traits, class Alloc>
inline
std::basic_ostream<Char, Traits>& operator<<(std::basic_ostream<Char, Traits>& os,
const quoted_proxy<std::basic_string<Char, Traits, Alloc> const &, Char>& proxy)
{
return basic_string_inserter_imp(os, proxy.string, proxy.escape, proxy.delim);
}
// inserter for non-const std::basic_string& proxies
template <class Char, class Traits, class Alloc>
inline
std::basic_ostream<Char, Traits>& operator<<(std::basic_ostream<Char, Traits>& os,
const quoted_proxy<std::basic_string<Char, Traits, Alloc>&, Char>& proxy)
{
return basic_string_inserter_imp(os, proxy.string, proxy.escape, proxy.delim);
}
// inserter for const C-string* proxies
template <class Char, class Traits>
std::basic_ostream<Char, Traits>& operator<<(std::basic_ostream<Char, Traits>& os,
const quoted_proxy<const Char*, Char>& proxy)
{
os << proxy.delim;
for (const Char* it = proxy.string;
*it;
++it )
{
if (*it == proxy.delim || *it == proxy.escape)
os << proxy.escape;
os << *it;
}
os << proxy.delim;
return os;
}
// extractor for non-const std::basic_string& proxies
template <class Char, class Traits, class Alloc>
std::basic_istream<Char, Traits>& operator>>(std::basic_istream<Char, Traits>& is,
const quoted_proxy<std::basic_string<Char, Traits, Alloc>&, Char>& proxy)
{
proxy.string.clear();
Char c;
is >> c;
if (c != proxy.delim)
{
is.unget();
is >> proxy.string;
return is;
}
{
mars_boost::io::ios_flags_saver ifs(is);
is >> std::noskipws;
for (;;)
{
is >> c;
if (!is.good()) // cope with I/O errors or end-of-file
break;
if (c == proxy.escape)
{
is >> c;
if (!is.good()) // cope with I/O errors or end-of-file
break;
}
else if (c == proxy.delim)
break;
proxy.string += c;
}
}
return is;
}
} // namespace detail
// manipulator implementation for const std::basic_string&
template <class Char, class Traits, class Alloc>
inline detail::quoted_proxy<std::basic_string<Char, Traits, Alloc> const &, Char>
quoted(const std::basic_string<Char, Traits, Alloc>& s, Char escape, Char delim)
{
return detail::quoted_proxy<std::basic_string<Char, Traits, Alloc> const &, Char>
(s, escape, delim);
}
// manipulator implementation for non-const std::basic_string&
template <class Char, class Traits, class Alloc>
inline detail::quoted_proxy<std::basic_string<Char, Traits, Alloc> &, Char>
quoted(std::basic_string<Char, Traits, Alloc>& s, Char escape, Char delim)
{
return detail::quoted_proxy<std::basic_string<Char, Traits, Alloc>&, Char>
(s, escape, delim);
}
// manipulator implementation for const C-string*
template <class Char>
inline detail::quoted_proxy<const Char*, Char>
quoted(const Char* s, Char escape, Char delim)
{
return detail::quoted_proxy<const Char*, Char> (s, escape, delim);
}
} // namespace io
} // namespace mars_boost
#endif // BOOST_IO_QUOTED_MANIP
| 2,966 |
461 | <filename>系统综合能力—虚拟机/ics2019/nexus-am/am/src/native/cte.c
#include <am.h>
#include <klib.h>
static _Context* (*user_handler)(_Event, _Context*) = NULL;
void __am_asm_trap();
void __am_ret_from_trap();
void __am_get_example_uc(_Context *c);
void __am_get_cur_as(_Context *c);
void __am_switch(_Context *c);
void __am_irq_handle(_Context *c) {
getcontext(&c->uc);
__am_get_cur_as(c);
_Event e;
e.event = ((uint32_t)c->rax == -1 ? _EVENT_YIELD : _EVENT_SYSCALL);
_Context *ret = user_handler(e, c);
if (ret != NULL) {
c = ret;
}
__am_switch(c);
c->uc.uc_mcontext.gregs[REG_RIP] = (uintptr_t)__am_ret_from_trap;
c->uc.uc_mcontext.gregs[REG_RSP] = (uintptr_t)c;
setcontext(&c->uc);
}
int _cte_init(_Context*(*handler)(_Event, _Context*)) {
void *start = (void *)0x100000;
*(uintptr_t *)start = (uintptr_t)__am_asm_trap;
user_handler = handler;
return 0;
}
_Context *_kcontext(_Area stack, void (*entry)(void *), void *arg) {
_Context *c = (_Context*)stack.end - 1;
__am_get_example_uc(c);
c->rip = (uintptr_t)entry;
return c;
}
void _yield() {
asm volatile("call *0x100000": : "a"(-1));
}
int _intr_read() {
return 0;
}
void _intr_write(int enable) {
}
| 552 |
965 | // The command handler for ID_SHAPE_COLOR (menu command to change
// the color of the currently selected shape) was added to the message
// map of CMyShape (note, not CMyView) using the Properties window.
// The menu item will be automatically enabled or disabled, depending
// on whether a CMyShape is currently selected in the view, that is,
// depending on whether CMyView::m_pActiveView is NULL. It is not
// necessary to implement an ON_UPDATE_COMMAND_UI handler to enable
// or disable the menu item.
BEGIN_MESSAGE_MAP(CMyShape, CCmdTarget)
ON_COMMAND(ID_SHAPE_COLOR, &CMyShape::OnShapeColor)
END_MESSAGE_MAP() | 181 |
364 | <reponame>akawalsky/hapi-fhir
package ca.uhn.fhir.rest.server.interceptor;
/*-
* #%L
* HAPI FHIR - Server Framework
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.interceptor.api.Hook;
import ca.uhn.fhir.interceptor.api.Interceptor;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.rest.api.EncodingEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.util.ClasspathUtil;
import org.apache.commons.lang3.Validate;
import org.hl7.fhir.instance.model.api.IBaseConformance;
/**
* This interceptor replaces the auto-generated CapabilityStatement that is generated
* by the HAPI FHIR Server with a static hard-coded resource.
*/
@Interceptor
public class StaticCapabilityStatementInterceptor {
private String myCapabilityStatementResource;
private volatile IBaseConformance myCapabilityStatement;
/**
* Sets the CapabilityStatement to use
*
* @see #setCapabilityStatementResource(String) #setCapabilityStatementResource(String) is an alternate way to supply the CapabilityStatement
*/
public void setCapabilityStatement(IBaseConformance theCapabilityStatement) {
myCapabilityStatement = theCapabilityStatement;
}
/**
* Sets the classpath location of the CapabilityStatement to use. If this method is used to supply
* the CapabiltyStatement, then the given classpath resource will be read and parsed as a FHIR
* CapabilityStatement.
*
* @see #setCapabilityStatement(IBaseConformance) #setCapabilityStatement(IBaseConformance) is an alternate way to supply the CapabilityStatement
*/
public void setCapabilityStatementResource(String theClasspath) {
myCapabilityStatementResource = theClasspath;
myCapabilityStatement = null;
}
@Hook(Pointcut.SERVER_CAPABILITY_STATEMENT_GENERATED)
public IBaseConformance hook(RequestDetails theRequestDetails) {
IBaseConformance retVal = myCapabilityStatement;
if (retVal == null) {
Validate.notBlank(myCapabilityStatementResource, "No CapabilityStatement defined");
String output = ClasspathUtil.loadResource(myCapabilityStatementResource);
FhirContext ctx = theRequestDetails.getFhirContext();
EncodingEnum encoding = EncodingEnum.detectEncodingNoDefault(output);
Validate.notNull(encoding, "Could not determine FHIR encoding for resource: %s", myCapabilityStatementResource);
retVal = (IBaseConformance) encoding
.newParser(ctx)
.parseResource(output);
myCapabilityStatement = retVal;
}
return retVal;
}
}
| 953 |
435 | {
"description": "<NAME>\u010dius \u201eFabric\"\n\nPyCon LT 2012 Prezentacija",
"duration": 1591,
"language": "lit",
"recorded": "2012-04-28",
"speakers": [
"<NAME>\u010dius"
],
"thumbnail_url": "https://i.ytimg.com/vi/A6sFU1unpZs/hqdefault.jpg",
"title": "Fabric",
"videos": [
{
"type": "youtube",
"url": "https://www.youtube.com/watch?v=A6sFU1unpZs"
}
]
}
| 197 |
4,772 | <reponame>Balkerm/spring-data-examples
package example.service;
import example.repo.Customer1297Repository;
import org.springframework.stereotype.Service;
@Service
public class Customer1297Service {
public Customer1297Service(Customer1297Repository repo) {}
}
| 80 |
307 | <filename>halyard-cli/src/main/java/com/netflix/spinnaker/halyard/cli/command/v1/config/security/ui/UiSecurityEditCommand.java
/*
* Copyright 2017 Google, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License")
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.spinnaker.halyard.cli.command.v1.config.security.ui;
import com.beust.jcommander.Parameter;
import com.beust.jcommander.Parameters;
import com.netflix.spinnaker.halyard.cli.command.v1.config.AbstractConfigCommand;
import com.netflix.spinnaker.halyard.cli.services.v1.Daemon;
import com.netflix.spinnaker.halyard.cli.services.v1.OperationHandler;
import com.netflix.spinnaker.halyard.cli.ui.v1.AnsiUi;
import com.netflix.spinnaker.halyard.config.model.v1.security.UiSecurity;
import lombok.Data;
import lombok.EqualsAndHashCode;
@EqualsAndHashCode(callSuper = true)
@Data
@Parameters(separators = "=")
public class UiSecurityEditCommand extends AbstractConfigCommand {
private String commandName = "edit";
private String shortDescription = "Configure access policies specific to Spinnaker's UI server.";
private String longDescription =
String.join(
" ",
"When Spinnaker is deployed to a remote host, the UI server may be configured to",
"do SSL termination, or sit behind an externally configured proxy server or load balancer.");
@Parameter(
names = "--override-base-url",
description =
"If you are accessing the UI server remotely, provide the full base URL of whatever proxy or "
+ "load balancer is fronting the UI requests.")
String overrideBaseUrl;
@Override
protected void executeThis() {
String currentDeployment = getCurrentDeployment();
UiSecurity uiSecurity =
new OperationHandler<UiSecurity>()
.setOperation(Daemon.getUiSecurity(currentDeployment, false))
.setFailureMesssage("Failed to load UI security settings.")
.get();
int originalHash = uiSecurity.hashCode();
uiSecurity.setOverrideBaseUrl(
isSet(overrideBaseUrl) ? overrideBaseUrl : uiSecurity.getOverrideBaseUrl());
if (originalHash == uiSecurity.hashCode()) {
AnsiUi.failure("No changes supplied.");
return;
}
new OperationHandler<Void>()
.setOperation(Daemon.setUiSecurity(currentDeployment, !noValidate, uiSecurity))
.setFailureMesssage("Failed to edit UI security settings.")
.setSuccessMessage("Successfully updated UI security settings.")
.get();
}
}
| 995 |
404 | package io.github.memfis19.cadar.event;
import java.util.Calendar;
public interface OnDayChangeListener {
void onDayChanged(Calendar calendar);
} | 50 |
14,668 | <reponame>chromium/chromium
// Copyright 2021 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/enterprise/connectors/device_trust/navigation_throttle.h"
#include <memory>
#include "base/run_loop.h"
#include "base/test/scoped_feature_list.h"
#include "base/values.h"
#include "build/build_config.h"
#include "chrome/browser/enterprise/connectors/connectors_prefs.h"
#include "chrome/browser/enterprise/connectors/device_trust/device_trust_connector_service.h"
#include "chrome/browser/enterprise/connectors/device_trust/device_trust_features.h"
#include "chrome/browser/enterprise/connectors/device_trust/fake_device_trust_connector_service.h"
#include "chrome/browser/enterprise/connectors/device_trust/mock_device_trust_service.h"
#include "chrome/test/base/testing_browser_process.h"
#include "chrome/test/base/testing_profile.h"
#include "components/policy/core/common/policy_pref_names.h"
#include "components/sync_preferences/testing_pref_service_syncable.h"
#include "content/public/browser/navigation_throttle.h"
#include "content/public/browser/web_contents.h"
#include "content/public/test/browser_task_environment.h"
#include "content/public/test/mock_navigation_handle.h"
#include "content/public/test/test_renderer_host.h"
#include "content/public/test/web_contents_tester.h"
#include "net/http/http_response_headers.h"
#include "net/http/http_util.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
using content::NavigationThrottle;
using ::testing::_;
using ::testing::Invoke;
using ::testing::Return;
namespace {
base::Value::ListStorage GetTrustedUrls() {
base::Value::ListStorage trusted_urls;
trusted_urls.push_back(base::Value("https://www.example.com"));
trusted_urls.push_back(base::Value("example2.example.com"));
return trusted_urls;
}
constexpr char kChallenge[] = R"({"challenge": "encrypted_challenge_string"})";
scoped_refptr<net::HttpResponseHeaders> GetHeaderChallenge(
const std::string& challenge) {
std::string raw_response_headers =
"HTTP/1.1 200 OK\r\n"
"x-verified-access-challenge: " +
challenge + "\r\n";
return base::MakeRefCounted<net::HttpResponseHeaders>(
net::HttpUtil::AssembleRawHeaders(raw_response_headers));
}
} // namespace
namespace enterprise_connectors {
class DeviceTrustNavigationThrottleTest : public testing::Test {
public:
DeviceTrustNavigationThrottleTest() : trusted_urls_(GetTrustedUrls()) {}
void SetUp() override {
scoped_feature_list_.InitAndEnableFeature(kDeviceTrustConnectorEnabled);
web_contents_ =
content::WebContentsTester::CreateTestWebContents(&profile_, nullptr);
fake_connector_ = std::make_unique<FakeDeviceTrustConnectorService>(
profile_.GetTestingPrefService());
fake_connector_->Initialize();
fake_connector_->update_policy(
std::make_unique<base::ListValue>(GetTrustedUrls()));
EXPECT_CALL(mock_device_trust_service_, Watches(_))
.WillRepeatedly(Invoke(
[this](const GURL& url) { return fake_connector_->Watches(url); }));
EXPECT_CALL(mock_device_trust_service_, IsEnabled())
.WillRepeatedly(Return(true));
}
std::unique_ptr<DeviceTrustNavigationThrottle> CreateThrottle(
content::NavigationHandle* navigation_handle) {
return std::make_unique<DeviceTrustNavigationThrottle>(
&mock_device_trust_service_, navigation_handle);
}
content::WebContents* web_contents() const { return web_contents_.get(); }
content::RenderFrameHost* main_frame() const {
return web_contents()->GetMainFrame();
}
protected:
content::BrowserTaskEnvironment task_environment_{
base::test::TaskEnvironment::TimeSource::MOCK_TIME};
base::test::ScopedFeatureList scoped_feature_list_;
content::RenderViewHostTestEnabler rvh_test_enabler_;
TestingProfile profile_;
std::unique_ptr<content::WebContents> web_contents_;
test::MockDeviceTrustService mock_device_trust_service_;
std::unique_ptr<FakeDeviceTrustConnectorService> fake_connector_;
base::ListValue trusted_urls_;
};
TEST_F(DeviceTrustNavigationThrottleTest, ExpectHeaderDeviceTrustOnRequest) {
content::MockNavigationHandle test_handle(GURL("https://www.example.com/"),
main_frame());
EXPECT_CALL(test_handle,
SetRequestHeader("X-Device-Trust", "VerifiedAccess"));
auto throttle = CreateThrottle(&test_handle);
EXPECT_EQ(NavigationThrottle::PROCEED, throttle->WillStartRequest().action());
}
TEST_F(DeviceTrustNavigationThrottleTest, NoHeaderDeviceTrustOnRequest) {
content::MockNavigationHandle test_handle(GURL("https://www.no-example.com/"),
main_frame());
EXPECT_CALL(test_handle, SetRequestHeader("X-Device-Trust", "VerifiedAccess"))
.Times(0);
auto throttle = CreateThrottle(&test_handle);
EXPECT_EQ(NavigationThrottle::PROCEED, throttle->WillStartRequest().action());
}
TEST_F(DeviceTrustNavigationThrottleTest, BuildChallengeResponseFromHeader) {
content::MockNavigationHandle test_handle(GURL("https://www.example.com/"),
main_frame());
test_handle.set_response_headers(GetHeaderChallenge(kChallenge));
auto throttle = CreateThrottle(&test_handle);
EXPECT_CALL(test_handle, RemoveRequestHeader("X-Device-Trust"));
EXPECT_CALL(mock_device_trust_service_,
BuildChallengeResponse(kChallenge, _));
EXPECT_EQ(NavigationThrottle::DEFER, throttle->WillStartRequest().action());
base::RunLoop().RunUntilIdle();
}
} // namespace enterprise_connectors
| 2,055 |
2,577 | /*
* Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH
* under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright
* ownership. Camunda licenses this file to you under the Apache License,
* Version 2.0; you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.test.api.runtime.migration;
import static org.camunda.bpm.engine.test.util.ActivityInstanceAssert.describeActivityInstanceTree;
import static org.camunda.bpm.engine.test.util.ExecutionAssert.describeExecutionTree;
import java.util.List;
import org.camunda.bpm.engine.migration.MigrationPlan;
import org.camunda.bpm.engine.repository.ProcessDefinition;
import org.camunda.bpm.engine.runtime.ActivityInstance;
import org.camunda.bpm.engine.runtime.ProcessInstance;
import org.camunda.bpm.engine.task.Task;
import org.camunda.bpm.engine.test.ProcessEngineRule;
import org.camunda.bpm.engine.test.api.runtime.migration.models.MultiInstanceProcessModels;
import org.camunda.bpm.engine.test.api.runtime.migration.models.ProcessModels;
import org.camunda.bpm.engine.test.util.ProvidedProcessEngineRule;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.RuleChain;
/**
* @author <NAME>
*
*/
public class MigrationRemoveMultiInstanceTest {
protected ProcessEngineRule rule = new ProvidedProcessEngineRule();
protected MigrationTestRule testHelper = new MigrationTestRule(rule);
@Rule
public RuleChain ruleChain = RuleChain.outerRule(rule).around(testHelper);
@Test
public void testRemoveParallelMultiInstanceBody() {
// given
ProcessDefinition sourceProcessDefinition = testHelper.deployAndGetDefinition(MultiInstanceProcessModels.PAR_MI_ONE_TASK_PROCESS);
ProcessDefinition targetProcessDefinition = testHelper.deployAndGetDefinition(ProcessModels.ONE_TASK_PROCESS);
MigrationPlan migrationPlan = rule.getRuntimeService()
.createMigrationPlan(sourceProcessDefinition.getId(), targetProcessDefinition.getId())
.mapActivities("userTask", "userTask")
.build();
// when
testHelper.createProcessInstanceAndMigrate(migrationPlan);
// then
testHelper.assertExecutionTreeAfterMigration()
.hasProcessDefinitionId(targetProcessDefinition.getId())
.matches(
describeExecutionTree(null).scope().id(testHelper.snapshotBeforeMigration.getProcessInstanceId())
.child("userTask").concurrent().noScope().up()
.child("userTask").concurrent().noScope().up()
.child("userTask").concurrent().noScope()
.done());
ActivityInstance[] userTaskInstances = testHelper.snapshotBeforeMigration.getActivityTree().getActivityInstances("userTask");
testHelper.assertActivityTreeAfterMigration().hasStructure(
describeActivityInstanceTree(targetProcessDefinition.getId())
.activity("userTask", userTaskInstances[0].getId())
.activity("userTask", userTaskInstances[1].getId())
.activity("userTask", userTaskInstances[2].getId())
.done());
List<Task> migratedTasks = testHelper.snapshotAfterMigration.getTasks();
Assert.assertEquals(3, migratedTasks.size());
// and it is possible to successfully complete the migrated instance
for (Task migratedTask : migratedTasks) {
rule.getTaskService().complete(migratedTask.getId());
}
testHelper.assertProcessEnded(testHelper.snapshotBeforeMigration.getProcessInstanceId());
}
@Test
public void testRemoveParallelMultiInstanceBodyVariables() {
// given
ProcessDefinition sourceProcessDefinition = testHelper.deployAndGetDefinition(MultiInstanceProcessModels.PAR_MI_ONE_TASK_PROCESS);
ProcessDefinition targetProcessDefinition = testHelper.deployAndGetDefinition(ProcessModels.ONE_TASK_PROCESS);
MigrationPlan migrationPlan = rule.getRuntimeService()
.createMigrationPlan(sourceProcessDefinition.getId(), targetProcessDefinition.getId())
.mapActivities("userTask", "userTask")
.build();
// when
testHelper.createProcessInstanceAndMigrate(migrationPlan);
// then
Assert.assertEquals(0, rule.getRuntimeService().createVariableInstanceQuery().variableName("nrOfInstances").count());
// the MI body variables are gone
Assert.assertEquals(0, rule.getRuntimeService().createVariableInstanceQuery().variableName("nrOfInstances").count());
Assert.assertEquals(0, rule.getRuntimeService().createVariableInstanceQuery().variableName("nrOfActiveInstances").count());
Assert.assertEquals(0, rule.getRuntimeService().createVariableInstanceQuery().variableName("nrOfCompletedInstances").count());
// and the loop counters are still there (because they logically belong to the inner activity instances)
Assert.assertEquals(3, rule.getRuntimeService().createVariableInstanceQuery().variableName("loopCounter").count());
}
@Test
public void testRemoveParallelMultiInstanceBodyScope() {
// given
ProcessDefinition sourceProcessDefinition = testHelper.deployAndGetDefinition(MultiInstanceProcessModels.PAR_MI_SUBPROCESS_PROCESS);
ProcessDefinition targetProcessDefinition = testHelper.deployAndGetDefinition(ProcessModels.SUBPROCESS_PROCESS);
MigrationPlan migrationPlan = rule.getRuntimeService()
.createMigrationPlan(sourceProcessDefinition.getId(), targetProcessDefinition.getId())
.mapActivities("subProcess", "subProcess")
.mapActivities("userTask", "userTask")
.build();
// when
testHelper.createProcessInstanceAndMigrate(migrationPlan);
// then
ActivityInstance[] subProcessInstances =
testHelper.snapshotBeforeMigration.getActivityTree().getActivityInstances("subProcess");
testHelper.assertExecutionTreeAfterMigration()
.hasProcessDefinitionId(targetProcessDefinition.getId())
.matches(
describeExecutionTree(null).scope().id(testHelper.snapshotBeforeMigration.getProcessInstanceId())
.child(null).concurrent().noScope()
.child("userTask").scope().id(testHelper.getSingleExecutionIdForActivity(subProcessInstances[0], "subProcess")).up().up()
.child(null).concurrent().noScope()
.child("userTask").scope().id(testHelper.getSingleExecutionIdForActivity(subProcessInstances[1], "subProcess")).up().up()
.child(null).concurrent().noScope()
.child("userTask").scope().id(testHelper.getSingleExecutionIdForActivity(subProcessInstances[2], "subProcess"))
.done());
testHelper.assertActivityTreeAfterMigration().hasStructure(
describeActivityInstanceTree(targetProcessDefinition.getId())
.beginScope("subProcess", subProcessInstances[0].getId())
.activity("userTask", subProcessInstances[0].getActivityInstances("userTask")[0].getId())
.endScope()
.beginScope("subProcess", subProcessInstances[1].getId())
.activity("userTask", subProcessInstances[1].getActivityInstances("userTask")[0].getId())
.endScope()
.beginScope("subProcess", subProcessInstances[2].getId())
.activity("userTask", subProcessInstances[2].getActivityInstances("userTask")[0].getId())
.endScope()
.done());
List<Task> migratedTasks = testHelper.snapshotAfterMigration.getTasks();
Assert.assertEquals(3, migratedTasks.size());
// and it is possible to successfully complete the migrated instance
for (Task migratedTask : migratedTasks) {
rule.getTaskService().complete(migratedTask.getId());
}
testHelper.assertProcessEnded(testHelper.snapshotBeforeMigration.getProcessInstanceId());
}
@Test
public void testRemoveParallelMultiInstanceBodyOneInstanceFinished() {
// given
ProcessDefinition sourceProcessDefinition = testHelper.deployAndGetDefinition(MultiInstanceProcessModels.PAR_MI_ONE_TASK_PROCESS);
ProcessDefinition targetProcessDefinition = testHelper.deployAndGetDefinition(ProcessModels.ONE_TASK_PROCESS);
MigrationPlan migrationPlan = rule.getRuntimeService()
.createMigrationPlan(sourceProcessDefinition.getId(), targetProcessDefinition.getId())
.mapActivities("userTask", "userTask")
.build();
ProcessInstance processInstance = rule.getRuntimeService()
.startProcessInstanceById(migrationPlan.getSourceProcessDefinitionId());
Task firstTask = rule.getTaskService().createTaskQuery().listPage(0, 1).get(0);
rule.getTaskService().complete(firstTask.getId());
// when
testHelper.migrateProcessInstance(migrationPlan, processInstance);
// then
testHelper.assertExecutionTreeAfterMigration()
.hasProcessDefinitionId(targetProcessDefinition.getId())
.matches(
describeExecutionTree(null).scope().id(testHelper.snapshotBeforeMigration.getProcessInstanceId())
.child("userTask").concurrent().noScope().up()
.child("userTask").concurrent().noScope()
.done());
ActivityInstance[] userTaskInstances = testHelper.snapshotBeforeMigration.getActivityTree().getActivityInstances("userTask");
testHelper.assertActivityTreeAfterMigration().hasStructure(
describeActivityInstanceTree(targetProcessDefinition.getId())
.activity("userTask", userTaskInstances[0].getId())
.activity("userTask", userTaskInstances[1].getId())
.done());
List<Task> migratedTasks = testHelper.snapshotAfterMigration.getTasks();
Assert.assertEquals(2, migratedTasks.size());
// and it is possible to successfully complete the migrated instance
for (Task migratedTask : migratedTasks) {
rule.getTaskService().complete(migratedTask.getId());
}
testHelper.assertProcessEnded(testHelper.snapshotBeforeMigration.getProcessInstanceId());
}
@Test
public void testRemoveSequentialMultiInstanceBody() {
// given
ProcessDefinition sourceProcessDefinition = testHelper.deployAndGetDefinition(MultiInstanceProcessModels.SEQ_MI_ONE_TASK_PROCESS);
ProcessDefinition targetProcessDefinition = testHelper.deployAndGetDefinition(ProcessModels.ONE_TASK_PROCESS);
MigrationPlan migrationPlan = rule.getRuntimeService()
.createMigrationPlan(sourceProcessDefinition.getId(), targetProcessDefinition.getId())
.mapActivities("userTask", "userTask")
.build();
// when
testHelper.createProcessInstanceAndMigrate(migrationPlan);
// then
testHelper.assertExecutionTreeAfterMigration()
.hasProcessDefinitionId(targetProcessDefinition.getId())
.matches(
describeExecutionTree("userTask").scope().id(testHelper.snapshotBeforeMigration.getProcessInstanceId())
.done());
testHelper.assertActivityTreeAfterMigration().hasStructure(
describeActivityInstanceTree(targetProcessDefinition.getId())
.activity("userTask", testHelper.getSingleActivityInstanceBeforeMigration("userTask").getId())
.done());
Task migratedTask = testHelper.snapshotAfterMigration.getTaskForKey("userTask");
Assert.assertNotNull(migratedTask);
// and it is possible to successfully complete the migrated instance
rule.getTaskService().complete(migratedTask.getId());
testHelper.assertProcessEnded(testHelper.snapshotBeforeMigration.getProcessInstanceId());
}
@Test
public void testRemoveSequentialMultiInstanceBodyVariables() {
// given
ProcessDefinition sourceProcessDefinition = testHelper.deployAndGetDefinition(MultiInstanceProcessModels.SEQ_MI_ONE_TASK_PROCESS);
ProcessDefinition targetProcessDefinition = testHelper.deployAndGetDefinition(ProcessModels.ONE_TASK_PROCESS);
MigrationPlan migrationPlan = rule.getRuntimeService()
.createMigrationPlan(sourceProcessDefinition.getId(), targetProcessDefinition.getId())
.mapActivities("userTask", "userTask")
.build();
// when
testHelper.createProcessInstanceAndMigrate(migrationPlan);
// then all MI variables are gone
Assert.assertEquals(0, rule.getRuntimeService().createVariableInstanceQuery().count());
}
@Test
public void testRemovSequentialMultiInstanceBodyScope() {
// given
ProcessDefinition sourceProcessDefinition = testHelper.deployAndGetDefinition(MultiInstanceProcessModels.SEQ_MI_SUBPROCESS_PROCESS);
ProcessDefinition targetProcessDefinition = testHelper.deployAndGetDefinition(ProcessModels.SUBPROCESS_PROCESS);
MigrationPlan migrationPlan = rule.getRuntimeService()
.createMigrationPlan(sourceProcessDefinition.getId(), targetProcessDefinition.getId())
.mapActivities("subProcess", "subProcess")
.mapActivities("userTask", "userTask")
.build();
// when
testHelper.createProcessInstanceAndMigrate(migrationPlan);
// then
ActivityInstance subProcessInstance = testHelper.getSingleActivityInstanceBeforeMigration("subProcess");
testHelper.assertExecutionTreeAfterMigration()
.hasProcessDefinitionId(targetProcessDefinition.getId())
.matches(
describeExecutionTree(null).scope().id(testHelper.snapshotBeforeMigration.getProcessInstanceId())
.child("userTask").scope().id(testHelper.getSingleExecutionIdForActivityBeforeMigration("subProcess"))
.done());
testHelper.assertActivityTreeAfterMigration().hasStructure(
describeActivityInstanceTree(targetProcessDefinition.getId())
.beginScope("subProcess", subProcessInstance.getId())
.activity("userTask", subProcessInstance.getActivityInstances("userTask")[0].getId())
.done());
Task migratedTask = testHelper.snapshotAfterMigration.getTaskForKey("userTask");
Assert.assertNotNull(migratedTask);
// and it is possible to successfully complete the migrated instance
rule.getTaskService().complete(migratedTask.getId());
testHelper.assertProcessEnded(testHelper.snapshotBeforeMigration.getProcessInstanceId());
}
}
| 4,546 |
887 | package org.javers.core.json.typeadapter.change;
import com.google.gson.JsonDeserializationContext;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonSerializationContext;
import org.javers.common.collections.Lists;
import org.javers.common.exception.JaversException;
import org.javers.common.exception.JaversExceptionCode;
import org.javers.core.diff.Change;
import org.javers.core.diff.changetype.InitialValueChange;
import org.javers.core.diff.changetype.PropertyChangeMetadata;
import org.javers.core.diff.changetype.TerminalValueChange;
import org.javers.core.diff.changetype.ValueChange;
import org.javers.core.metamodel.type.TypeMapper;
import java.util.List;
class ValueChangeTypeAdapter extends ChangeTypeAdapter<ValueChange> {
private static final String LEFT_VALUE_FIELD = "left";
private static final String RIGHT_VALUE_FIELD = "right";
public ValueChangeTypeAdapter(TypeMapper typeMapper) {
super(typeMapper);
}
@Override
public ValueChange fromJson(JsonElement json, JsonDeserializationContext context) {
JsonObject jsonObject = (JsonObject) json;
PropertyChangeMetadata stub = deserializeStub(jsonObject, context);
Object leftValue = context.deserialize(jsonObject.get(LEFT_VALUE_FIELD), getJaversProperty(stub).getGenericType());
Object rightValue = context.deserialize(jsonObject.get(RIGHT_VALUE_FIELD), getJaversProperty(stub).getGenericType());
Class<? extends Change> changeType = decodeChangeType((JsonObject) json);
if (changeType == ValueChange.class) {
return new ValueChange(stub, leftValue, rightValue);
}
if (changeType == InitialValueChange.class) {
return new InitialValueChange(stub, rightValue);
}
if (changeType == TerminalValueChange.class) {
return new TerminalValueChange(stub, leftValue);
}
throw new JaversException(JaversExceptionCode.NOT_IMPLEMENTED);
}
@Override
public JsonElement toJson(ValueChange change, JsonSerializationContext context) {
JsonObject jsonObject = createJsonObject(change, context);
jsonObject.add(LEFT_VALUE_FIELD, context.serialize(change.getLeft()));
jsonObject.add(RIGHT_VALUE_FIELD, context.serialize(change.getRight()));
return jsonObject;
}
@Override
public Class getValueType() {
throw new JaversException(JaversExceptionCode.NOT_IMPLEMENTED);
}
@Override
public List<Class> getValueTypes() {
return Lists.asList(ValueChange.class, InitialValueChange.class, TerminalValueChange.class);
}
}
| 952 |
381 | package com.tngtech.jgiven.integration.spring.junit5.test;
import com.tngtech.jgiven.integration.spring.junit5.SpringScenarioTest;
import com.tngtech.jgiven.integration.spring.junit5.config.TestSpringConfig;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
import org.springframework.test.context.ContextConfiguration;
@ContextConfiguration( classes = TestSpringConfig.class )
public class SpringScenarioWithParameterizedTest extends SpringScenarioTest<SimpleTestSpringSteps, SimpleTestSpringSteps, SimpleTestSpringSteps> {
@ParameterizedTest( name = "{index} [{arguments}] param name" )
@ValueSource( strings = { "Hello", "World" } )
public void spring_can_inject_beans_into_stages(String param) {
given().a_step_that_is_a_spring_component();
when().method_with_parameter_is_called(param);
then().beans_are_injected();
}
}
| 323 |
310 | <gh_stars>100-1000
{
"name": "EKKO 616 MKII",
"description": "A delay pedal.",
"url": "https://malekkoheavyindustry.com/product/ekko-616-mkii/"
} | 64 |
Subsets and Splits