max_stars_count
int64 301
224k
| text
stringlengths 6
1.05M
| token_count
int64 3
727k
|
---|---|---|
478 | <filename>tools/neko/include/neko_vm.h
/*
* Copyright (C)2005-2017 Haxe Foundation
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
#ifndef _NEKO_VM_H
#define _NEKO_VM_H
#include "neko.h"
typedef void (*neko_printer)( const char *data, int size, void *param );
typedef void (*thread_main_func)( void *param );
typedef struct _neko_vm neko_vm;
typedef void (*neko_stat_func)( neko_vm *vm, const char *kind, int start );
C_FUNCTION_BEGIN
EXTERN void neko_global_init();
EXTERN void neko_global_free();
EXTERN void neko_gc_major();
EXTERN void neko_gc_loop();
EXTERN void neko_gc_stats( int *heap, int *free );
EXTERN int neko_thread_create( thread_main_func init, thread_main_func main, void *param, void **handle );
EXTERN void neko_thread_blocking( thread_main_func f, void *p );
EXTERN bool neko_thread_register( bool t );
EXTERN neko_vm *neko_vm_alloc( void *unused );
EXTERN neko_vm *neko_vm_current();
EXTERN value neko_exc_stack( neko_vm *vm );
EXTERN value neko_call_stack( neko_vm *vm );
EXTERN void *neko_vm_custom( neko_vm *vm, vkind k );
EXTERN void neko_vm_set_custom( neko_vm *vm, vkind k, void *v );
EXTERN value neko_vm_execute( neko_vm *vm, void *module );
EXTERN void neko_vm_select( neko_vm *vm );
EXTERN int neko_vm_jit( neko_vm *vm, int enable_jit );
EXTERN int neko_vm_trusted( neko_vm *vm, int trusted );
EXTERN value neko_default_loader( char **argv, int argc );
EXTERN void neko_vm_redirect( neko_vm *vm, neko_printer print, void *param );
EXTERN void neko_vm_set_stats( neko_vm *vm, neko_stat_func fstats, neko_stat_func pstats );
EXTERN void neko_vm_dump_stack( neko_vm *vm );
EXTERN int neko_is_big_endian();
C_FUNCTION_END
#endif
/* ************************************************************************ */
| 916 |
1,443 | <filename>users/andrew-shapton.json
{
"copyright": "<NAME>",
"url": "https://github.com/alshapton/Pyntel4004/blob/main/LICENSE",
"email": "<EMAIL>",
"format": "text",
"license": "mit",
"theme": "default",
"gravatar": false
} | 100 |
346 | #ifndef _IMP_ABOUTUS_H
#define _IMP_ABOUTUS_H
void RenderIMPAboutUs( void );
void ExitIMPAboutUs( void );
void EnterIMPAboutUs( void );
void HandleIMPAboutUs( void );
#endif
| 75 |
384 | <reponame>hejamu/gromacs
/*
* This file is part of the GROMACS molecular simulation package.
*
* Copyright (c) 2012,2014,2015,2016,2017 by the GROMACS development team.
* Copyright (c) 2018,2019,2020, by the GROMACS development team, led by
* <NAME>, <NAME>, <NAME>, and <NAME>,
* and including many others, as listed in the AUTHORS file in the
* top-level source directory and at http://www.gromacs.org.
*
* GROMACS is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* as published by the Free Software Foundation; either version 2.1
* of the License, or (at your option) any later version.
*
* GROMACS is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with GROMACS; if not, see
* http://www.gnu.org/licenses, or write to the Free Software Foundation,
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*
* If you want to redistribute modifications to GROMACS, please
* consider that scientific software is very special. Version
* control is crucial - bugs must be traceable. We will be happy to
* consider code for inclusion in the official distribution, but
* derived work must not be called official GROMACS. Details are found
* in the README & COPYING files - if they are missing, get the
* official version at http://www.gromacs.org.
*
* To help us fund GROMACS development, we humbly ask that you cite
* the research papers on the package. Check out http://www.gromacs.org.
*/
/*! \internal \file
* \brief
* Tests for string utility functions and classes.
*
* For development, the tests can be run with a '-stdout' command-line option
* to print out the help to stdout instead of using the XML reference
* framework.
*
* \author <NAME> <<EMAIL>>
* \ingroup module_utility
*/
#include "gmxpre.h"
#include "gromacs/utility/stringutil.h"
#include <string>
#include <vector>
#include <gmock/gmock.h>
#include <gtest/gtest.h>
#include "gromacs/utility/arrayref.h"
#include "gromacs/utility/exceptions.h"
#include "testutils/refdata.h"
#include "testutils/stringtest.h"
namespace gmx
{
namespace test
{
namespace
{
/********************************************************************
* Tests for simple string utilities
*/
TEST(StringUtilityTest, StartsWith)
{
EXPECT_TRUE(gmx::startsWith("foobar", "foo"));
EXPECT_TRUE(gmx::startsWith("foobar", ""));
EXPECT_TRUE(gmx::startsWith("", ""));
EXPECT_FALSE(gmx::startsWith("", "foobar"));
EXPECT_FALSE(gmx::startsWith("foo", "foobar"));
EXPECT_FALSE(gmx::startsWith("foobar", "oob"));
EXPECT_TRUE(gmx::startsWith(std::string("foobar"), "foo"));
EXPECT_TRUE(gmx::startsWith(std::string("foobar"), ""));
EXPECT_TRUE(gmx::startsWith(std::string(""), ""));
EXPECT_FALSE(gmx::startsWith(std::string(""), "foobar"));
EXPECT_FALSE(gmx::startsWith(std::string("foo"), "foobar"));
EXPECT_FALSE(gmx::startsWith(std::string("foobar"), "oob"));
}
TEST(StringUtilityTest, EndsWith)
{
EXPECT_TRUE(gmx::endsWith("foobar", "bar"));
EXPECT_TRUE(gmx::endsWith("foobar", nullptr));
EXPECT_TRUE(gmx::endsWith("foobar", ""));
EXPECT_TRUE(gmx::endsWith("", ""));
EXPECT_FALSE(gmx::endsWith("", "foobar"));
EXPECT_FALSE(gmx::endsWith("foobar", "bbar"));
EXPECT_FALSE(gmx::endsWith("foobar", "barr"));
EXPECT_FALSE(gmx::endsWith("foobar", "foofoobar"));
}
TEST(StringUtilityTest, StripSuffixIfPresent)
{
EXPECT_EQ("foo", gmx::stripSuffixIfPresent("foobar", "bar"));
EXPECT_EQ("foobar", gmx::stripSuffixIfPresent("foobar", nullptr));
EXPECT_EQ("foobar", gmx::stripSuffixIfPresent("foobar", ""));
EXPECT_EQ("foobar", gmx::stripSuffixIfPresent("foobar", "bbar"));
EXPECT_EQ("foobar", gmx::stripSuffixIfPresent("foobar", "barr"));
EXPECT_EQ("foobar", gmx::stripSuffixIfPresent("foobar", "foofoobar"));
}
TEST(StringUtilityTest, StripString)
{
EXPECT_EQ("", gmx::stripString(""));
EXPECT_EQ("foo", gmx::stripString("foo"));
EXPECT_EQ("foo", gmx::stripString(" foo"));
EXPECT_EQ("foo", gmx::stripString("foo "));
EXPECT_EQ("f o o", gmx::stripString(" f o o "));
}
TEST(StringUtilityTest, SplitString)
{
using ::testing::ElementsAre;
using ::testing::IsEmpty;
using ::testing::Matcher;
Matcher<std::vector<std::string>> matcher = ElementsAre("foo", "bar");
EXPECT_THAT(gmx::splitString("foo bar"), matcher);
EXPECT_THAT(gmx::splitString(" foo bar"), matcher);
EXPECT_THAT(gmx::splitString("foo bar "), matcher);
EXPECT_THAT(gmx::splitString(" foo \t bar "), matcher);
EXPECT_THAT(gmx::splitString(""), IsEmpty());
EXPECT_THAT(gmx::splitString(" "), IsEmpty());
}
TEST(StringUtilityTest, SplitDelimitedString)
{
using ::testing::ElementsAre;
using ::testing::IsEmpty;
EXPECT_THAT(gmx::splitDelimitedString("foo;bar", ';'), ElementsAre("foo", "bar"));
EXPECT_THAT(gmx::splitDelimitedString(";foo;bar;", ';'), ElementsAre("", "foo", "bar", ""));
EXPECT_THAT(gmx::splitDelimitedString("foo;;bar", ';'), ElementsAre("foo", "", "bar"));
EXPECT_THAT(gmx::splitDelimitedString("foo", ';'), ElementsAre("foo"));
EXPECT_THAT(gmx::splitDelimitedString(";", ';'), ElementsAre("", ""));
EXPECT_THAT(gmx::splitDelimitedString("", ';'), IsEmpty());
}
TEST(StringUtilityTest, SplitAndTrimDelimitedString)
{
using ::testing::ElementsAre;
using ::testing::IsEmpty;
EXPECT_THAT(splitAndTrimDelimitedString("", ';'), IsEmpty());
EXPECT_THAT(splitAndTrimDelimitedString(" \t\n ", ';'), ElementsAre(""));
EXPECT_THAT(splitAndTrimDelimitedString("foo", ';'), ElementsAre("foo"));
EXPECT_THAT(splitAndTrimDelimitedString(" foo ", ';'), ElementsAre("foo"));
EXPECT_THAT(splitAndTrimDelimitedString("foo;bar", ';'), ElementsAre("foo", "bar"));
EXPECT_THAT(splitAndTrimDelimitedString(";foo;bar", ';'), ElementsAre("", "foo", "bar"));
EXPECT_THAT(splitAndTrimDelimitedString("foo;bar;", ';'), ElementsAre("foo", "bar", ""));
EXPECT_THAT(splitAndTrimDelimitedString(";foo;bar;", ';'), ElementsAre("", "foo", "bar", ""));
EXPECT_THAT(splitAndTrimDelimitedString("foo;;bar", ';'), ElementsAre("foo", "", "bar"));
EXPECT_THAT(splitAndTrimDelimitedString("foo ; bar ", ';'), ElementsAre("foo", "bar"));
EXPECT_THAT(splitAndTrimDelimitedString(" ; foo ; bar ", ';'), ElementsAre("", "foo", "bar"));
EXPECT_THAT(splitAndTrimDelimitedString(" foo ; bar ; ", ';'), ElementsAre("foo", "bar", ""));
EXPECT_THAT(splitAndTrimDelimitedString(" ; foo\n ; bar ; ", ';'),
ElementsAre("", "foo", "bar", ""));
EXPECT_THAT(splitAndTrimDelimitedString(" foo ; ; \tbar", ';'), ElementsAre("foo", "", "bar"));
}
TEST(StringUtilityTest, CanCompareCaseInsensitive)
{
EXPECT_TRUE(equalCaseInsensitive("foo", "foo"));
EXPECT_FALSE(equalCaseInsensitive("foo", "bar"));
EXPECT_TRUE(equalCaseInsensitive("foo", "FOO"));
EXPECT_FALSE(equalCaseInsensitive("foo", "foobar"));
EXPECT_FALSE(equalCaseInsensitive("foobar", "foo"));
}
/*! \brief
* Helper to test that string comparison works with switched input positions.
*
* \param[in] foo First string to check.
* \param[in] bar Second string to check.
* \param[in] length Max comparison length to use.
* \param[in] expectedResult If we expect the result be a match between the strings or not.
*/
void checkEqualCaseInsensitive(const std::string& foo, const std::string& bar, int length, bool expectedResult)
{
EXPECT_EQ(equalCaseInsensitive(foo, bar, length), expectedResult);
EXPECT_EQ(equalCaseInsensitive(bar, foo, length), expectedResult);
}
TEST(StringUtilityTest, CanCompareCaseInsensitiveInLength)
{
checkEqualCaseInsensitive("foo", "bar", 0, true);
checkEqualCaseInsensitive("foo", "foo", 3, true);
checkEqualCaseInsensitive("foo", "bar", 3, false);
checkEqualCaseInsensitive("foo", "FOO", 3, true);
checkEqualCaseInsensitive("foo", "foobar", 3, true);
checkEqualCaseInsensitive("foo", "foobar", 5, false);
checkEqualCaseInsensitive("foo", "foobar", 6, false);
checkEqualCaseInsensitive("foo", "FooBAR", 3, true);
checkEqualCaseInsensitive("foo", "FooBAR", 5, false);
checkEqualCaseInsensitive("foo", "FooBAR", 6, false);
checkEqualCaseInsensitive("fooo", "foo", 3, true);
checkEqualCaseInsensitive("fooo", "foo", 4, false);
checkEqualCaseInsensitive("foobar", "foo", 4, false);
checkEqualCaseInsensitive("foobar", "foob", 4, true);
}
/********************************************************************
* Tests for formatString()
*/
TEST(FormatStringTest, HandlesBasicFormatting)
{
EXPECT_EQ("12 abc", gmx::formatString("%d %s", 12, "abc"));
}
TEST(FormatStringTest, HandlesLongStrings)
{
std::string longString = gmx::formatString("%*c%d", 2000, 'x', 10);
EXPECT_EQ(2002U, longString.length());
EXPECT_EQ("x10", longString.substr(1999));
}
/********************************************************************
* Tests for StringFormatter
*/
TEST(StringFormatterTest, HandlesBasicFormatting)
{
int value = 103;
EXPECT_EQ("103", gmx::StringFormatter("%d")(value));
EXPECT_EQ("null", gmx::StringFormatter("null")(value));
}
/********************************************************************
* Tests for formatAndJoin
*/
TEST(formatAndJoinTest, Works)
{
const char* const words[] = { "The", "quick", "brown", "fox" };
EXPECT_EQ("The .quick .brown .fox ",
gmx::formatAndJoin(
gmx::ArrayRef<const char* const>(words), ".", gmx::StringFormatter("%-10s")));
const int values[] = { 0, 1, 4 };
EXPECT_EQ("0,1,4",
gmx::formatAndJoin(gmx::ArrayRef<const int>(values), ",", gmx::StringFormatter("%d")));
}
/********************************************************************
* Tests for joinStrings
*/
TEST(JoinStringsTest, Works)
{
const char* const words[] = { "The", "quick", "brown", "fox" };
gmx::ArrayRef<const char* const> refToWords(words);
EXPECT_EQ("The; quick; brown; fox",
gmx::joinStrings(refToWords.begin(), refToWords.end(), "; "));
EXPECT_EQ("The-quick-brown-fox", gmx::joinStrings(refToWords, "-"));
EXPECT_EQ("The-quick-brown-fox", gmx::joinStrings(words, "-"));
}
/********************************************************************
* Tests for replaceAll() and replaceAllWords()
*/
TEST(ReplaceAllTest, HandlesEmptyStrings)
{
EXPECT_EQ("", gmx::replaceAll("", "aaa", "bbbb"));
EXPECT_EQ("", gmx::replaceAllWords("", "aaa", "bbbb"));
}
TEST(ReplaceAllTest, HandlesNoMatches)
{
const std::string text("Text with no matches");
EXPECT_EQ(text, gmx::replaceAll(text, "aaa", "bbbb"));
EXPECT_EQ(text, gmx::replaceAllWords(text, "aaa", "bbbb"));
}
TEST(ReplaceAllTest, HandlesMatchesAtEnds)
{
EXPECT_EQ("bbbbtext", gmx::replaceAll("aaatext", "aaa", "bbbb"));
EXPECT_EQ("textbbbb", gmx::replaceAll("textaaa", "aaa", "bbbb"));
EXPECT_EQ("bbbb text", gmx::replaceAllWords("aaa text", "aaa", "bbbb"));
EXPECT_EQ("text bbbb", gmx::replaceAllWords("text aaa", "aaa", "bbbb"));
}
TEST(ReplaceAllTest, HandlesMultipleMatches)
{
const std::string text("Text aaa with multiple aaa matches");
EXPECT_EQ("Text bbbb with multiple bbbb matches", gmx::replaceAll(text, "aaa", "bbbb"));
EXPECT_EQ("Text bbbb with multiple bbbb matches", gmx::replaceAllWords(text, "aaa", "bbbb"));
}
TEST(ReplaceAllTest, HandlesWordBoundaries)
{
const std::string text("Text aaax with one word aaa match");
EXPECT_EQ("Text aaax with one word bbbb match", gmx::replaceAllWords(text, "aaa", "bbbb"));
}
TEST(ReplaceAllTest, HandlesPossibleRecursiveMatches)
{
const std::string text("Text with recursive aaabbbbbb matches");
EXPECT_EQ("Text with recursive aaaaaabbb matches", gmx::replaceAll(text, "aaabbb", "aaaaaa"));
}
/********************************************************************
* Tests for TextLineWrapper
*/
//! Simple test string for wrapping.
const char g_wrapText[] = "A quick brown fox jumps over the lazy dog";
//! Test string for wrapping with embedded line breaks.
const char g_wrapText2[] = "A quick brown fox jumps\nover the lazy dog";
//! Test string for wrapping with embedded line breaks and an empty line.
const char g_wrapText3[] = "A quick brown fox jumps\n\nover the lazy dog";
//! Test string for wrapping with a long word.
const char g_wrapTextLongWord[] =
"A quick brown fox jumps awordthatoverflowsaline over the lazy dog";
//! Test string for wrapping with extra whitespace.
const char g_wrapTextWhitespace[] = " A quick brown fox jumps \n over the lazy dog";
//! Test fixture for gmx::TextLineWrapper.
typedef gmx::test::StringTestBase TextLineWrapperTest;
TEST_F(TextLineWrapperTest, HandlesEmptyStrings)
{
gmx::TextLineWrapper wrapper;
EXPECT_EQ("", wrapper.wrapToString(""));
EXPECT_EQ("", wrapper.wrapToString(" "));
EXPECT_TRUE(wrapper.wrapToVector("").empty());
{
std::vector<std::string> wrapped(wrapper.wrapToVector(" "));
ASSERT_EQ(1U, wrapped.size());
EXPECT_EQ("", wrapped[0]);
}
}
TEST_F(TextLineWrapperTest, HandlesTrailingWhitespace)
{
gmx::TextLineWrapper wrapper;
EXPECT_EQ("line", wrapper.wrapToString("line "));
EXPECT_EQ("line\n", wrapper.wrapToString("line \n"));
wrapper.settings().setKeepFinalSpaces(true);
EXPECT_EQ("line ", wrapper.wrapToString("line "));
EXPECT_EQ("line \n", wrapper.wrapToString("line \n"));
}
TEST_F(TextLineWrapperTest, HandlesTrailingNewlines)
{
gmx::TextLineWrapper wrapper;
EXPECT_EQ("line", wrapper.wrapToString("line"));
EXPECT_EQ("line\n", wrapper.wrapToString("line\n"));
EXPECT_EQ("line\n\n", wrapper.wrapToString("line\n\n"));
EXPECT_EQ("\n", wrapper.wrapToString("\n"));
EXPECT_EQ("\n\n", wrapper.wrapToString("\n\n"));
{
std::vector<std::string> wrapped(wrapper.wrapToVector("line"));
ASSERT_EQ(1U, wrapped.size());
EXPECT_EQ("line", wrapped[0]);
}
{
std::vector<std::string> wrapped(wrapper.wrapToVector("line\n"));
ASSERT_EQ(1U, wrapped.size());
EXPECT_EQ("line", wrapped[0]);
}
{
std::vector<std::string> wrapped(wrapper.wrapToVector("line\n\n"));
ASSERT_EQ(2U, wrapped.size());
EXPECT_EQ("line", wrapped[0]);
EXPECT_EQ("", wrapped[1]);
}
{
std::vector<std::string> wrapped(wrapper.wrapToVector("\n"));
ASSERT_EQ(1U, wrapped.size());
EXPECT_EQ("", wrapped[0]);
}
{
std::vector<std::string> wrapped(wrapper.wrapToVector("\n\n"));
ASSERT_EQ(2U, wrapped.size());
EXPECT_EQ("", wrapped[0]);
EXPECT_EQ("", wrapped[1]);
}
}
TEST_F(TextLineWrapperTest, WrapsCorrectly)
{
gmx::TextLineWrapper wrapper;
wrapper.settings().setLineLength(10);
checkText(wrapper.wrapToString(g_wrapText), "WrappedAt10");
std::vector<std::string> wrapped(wrapper.wrapToVector(g_wrapText));
checker().checkSequence(wrapped.begin(), wrapped.end(), "WrappedToVector");
wrapper.settings().setLineLength(13);
checkText(wrapper.wrapToString(g_wrapText), "WrappedAt13");
wrapper.settings().setLineLength(14);
checkText(wrapper.wrapToString(g_wrapText), "WrappedAt14");
checkText(wrapper.wrapToString(g_wrapTextLongWord), "WrappedWithLongWord");
}
TEST_F(TextLineWrapperTest, WrapsCorrectlyWithExistingBreaks)
{
gmx::TextLineWrapper wrapper;
checkText(wrapper.wrapToString(g_wrapText2), "WrappedWithNoLimit");
wrapper.settings().setLineLength(10);
checkText(wrapper.wrapToString(g_wrapText2), "WrappedAt10");
wrapper.settings().setLineLength(14);
checkText(wrapper.wrapToString(g_wrapText2), "WrappedAt14");
}
TEST_F(TextLineWrapperTest, HandlesIndent)
{
gmx::TextLineWrapper wrapper;
wrapper.settings().setIndent(2);
checkText(wrapper.wrapToString(g_wrapText2), "WrappedWithNoLimit");
wrapper.settings().setLineLength(16);
checkText(wrapper.wrapToString(g_wrapText2), "WrappedAt14");
}
TEST_F(TextLineWrapperTest, HandlesIndentWithEmptyLines)
{
gmx::TextLineWrapper wrapper;
wrapper.settings().setIndent(2);
checkText(wrapper.wrapToString(g_wrapText3), "WrappedWithNoLimit");
wrapper.settings().setLineLength(16);
checkText(wrapper.wrapToString(g_wrapText3), "WrappedAt14");
}
TEST_F(TextLineWrapperTest, HandlesHangingIndent)
{
gmx::TextLineWrapper wrapper;
wrapper.settings().setFirstLineIndent(2);
wrapper.settings().setIndent(4);
checkText(wrapper.wrapToString(g_wrapText2), "WrappedWithNoLimit");
wrapper.settings().setLineLength(16);
checkText(wrapper.wrapToString(g_wrapText2), "WrappedAt14/12");
}
TEST_F(TextLineWrapperTest, HandlesContinuationCharacter)
{
gmx::TextLineWrapper wrapper;
wrapper.settings().setFirstLineIndent(2);
wrapper.settings().setIndent(4);
wrapper.settings().setContinuationChar('\\');
wrapper.settings().setLineLength(16);
checkText(wrapper.wrapToString(g_wrapText2), "WrappedAt14/12");
}
TEST_F(TextLineWrapperTest, WrapsCorrectlyWithExtraWhitespace)
{
gmx::TextLineWrapper wrapper;
wrapper.settings().setLineLength(14);
checkText(wrapper.wrapToString(g_wrapTextWhitespace), "WrappedAt14");
wrapper.settings().setKeepFinalSpaces(true);
checkText(wrapper.wrapToString(g_wrapTextWhitespace), "WrappedAt14WithTrailingWhitespace");
}
} // namespace
} // namespace test
} // namespace gmx
| 6,715 |
365 | <gh_stars>100-1000
/*
* Copyright (c) 2007 Intel Corporation. All Rights Reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sub license, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice (including the
* next paragraph) shall be included in all copies or substantial portions
* of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
* IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
* ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
#define _GNU_SOURCE 1
#include "sysdeps.h"
#include "va.h"
#include "va_backend.h"
#include "va_internal.h"
#include "va_trace.h"
#include "va_fool.h"
#include "va_x11.h"
#include "va_dri2.h"
#include "va_dricommon.h"
#include "va_nvctrl.h"
#include "va_fglrx.h"
#include <stdio.h>
#include <stdlib.h>
#include <stdarg.h>
#include <string.h>
#include <unistd.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <errno.h>
struct driver_name_map {
const char *key;
int key_len;
const char *name;
};
static const struct driver_name_map g_dri2_driver_name_map[] = {
{ "i965", 4, "iHD" }, // Intel iHD VAAPI driver with i965 DRI driver
{ "i965", 4, "i965" }, // Intel i965 VAAPI driver with i965 DRI driver
{ "iris", 4, "iHD" }, // Intel iHD VAAPI driver with iris DRI driver
{ "iris", 4, "i965" }, // Intel i965 VAAPI driver with iris DRI driver
{ NULL, 0, NULL }
};
static int va_DisplayContextIsValid(
VADisplayContextP pDisplayContext
)
{
return (pDisplayContext != NULL &&
pDisplayContext->pDriverContext != NULL);
}
static void va_DisplayContextDestroy(
VADisplayContextP pDisplayContext
)
{
VADriverContextP ctx;
struct dri_state *dri_state;
if (pDisplayContext == NULL)
return;
ctx = pDisplayContext->pDriverContext;
dri_state = ctx->drm_state;
if (dri_state && dri_state->close)
dri_state->close(ctx);
free(pDisplayContext->pDriverContext->drm_state);
free(pDisplayContext->pDriverContext);
free(pDisplayContext);
}
static VAStatus va_DRI2_GetNumCandidates(
VADisplayContextP pDisplayContext,
int *num_candidates
)
{
char *driver_name = NULL;
const struct driver_name_map *m = NULL;
VADriverContextP ctx = pDisplayContext->pDriverContext;
*num_candidates = 0;
if (!(va_isDRI2Connected(ctx, &driver_name) && driver_name))
return VA_STATUS_ERROR_UNKNOWN;
for (m = g_dri2_driver_name_map; m->key != NULL; m++) {
if (strlen(driver_name) >= m->key_len &&
strncmp(driver_name, m->key, m->key_len) == 0) {
(*num_candidates)++;
}
}
free(driver_name);
/*
* If the dri2 driver name does not have a mapped vaapi driver name, then
* assume they have the same name.
*/
if (*num_candidates == 0)
*num_candidates = 1;
return VA_STATUS_SUCCESS;
}
static VAStatus va_DRI2_GetDriverName(
VADisplayContextP pDisplayContext,
char **driver_name_ptr,
int candidate_index
)
{
const struct driver_name_map *m = NULL;
int current_index = 0;
VADriverContextP ctx = pDisplayContext->pDriverContext;
*driver_name_ptr = NULL;
if (!(va_isDRI2Connected(ctx, driver_name_ptr) && *driver_name_ptr))
return VA_STATUS_ERROR_UNKNOWN;
for (m = g_dri2_driver_name_map; m->key != NULL; m++) {
if (strlen(*driver_name_ptr) >= m->key_len &&
strncmp(*driver_name_ptr, m->key, m->key_len) == 0) {
if (current_index == candidate_index) {
break;
}
current_index++;
}
}
/*
* If the dri2 driver name does not have a mapped vaapi driver name, then
* assume they have the same name.
*/
if (!m->name)
return VA_STATUS_SUCCESS;
/* Use the mapped vaapi driver name */
free(*driver_name_ptr);
*driver_name_ptr = strdup(m->name);
if (!*driver_name_ptr)
return VA_STATUS_ERROR_ALLOCATION_FAILED;
return VA_STATUS_SUCCESS;
}
static VAStatus va_NVCTRL_GetDriverName(
VADisplayContextP pDisplayContext,
char **driver_name,
int candidate_index
)
{
VADriverContextP ctx = pDisplayContext->pDriverContext;
int direct_capable, driver_major, driver_minor, driver_patch;
Bool result;
if (candidate_index != 0)
return VA_STATUS_ERROR_INVALID_PARAMETER;
result = VA_NVCTRLQueryDirectRenderingCapable(ctx->native_dpy, ctx->x11_screen,
&direct_capable);
if (!result || !direct_capable)
return VA_STATUS_ERROR_UNKNOWN;
result = VA_NVCTRLGetClientDriverName(ctx->native_dpy, ctx->x11_screen,
&driver_major, &driver_minor,
&driver_patch, driver_name);
if (!result)
return VA_STATUS_ERROR_UNKNOWN;
return VA_STATUS_SUCCESS;
}
static VAStatus va_FGLRX_GetDriverName(
VADisplayContextP pDisplayContext,
char **driver_name,
int candidate_index
)
{
VADriverContextP ctx = pDisplayContext->pDriverContext;
int driver_major, driver_minor, driver_patch;
Bool result;
if (candidate_index != 0)
return VA_STATUS_ERROR_INVALID_PARAMETER;
result = VA_FGLRXGetClientDriverName(ctx->native_dpy, ctx->x11_screen,
&driver_major, &driver_minor,
&driver_patch, driver_name);
if (!result)
return VA_STATUS_ERROR_UNKNOWN;
return VA_STATUS_SUCCESS;
}
static VAStatus va_DisplayContextGetDriverName(
VADisplayContextP pDisplayContext,
char **driver_name, int candidate_index
)
{
VAStatus vaStatus;
if (driver_name)
*driver_name = NULL;
else
return VA_STATUS_ERROR_UNKNOWN;
vaStatus = va_DRI2_GetDriverName(pDisplayContext, driver_name, candidate_index);
if (vaStatus != VA_STATUS_SUCCESS)
vaStatus = va_NVCTRL_GetDriverName(pDisplayContext, driver_name, candidate_index);
if (vaStatus != VA_STATUS_SUCCESS)
vaStatus = va_FGLRX_GetDriverName(pDisplayContext, driver_name, candidate_index);
return vaStatus;
}
static VAStatus va_DisplayContextGetNumCandidates(
VADisplayContextP pDisplayContext,
int *num_candidates
)
{
VAStatus vaStatus;
vaStatus = va_DRI2_GetNumCandidates(pDisplayContext, num_candidates);
/* A call to va_DisplayContextGetDriverName will fallback to other
* methods (i.e. NVCTRL, FGLRX) when DRI2 is unsuccessful. All of those
* fallbacks only have 1 candidate driver.
*/
if (vaStatus != VA_STATUS_SUCCESS)
*num_candidates = 1;
return VA_STATUS_SUCCESS;
}
VADisplay vaGetDisplay(
Display *native_dpy /* implementation specific */
)
{
VADisplayContextP pDisplayContext;
VADriverContextP pDriverContext;
struct dri_state *dri_state;
if (!native_dpy)
return NULL;
pDisplayContext = va_newDisplayContext();
if (!pDisplayContext)
return NULL;
pDisplayContext->vaIsValid = va_DisplayContextIsValid;
pDisplayContext->vaDestroy = va_DisplayContextDestroy;
pDisplayContext->vaGetNumCandidates = va_DisplayContextGetNumCandidates;
pDisplayContext->vaGetDriverNameByIndex = va_DisplayContextGetDriverName;
pDriverContext = va_newDriverContext(pDisplayContext);
if (!pDriverContext) {
free(pDisplayContext);
return NULL;
}
pDriverContext->native_dpy = (void *)native_dpy;
pDriverContext->x11_screen = XDefaultScreen(native_dpy);
pDriverContext->display_type = VA_DISPLAY_X11;
dri_state = calloc(1, sizeof(*dri_state));
if (!dri_state) {
free(pDisplayContext);
free(pDriverContext);
return NULL;
}
dri_state->base.fd = -1;
dri_state->base.auth_type = VA_NONE;
pDriverContext->drm_state = dri_state;
return (VADisplay)pDisplayContext;
}
void va_TracePutSurface(
VADisplay dpy,
VASurfaceID surface,
void *draw, /* the target Drawable */
short srcx,
short srcy,
unsigned short srcw,
unsigned short srch,
short destx,
short desty,
unsigned short destw,
unsigned short desth,
VARectangle *cliprects, /* client supplied clip list */
unsigned int number_cliprects, /* number of clip rects in the clip list */
unsigned int flags /* de-interlacing flags */
);
VAStatus vaPutSurface(
VADisplay dpy,
VASurfaceID surface,
Drawable draw, /* X Drawable */
short srcx,
short srcy,
unsigned short srcw,
unsigned short srch,
short destx,
short desty,
unsigned short destw,
unsigned short desth,
VARectangle *cliprects, /* client supplied clip list */
unsigned int number_cliprects, /* number of clip rects in the clip list */
unsigned int flags /* de-interlacing flags */
)
{
VADriverContextP ctx;
if (va_fool_postp)
return VA_STATUS_SUCCESS;
CHECK_DISPLAY(dpy);
ctx = CTX(dpy);
VA_TRACE_LOG(va_TracePutSurface, dpy, surface, (void *)draw, srcx, srcy, srcw, srch,
destx, desty, destw, desth,
cliprects, number_cliprects, flags);
return ctx->vtable->vaPutSurface(ctx, surface, (void *)draw, srcx, srcy, srcw, srch,
destx, desty, destw, desth,
cliprects, number_cliprects, flags);
}
| 4,201 |
2,151 | <reponame>zipated/src
// Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef GPU_IPC_SERVICE_DIRECT_COMPOSITION_CHILD_SURFACE_WIN_H_
#define GPU_IPC_SERVICE_DIRECT_COMPOSITION_CHILD_SURFACE_WIN_H_
#include <windows.h>
#include <d3d11.h>
#include <dcomp.h>
#include <wrl/client.h>
#include "gpu/ipc/service/gpu_ipc_service_export.h"
#include "ui/gl/gl_surface_egl.h"
namespace gpu {
class GPU_IPC_SERVICE_EXPORT DirectCompositionChildSurfaceWin
: public gl::GLSurfaceEGL {
public:
DirectCompositionChildSurfaceWin(const gfx::Size& size,
bool is_hdr,
bool has_alpha,
bool use_dcomp_surface,
bool allow_tearing);
// GLSurfaceEGL implementation.
using GLSurfaceEGL::Initialize;
bool Initialize(gl::GLSurfaceFormat format) override;
void Destroy() override;
gfx::Size GetSize() override;
bool IsOffscreen() override;
void* GetHandle() override;
gfx::SwapResult SwapBuffers(const PresentationCallback& callback) override;
bool FlipsVertically() const override;
bool SupportsPostSubBuffer() override;
bool OnMakeCurrent(gl::GLContext* context) override;
bool SupportsDCLayers() const override;
bool SetDrawRectangle(const gfx::Rect& rect) override;
gfx::Vector2d GetDrawOffset() const override;
void SetVSyncEnabled(bool enabled) override;
const Microsoft::WRL::ComPtr<IDCompositionSurface>& dcomp_surface() const {
return dcomp_surface_;
}
const Microsoft::WRL::ComPtr<IDXGISwapChain1>& swap_chain() const {
return swap_chain_;
}
uint64_t dcomp_surface_serial() const { return dcomp_surface_serial_; }
EGLSurface default_surface_for_debugging() { return default_surface_; }
EGLSurface real_surface_for_debugging() { return real_surface_; }
protected:
~DirectCompositionChildSurfaceWin() override;
private:
// Releases previous surface or swap chain, and initializes new surface or
// swap chain.
bool InitializeSurface();
// Release the texture that's currently being drawn to. If will_discard is
// true then the surface should be discarded without swapping any contents
// to it. Returns false if this fails.
bool ReleaseDrawTexture(bool will_discard);
// This is a placeholder surface used when not rendering to the
// DirectComposition surface.
EGLSurface default_surface_ = 0;
// This is the real surface representing the backbuffer. It may be null
// outside of a BeginDraw/EndDraw pair.
EGLSurface real_surface_ = 0;
bool first_swap_ = true;
const gfx::Size size_;
const bool is_hdr_;
const bool has_alpha_;
const bool use_dcomp_surface_;
const bool allow_tearing_;
gfx::Rect swap_rect_;
gfx::Vector2d draw_offset_;
bool vsync_enabled_ = true;
// This is a number that increments once for every EndDraw on a surface, and
// is used to determine when the contents have changed so Commit() needs to
// be called on the device.
uint64_t dcomp_surface_serial_ = 0;
Microsoft::WRL::ComPtr<ID3D11Device> d3d11_device_;
Microsoft::WRL::ComPtr<IDCompositionDevice2> dcomp_device_;
Microsoft::WRL::ComPtr<IDCompositionSurface> dcomp_surface_;
Microsoft::WRL::ComPtr<IDXGISwapChain1> swap_chain_;
Microsoft::WRL::ComPtr<ID3D11Texture2D> draw_texture_;
// Keep track of whether the texture has been rendered to, as the first draw
// to it must overwrite the entire thing.
bool has_been_rendered_to_ = false;
DISALLOW_COPY_AND_ASSIGN(DirectCompositionChildSurfaceWin);
};
} // namespace gpu
#endif // GPU_IPC_SERVICE_DIRECT_COMPOSITION_CHILD_SURFACE_WIN_H_
| 1,328 |
21,382 | <gh_stars>1000+
from ray.util.accelerators.accelerators import NVIDIA_TESLA_V100, \
NVIDIA_TESLA_P100, NVIDIA_TESLA_T4, NVIDIA_TESLA_P4, NVIDIA_TESLA_K80
__all__ = [
"NVIDIA_TESLA_V100", "NVIDIA_TESLA_P100", "NVIDIA_TESLA_T4",
"NVIDIA_TESLA_P4", "NVIDIA_TESLA_K80"
]
| 138 |
834 | # -*- coding:utf-8 -*-
from collections import OrderedDict
import torch.nn as nn
from torch.nn.modules.batchnorm import _BatchNorm
from ...cv_core import kaiming_init, constant_init
from ..utils import brick as vn_layer
from ..builder import BACKBONES
@BACKBONES.register_module()
class RRDarknet53(nn.Module):
# 用于递归导入权重
custom_layers = (vn_layer.Stage, vn_layer.Stage.custom_layers)
def __init__(self, pretrained=None, input_channels=32):
super().__init__()
stage_cfg = {'stage_2': 2, 'stage_3': 3, 'stage_4': 9, 'stage_5': 9, 'stage_6': 5}
# Network
layer_list = [
# first scale, smallest
OrderedDict([
('stage_1', vn_layer.Conv2dBatchLeaky(3, input_channels, 3, 1)),
('stage_2', vn_layer.Stage(input_channels, stage_cfg['stage_2'])),
('stage_3', vn_layer.Stage(input_channels * (2 ** 1), stage_cfg['stage_3'])),
('stage_4', vn_layer.Stage(input_channels * (2 ** 2), stage_cfg['stage_4'])),
]),
# second scale
OrderedDict([
('stage_5', vn_layer.Stage(input_channels * (2 ** 3), stage_cfg['stage_5'])),
]),
# third scale, largest
OrderedDict([
('stage_6', vn_layer.Stage(input_channels * (2 ** 4), stage_cfg['stage_6'])),
]),
]
self.layers = nn.ModuleList([nn.Sequential(layer_dict) for layer_dict in layer_list])
self.init_weights(pretrained)
def __modules_recurse(self, mod=None):
""" This function will recursively loop over all module children.
Args:
mod (torch.nn.Module, optional): Module to loop over; Default **self**
"""
if mod is None:
mod = self
for module in mod.children():
if isinstance(module, (nn.ModuleList, nn.Sequential, RRDarknet53.custom_layers)):
yield from self.__modules_recurse(module)
else:
yield module
def init_weights(self, pretrained=None):
if isinstance(pretrained, str):
weights = vn_layer.WeightLoader(pretrained)
for module in self.__modules_recurse():
try:
weights.load_layer(module)
print(f'Layer loaded: {module}')
if weights.start >= weights.size:
print(f'Finished loading weights [{weights.start}/{weights.size} weights]')
break
except NotImplementedError:
print(f'Layer skipped: {module.__class__.__name__}')
else:
for m in self.modules():
if isinstance(m, nn.Conv2d):
kaiming_init(m)
elif isinstance(m, (_BatchNorm, nn.GroupNorm)):
constant_init(m, 1)
def forward(self, x):
stage_4 = self.layers[0](x)
stage_5 = self.layers[1](stage_4)
stage_6 = self.layers[2](stage_5)
return [stage_6, stage_5, stage_4] # 由小到大特征图输出
| 1,571 |
530 | from .all_variable_usages_are_allowed import AllVariableUsagesAreAllowed
from .all_variable_uses_defined import AllVariableUsesDefined
from .all_variables_used import AllVariablesUsed
from .argument_names import ArgumentNames
from .argument_uniqueness import ArgumentUniqueness
from .directives_are_defined import DirectivesAreDefined
from .directives_are_in_valid_locations import DirectivesAreInValidLocations
from .directives_are_unique_per_location import DirectivesAreUniquePerLocation
from .executable_definitions import ExecutableDefinition
from .field_selections_on_objects_interfaces_and_unions_types import (
FieldSelectionsOnObjectsInterfacesAndUnionsTypes,
)
from .fragment_must_be_used import FragmentMustBeUsed
from .fragment_name_uniqueness import FragmentNameUniqueness
from .fragment_spread_is_possible import FragmentSpreadIsPossible
from .fragment_spread_target_defined import FragmentSpreadTargetDefined
from .fragment_spread_type_existence import FragmentSpreadTypeExistence
from .fragment_spreads_must_not_form_cycles import (
FragmentSpreadsMustNotFormCycles,
)
from .fragments_on_composite_types import FragmentsOnCompositeTypes
from .input_object_field_uniqueness import InputObjectFieldUniqueness
from .leaf_field_selections import LeafFieldSelections
from .lone_anonymous_operation import LoneAnonymousOperation
from .operation_name_uniqueness import OperationNameUniqueness
from .required_arguments import RequiredArguments
from .single_root_field import SingleRootField
from .values_of_correct_type import ValuesOfCorrectType
from .variable_uniqueness import VariableUniqueness
from .variables_are_input_types import VariablesAreInputTypes
# TODO make this automatically via reflection
RULE_SET = {
AllVariableUsagesAreAllowed.RULE_NAME: AllVariableUsagesAreAllowed(),
AllVariablesUsed.RULE_NAME: AllVariablesUsed(),
AllVariableUsesDefined.RULE_NAME: AllVariableUsesDefined(),
ArgumentNames.RULE_NAME: ArgumentNames(),
ArgumentUniqueness.RULE_NAME: ArgumentUniqueness(),
DirectivesAreDefined.RULE_NAME: DirectivesAreDefined(),
DirectivesAreInValidLocations.RULE_NAME: DirectivesAreInValidLocations(),
DirectivesAreUniquePerLocation.RULE_NAME: DirectivesAreUniquePerLocation(),
ExecutableDefinition.RULE_NAME: ExecutableDefinition(),
FieldSelectionsOnObjectsInterfacesAndUnionsTypes.RULE_NAME: FieldSelectionsOnObjectsInterfacesAndUnionsTypes(),
FragmentMustBeUsed.RULE_NAME: FragmentMustBeUsed(),
FragmentNameUniqueness.RULE_NAME: FragmentNameUniqueness(),
FragmentsOnCompositeTypes.RULE_NAME: FragmentsOnCompositeTypes(),
FragmentSpreadIsPossible.RULE_NAME: FragmentSpreadIsPossible(),
FragmentSpreadsMustNotFormCycles.RULE_NAME: FragmentSpreadsMustNotFormCycles(
True
),
FragmentSpreadTargetDefined.RULE_NAME: FragmentSpreadTargetDefined(),
FragmentSpreadTypeExistence.RULE_NAME: FragmentSpreadTypeExistence(),
InputObjectFieldUniqueness.RULE_NAME: InputObjectFieldUniqueness(),
LeafFieldSelections.RULE_NAME: LeafFieldSelections(),
LoneAnonymousOperation.RULE_NAME: LoneAnonymousOperation(),
RequiredArguments.RULE_NAME: RequiredArguments(),
SingleRootField.RULE_NAME: SingleRootField(),
OperationNameUniqueness.RULE_NAME: OperationNameUniqueness(),
ValuesOfCorrectType.RULE_NAME: ValuesOfCorrectType(),
VariablesAreInputTypes.RULE_NAME: VariablesAreInputTypes(),
VariableUniqueness.RULE_NAME: VariableUniqueness(),
}
| 1,044 |
608 | <reponame>favoritas37/qzxing<gh_stars>100-1000
// -*- mode:c++; tab-width:2; indent-tabs-mode:nil; c-basic-offset:2 -*-
#ifndef ZXING_EXCEPTION_H
#define ZXING_EXCEPTION_H
/*
* Exception.h
* ZXing
*
* Copyright 2010 ZXing authors All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <zxing/ZXing.h>
#include <string>
#include <exception>
namespace zxing {
class Exception : public std::exception {
private:
char const* const message;
public:
Exception() ZXING_NOEXCEPT;
Exception(const char* msg) ZXING_NOEXCEPT;
Exception(Exception const& that) ZXING_NOEXCEPT;
~Exception() ZXING_NOEXCEPT;
char const* what() const ZXING_NOEXCEPT;
private:
static char const* copy(char const*);
void deleteMessage();
};
}
#endif // ZXING_EXCEPTION_H
| 434 |
360 | package com.github.davidmoten.geo;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.State;
@State(Scope.Benchmark)
public class Benchmarks {
private final LatLong centre = GeoHash.decodeHash("dre7");
@Benchmark
public void hashContains() {
GeoHash.hashContains("dre7", centre.getLat(), centre.getLon());
}
@Benchmark
public void decodeHash() {
GeoHash.decodeHash("dre7");
}
}
| 210 |
852 | <reponame>ckamtsikis/cmssw
#include <map>
#include <string>
#include "TH1D.h"
#include "TH2D.h"
#include "FWCore/Framework/interface/Event.h"
#include "FWCore/Utilities/interface/InputTag.h"
#include "FWCore/Framework/interface/EDAnalyzer.h"
#include "FWCore/Framework/interface/Frameworkfwd.h"
#include "FWCore/ParameterSet/interface/ParameterSet.h"
#include "DataFormats/PatCandidates/interface/Muon.h"
/**
\class PatZToMuMuAnalyzer PatZToMuMuAnalyzer.cc "PhysicsTools/PatExamples/plugins/PatZToMuMuAnalyzer.h"
\brief Module to analyze the performance of muon reconstruction on the example of Z->mumu events
Module to analyze the performance of muon reconstruction on the example of Z->mumu events: transverse
momentum and eta of the muon candidates and the mass of the Z boson candidate are plotted from inner,
outer and global tracks. The mass is recalculated by an extra finction. The difference of the outer
track and the global track are plotted for the transverse momentum, eta and phi of the two muon candi-
dates, for global muons as far as available. The only input parameters are:
_muons_ --> indicating the muon collection of choice.
_shift_ --> indicating the relative shift of the transverse momentum for the estimate of the effect
on the invariant mass.
The shift is applied to all mass calculations.
*/
class PatZToMuMuAnalyzer : public edm::EDAnalyzer {
public:
/// typedef's to simplify get functions
typedef math::XYZVector Vector;
typedef math::XYZTLorentzVector LorentzVector;
/// default constructor
explicit PatZToMuMuAnalyzer(const edm::ParameterSet& cfg);
/// default destructor
~PatZToMuMuAnalyzer() override{};
private:
/// everything that needs to be done during the event loop
void analyze(const edm::Event& event, const edm::EventSetup& setup) override;
/// calculate the mass of the Z boson from the tracker momenta by hand
double mass(const math::XYZVector& t1, const math::XYZVector& t2) const;
/// check if histogram was booked
bool booked(const std::string histName) const { return hists_.find(histName) != hists_.end(); };
/// fill histogram if it had been booked before
void fill(const std::string histName, double value) const {
if (booked(histName))
hists_.find(histName)->second->Fill(value);
};
/// fill a predefined set of histograms from inner outer or global tracks for first and second mu candidate
void fill(std::string hists, const reco::TrackRef& t1, const reco::TrackRef& t2) const;
/// input for muons
edm::EDGetTokenT<edm::View<pat::Muon> > muonsToken_;
/// shift in transverse momentum to determine a
/// rough uncertainty on the Z mass estimation
double shift_;
/// management of 1d histograms
std::map<std::string, TH1D*> hists_;
};
inline double PatZToMuMuAnalyzer::mass(const Vector& t1, const Vector& t2) const {
return (LorentzVector(shift_ * t1.x(), shift_ * t1.y(), t1.z(), sqrt((0.1057 * 0.1057) + t1.mag2())) +
LorentzVector(shift_ * t2.x(), shift_ * t2.y(), t2.z(), sqrt((0.1057 * 0.1057) + t2.mag2())))
.mass();
}
#include "FWCore/ServiceRegistry/interface/Service.h"
#include "CommonTools/UtilAlgos/interface/TFileService.h"
PatZToMuMuAnalyzer::PatZToMuMuAnalyzer(const edm::ParameterSet& cfg)
: muonsToken_(consumes<edm::View<pat::Muon> >(cfg.getParameter<edm::InputTag>("muons"))),
shift_(cfg.getParameter<double>("shift")) {
edm::Service<TFileService> fileService;
// mass plot around Z peak from global tracks
hists_["globalMass"] = fileService->make<TH1D>("globalMass", "Mass_{Z} (global) (GeV)", 90, 30., 120.);
// eta from global tracks
hists_["globalEta"] = fileService->make<TH1D>("globalEta", "#eta (global)", 48, -2.4, 2.4);
// pt from global tracks
hists_["globalPt"] = fileService->make<TH1D>("globalPt", "p_{T} (global) (GeV)", 100, 0., 100.);
// mass plot around Z peak from inner tracks
hists_["innerMass"] = fileService->make<TH1D>("innerMass", "Mass_{Z} (inner) (GeV)", 90, 30., 120.);
// eta from inner tracks
hists_["innerEta"] = fileService->make<TH1D>("innerEta", "#eta (inner)", 48, -2.4, 2.4);
// pt from inner tracks
hists_["innerPt"] = fileService->make<TH1D>("innerPt", "p_{T} (inner) (GeV)", 100, 0., 100.);
// mass plot around Z peak from outer tracks
hists_["outerMass"] = fileService->make<TH1D>("outerMass", "Mass_{Z} (outer) (GeV)", 90, 30., 120.);
// eta from outer tracks
hists_["outerEta"] = fileService->make<TH1D>("outerEta", "#eta (outer)", 48, -2.4, 2.4);
// pt from outer tracks
hists_["outerPt"] = fileService->make<TH1D>("outerPt", "p_{T} (outer) (GeV)", 100, 0., 100.);
// delta pt between global and outer track
hists_["deltaPt"] = fileService->make<TH1D>("deltaPt", "#Delta p_{T} (GeV)", 100, -20., 20.);
// delta eta between global and outer track
hists_["deltaEta"] = fileService->make<TH1D>("deltaEta", "#Delta #eta", 100, -0.2, 0.2);
// delta phi between global and outer track
hists_["deltaPhi"] = fileService->make<TH1D>("deltaPhi", "#Delta #phi", 100, -0.2, 0.2);
}
void PatZToMuMuAnalyzer::fill(std::string hists, const reco::TrackRef& t1, const reco::TrackRef& t2) const {
if (t1.isAvailable()) {
// fill pt from global track for first muon
fill(std::string(hists).append("Pt"), t1->pt());
// fill pt from global track for second muon
fill(std::string(hists).append("Eta"), t1->eta());
}
if (t2.isAvailable()) {
// fill eta from global track for first muon
fill(std::string(hists).append("Pt"), t2->pt());
// fill eta from global track for second muon
fill(std::string(hists).append("Eta"), t2->eta());
}
if (t1.isAvailable() && t2.isAvailable()) {
// fill invariant mass of the Z boson candidate
fill(std::string(hists).append("Mass"), mass(t1->momentum(), t2->momentum()));
}
}
void PatZToMuMuAnalyzer::analyze(const edm::Event& event, const edm::EventSetup& setup) {
// pat candidate collection
edm::Handle<edm::View<pat::Muon> > muons;
event.getByToken(muonsToken_, muons);
// Fill some basic muon quantities as
// reconstructed from inner and outer
// tack
for (edm::View<pat::Muon>::const_iterator mu1 = muons->begin(); mu1 != muons->end(); ++mu1) {
for (edm::View<pat::Muon>::const_iterator mu2 = muons->begin(); mu2 != muons->end(); ++mu2) {
if (mu2 > mu1) { // prevent double conting
if (mu1->charge() * mu2->charge() < 0) { // check only muon pairs of unequal charge
fill(std::string("inner"), mu1->innerTrack(), mu2->innerTrack());
fill(std::string("outer"), mu1->outerTrack(), mu2->outerTrack());
fill(std::string("global"), mu1->globalTrack(), mu2->globalTrack());
if (mu1->isGlobalMuon()) {
fill("deltaPt", mu1->outerTrack()->pt() - mu1->globalTrack()->pt());
fill("deltaEta", mu1->outerTrack()->eta() - mu1->globalTrack()->eta());
fill("deltaPhi", mu1->outerTrack()->phi() - mu1->globalTrack()->phi());
}
if (mu2->isGlobalMuon()) {
fill("deltaPt", mu2->outerTrack()->pt() - mu2->globalTrack()->pt());
fill("deltaEta", mu2->outerTrack()->eta() - mu2->globalTrack()->eta());
fill("deltaPhi", mu2->outerTrack()->phi() - mu2->globalTrack()->phi());
}
}
}
}
}
}
#include "FWCore/Framework/interface/MakerMacros.h"
DEFINE_FWK_MODULE(PatZToMuMuAnalyzer);
| 2,778 |
14,668 | <gh_stars>1000+
// Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "gpu/command_buffer/service/shared_image_backing_factory_gl_texture.h"
#include <list>
#include <utility>
#include "components/viz/common/resources/resource_sizes.h"
#include "gpu/command_buffer/common/mailbox.h"
#include "gpu/command_buffer/common/shared_image_usage.h"
#include "gpu/command_buffer/service/service_utils.h"
#include "gpu/command_buffer/service/shared_image_backing_gl_texture.h"
#include "gpu/config/gpu_preferences.h"
#include "ui/gl/gl_gl_api_implementation.h"
#include "ui/gl/progress_reporter.h"
namespace gpu {
namespace {
using ScopedResetAndRestoreUnpackState =
SharedImageBackingGLCommon::ScopedResetAndRestoreUnpackState;
using ScopedRestoreTexture = SharedImageBackingGLCommon::ScopedRestoreTexture;
using InitializeGLTextureParams =
SharedImageBackingGLCommon::InitializeGLTextureParams;
} // anonymous namespace
///////////////////////////////////////////////////////////////////////////////
// SharedImageBackingFactoryGLTexture
SharedImageBackingFactoryGLTexture::SharedImageBackingFactoryGLTexture(
const GpuPreferences& gpu_preferences,
const GpuDriverBugWorkarounds& workarounds,
const GpuFeatureInfo& gpu_feature_info,
gl::ProgressReporter* progress_reporter)
: SharedImageBackingFactoryGLCommon(gpu_preferences,
workarounds,
gpu_feature_info,
progress_reporter) {}
SharedImageBackingFactoryGLTexture::~SharedImageBackingFactoryGLTexture() =
default;
std::unique_ptr<SharedImageBacking>
SharedImageBackingFactoryGLTexture::CreateSharedImage(
const Mailbox& mailbox,
viz::ResourceFormat format,
SurfaceHandle surface_handle,
const gfx::Size& size,
const gfx::ColorSpace& color_space,
GrSurfaceOrigin surface_origin,
SkAlphaType alpha_type,
uint32_t usage,
bool is_thread_safe) {
DCHECK(!is_thread_safe);
return CreateSharedImageInternal(mailbox, format, surface_handle, size,
color_space, surface_origin, alpha_type,
usage, base::span<const uint8_t>());
}
std::unique_ptr<SharedImageBacking>
SharedImageBackingFactoryGLTexture::CreateSharedImage(
const Mailbox& mailbox,
viz::ResourceFormat format,
const gfx::Size& size,
const gfx::ColorSpace& color_space,
GrSurfaceOrigin surface_origin,
SkAlphaType alpha_type,
uint32_t usage,
base::span<const uint8_t> pixel_data) {
return CreateSharedImageInternal(mailbox, format, kNullSurfaceHandle, size,
color_space, surface_origin, alpha_type,
usage, pixel_data);
}
std::unique_ptr<SharedImageBacking>
SharedImageBackingFactoryGLTexture::CreateSharedImage(
const Mailbox& mailbox,
int client_id,
gfx::GpuMemoryBufferHandle handle,
gfx::BufferFormat buffer_format,
gfx::BufferPlane plane,
SurfaceHandle surface_handle,
const gfx::Size& size,
const gfx::ColorSpace& color_space,
GrSurfaceOrigin surface_origin,
SkAlphaType alpha_type,
uint32_t usage) {
NOTIMPLEMENTED_LOG_ONCE();
return nullptr;
}
std::unique_ptr<SharedImageBacking>
SharedImageBackingFactoryGLTexture::CreateSharedImageForTest(
const Mailbox& mailbox,
GLenum target,
GLuint service_id,
bool is_cleared,
viz::ResourceFormat format,
const gfx::Size& size,
uint32_t usage) {
auto result = std::make_unique<SharedImageBackingGLTexture>(
mailbox, format, size, gfx::ColorSpace(), kTopLeft_GrSurfaceOrigin,
kPremul_SkAlphaType, usage, false /* is_passthrough */);
InitializeGLTextureParams params;
params.target = target;
params.internal_format = viz::GLInternalFormat(format);
params.format = viz::GLDataFormat(format);
params.type = viz::GLDataType(format);
params.is_cleared = is_cleared;
result->InitializeGLTexture(service_id, params);
return std::move(result);
}
bool SharedImageBackingFactoryGLTexture::IsSupported(
uint32_t usage,
viz::ResourceFormat format,
bool thread_safe,
gfx::GpuMemoryBufferType gmb_type,
GrContextType gr_context_type,
bool* allow_legacy_mailbox,
bool is_pixel_used) {
if (is_pixel_used && gr_context_type != GrContextType::kGL) {
return false;
}
if (thread_safe) {
return false;
}
if (gmb_type != gfx::EMPTY_BUFFER) {
return false;
}
// Doesn't support contexts other than GL for OOPR Canvas
if (gr_context_type != GrContextType::kGL &&
((usage & SHARED_IMAGE_USAGE_DISPLAY) ||
(usage & SHARED_IMAGE_USAGE_RASTER))) {
return false;
}
// Needs interop factory
if ((usage & SHARED_IMAGE_USAGE_WEBGPU) ||
(usage & SHARED_IMAGE_USAGE_VIDEO_DECODE) ||
(usage & SHARED_IMAGE_USAGE_SCANOUT)) {
return false;
}
*allow_legacy_mailbox = gr_context_type == GrContextType::kGL;
return true;
}
std::unique_ptr<SharedImageBacking>
SharedImageBackingFactoryGLTexture::CreateSharedImageInternal(
const Mailbox& mailbox,
viz::ResourceFormat format,
SurfaceHandle surface_handle,
const gfx::Size& size,
const gfx::ColorSpace& color_space,
GrSurfaceOrigin surface_origin,
SkAlphaType alpha_type,
uint32_t usage,
base::span<const uint8_t> pixel_data) {
const FormatInfo& format_info = format_info_[format];
GLenum target = GL_TEXTURE_2D;
if (!CanCreateSharedImage(size, pixel_data, format_info, target)) {
return nullptr;
}
const bool for_framebuffer_attachment =
(usage & (SHARED_IMAGE_USAGE_RASTER |
SHARED_IMAGE_USAGE_GLES2_FRAMEBUFFER_HINT)) != 0;
InitializeGLTextureParams params;
params.target = target;
// TODO(piman): We pretend the texture was created in an ES2 context, so that
// it can be used in other ES2 contexts, and so we have to pass gl_format as
// the internal format in the LevelInfo. https://crbug.com/628064
params.internal_format = format_info.gl_format;
params.format = format_info.gl_format;
params.type = format_info.gl_type;
params.is_cleared = !pixel_data.empty();
params.has_immutable_storage = format_info.supports_storage;
params.framebuffer_attachment_angle =
for_framebuffer_attachment && texture_usage_angle_;
auto result = std::make_unique<SharedImageBackingGLTexture>(
mailbox, format, size, color_space, surface_origin, alpha_type, usage,
use_passthrough_);
result->InitializeGLTexture(0, params);
gl::GLApi* api = gl::g_current_gl_context;
ScopedRestoreTexture scoped_restore(api, target);
api->glBindTextureFn(target, result->GetGLServiceId());
if (format_info.supports_storage) {
{
gl::ScopedProgressReporter scoped_progress_reporter(progress_reporter_);
api->glTexStorage2DEXTFn(target, 1, format_info.storage_internal_format,
size.width(), size.height());
}
if (!pixel_data.empty()) {
ScopedResetAndRestoreUnpackState scoped_unpack_state(
api, attribs_, true /* uploading_data */);
gl::ScopedProgressReporter scoped_progress_reporter(progress_reporter_);
api->glTexSubImage2DFn(target, 0, 0, 0, size.width(), size.height(),
format_info.adjusted_format, format_info.gl_type,
pixel_data.data());
}
} else if (format_info.is_compressed) {
ScopedResetAndRestoreUnpackState scoped_unpack_state(api, attribs_,
!pixel_data.empty());
gl::ScopedProgressReporter scoped_progress_reporter(progress_reporter_);
api->glCompressedTexImage2DFn(target, 0, format_info.image_internal_format,
size.width(), size.height(), 0,
pixel_data.size(), pixel_data.data());
} else {
ScopedResetAndRestoreUnpackState scoped_unpack_state(api, attribs_,
!pixel_data.empty());
gl::ScopedProgressReporter scoped_progress_reporter(progress_reporter_);
api->glTexImage2DFn(target, 0, format_info.image_internal_format,
size.width(), size.height(), 0,
format_info.adjusted_format, format_info.gl_type,
pixel_data.data());
}
if (gl::g_current_gl_driver->ext.b_GL_KHR_debug) {
const std::string label =
"SharedImage_GLTexture" + CreateLabelForSharedImageUsage(usage);
api->glObjectLabelFn(GL_TEXTURE, result->GetGLServiceId(), -1,
label.c_str());
}
result->SetCompatibilitySwizzle(format_info.swizzle);
return std::move(result);
}
} // namespace gpu
| 3,585 |
846 | package com.dslplatform.json;
import org.junit.Assert;
import org.junit.Test;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
public class SkippingTest {
@CompiledJson
public static class Single {
public int x;
}
@CompiledJson
public static class SingleImmutable {
@JsonAttribute(name = "x")
public final long y;
public SingleImmutable(long y) {
this.y = y;
}
}
private static final DslJson<Object> dslJson = new DslJson<>();
@Test
public void canSkipOverObject1() throws IOException {
byte[] input = "{\"x\":1,\"a\":2}".getBytes("UTF-8");
Single s = dslJson.deserialize(Single.class, input, input.length);
Assert.assertEquals(1, s.x);
}
@Test
public void canSkipOverObject2() throws IOException {
byte[] input = "{\"a\":1,\"x\":2}".getBytes("UTF-8");
Single s = dslJson.deserialize(Single.class, input, input.length);
Assert.assertEquals(2, s.x);
}
@Test
public void canSkipOverObject3() throws IOException {
byte[] input = "{\"a\":1,\"b\":3,\"x\":2}".getBytes("UTF-8");
Single s = dslJson.deserialize(Single.class, input, input.length);
Assert.assertEquals(2, s.x);
}
@Test
public void canSkipOverObject4() throws IOException {
byte[] input = "{\"a\":1,\"b\":3,\"x\":2,\"c\":null}".getBytes("UTF-8");
Single s = dslJson.deserialize(Single.class, input, input.length);
Assert.assertEquals(2, s.x);
}
@Test
public void noSkippingOverObject5() throws IOException {
byte[] input = "{\"x\":2}".getBytes("UTF-8");
Single s = dslJson.deserialize(Single.class, input, input.length);
Assert.assertEquals(2, s.x);
}
@Test
public void doublePropertyObject6() throws IOException {
byte[] input = "{\"x\":2,\"x\":4}".getBytes("UTF-8");
Single s = dslJson.deserialize(Single.class, input, input.length);
Assert.assertEquals(4, s.x);
}
@Test
public void canSkipOverImmutable1() throws IOException {
byte[] input = "{\"x\":1,\"a\":2}".getBytes("UTF-8");
SingleImmutable s = dslJson.deserialize(SingleImmutable.class, input, input.length);
Assert.assertEquals(1, s.y);
}
@Test
public void canSkipOverImmutable2() throws IOException {
byte[] input = "{\"a\":1,\"x\":2}".getBytes("UTF-8");
SingleImmutable s = dslJson.deserialize(SingleImmutable.class, input, input.length);
Assert.assertEquals(2, s.y);
}
@Test
public void canSkipOverImmutable3() throws IOException {
byte[] input = "{\"a\":1,\"b\":3,\"x\":2}".getBytes("UTF-8");
SingleImmutable s = dslJson.deserialize(SingleImmutable.class, input, input.length);
Assert.assertEquals(2, s.y);
}
@Test
public void canSkipOverImmutable4() throws IOException {
byte[] input = "{\"a\":1,\"b\":3,\"x\":2,\"c\":null}".getBytes("UTF-8");
SingleImmutable s = dslJson.deserialize(SingleImmutable.class, input, input.length);
Assert.assertEquals(2, s.y);
}
@Test
public void noSkippingOverImmutable5() throws IOException {
byte[] input = "{\"x\":2}".getBytes("UTF-8");
SingleImmutable s = dslJson.deserialize(SingleImmutable.class, input, input.length);
Assert.assertEquals(2, s.y);
}
@Test
public void doublePropertyImmutable6() throws IOException {
byte[] input = "{\"x\":2,\"x\":4}".getBytes("UTF-8");
SingleImmutable s = dslJson.deserialize(SingleImmutable.class, input, input.length);
Assert.assertEquals(4, s.y);
}
private static JsonReader.BindObject<Unnesting> binder = dslJson.tryFindBinder(Unnesting.class);
private static ThreadLocal<JsonReader> unnestingReader = ThreadLocal.withInitial(dslJson::newReader);
@CompiledJson
public static class Unnesting {
public String a;
public String b;
@JsonAttribute(name = "c")
public String anUnknownFieldName;
public String getJsonString() { return null; }
public void setJsonString(String value) throws IOException {
JsonReader localReader = unnestingReader.get();
byte[] input = value.getBytes(StandardCharsets.UTF_8);
localReader.process(input, input.length);
localReader.read();
binder.bind(localReader, this);
}
}
@Test
public void nestedBinding() throws IOException {
byte[] input = "{\"jsonString\":\"{\\\"a\\\": \\\"value1\\\", \\\"b\\\": \\\"value2\\\"}\",\"c\":\"Some string\"}".getBytes("UTF-8");
Unnesting s = dslJson.deserialize(Unnesting.class, input, input.length);
Assert.assertEquals("value1", s.a);
Assert.assertEquals("value2", s.b);
Assert.assertEquals("Some string", s.anUnknownFieldName);
}
}
| 1,837 |
483 | <reponame>yangchong211/YCRefreshView<gh_stars>100-1000
package org.yczbj.ycrefreshview.app;
import android.app.Activity;
import android.content.Context;
import android.content.res.AssetManager;
import android.graphics.Color;
import android.os.Build;
import android.view.Display;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import java.io.IOException;
import java.lang.reflect.Field;
public class SysUtils {
public static int Dp2Px(Context context, float dp) {
if (context==null){
//避免空指针异常
context = BaseApp.getApp();
}
final float scale = context.getResources().getDisplayMetrics().density;
return (int) (dp * scale + 0.5f);
}
public static int getScreenWidth(Activity activity){
int width = 0;
WindowManager windowManager = activity.getWindowManager();
Display display = windowManager.getDefaultDisplay();
width=display.getWidth();
return width;
}
}
| 322 |
543 | <reponame>fredells/riiablo<gh_stars>100-1000
package com.riiablo.onet;
import java.nio.ByteBuffer;
public interface UnicastEndpoint<T> extends Endpoint<T> {
void sendMessage(ByteBuffer bb);
void sendMessage(Object qos, ByteBuffer bb);
}
| 87 |
432 | <filename>debugger/src/socsim_plugin/types_amba.h
/**
* @file
* @copyright Copyright 2016 GNSS Sensor Ltd. All right reserved.
* @author <NAME> - <EMAIL>
* @brief System Bus configuration types declaration.
*/
#ifndef __DEBUGGER_SOCSIM_TYPES_AMBA_H__
#define __DEBUGGER_SOCSIM_TYPES_AMBA_H__
#include <inttypes.h>
namespace debugger {
static const uint64_t CFG_NASTI_DATA_BITS = 128;
static const uint64_t CFG_NASTI_DATA_BYTES = CFG_NASTI_DATA_BITS / 8;
static const uint64_t CFG_NASTI_DATA_WORDS32 = CFG_NASTI_DATA_BYTES / 4;
static const uint64_t CFG_NASTI_ADDR_BITS = 32;
static const uint64_t CFG_NASTI_ADDR_OFFSET = 4;
static const uint64_t CFG_NASTI_CFG_ADDR_BITS = CFG_NASTI_ADDR_BITS - 12;
static const int CFG_NASTI_BOOTROM = 0;
static const int CFG_NASTI_FWROM = CFG_NASTI_BOOTROM + 1;
static const int CFG_NASTI_SRAM = CFG_NASTI_FWROM + 1;
static const int CFG_NASTI_UART = CFG_NASTI_SRAM + 1;
static const int CFG_NASTI_GPIO = CFG_NASTI_UART + 1;
static const int CFG_NASTI_IRQCTRL = CFG_NASTI_GPIO + 1;
static const int CFG_NASTI_GNSSENGINE = CFG_NASTI_IRQCTRL + 1;
static const int CFG_NASTI_RFCTRL = CFG_NASTI_GNSSENGINE + 1;
static const int CFG_NASTI_FSE_GPS = CFG_NASTI_RFCTRL + 1;
static const int CFG_NASTI_ETHMAC = CFG_NASTI_FSE_GPS + 1;
static const int CFG_NASTI_DSU = CFG_NASTI_ETHMAC + 1;
static const int CFG_NASTI_PNP = CFG_NASTI_DSU + 1;
static const int CFG_NASTI_SLAVES_TOTAL = CFG_NASTI_PNP + 1;
} // namespace debugger
#endif // __DEBUGGER_SOCSIM_TYPES_AMBA_H__
| 705 |
428 | <gh_stars>100-1000
class base
{
virtual int method(int);
};
class start : public base
{
virtual int method(int);
};
| 42 |
1,767 | package com.annimon.stream.intstreamtests;
import com.annimon.stream.IntStream;
import com.annimon.stream.function.IntConsumer;
import com.annimon.stream.function.IntSupplier;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
public final class PeekTest {
@Test
public void testPeek() {
assertEquals(0, IntStream.empty().peek(new IntConsumer() {
@Override
public void accept(int value) {
throw new IllegalStateException();
}
}).count());
assertEquals(10, IntStream.generate(new IntSupplier() {
int value = 2;
@Override
public int getAsInt() {
int v = value;
value *= 2;
return v;
}
}).peek(new IntConsumer() {
int curValue = 1;
@Override
public void accept(int value) {
if (value != curValue * 2)
throw new IllegalStateException();
curValue = value;
}
}).limit(10).count());
}
}
| 541 |
1,350 | <reponame>Shashi-rk/azure-sdk-for-java
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.apimanagement.models;
import com.azure.core.annotation.Fluent;
import com.azure.core.util.logging.ClientLogger;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
/** Subscription key parameter names details. */
@Fluent
public final class SubscriptionKeyParameterNamesContract {
@JsonIgnore private final ClientLogger logger = new ClientLogger(SubscriptionKeyParameterNamesContract.class);
/*
* Subscription key header name.
*/
@JsonProperty(value = "header")
private String headerProperty;
/*
* Subscription key query string parameter name.
*/
@JsonProperty(value = "query")
private String query;
/**
* Get the headerProperty property: Subscription key header name.
*
* @return the headerProperty value.
*/
public String headerProperty() {
return this.headerProperty;
}
/**
* Set the headerProperty property: Subscription key header name.
*
* @param headerProperty the headerProperty value to set.
* @return the SubscriptionKeyParameterNamesContract object itself.
*/
public SubscriptionKeyParameterNamesContract withHeaderProperty(String headerProperty) {
this.headerProperty = headerProperty;
return this;
}
/**
* Get the query property: Subscription key query string parameter name.
*
* @return the query value.
*/
public String query() {
return this.query;
}
/**
* Set the query property: Subscription key query string parameter name.
*
* @param query the query value to set.
* @return the SubscriptionKeyParameterNamesContract object itself.
*/
public SubscriptionKeyParameterNamesContract withQuery(String query) {
this.query = query;
return this;
}
/**
* Validates the instance.
*
* @throws IllegalArgumentException thrown if the instance is not valid.
*/
public void validate() {
}
}
| 749 |
578 |
#pragma once
#include <infra/Config.h>
#include <infra/Forward.h>
#include <type/Forward.h>
#include <pool/Forward.h>
#ifndef TWO_WEBCL_EXPORT
#define TWO_WEBCL_EXPORT TWO_IMPORT
#endif
namespace two {
}
| 104 |
1,909 | <gh_stars>1000+
package org.knowm.xchange.simulated;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import org.knowm.xchange.ExchangeSpecification;
/**
* An instance of {@link AccountFactory} represents a single set of user accounts. A user account is
* identified by its {@link ExchangeSpecification#getApiKey()} and consist of a set of per-currency
* balances.
*
* <p>If shared between {@link SimulatedExchange} instances, this ensures that they all share the
* same scope of user accounts.
*
* @author <NAME>
*/
public class AccountFactory {
private final ConcurrentMap<String, Account> accounts = new ConcurrentHashMap<>();
Account get(String apiKey) {
return accounts.computeIfAbsent(apiKey, key -> new Account());
}
}
| 231 |
2,151 | <gh_stars>1000+
/**************************************************************************
Copyright 2002 Tungsten Graphics Inc., Cedar Park, Texas.
All Rights Reserved.
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
on the rights to use, copy, modify, merge, publish, distribute, sub
license, and/or sell copies of the Software, and to permit persons to whom
the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice (including the next
paragraph) shall be included in all copies or substantial portions of the
Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
TUNGSTEN GRAPHICS AND/OR THEIR SUPPLIERS BE LIABLE FOR ANY CLAIM,
DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
USE OR OTHER DEALINGS IN THE SOFTWARE.
**************************************************************************/
/*
* Authors:
* <NAME> <<EMAIL>>
*
*/
#ifndef VBO_SAVE_H
#define VBO_SAVE_H
#include "main/mfeatures.h"
#include "main/mtypes.h"
#include "vbo.h"
#include "vbo_attrib.h"
struct vbo_save_copied_vtx {
GLfloat buffer[VBO_ATTRIB_MAX * 4 * VBO_MAX_COPIED_VERTS];
GLuint nr;
};
/* For display lists, this structure holds a run of vertices of the
* same format, and a strictly well-formed set of begin/end pairs,
* starting on the first vertex and ending at the last. Vertex
* copying on buffer breaks is precomputed according to these
* primitives, though there are situations where the copying will need
* correction at execute-time, perhaps by replaying the list as
* immediate mode commands.
*
* On executing this list, the 'current' values may be updated with
* the values of the final vertex, and often no fixup of the start of
* the vertex list is required.
*
* Eval and other commands that don't fit into these vertex lists are
* compiled using the fallback opcode mechanism provided by dlist.c.
*/
struct vbo_save_vertex_list {
GLubyte attrsz[VBO_ATTRIB_MAX];
GLenum attrtype[VBO_ATTRIB_MAX];
GLuint vertex_size;
/* Copy of the final vertex from node->vertex_store->bufferobj.
* Keep this in regular (non-VBO) memory to avoid repeated
* map/unmap of the VBO when updating GL current data.
*/
GLfloat *current_data;
GLuint current_size;
GLuint buffer_offset;
GLuint count; /**< vertex count */
GLuint wrap_count; /* number of copied vertices at start */
GLboolean dangling_attr_ref; /* current attr implicitly referenced
outside the list */
struct _mesa_prim *prim;
GLuint prim_count;
struct vbo_save_vertex_store *vertex_store;
struct vbo_save_primitive_store *prim_store;
};
/* These buffers should be a reasonable size to support upload to
* hardware. Current vbo implementation will re-upload on any
* changes, so don't make too big or apps which dynamically create
* dlists and use only a few times will suffer.
*
* Consider stategy of uploading regions from the VBO on demand in the
* case of dynamic vbos. Then make the dlist code signal that
* likelyhood as it occurs. No reason we couldn't change usage
* internally even though this probably isn't allowed for client VBOs?
*/
#define VBO_SAVE_BUFFER_SIZE (8*1024) /* dwords */
#define VBO_SAVE_PRIM_SIZE 128
#define VBO_SAVE_PRIM_MODE_MASK 0x3f
#define VBO_SAVE_PRIM_WEAK 0x40
#define VBO_SAVE_PRIM_NO_CURRENT_UPDATE 0x80
#define VBO_SAVE_FALLBACK 0x10000000
/* Storage to be shared among several vertex_lists.
*/
struct vbo_save_vertex_store {
struct gl_buffer_object *bufferobj;
GLfloat *buffer;
GLuint used;
GLuint refcount;
};
struct vbo_save_primitive_store {
struct _mesa_prim buffer[VBO_SAVE_PRIM_SIZE];
GLuint used;
GLuint refcount;
};
struct vbo_save_context {
struct gl_context *ctx;
GLvertexformat vtxfmt;
GLvertexformat vtxfmt_noop; /**< Used if out_of_memory is true */
struct gl_client_array arrays[VBO_ATTRIB_MAX];
const struct gl_client_array *inputs[VBO_ATTRIB_MAX];
GLubyte attrsz[VBO_ATTRIB_MAX];
GLenum attrtype[VBO_ATTRIB_MAX];
GLubyte active_sz[VBO_ATTRIB_MAX];
GLuint vertex_size;
GLboolean out_of_memory; /**< True if last VBO allocation failed */
GLfloat *buffer;
GLuint count;
GLuint wrap_count;
GLuint replay_flags;
struct _mesa_prim *prim;
GLuint prim_count, prim_max;
struct vbo_save_vertex_store *vertex_store;
struct vbo_save_primitive_store *prim_store;
GLfloat *buffer_ptr; /* cursor, points into buffer */
GLfloat vertex[VBO_ATTRIB_MAX*4]; /* current values */
GLfloat *attrptr[VBO_ATTRIB_MAX];
GLuint vert_count;
GLuint max_vert;
GLboolean dangling_attr_ref;
GLuint opcode_vertex_list;
struct vbo_save_copied_vtx copied;
GLfloat *current[VBO_ATTRIB_MAX]; /* points into ctx->ListState */
GLubyte *currentsz[VBO_ATTRIB_MAX];
};
#if FEATURE_dlist
void vbo_save_init( struct gl_context *ctx );
void vbo_save_destroy( struct gl_context *ctx );
void vbo_save_fallback( struct gl_context *ctx, GLboolean fallback );
/* save_loopback.c:
*/
void vbo_loopback_vertex_list( struct gl_context *ctx,
const GLfloat *buffer,
const GLubyte *attrsz,
const struct _mesa_prim *prim,
GLuint prim_count,
GLuint wrap_count,
GLuint vertex_size);
/* Callbacks:
*/
void vbo_save_EndList( struct gl_context *ctx );
void vbo_save_NewList( struct gl_context *ctx, GLuint list, GLenum mode );
void vbo_save_EndCallList( struct gl_context *ctx );
void vbo_save_BeginCallList( struct gl_context *ctx, struct gl_display_list *list );
void vbo_save_SaveFlushVertices( struct gl_context *ctx );
GLboolean vbo_save_NotifyBegin( struct gl_context *ctx, GLenum mode );
void vbo_save_playback_vertex_list( struct gl_context *ctx, void *data );
void vbo_save_api_init( struct vbo_save_context *save );
GLfloat *
vbo_save_map_vertex_store(struct gl_context *ctx,
struct vbo_save_vertex_store *vertex_store);
void
vbo_save_unmap_vertex_store(struct gl_context *ctx,
struct vbo_save_vertex_store *vertex_store);
#else /* FEATURE_dlist */
static inline void
vbo_save_init( struct gl_context *ctx )
{
}
static inline void
vbo_save_destroy( struct gl_context *ctx )
{
}
#endif /* FEATURE_dlist */
#endif /* VBO_SAVE_H */
| 2,374 |
714 | <reponame>Eliane1991/mybatis-generator-gui-extension<gh_stars>100-1000
package com.spawpaw.mybatis.generator.gui;
import com.spawpaw.mybatis.generator.gui.controller.BaseController;
import com.spawpaw.mybatis.generator.gui.util.Constants;
import java.util.Locale;
/**
* Created By <EMAIL> 2018.1.20
* Description:
* 整个程序的入口
*
* @author BenBenShang <EMAIL>
*/
public class GeneratorGuiRunner {
public static void main(String[] args) {
//set your language(only supports CHINESE or ENGLISH)
Constants.setLocale(Locale.getDefault());
// Constants.setLocale(Locale.CHINA);
// Constants.setLocale(Locale.ENGLISH);
BaseController.launchWindow(args);
}
}
| 291 |
739 | from __pyjamas__ import JS, INT
class Set:
def __init__(self, data=None):
JS("""
@{{self}}['__object'] = {};
@{{self}}['update'](@{{data}});
""")
def add(self, value):
JS(""" @{{self}}['__object'][pyjslib['hash'](@{{value}})] = @{{value}};""")
def clear(self):
JS(""" @{{self}}['__object'] = {};""")
def __contains__(self, value):
JS(""" return (@{{self}}['__object'][pyjslib['hash'](@{{value}})]) ? true : false;""")
def discard(self, value):
JS(""" delete @{{self}}['__object'][pyjslib['hash'](@{{value}})];""")
def issubset(self, items):
JS("""
for (var i in @{{self}}['__object']) {
if (!@{{items}}['__contains__'](i)) return false;
}
return true;
""")
def issuperset(self, items):
JS("""
for (var i in @{{items}}) {
if (!@{{self}}['__contains__'](i)) return false;
}
return true;
""")
def __iter__(self):
JS("""
var items=new pyjslib['list']();
for (var key in @{{self}}['__object']) items['append'](@{{self}}['__object'][key]);
return items['__iter__']();
""")
def __len__(self):
size=0
JS("""
for (var i in @{{self}}['__object']) @{{size}}++;
""")
return INT(size)
def pop(self):
JS("""
for (var key in @{{self}}['__object']) {
var value = @{{self}}['__object'][key];
delete @{{self}}['__object'][key];
return value;
}
""")
def remove(self, value):
self.discard(value)
def update(self, data):
JS("""
if (pyjslib['isArray'](@{{data}})) {
for (var i in @{{data}}) {
@{{self}}['__object'][pyjslib['hash'](@{{data}}[i])]=@{{data}}[i];
}
}
else if (pyjslib['isIteratable'](@{{data}})) {
var iter=@{{data}}['__iter__']();
var i=0;
try {
while (true) {
var item=iter['next']();
@{{self}}['__object'][pyjslib['hash'](item)]=item;
}
}
catch (e) {
if (e != pyjslib['StopIteration']) throw e;
}
}
""")
| 1,284 |
1,326 | import org.mangosdk.spi.ProviderFor;
@ProviderFor(Object.class)
public class ProvidesClassTestClass {
}
| 37 |
14,668 | // Copyright 2021 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.autofill_assistant.onboarding;
import android.content.Context;
import android.content.DialogInterface.OnDismissListener;
import android.view.Gravity;
import android.view.ViewGroup;
import android.view.ViewGroup.LayoutParams;
import android.widget.LinearLayout;
import android.widget.ScrollView;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AlertDialog;
import org.chromium.base.Callback;
import org.chromium.chrome.autofill_assistant.R;
import org.chromium.chrome.browser.autofill_assistant.AssistantInfoPageUtil;
import org.chromium.chrome.browser.autofill_assistant.LayoutUtils;
import java.util.Map;
/**
* Coordinator responsible for showing the dialog onboarding screen when the user is using the
* Autofill Assistant for the first time.
*/
class DialogOnboardingCoordinator extends BaseOnboardingCoordinator {
@Nullable
AlertDialog mDialog;
DialogOnboardingCoordinator(AssistantInfoPageUtil infoPageUtil, String experimentIds,
Map<String, String> parameters, Context context) {
super(infoPageUtil, experimentIds, parameters, context);
}
@Override
ScrollView createViewImpl() {
ScrollView baseView = (ScrollView) LayoutUtils.createInflater(mContext).inflate(
R.layout.autofill_assistant_base_onboarding, /* root= */ null);
ViewGroup onboardingContentContainer =
baseView.findViewById(R.id.onboarding_layout_container);
LinearLayout buttonsLayout = new LinearLayout(mContext);
buttonsLayout.setLayoutParams(new LinearLayout.LayoutParams(
LayoutParams.MATCH_PARENT, LayoutParams.WRAP_CONTENT));
buttonsLayout.setGravity(Gravity.BOTTOM | Gravity.END);
buttonsLayout.setOrientation(LinearLayout.HORIZONTAL);
LayoutUtils.createInflater(mContext).inflate(
R.layout.autofill_assistant_onboarding_no_button, /* root= */ buttonsLayout);
LayoutUtils.createInflater(mContext).inflate(
R.layout.autofill_assistant_onboarding_yes_button, /* root= */ buttonsLayout);
onboardingContentContainer.addView(buttonsLayout);
return baseView;
}
@Override
void initViewImpl(Callback<Integer> callback) {
mDialog = new AlertDialog
.Builder(getContext(),
org.chromium.chrome.autofill_assistant.R.style
.Theme_Chromium_AlertDialog)
.create();
mDialog.setOnDismissListener((OnDismissListener) dialog
-> onUserAction(
/* result= */ AssistantOnboardingResult.DISMISSED, callback));
mDialog.setView(mView);
}
@Override
void showViewImpl() {
mDialog.show();
}
@Override
public void hide() {
if (mDialog != null) {
mDialog.cancel();
mDialog = null;
}
}
@Override
public boolean isInProgress() {
return mDialog != null;
}
}
| 1,289 |
534 | <gh_stars>100-1000
package mekanism.api.chemical.infuse;
import mekanism.api.chemical.IChemicalHandler;
import mekanism.api.chemical.IMekanismChemicalHandler;
import mekanism.api.chemical.ISidedChemicalHandler;
public interface IInfusionHandler extends IChemicalHandler<InfuseType, InfusionStack>, IEmptyInfusionProvider {
/**
* A sided variant of {@link IInfusionHandler}
*/
interface ISidedInfusionHandler extends ISidedChemicalHandler<InfuseType, InfusionStack>, IInfusionHandler {
}
interface IMekanismInfusionHandler extends IMekanismChemicalHandler<InfuseType, InfusionStack, IInfusionTank>, ISidedInfusionHandler {
}
} | 207 |
310 | <filename>doma-processor/src/main/java/org/seasar/doma/internal/apt/processor/AbstractGeneratingProcessor.java
package org.seasar.doma.internal.apt.processor;
import java.lang.annotation.Annotation;
import java.util.Set;
import javax.annotation.processing.RoundEnvironment;
import javax.lang.model.element.TypeElement;
import javax.lang.model.util.ElementFilter;
import org.seasar.doma.internal.ClassName;
import org.seasar.doma.internal.apt.generator.Generator;
import org.seasar.doma.internal.apt.generator.JavaFileGenerator;
import org.seasar.doma.internal.apt.generator.Printer;
import org.seasar.doma.internal.apt.meta.TypeElementMeta;
import org.seasar.doma.internal.apt.meta.TypeElementMetaFactory;
public abstract class AbstractGeneratingProcessor<M extends TypeElementMeta>
extends AbstractProcessor {
protected AbstractGeneratingProcessor(Class<? extends Annotation> supportedAnnotationType) {
super(supportedAnnotationType);
}
@Override
public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) {
if (roundEnv.processingOver()) {
return true;
}
for (TypeElement a : annotations) {
final TypeElementMetaFactory<M> factory = createTypeElementMetaFactory();
for (TypeElement typeElement : ElementFilter.typesIn(roundEnv.getElementsAnnotatedWith(a))) {
handleTypeElement(
typeElement,
t -> {
M meta = factory.createTypeElementMeta(typeElement);
if (!meta.isError()) {
generate(typeElement, meta);
}
});
}
}
return true;
}
protected abstract TypeElementMetaFactory<M> createTypeElementMetaFactory();
protected void generate(TypeElement typeElement, M meta) {
JavaFileGenerator<M> javaFileGenerator =
new JavaFileGenerator<>(ctx, this::createClassName, this::createGenerator);
javaFileGenerator.generate(typeElement, meta);
}
protected abstract ClassName createClassName(TypeElement typeElement, M meta);
protected abstract Generator createGenerator(ClassName className, Printer printer, M meta);
}
| 720 |
337 | <reponame>qussarah/declare<gh_stars>100-1000
interface A {
public void foo();
}
abstract class C implements B {
@Override
public void <caret>foo() {
}
} | 68 |
310 | <reponame>dreeves/usesthis
{
"name": "XCAPTURE-1",
"description": "A USB-based video capture device.",
"url": "https://solarisjapan.com/products/xcapture-1-usb-3-0-hd-capture-unit"
} | 80 |
1,003 | package com.pixplicity.multiviewpager.sample.test;
import android.test.ActivityInstrumentationTestCase2;
import android.view.View;
import com.pixplicity.multiviewpager.MultiViewPager;
import com.pixplicity.multiviewpager.sample.PagerActivity;
import com.pixplicity.multiviewpager.sample.R;
public class PagerActivityTest extends ActivityInstrumentationTestCase2<PagerActivity> {
public PagerActivityTest() {
super(PagerActivity.class);
}
@Override
protected void setUp() throws Exception {
super.setUp();
final PagerActivity mPagerTestActivity = getActivity();
final View view = mPagerTestActivity.findViewById(R.id.pager);
assertNotNull("View with id:" + String.valueOf(R.id.pager), view);
assertTrue("view is instance of class MultiViewPager", view instanceof MultiViewPager);
final MultiViewPager pager = (MultiViewPager) view;
assertEquals("view pager has number of pages", 4, pager.getChildCount());
}
}
| 358 |
1,674 | <reponame>ajmgh/polipo<gh_stars>1000+
/*
Copyright (c) 2003-2006 by <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
extern AtomPtr forbiddenUrl;
extern int forbiddenRedirectCode;
typedef struct _RedirectRequest {
AtomPtr url;
struct _RedirectRequest *next;
int (*handler)(int, AtomPtr, AtomPtr, AtomPtr, void*);
void *data;
} RedirectRequestRec, *RedirectRequestPtr;
void preinitForbidden(void);
void initForbidden(void);
int urlIsUncachable(char *url, int length);
int urlForbidden(AtomPtr url,
int (*handler)(int, AtomPtr, AtomPtr, AtomPtr, void*),
void *closure);
void redirectorKill(void);
int redirectorStreamHandler1(int status,
FdEventHandlerPtr event,
StreamRequestPtr srequest);
int redirectorStreamHandler2(int status,
FdEventHandlerPtr event,
StreamRequestPtr srequest);
void redirectorTrigger(void);
int
runRedirector(pid_t *pid_return, int *read_fd_return, int *write_fd_return);
int tunnelIsMatched(char *url, int lurl, char *hostname, int lhost);
| 726 |
45,293 | package test;
public final class Simple {
public Simple() { }
}
| 22 |
1,056 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.modules.apisupport.project.ui.customizer;
import org.netbeans.modules.apisupport.project.ModuleDependency;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.net.URL;
import java.util.SortedSet;
import java.util.TreeSet;
import javax.swing.DefaultListModel;
import javax.swing.JPanel;
import org.netbeans.modules.apisupport.project.api.ManifestManager;
import org.netbeans.modules.apisupport.project.ui.ApisupportAntUIUtils;
import org.netbeans.modules.apisupport.project.universe.ModuleEntry;
import org.netbeans.modules.apisupport.project.universe.NbPlatform;
import org.openide.awt.HtmlBrowser;
import org.openide.util.NbBundle;
/**
* Represents panel for editing dependency details. Shown e.g. after <em>Edit</em>
* button on the <code>CustomizerLibraries</code> panel has been pushed.
*
* @author <NAME>
*/
public final class EditDependencyPanel extends JPanel {
private final ModuleDependency origDep;
private final URL javadoc;
private final ManifestManager.PackageExport[] pp;
private final DefaultListModel packagesModel = new DefaultListModel();
/** Creates new form EditDependencyPanel */
public EditDependencyPanel(final ModuleDependency dep, final NbPlatform platform) {
this.origDep = dep;
this.pp = origDep.getModuleEntry().getPublicPackages();
initComponents();
initDependency();
javadoc = origDep.getModuleEntry().getJavadoc(platform);
showJavadocButton.setEnabled(javadoc != null);
getAccessibleContext().setAccessibleDescription(
NbBundle.getMessage(EditDependencyPanel.class, "EditDependencyPanel.title.AccessibleContext.accessibleName"));
}
private void refresh() {
specVerValue.setEnabled(specVer.isSelected());
includeInCP.setEnabled(hasAvailablePackages());
if (!includeInCP.isEnabled()) {
includeInCP.setSelected(false);
} // else leave the user's selection
}
private boolean hasAvailablePackages() {
return implVer.isSelected() || pp.length > 0;
}
/** Called first time dialog is opened. */
private void initDependency() {
ModuleEntry me = origDep.getModuleEntry();
ApisupportAntUIUtils.setText(codeNameBaseValue, me.getCodeNameBase());
ApisupportAntUIUtils.setText(jarLocationValue, me.getJarLocation().getAbsolutePath());
ApisupportAntUIUtils.setText(releaseVersionValue, origDep.getReleaseVersion());
ApisupportAntUIUtils.setText(specVerValue, origDep.hasImplementationDependency() ?
me.getSpecificationVersion() :
origDep.getSpecificationVersion());
implVer.setSelected(origDep.hasImplementationDependency());
availablePkg.setEnabled(hasAvailablePackages());
includeInCP.setSelected(origDep.hasCompileDependency());
refreshAvailablePackages();
refresh();
ActionListener versionListener = new ActionListener() {
public @Override void actionPerformed(ActionEvent arg0) {
refreshAvailablePackages();
}
};
implVer.addActionListener(versionListener);
specVer.addActionListener(versionListener);
}
public void refreshAvailablePackages() {
packagesModel.clear();
if (hasAvailablePackages()) {
// XXX should show all subpackages in the case of recursion is set
// to true instead of e.g. org/**
SortedSet<String> packages = new TreeSet<String>();
for (int i = 0; i < pp.length; i++) { // add public packages
packages.add(pp[i].getPackage() + (pp[i].isRecursive() ? ".**" : "")); // NOI18N
}
if (implVer.isSelected()) { // add all packages
packages.addAll(origDep.getModuleEntry().getAllPackageNames());
}
for (String pkg : packages) {
packagesModel.addElement(pkg);
}
} else {
packagesModel.addElement(NbBundle.getMessage(EditDependencyPanel.class, "EditDependencyPanel_empty"));
}
availablePkg.setModel(packagesModel);
}
public ModuleDependency getEditedDependency() {
try {
return new ModuleDependency(origDep.getModuleEntry(),
releaseVersionValue.getText().trim(),
specVerValue.getText().trim(),
includeInCP.isSelected(),
implVer.isSelected());
} catch (NumberFormatException x) {
// XXX would be better to notify the user somehow
return origDep;
}
}
/** This method is called from within the constructor to
* initialize the form.
* WARNING: Do NOT modify this code. The content of this method is
* always regenerated by the Form Editor.
*/
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
java.awt.GridBagConstraints gridBagConstraints;
versionGroup = new javax.swing.ButtonGroup();
codeNameBase = new javax.swing.JLabel();
jarLocation = new javax.swing.JLabel();
releaseVersion = new javax.swing.JLabel();
releaseVersionValue = new javax.swing.JTextField();
specVer = new javax.swing.JRadioButton();
specVerValue = new javax.swing.JTextField();
implVer = new javax.swing.JRadioButton();
includeInCP = new javax.swing.JCheckBox();
availablePkgSP = new javax.swing.JScrollPane();
availablePkg = new javax.swing.JList();
codeNameBaseValue = new javax.swing.JTextField();
jarLocationValue = new javax.swing.JTextField();
showJavadocButton = new javax.swing.JButton();
setBorder(javax.swing.BorderFactory.createEmptyBorder(6, 6, 6, 6));
setPreferredSize(new java.awt.Dimension(400, 300));
setLayout(new java.awt.GridBagLayout());
codeNameBase.setLabelFor(codeNameBaseValue);
org.openide.awt.Mnemonics.setLocalizedText(codeNameBase, org.openide.util.NbBundle.getMessage(EditDependencyPanel.class, "LBL_CNB")); // NOI18N
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 0;
gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST;
gridBagConstraints.insets = new java.awt.Insets(0, 0, 0, 12);
add(codeNameBase, gridBagConstraints);
codeNameBase.getAccessibleContext().setAccessibleDescription(org.openide.util.NbBundle.getMessage(EditDependencyPanel.class, "EditDependencyPanel.codeNameBase.AccessibleContext.accessibleDescription")); // NOI18N
jarLocation.setLabelFor(jarLocationValue);
org.openide.awt.Mnemonics.setLocalizedText(jarLocation, org.openide.util.NbBundle.getMessage(EditDependencyPanel.class, "LBL_JAR")); // NOI18N
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 1;
gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST;
gridBagConstraints.insets = new java.awt.Insets(2, 0, 0, 12);
add(jarLocation, gridBagConstraints);
jarLocation.getAccessibleContext().setAccessibleDescription(org.openide.util.NbBundle.getMessage(EditDependencyPanel.class, "EditDependencyPanel.jarLocation.AccessibleContext.accessibleDescription")); // NOI18N
releaseVersion.setLabelFor(releaseVersionValue);
org.openide.awt.Mnemonics.setLocalizedText(releaseVersion, org.openide.util.NbBundle.getMessage(EditDependencyPanel.class, "LBL_MajorReleaseVersion")); // NOI18N
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 2;
gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST;
gridBagConstraints.insets = new java.awt.Insets(24, 0, 0, 12);
add(releaseVersion, gridBagConstraints);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 2;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST;
gridBagConstraints.weightx = 1.0;
gridBagConstraints.insets = new java.awt.Insets(24, 0, 0, 0);
add(releaseVersionValue, gridBagConstraints);
releaseVersionValue.getAccessibleContext().setAccessibleName(org.openide.util.NbBundle.getMessage(EditDependencyPanel.class, "EditDependencyPanel.releaseVersionValue.AccessibleContext.accessibleName")); // NOI18N
releaseVersionValue.getAccessibleContext().setAccessibleDescription(org.openide.util.NbBundle.getMessage(EditDependencyPanel.class, "EditDependencyPanel.releaseVersionValue.AccessibleContext.accessibleDescription")); // NOI18N
versionGroup.add(specVer);
specVer.setSelected(true);
org.openide.awt.Mnemonics.setLocalizedText(specVer, org.openide.util.NbBundle.getMessage(EditDependencyPanel.class, "LBL_SpecificationVersion")); // NOI18N
specVer.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
versionChanged(evt);
}
});
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 3;
gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST;
gridBagConstraints.insets = new java.awt.Insets(0, 0, 0, 12);
add(specVer, gridBagConstraints);
specVer.getAccessibleContext().setAccessibleDescription(org.openide.util.NbBundle.getMessage(EditDependencyPanel.class, "EditDependencyPanel.specVer.AccessibleContext.accessibleDescription")); // NOI18N
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 3;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST;
add(specVerValue, gridBagConstraints);
specVerValue.getAccessibleContext().setAccessibleName(org.openide.util.NbBundle.getMessage(EditDependencyPanel.class, "EditDependencyPanel.specVerValue.AccessibleContext.accessibleName")); // NOI18N
specVerValue.getAccessibleContext().setAccessibleDescription(org.openide.util.NbBundle.getMessage(EditDependencyPanel.class, "EditDependencyPanel.specVerValue.AccessibleContext.accessibleDescription")); // NOI18N
versionGroup.add(implVer);
org.openide.awt.Mnemonics.setLocalizedText(implVer, org.openide.util.NbBundle.getMessage(EditDependencyPanel.class, "LBL_ImplementationVersion")); // NOI18N
implVer.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
versionChanged(evt);
}
});
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 4;
gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST;
gridBagConstraints.insets = new java.awt.Insets(0, 0, 0, 12);
add(implVer, gridBagConstraints);
implVer.getAccessibleContext().setAccessibleDescription(org.openide.util.NbBundle.getMessage(EditDependencyPanel.class, "EditDependencyPanel.implVer.AccessibleContext.accessibleDescription")); // NOI18N
org.openide.awt.Mnemonics.setLocalizedText(includeInCP, org.openide.util.NbBundle.getMessage(EditDependencyPanel.class, "LBL_IncludeAPIPackages")); // NOI18N
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 5;
gridBagConstraints.gridwidth = 2;
gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST;
gridBagConstraints.insets = new java.awt.Insets(24, 0, 0, 0);
add(includeInCP, gridBagConstraints);
includeInCP.getAccessibleContext().setAccessibleDescription(org.openide.util.NbBundle.getMessage(EditDependencyPanel.class, "EditDependencyPanel.includeInCP.AccessibleContext.accessibleDescription")); // NOI18N
availablePkgSP.setViewportView(availablePkg);
availablePkg.getAccessibleContext().setAccessibleName(org.openide.util.NbBundle.getMessage(EditDependencyPanel.class, "EditDependencyPanel.availablePkg.AccessibleContext.accessibleName")); // NOI18N
availablePkg.getAccessibleContext().setAccessibleDescription(org.openide.util.NbBundle.getMessage(EditDependencyPanel.class, "EditDependencyPanel.availablePkg.AccessibleContext.accessibleDescription")); // NOI18N
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 6;
gridBagConstraints.gridwidth = 2;
gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH;
gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST;
gridBagConstraints.weighty = 1.0;
add(availablePkgSP, gridBagConstraints);
codeNameBaseValue.setEditable(false);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST;
add(codeNameBaseValue, gridBagConstraints);
codeNameBaseValue.getAccessibleContext().setAccessibleName(org.openide.util.NbBundle.getMessage(EditDependencyPanel.class, "EditDependencyPanel.codeNameBaseValue.AccessibleContext.accessibleName")); // NOI18N
codeNameBaseValue.getAccessibleContext().setAccessibleDescription(org.openide.util.NbBundle.getMessage(EditDependencyPanel.class, "EditDependencyPanel.codeNameBaseValue.AccessibleContext.accessibleDescription")); // NOI18N
jarLocationValue.setEditable(false);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 1;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST;
gridBagConstraints.insets = new java.awt.Insets(2, 0, 0, 0);
add(jarLocationValue, gridBagConstraints);
jarLocationValue.getAccessibleContext().setAccessibleName(org.openide.util.NbBundle.getMessage(EditDependencyPanel.class, "EditDependencyPanel.jarLocationValue.AccessibleContext.accessibleName")); // NOI18N
jarLocationValue.getAccessibleContext().setAccessibleDescription(org.openide.util.NbBundle.getMessage(EditDependencyPanel.class, "EditDependencyPanel.jarLocationValue.AccessibleContext.accessibleDescription")); // NOI18N
org.openide.awt.Mnemonics.setLocalizedText(showJavadocButton, org.openide.util.NbBundle.getMessage(EditDependencyPanel.class, "CTL_ShowJavadoc")); // NOI18N
showJavadocButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
showJavadoc(evt);
}
});
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 7;
gridBagConstraints.gridwidth = java.awt.GridBagConstraints.REMAINDER;
gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST;
gridBagConstraints.insets = new java.awt.Insets(24, 0, 0, 0);
add(showJavadocButton, gridBagConstraints);
showJavadocButton.getAccessibleContext().setAccessibleDescription(org.openide.util.NbBundle.getMessage(EditDependencyPanel.class, "EditDependencyPanel.showJavadocButton.AccessibleContext.accessibleDescription")); // NOI18N
}// </editor-fold>//GEN-END:initComponents
private void showJavadoc(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_showJavadoc
HtmlBrowser.URLDisplayer.getDefault().showURL(javadoc);
}//GEN-LAST:event_showJavadoc
private void versionChanged(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_versionChanged
refresh();
if (implVer.isSelected()) { // automatic compile-time dependency
includeInCP.setSelected(true);
}
}//GEN-LAST:event_versionChanged
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JList availablePkg;
private javax.swing.JScrollPane availablePkgSP;
private javax.swing.JLabel codeNameBase;
private javax.swing.JTextField codeNameBaseValue;
private javax.swing.JRadioButton implVer;
private javax.swing.JCheckBox includeInCP;
private javax.swing.JLabel jarLocation;
private javax.swing.JTextField jarLocationValue;
private javax.swing.JLabel releaseVersion;
private javax.swing.JTextField releaseVersionValue;
private javax.swing.JButton showJavadocButton;
private javax.swing.JRadioButton specVer;
private javax.swing.JTextField specVerValue;
private javax.swing.ButtonGroup versionGroup;
// End of variables declaration//GEN-END:variables
}
| 7,221 |
530 | <reponame>meghasfdc/jmc
/*
* Copyright (c) 2018, Oracle and/or its affiliates. All rights reserved.
*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* The contents of this file are subject to the terms of either the Universal Permissive License
* v 1.0 as shown at http://oss.oracle.com/licenses/upl
*
* or the following license:
*
* Redistribution and use in source and binary forms, with or without modification, are permitted
* provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this list of conditions
* and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice, this list of
* conditions and the following disclaimer in the documentation and/or other materials provided with
* the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its contributors may be used to
* endorse or promote products derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY
* WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.openjdk.jmc.ui.common.labelingrules;
import java.util.HashMap;
import org.openjdk.jmc.ui.common.labelingrules.NameConverter.ValueArrayInfo;
/**
* Constant mapping database.
*/
final class Constants {
// A mapping from constants to their expanded form.
private final static HashMap<String, Object> constants = new HashMap<>();
private final static Constants instance = new Constants();
static Constants getInstance() {
return instance;
}
public Constants() {
initDefaultConstants();
}
private void initDefaultConstants() {
// setConstant("ThisPID", MessageFormat.format("{0,number,#}", new Object[] {Integer.valueOf(MBeanToolkit.getThisPID())})); //$NON-NLS-1$ //$NON-NLS-2$
for (ValueArrayInfo info : ValueArrayInfo.values()) {
setConstant(info.getValueName(), info.getMatchExpression());
}
setConstant("StrConstDebug", Messages.NameConverter_DEBUG); //$NON-NLS-1$
setConstant("StrConstJVMJRockit", Messages.NameConverter_JVM_TYPE_JROCKIT); //$NON-NLS-1$
setConstant("StrConstJVMUnknown", Messages.NameConverter_JVM_TYPE_UNKNOWN); //$NON-NLS-1$
}
/**
* Adds a constant to this name converter.
*
* @param key
* the name of the constant.
* @param value
* the value to assign the constant.
*/
public void setConstant(String key, Object value) {
if (value == null) {
throw new IllegalArgumentException("You may not add a variable which has a value that is null!"); //$NON-NLS-1$
}
constants.put(key, value);
}
public Object getConstant(String key) {
return constants.get(key);
}
public boolean containsConstant(String key) {
return constants.containsKey(key);
}
}
| 1,106 |
3,402 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kylin.stream.core.storage.columnar.protocol;
/**
* The Footer section of the file provides the necessary information to interpret the rest of the file including the version info and the length of the segment meta.
*
*
*/
public class Footer {
//Version of the storage format for the segment
private int version;
//The start of the segmentMetaInfo within the file
private long segmentInfoOffSet;
//The length of the segmentMetaInfo in bytes
private long segmentInfoLength;
public int getVersion() {
return version;
}
public void setVersion(int version) {
this.version = version;
}
public long getSegmentInfoOffSet() {
return segmentInfoOffSet;
}
public void setSegmentInfoOffSet(long segmentInfoOffSet) {
this.segmentInfoOffSet = segmentInfoOffSet;
}
public long getSegmentInfoLength() {
return segmentInfoLength;
}
public void setSegmentInfoLength(long segmentInfoLength) {
this.segmentInfoLength = segmentInfoLength;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + (int) (segmentInfoLength ^ (segmentInfoLength >>> 32));
result = prime * result + (int) (segmentInfoOffSet ^ (segmentInfoOffSet >>> 32));
result = prime * result + version;
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
Footer other = (Footer) obj;
if (segmentInfoLength != other.segmentInfoLength)
return false;
if (segmentInfoOffSet != other.segmentInfoOffSet)
return false;
if (version != other.version)
return false;
return true;
}
}
| 939 |
2,890 | package com.github.ltsopensource.spring.boot;
import com.github.ltsopensource.core.cluster.AbstractJobNode;
import com.github.ltsopensource.core.cluster.NodeType;
import com.github.ltsopensource.core.commons.utils.CollectionUtils;
import com.github.ltsopensource.jobclient.JobClient;
import com.github.ltsopensource.jobclient.JobClientBuilder;
import com.github.ltsopensource.jobclient.support.JobCompletedHandler;
import com.github.ltsopensource.spring.boot.annotation.EnableJobClient;
import com.github.ltsopensource.spring.boot.annotation.JobCompletedHandler4JobClient;
import com.github.ltsopensource.spring.boot.properties.JobClientProperties;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import java.util.Map;
/**
* @author <NAME> (<EMAIL>) on 4/9/16.
*/
@Configuration
@ConditionalOnBean(annotation = EnableJobClient.class)
@EnableConfigurationProperties(JobClientProperties.class)
public class JobClientAutoConfiguration extends AbstractAutoConfiguration {
@Autowired(required = false)
private JobClientProperties properties;
private JobClient jobClient;
@Bean
public JobClient jobClient() {
return jobClient;
}
@Override
protected void initJobNode() {
jobClient = JobClientBuilder.buildByProperties(properties);
Map<String, Object> handlers = applicationContext.getBeansWithAnnotation(JobCompletedHandler4JobClient.class);
if (CollectionUtils.isNotEmpty(handlers)) {
if (handlers.size() > 1) {
throw new IllegalArgumentException("annotation @" + JobCompletedHandler4JobClient.class.getSimpleName() + " only should have one");
}
for (Map.Entry<String, Object> entry : handlers.entrySet()) {
Object handler = entry.getValue();
if (handler instanceof JobCompletedHandler) {
jobClient.setJobCompletedHandler((JobCompletedHandler) entry.getValue());
} else {
throw new IllegalArgumentException(entry.getKey() + " is not instance of " + JobCompletedHandler.class.getName());
}
}
}
}
@Override
protected NodeType nodeType() {
return NodeType.JOB_CLIENT;
}
@Override
protected AbstractJobNode getJobNode() {
return jobClient;
}
}
| 923 |
675 | #pragma once
#include "../shader_node_glsl.h"
namespace Echo
{
#ifdef ECHO_EDITOR_MODE
class ShaderNodeGrayScale : public ShaderNodeGLSL
{
ECHO_CLASS(ShaderNodeGrayScale, ShaderNodeGLSL)
public:
ShaderNodeGrayScale();
virtual ~ShaderNodeGrayScale();
// name
virtual QString name() const override { return "GrayScale"; }
// category
virtual QString category() const override { return "Color"; }
};
#endif
}
| 156 |
2,332 | import dash
import pandas as pd
import pathlib
import dash_html_components as html
import dash_core_components as dcc
from dash.dependencies import Input, Output
from dash.exceptions import PreventUpdate
from helpers import make_dash_table, create_plot
app = dash.Dash(
__name__,
meta_tags=[{"name": "viewport", "content": "width=device-width, initial-scale=1"}],
)
server = app.server
DATA_PATH = pathlib.Path(__file__).parent.joinpath("data").resolve()
# read from datasheet
df = pd.read_csv(DATA_PATH.joinpath("small_molecule_drugbank.csv")).drop(
["Unnamed: 0"], axis=1
)
STARTING_DRUG = "Levobupivacaine"
DRUG_DESCRIPTION = df.loc[df["NAME"] == STARTING_DRUG]["DESC"].iloc[0]
DRUG_IMG = df.loc[df["NAME"] == STARTING_DRUG]["IMG_URL"].iloc[0]
FIGURE = create_plot(
x=df["PKA"],
y=df["LOGP"],
z=df["SOL"],
size=df["MW"],
color=df["MW"],
name=df["NAME"],
)
app.layout = html.Div(
[
html.Div(
[html.Img(src=app.get_asset_url("dash-logo.png"))], className="app__banner"
),
html.Div(
[
html.Div(
[
html.Div(
[
html.H3(
"dash for drug discovery",
className="uppercase title",
),
html.Span("Hover ", className="uppercase bold"),
html.Span(
"over a drug in the graph to see its structure."
),
html.Br(),
html.Span("Select ", className="uppercase bold"),
html.Span(
"a drug in the dropdown to add it to the drug candidates at the bottom."
),
]
)
],
className="app__header",
),
html.Div(
[
dcc.Dropdown(
id="chem_dropdown",
multi=True,
value=[STARTING_DRUG],
options=[{"label": i, "value": i} for i in df["NAME"]],
)
],
className="app__dropdown",
),
html.Div(
[
html.Div(
[
dcc.RadioItems(
id="charts_radio",
options=[
{"label": "3D Scatter", "value": "scatter3d"},
{"label": "2D Scatter", "value": "scatter"},
{
"label": "2D Histogram",
"value": "histogram2d",
},
],
labelClassName="radio__labels",
inputClassName="radio__input",
value="scatter3d",
className="radio__group",
),
dcc.Graph(
id="clickable-graph",
hoverData={"points": [{"pointNumber": 0}]},
figure=FIGURE,
),
],
className="two-thirds column",
),
html.Div(
[
html.Div(
[
html.Img(
id="chem_img",
src=DRUG_IMG,
className="chem__img",
)
],
className="chem__img__container",
),
html.Div(
[
html.A(
STARTING_DRUG,
id="chem_name",
href="https://www.drugbank.ca/drugs/DB01002",
target="_blank",
),
html.P(DRUG_DESCRIPTION, id="chem_desc"),
],
className="chem__desc__container",
),
],
className="one-third column",
),
],
className="container card app__content bg-white",
),
html.Div(
[
html.Table(
make_dash_table([STARTING_DRUG], df),
id="table-element",
className="table__container",
)
],
className="container bg-white p-0",
),
],
className="app__container",
),
]
)
def df_row_from_hover(hoverData):
""" Returns row for hover point as a Pandas Series. """
try:
point_number = hoverData["points"][0]["pointNumber"]
molecule_name = str(FIGURE["data"][0]["text"][point_number]).strip()
return df.loc[df["NAME"] == molecule_name]
except KeyError as error:
print(error)
return pd.Series()
@app.callback(
Output("clickable-graph", "figure"),
[Input("chem_dropdown", "value"), Input("charts_radio", "value")],
)
def highlight_molecule(chem_dropdown_values, plot_type):
"""
Selected chemical dropdown values handler.
:params chem_dropdown_values: selected dropdown values
:params plot_type: selected plot graph
"""
return create_plot(
x=df["PKA"],
y=df["LOGP"],
z=df["SOL"],
size=df["MW"],
color=df["MW"],
name=df["NAME"],
markers=chem_dropdown_values,
plot_type=plot_type,
)
@app.callback(Output("table-element", "children"), [Input("chem_dropdown", "value")])
def update_table(chem_dropdown_value):
"""
Update the table rows.
:params chem_dropdown_values: selected dropdown values
"""
return make_dash_table(chem_dropdown_value, df)
@app.callback(
[
Output("chem_name", "children"),
Output("chem_name", "href"),
Output("chem_img", "src"),
Output("chem_desc", "children"),
],
[Input("clickable-graph", "hoverData")],
)
def chem_info_on_hover(hoverData):
"""
Display chemical information on graph hover.
Update the image, link, description.
:params hoverData: data on graph hover
"""
if hoverData is None:
raise PreventUpdate
try:
row = df_row_from_hover(hoverData)
if row.empty:
raise Exception
return (
row["NAME"].iloc[0],
row["PAGE"].iloc[0],
row["IMG_URL"].iloc[0],
row["DESC"].iloc[0],
)
except Exception as error:
print(error)
raise PreventUpdate
if __name__ == "__main__":
app.run_server(debug=True)
| 5,002 |
5,166 | # Copyright 2010-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# This file is licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
# snippet-sourcedescription:[authorize_cluster_access.py demonstrates how to enable access to Amazon Redshift clusters.]
# snippet-service:[redshift]
# snippet-keyword:[Python]
# snippet-sourcesyntax:[python]
# snippet-sourcesyntax:[python]
# snippet-keyword:[Amazon Redshift]
# snippet-keyword:[Code Sample]
# snippet-sourcetype:[full-example]
# snippet-sourcedate:[2019-01-28]
# snippet-sourceauthor:[AWS]
# snippet-start:[redshift.python.authorize_cluster_access.complete]
import boto3
from botocore.exceptions import ClientError
def authorize_cluster_access(IpAddress='0.0.0.0/0'):
"""Enable access to Amazon Redshift clusters
Defines a security group inbound rule for the default VPC. The rule
enables access to Redshift clusters by IP addresses referenced in the
IpAddress argument. To define the rule, EC2 permissions are required.
:param IpAddress: string; IP addresses to authorize access to Redshift
clusters. Default: '0.0.0.0/0' allows access from any computer, which is
reasonable for demonstration purposes, but is not appropriate in a
production environment.
:return: True if cluster access is enabled, else False
"""
ec2_client = boto3.client('ec2')
# Redshift uses port 5439 by default. If Redshift was configured to use
# a different port, specify the FromPort= and ToPort= arguments accordingly.
try:
ec2_client.authorize_security_group_ingress(GroupName='default',
IpProtocol='tcp',
FromPort=5439,
ToPort=5439,
CidrIp=IpAddress)
except ClientError as e:
print(f'ERROR: {e}')
return False
return True
# snippet-end:[redshift.python.authorize_cluster_access.complete]
def main():
"""Test authorize_cluster_access()"""
if not authorize_cluster_access():
print('FAIL: authorize_cluster_access()')
exit(1)
if __name__ == '__main__':
main()
| 964 |
1,056 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.modules.websvc.design.view.widget;
import java.awt.BasicStroke;
import java.awt.Color;
import java.awt.Graphics2D;
import java.awt.Insets;
import java.awt.Paint;
import java.awt.Rectangle;
import java.awt.Stroke;
import org.netbeans.api.visual.border.Border;
import java.awt.geom.Area;
import java.awt.geom.RoundRectangle2D;
import org.netbeans.api.visual.widget.Widget;
/**
* @author <NAME>
*/
public class RoundedBorder3D implements Border {
private static final Color SHADOW_COLOR = new Color(208,208,208);
private static final Color SELECTED_BORDER_COLOR = new Color(255,153,0);
private Widget widget;
private int radius;
private int insetWidth;
private int insetHeight;
private Color drawColor;
private int depth = 3;
/**
*
* @param radius
* @param depth
* @param insetWidth
* @param insetHeight
* @param drawColor
*/
public RoundedBorder3D(Widget widget, int radius, int depth, int insetWidth, int insetHeight, Color drawColor) {
this.widget = widget;
this.radius = radius;
this.depth = depth;
this.insetWidth = insetWidth;
this.insetHeight = insetHeight;
this.drawColor = drawColor;
}
public Insets getInsets() {
return new Insets(insetHeight, insetWidth, insetHeight+depth, insetWidth+depth);
}
public void paint(Graphics2D gr, Rectangle bounds) {
Paint oldPaint = gr.getPaint();
RoundRectangle2D rect = new RoundRectangle2D.Double(bounds.x+0.5f,
bounds.y + 0.5f, bounds.width - depth - 1,
bounds.height - depth - 1, radius, radius);
if (drawColor != null) {
RoundRectangle2D outerRect = new RoundRectangle2D.Double(
bounds.x + depth + 0.5f, bounds.y + depth + 0.5f,
bounds.width - depth - 1, bounds.height - depth - 1, radius, radius);
Area raisedArea = new Area(outerRect);
raisedArea.subtract(new Area(rect));
gr.setPaint(SHADOW_COLOR);
gr.fill(raisedArea);
gr.setPaint(widget.getState().isSelected()?SELECTED_BORDER_COLOR:drawColor);
Stroke s = gr.getStroke ();
if(widget.getState().isFocused())
gr.setStroke (new BasicStroke(1, BasicStroke.CAP_BUTT,
BasicStroke.JOIN_ROUND, BasicStroke.JOIN_MITER, new float[] {2,2}, 0));
gr.draw(rect);
gr.setStroke (s);
}
gr.setPaint(oldPaint);
}
public boolean isOpaque() {
return true;
}
}
| 1,390 |
12,278 | // Copyright <NAME> 2007.
// Copyright <NAME> 2010.
// Use, modification and distribution are subject to the
// Boost Software License, Version 1.0. (See accompanying file
// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
// Note that this file contains quickbook mark-up as well as code
// and comments, don't change any of the special comment mark-ups!
#ifdef _MSC_VER
# pragma warning (disable : 4305) // 'initializing' : truncation from 'long double' to 'const eval_type'
# pragma warning (disable : 4244) // 'conversion' : truncation from 'long double' to 'const eval_type'
#endif
//[policy_ref_snip4
#include <boost/math/distributions/normal.hpp>
using boost::math::normal_distribution;
using namespace boost::math::policies;
// Define a policy:
typedef policy<
promote_float<false>
> my_policy;
// Define the new normal distribution using my_policy:
typedef normal_distribution<float, my_policy> my_norm;
// Get a quantile:
float q = quantile(my_norm(), 0.05f);
//] [policy_ref_snip4]
#include <iostream>
using std::cout; using std::endl;
int main()
{
cout << " quantile(my_norm(), 0.05f) = " << q << endl; // -1.64485
}
| 411 |
8,456 | package redis.clients.jedis.graph;
import java.util.HashMap;
import java.util.Map;
import redis.clients.jedis.CommandArguments;
import redis.clients.jedis.exceptions.JedisException;
import redis.clients.jedis.graph.GraphProtocol.GraphCommand;
import redis.clients.jedis.graph.GraphProtocol.GraphKeyword;
import redis.clients.jedis.params.IParams;
public class GraphQueryParams implements IParams {
private boolean readonly;
private String query;
private Map<String, Object> params;
private Long timeout;
/**
* Query string must be set later.
*/
public GraphQueryParams() {
}
/**
* Query string must be set later.
*/
public static GraphQueryParams queryParams() {
return new GraphQueryParams();
}
public GraphQueryParams(String query) {
this.query = query;
}
public static GraphQueryParams queryParams(String query) {
return new GraphQueryParams(query);
}
public GraphQueryParams readonly() {
return readonly(true);
}
public GraphQueryParams readonly(boolean readonly) {
this.readonly = readonly;
return this;
}
public GraphQueryParams query(String queryStr) {
this.query = queryStr;
return this;
}
public GraphQueryParams params(Map<String, Object> params) {
this.params = params;
return this;
}
public GraphQueryParams addParam(String key, Object value) {
if (this.params == null) this.params = new HashMap<>();
this.params.put(key, value);
return this;
}
public GraphQueryParams timeout(long timeout) {
this.timeout = timeout;
return this;
}
@Override
public void addParams(CommandArguments args) {
if (query == null) throw new JedisException("Query string must be set.");
if (params == null) {
args.add(query);
} else {
args.add(RedisGraphQueryUtil.prepareQuery(query, params));
}
args.add(GraphKeyword.__COMPACT);
if (timeout != null) {
args.add(GraphKeyword.TIMEOUT).add(timeout).blocking();
}
}
public CommandArguments getArguments(String graphName) {
return new CommandArguments(!readonly ? GraphCommand.QUERY : GraphCommand.RO_QUERY)
.key(graphName).addParams(this);
}
}
| 763 |
521 | /* get localized short TIME format */
#include "../fb.h"
#include <langinfo.h>
int fb_DrvIntlGetTimeFormat( char *buffer, size_t len )
{
int do_esc = FALSE, do_fmt = FALSE;
char *pszOutput = buffer;
char achAddBuffer[2] = { 0 };
const char *pszAdd = NULL;
size_t remaining = len - 1, add_len = 0;
const char *pszCurrent = nl_langinfo( T_FMT );
DBG_ASSERT(buffer!=NULL);
while ( *pszCurrent!=0 ) {
char ch = *pszCurrent;
if( do_esc ) {
do_esc = FALSE;
achAddBuffer[0] = ch;
pszAdd = achAddBuffer;
add_len = 1;
} else if ( do_fmt ) {
int succeeded = TRUE;
do_fmt = FALSE;
switch (ch) {
case 'n':
pszAdd = "\n";
add_len = 1;
break;
case 't':
pszAdd = "\t";
add_len = 1;
break;
case '%':
pszAdd = "%";
add_len = 1;
break;
case 'H':
pszAdd = "HH";
add_len = 2;
break;
case 'I':
pszAdd = "hh";
add_len = 2;
break;
case 'M':
pszAdd = "mm";
add_len = 2;
break;
case 'p':
pszAdd = "tt";
add_len = 2;
break;
case 'r':
pszAdd = "hh:mm:ss tt";
add_len = 11;
break;
case 'R':
pszAdd = "HH:mm";
add_len = 5;
break;
case 'S':
pszAdd = "ss";
add_len = 2;
break;
case 'T':
case 'X':
pszAdd = "HH:mm:ss";
add_len = 8;
break;
default:
/* Unsupported format */
succeeded = FALSE;
break;
}
if( !succeeded )
break;
} else {
switch (ch) {
case '%':
do_fmt = TRUE;
break;
case '\\':
do_esc = TRUE;
break;
default:
achAddBuffer[0] = ch;
pszAdd = achAddBuffer;
add_len = 1;
break;
}
}
if( add_len!=0 ) {
if( remaining < add_len ) {
return FALSE;
}
strcpy( pszOutput, pszAdd );
pszOutput += add_len;
remaining -= add_len;
add_len = 0;
}
++pszCurrent;
}
return TRUE;
}
| 1,838 |
347 | <reponame>jihwahn1018/ovirt-engine
package org.ovirt.engine.core.utils;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.util.Collection;
import java.util.Collections;
import java.util.EnumSet;
import org.junit.jupiter.api.Test;
import org.ovirt.engine.core.common.businessentities.VMStatus;
public class ObjectIdentityCheckerTest {
@Test
public void testIsUpdateable() {
ObjectIdentityChecker oic = new ObjectIdentityChecker(Jedi.class);
assertFalse(oic.isFieldUpdatable("name"), "Should be false by default");
oic.addPermittedFields("name");
assertTrue(oic.isFieldUpdatable("name"), "Should be true now");
}
@Test
public void testNoChanges() {
Jedi jedi1 = new Jedi();
Jedi jedi2 = new Jedi();
Collection<String> changes = ObjectIdentityChecker.getChangedFields(jedi1, jedi2);
assertEquals(0, changes.size(), "Should be no changes");
}
@Test
public void testChanges() {
Jedi jedi1 = new Jedi();
Jedi jedi2 = new Jedi();
jedi2.saberColor = "red"; // Gone to the dark side
Collection<String> changes = ObjectIdentityChecker.getChangedFields(jedi1, jedi2);
assertEquals(1, changes.size(), "Should be 1 changes");
}
@Test
public void testIsFieldsUpdated() {
Jedi jedi1 = new Jedi();
Jedi jedi2 = new Jedi();
jedi2.saberColor = "red"; // Gone to the dark side
ObjectIdentityChecker oic = new ObjectIdentityChecker(Jedi.class);
boolean changed = oic.isFieldsUpdated(jedi1, jedi2, Collections.singletonList("name"));
assertFalse(changed, "No Change");
changed = oic.isFieldsUpdated(jedi1, jedi2, Collections.singletonList("saberColor"));
assertTrue(changed, "1 Change");
}
@Test
public void testHotsetUpdateableWhenHotsetRequested() {
ObjectIdentityChecker oic = new ObjectIdentityChecker(Jedi.class);
oic.addHotsetField("name", EnumSet.of(VMStatus.Up));
assertTrue(oic.isFieldUpdatable(VMStatus.Up, "name", null, true),
"hot set requested for hot set fields should be true in state Up");
}
@Test
public void testHotsetUpdateableWhenHotsetRequestedAndStatusOtherThanHotSettable() {
ObjectIdentityChecker oic = new ObjectIdentityChecker(Jedi.class);
oic.addHotsetField("name", EnumSet.of(VMStatus.Up));
assertFalse(oic.isFieldUpdatable(null, "name", null, true),
"hot set requested for hot set fields should be false in state other than Up");
}
@Test
public void testHotsetNotUpdateableWhenHotsetNotRequested() {
ObjectIdentityChecker oic = new ObjectIdentityChecker(Jedi.class);
assertFalse(oic.isFieldUpdatable("name"), "Should be false by default");
oic.addHotsetField("name", EnumSet.of(VMStatus.Up));
assertFalse(oic.isFieldUpdatable(null, "name", null, false),
"hot set not requested should return false even if field is hot set");
}
@Test
public void testHotsetUpdateableWhenHotsetRequestedWithStatus() {
ObjectIdentityChecker oic = new ObjectIdentityChecker(Jedi.class);
oic.addField(VMStatus.Down, "name");
oic.addHotsetField("name", EnumSet.of(VMStatus.Up));
assertTrue(oic.isFieldUpdatable(VMStatus.Down, "name", null, true),
"hot set requested for hot set fields should be true");
}
@Test
public void testHotsetUpdateableWhenHotsetNotRequestedWithStatus() {
ObjectIdentityChecker oic = new ObjectIdentityChecker(Jedi.class);
oic.addField(VMStatus.Down, "name");
oic.addHotsetField("name", EnumSet.of(VMStatus.Up));
assertTrue(oic.isFieldUpdatable(VMStatus.Down, "name", null, false),
"hot set not requested field should be updateable according to status");
}
}
| 1,598 |
664 | <reponame>adem4ik/LIII
#pragma once
#ifdef ALLOW_TRAFFIC_CONTROL
#include "traffic_limitation/InterceptingApplication.h"
#else
#include "qtsingleapplication/qtsingleapplication.h"
#endif // ALLOW_TRAFFIC_CONTROL
#include "utilities/translatable.h"
#include "utilities/utils.h"
class Application
#ifdef ALLOW_TRAFFIC_CONTROL
: public traffic_limitation::InterceptingApp,
#else
: public QtSingleApplication,
#endif // ALLOW_TRAFFIC_CONTROL
public utilities::Translatable
{
Q_OBJECT
public:
Application(const QString& id, int& argc, char** argv);
void passCmdLine();
void checkFirewallException(QMainWindow* mainWindow);
bool allowAsSecondInstance() const { return alowAsSecondInstance_; }
bool isMissionDone() const { return missionDone; }
private:
bool missionDone; // flag to ignore all other commands, if mission done in c-tor
bool alowAsSecondInstance_;
void checkSpecialCmdLine();
};
| 329 |
31,446 | {"version":3,"e2ee":{"value":false,"updatedTime":0},"activeMasterKeyId":{"value":"","updatedTime":0},"masterKeys":[]} | 34 |
319 | # Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
import sys
import torch
import torch.nn as nn
import torch.nn.functional as F
from models.networks.base_network import BaseNetwork
from models.networks.generator import AdaptiveFeatureGenerator, DomainClassifier, ReverseLayerF
from util.util import vgg_preprocess
import util.util as util
class ResidualBlock(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size=3, padding=1, stride=1):
super(ResidualBlock, self).__init__()
self.padding1 = nn.ReflectionPad2d(padding)
self.conv1 = nn.Conv2d(in_channels, out_channels, kernel_size=kernel_size, padding=0, stride=stride)
self.bn1 = nn.InstanceNorm2d(out_channels)
self.prelu = nn.PReLU()
self.padding2 = nn.ReflectionPad2d(padding)
self.conv2 = nn.Conv2d(in_channels, out_channels, kernel_size=kernel_size, padding=0, stride=stride)
self.bn2 = nn.InstanceNorm2d(out_channels)
def forward(self, x):
residual = x
out = self.padding1(x)
out = self.conv1(out)
out = self.bn1(out)
out = self.prelu(out)
out = self.padding2(out)
out = self.conv2(out)
out = self.bn2(out)
out += residual
out = self.prelu(out)
return out
class WTA_scale(torch.autograd.Function):
"""
We can implement our own custom autograd Functions by subclassing
torch.autograd.Function and implementing the forward and backward passes
which operate on Tensors.
"""
@staticmethod
def forward(ctx, input, scale=1e-4):
"""
In the forward pass we receive a Tensor containing the input and return a
Tensor containing the output. You can cache arbitrary Tensors for use in the
backward pass using the save_for_backward method.
"""
activation_max, index_max = torch.max(input, -1, keepdim=True)
input_scale = input * scale # default: 1e-4
# input_scale = input * scale # default: 1e-4
output_max_scale = torch.where(input == activation_max, input, input_scale)
mask = (input == activation_max).type(torch.float)
ctx.save_for_backward(input, mask)
return output_max_scale
@staticmethod
def backward(ctx, grad_output):
"""
In the backward pass we receive a Tensor containing the gradient of the loss
with respect to the output, and we need to compute the gradient of the loss
with respect to the input.
"""
# import pdb
# pdb.set_trace()
input, mask = ctx.saved_tensors
mask_ones = torch.ones_like(mask)
mask_small_ones = torch.ones_like(mask) * 1e-4
# mask_small_ones = torch.ones_like(mask) * 1e-4
grad_scale = torch.where(mask == 1, mask_ones, mask_small_ones)
grad_input = grad_output.clone() * grad_scale
return grad_input, None
class VGG19_feature_color_torchversion(nn.Module):
'''
NOTE: there is no need to pre-process the input
input tensor should range in [0,1]
'''
def __init__(self, pool='max', vgg_normal_correct=False, ic=3):
super(VGG19_feature_color_torchversion, self).__init__()
self.vgg_normal_correct = vgg_normal_correct
self.conv1_1 = nn.Conv2d(ic, 64, kernel_size=3, padding=1)
self.conv1_2 = nn.Conv2d(64, 64, kernel_size=3, padding=1)
self.conv2_1 = nn.Conv2d(64, 128, kernel_size=3, padding=1)
self.conv2_2 = nn.Conv2d(128, 128, kernel_size=3, padding=1)
self.conv3_1 = nn.Conv2d(128, 256, kernel_size=3, padding=1)
self.conv3_2 = nn.Conv2d(256, 256, kernel_size=3, padding=1)
self.conv3_3 = nn.Conv2d(256, 256, kernel_size=3, padding=1)
self.conv3_4 = nn.Conv2d(256, 256, kernel_size=3, padding=1)
self.conv4_1 = nn.Conv2d(256, 512, kernel_size=3, padding=1)
self.conv4_2 = nn.Conv2d(512, 512, kernel_size=3, padding=1)
self.conv4_3 = nn.Conv2d(512, 512, kernel_size=3, padding=1)
self.conv4_4 = nn.Conv2d(512, 512, kernel_size=3, padding=1)
self.conv5_1 = nn.Conv2d(512, 512, kernel_size=3, padding=1)
self.conv5_2 = nn.Conv2d(512, 512, kernel_size=3, padding=1)
self.conv5_3 = nn.Conv2d(512, 512, kernel_size=3, padding=1)
self.conv5_4 = nn.Conv2d(512, 512, kernel_size=3, padding=1)
if pool == 'max':
self.pool1 = nn.MaxPool2d(kernel_size=2, stride=2)
self.pool2 = nn.MaxPool2d(kernel_size=2, stride=2)
self.pool3 = nn.MaxPool2d(kernel_size=2, stride=2)
self.pool4 = nn.MaxPool2d(kernel_size=2, stride=2)
self.pool5 = nn.MaxPool2d(kernel_size=2, stride=2)
elif pool == 'avg':
self.pool1 = nn.AvgPool2d(kernel_size=2, stride=2)
self.pool2 = nn.AvgPool2d(kernel_size=2, stride=2)
self.pool3 = nn.AvgPool2d(kernel_size=2, stride=2)
self.pool4 = nn.AvgPool2d(kernel_size=2, stride=2)
self.pool5 = nn.AvgPool2d(kernel_size=2, stride=2)
def forward(self, x, out_keys, preprocess=True):
'''
NOTE: input tensor should range in [0,1]
'''
out = {}
if preprocess:
x = vgg_preprocess(x, vgg_normal_correct=self.vgg_normal_correct)
out['r11'] = F.relu(self.conv1_1(x))
out['r12'] = F.relu(self.conv1_2(out['r11']))
out['p1'] = self.pool1(out['r12'])
out['r21'] = F.relu(self.conv2_1(out['p1']))
out['r22'] = F.relu(self.conv2_2(out['r21']))
out['p2'] = self.pool2(out['r22'])
out['r31'] = F.relu(self.conv3_1(out['p2']))
out['r32'] = F.relu(self.conv3_2(out['r31']))
out['r33'] = F.relu(self.conv3_3(out['r32']))
out['r34'] = F.relu(self.conv3_4(out['r33']))
out['p3'] = self.pool3(out['r34'])
out['r41'] = F.relu(self.conv4_1(out['p3']))
out['r42'] = F.relu(self.conv4_2(out['r41']))
out['r43'] = F.relu(self.conv4_3(out['r42']))
out['r44'] = F.relu(self.conv4_4(out['r43']))
out['p4'] = self.pool4(out['r44'])
out['r51'] = F.relu(self.conv5_1(out['p4']))
out['r52'] = F.relu(self.conv5_2(out['r51']))
out['r53'] = F.relu(self.conv5_3(out['r52']))
out['r54'] = F.relu(self.conv5_4(out['r53']))
out['p5'] = self.pool5(out['r54'])
return [out[key] for key in out_keys]
class NoVGGCorrespondence(BaseNetwork):
# input is Al, Bl, channel = 1, range~[0,255]
def __init__(self, opt):
self.opt = opt
super().__init__()
opt.spade_ic = opt.semantic_nc
self.adaptive_model_seg = AdaptiveFeatureGenerator(opt)
opt.spade_ic = 3
self.adaptive_model_img = AdaptiveFeatureGenerator(opt)
del opt.spade_ic
if opt.weight_domainC > 0 and (not opt.domain_rela):
self.domain_classifier = DomainClassifier(opt)
if 'down' not in opt:
opt.down = 4
if opt.warp_stride == 2:
opt.down = 2
assert (opt.down == 2) or (opt.down == 4)
self.down = opt.down
self.feature_channel = 64
self.in_channels = self.feature_channel * 4
self.inter_channels = 256
coord_c = 3 if opt.use_coordconv else 0
label_nc = opt.semantic_nc if opt.maskmix else 0
self.layer = nn.Sequential(
ResidualBlock(self.feature_channel * 4 + label_nc + coord_c, self.feature_channel * 4 + label_nc + coord_c, kernel_size=3, padding=1, stride=1),
ResidualBlock(self.feature_channel * 4 + label_nc + coord_c, self.feature_channel * 4 + label_nc + coord_c, kernel_size=3, padding=1, stride=1),
ResidualBlock(self.feature_channel * 4 + label_nc + coord_c, self.feature_channel * 4 + label_nc + coord_c, kernel_size=3, padding=1, stride=1),
ResidualBlock(self.feature_channel * 4 + label_nc + coord_c, self.feature_channel * 4 + label_nc + coord_c, kernel_size=3, padding=1, stride=1))
self.phi = nn.Conv2d(in_channels=self.in_channels + label_nc + coord_c, out_channels=self.inter_channels, kernel_size=1, stride=1, padding=0)
self.theta = nn.Conv2d(in_channels=self.in_channels + label_nc + coord_c, out_channels=self.inter_channels, kernel_size=1, stride=1, padding=0)
self.upsampling_bi = nn.Upsample(scale_factor=opt.down, mode='bilinear') #for show
if opt.warp_bilinear:
self.upsampling = nn.Upsample(scale_factor=opt.down, mode='bilinear')
else:
self.upsampling = nn.Upsample(scale_factor=opt.down)
self.zero_tensor = None
# model = [nn.ReflectionPad2d(1),
# nn.Conv2d(opt.semantic_nc, 128, kernel_size=3, padding=0, stride=1),
# nn.InstanceNorm2d(128),
# nn.PReLU(),
# nn.ReflectionPad2d(1),
# nn.Conv2d(128, self.feature_channel * 2, kernel_size=3, padding=0, stride=1),
# nn.InstanceNorm2d(self.feature_channel * 2),
# nn.PReLU()]
# self.layer_mask_head = nn.Sequential(*model)
# self.layer_mix = nn.Conv2d(in_channels=self.feature_channel * 6, out_channels=self.feature_channel * 4, kernel_size=1, stride=1, padding=0)
def addcoords(self, x):
bs, _, h, w = x.shape
xx_ones = torch.ones([bs, h, 1], dtype=x.dtype, device=x.device)
xx_range = torch.arange(w, dtype=x.dtype, device=x.device).unsqueeze(0).repeat([bs, 1]).unsqueeze(1)
xx_channel = torch.matmul(xx_ones, xx_range).unsqueeze(1)
yy_ones = torch.ones([bs, 1, w], dtype=x.dtype, device=x.device)
yy_range = torch.arange(h, dtype=x.dtype, device=x.device).unsqueeze(0).repeat([bs, 1]).unsqueeze(-1)
yy_channel = torch.matmul(yy_range, yy_ones).unsqueeze(1)
xx_channel = xx_channel.float() / (w - 1)
yy_channel = yy_channel.float() / (h - 1)
xx_channel = 2 * xx_channel - 1
yy_channel = 2 * yy_channel - 1
rr_channel = torch.sqrt(torch.pow(xx_channel, 2) + torch.pow(yy_channel, 2))
concat = torch.cat((x, xx_channel, yy_channel, rr_channel), dim=1)
return concat
def forward(self,
ref_img,
real_img,
seg_map,
ref_seg_map,
temperature=0.01,
detach_flag=False,
WTA_scale_weight=1,
alpha=1,
return_corr=False):
coor_out = {}
batch_size = ref_img.shape[0]
image_height = ref_img.shape[2]
image_width = ref_img.shape[3]
feature_height = int(image_height / self.opt.down)
feature_width = int(image_width / self.opt.down)
if self.opt.mask_noise: #add noise to mask
noise = torch.randn_like(seg_map, requires_grad=False) * 0.1
noise[seg_map == 0] = 0
seg_input = seg_map + noise
else:
seg_input = seg_map
adaptive_feature_seg = self.adaptive_model_seg(seg_input, seg_input)
adaptive_feature_img = self.adaptive_model_img(ref_img, ref_img)
adaptive_feature_seg = util.feature_normalize(adaptive_feature_seg)
adaptive_feature_img = util.feature_normalize(adaptive_feature_img)
if self.opt.isTrain and self.opt.novgg_featpair > 0:
adaptive_feature_img_pair = self.adaptive_model_img(real_img, real_img)
adaptive_feature_img_pair = util.feature_normalize(adaptive_feature_img_pair)
coor_out['loss_novgg_featpair'] = F.l1_loss(adaptive_feature_seg, adaptive_feature_img_pair) * self.opt.novgg_featpair
if self.opt.use_coordconv:
adaptive_feature_seg = self.addcoords(adaptive_feature_seg)
adaptive_feature_img = self.addcoords(adaptive_feature_img)
seg = F.interpolate(seg_map, size=adaptive_feature_seg.size()[2:], mode='nearest')
ref_seg = F.interpolate(ref_seg_map, size=adaptive_feature_img.size()[2:], mode='nearest')
if self.opt.maskmix:
cont_features = self.layer(torch.cat((adaptive_feature_seg, seg), 1))
if self.opt.noise_for_mask and ((not self.opt.isTrain) or (self.opt.isTrain and self.opt.epoch > self.opt.mask_epoch)):
noise = torch.randn_like(ref_seg, requires_grad=False) * 0.01
ref_features = self.layer(torch.cat((adaptive_feature_img, noise), 1))
else:
ref_features = self.layer(torch.cat((adaptive_feature_img, ref_seg), 1))
else:
cont_features = self.layer(adaptive_feature_seg)
ref_features = self.layer(adaptive_feature_img)
# pairwise cosine similarity
theta = self.theta(cont_features)
if self.opt.match_kernel == 1:
theta = theta.view(batch_size, self.inter_channels, -1) # 2*256*(feature_height*feature_width)
else:
theta = F.unfold(theta, kernel_size=self.opt.match_kernel, padding=int(self.opt.match_kernel // 2))
dim_mean = 1 if self.opt.PONO_C else -1
theta = theta - theta.mean(dim=dim_mean, keepdim=True) # center the feature
theta_norm = torch.norm(theta, 2, 1, keepdim=True) + sys.float_info.epsilon
theta = torch.div(theta, theta_norm)
theta_permute = theta.permute(0, 2, 1) # 2*(feature_height*feature_width)*256
phi = self.phi(ref_features)
if self.opt.match_kernel == 1:
phi = phi.view(batch_size, self.inter_channels, -1) # 2*256*(feature_height*feature_width)
else:
phi = F.unfold(phi, kernel_size=self.opt.match_kernel, padding=int(self.opt.match_kernel // 2))
phi = phi - phi.mean(dim=dim_mean, keepdim=True) # center the feature
phi_norm = torch.norm(phi, 2, 1, keepdim=True) + sys.float_info.epsilon
phi = torch.div(phi, phi_norm)
f = torch.matmul(theta_permute, phi) # 2*(feature_height*feature_width)*(feature_height*feature_width)
if detach_flag:
f = f.detach()
#f_similarity = f.unsqueeze(dim=1)
# similarity_map = torch.max(f_similarity, -1, keepdim=True)[0]
# similarity_map = similarity_map.view(batch_size, 1, feature_height, feature_width)
# f can be negative
if WTA_scale_weight == 1:
f_WTA = f
else:
f_WTA = WTA_scale.apply(f, WTA_scale_weight)
f_WTA = f_WTA / temperature
if return_corr:
return f_WTA
f_div_C = F.softmax(f_WTA.squeeze(), dim=-1) # 2*1936*1936; softmax along the horizontal line (dim=-1)
# downsample the reference color
if self.opt.warp_patch:
ref = F.unfold(ref_img, self.opt.down, stride=self.opt.down)
else:
ref = F.avg_pool2d(ref_img, self.opt.down)
channel = ref.shape[1]
ref = ref.view(batch_size, channel, -1)
ref = ref.permute(0, 2, 1)
y = torch.matmul(f_div_C, ref) # 2*1936*channel
if self.opt.warp_patch:
y = y.permute(0, 2, 1)
y = F.fold(y, 256, self.opt.down, stride=self.opt.down)
else:
y = y.permute(0, 2, 1).contiguous()
y = y.view(batch_size, channel, feature_height, feature_width) # 2*3*44*44
if (not self.opt.isTrain) and self.opt.show_corr:
coor_out['warp_out_bi'] = y if self.opt.warp_patch else self.upsampling_bi(y)
coor_out['warp_out'] = y if self.opt.warp_patch else self.upsampling(y)
if self.opt.warp_mask_losstype == 'direct' or self.opt.show_warpmask:
ref_seg = F.interpolate(ref_seg_map, scale_factor= 1/self.opt.down, mode='nearest')
channel = ref_seg.shape[1]
ref_seg = ref_seg.view(batch_size, channel, -1)
ref_seg = ref_seg.permute(0, 2, 1)
warp_mask = torch.matmul(f_div_C, ref_seg) # 2*1936*channel
warp_mask = warp_mask.permute(0, 2, 1).contiguous()
coor_out['warp_mask'] = warp_mask.view(batch_size, channel, feature_height, feature_width) # 2*3*44*44
elif self.opt.warp_mask_losstype == 'cycle':
f_div_C_v = F.softmax(f_WTA.transpose(1, 2), dim=-1) # 2*1936*1936; softmax along the vertical line
seg = F.interpolate(seg_map, scale_factor=1 / self.opt.down, mode='nearest')
channel = seg.shape[1]
seg = seg.view(batch_size, channel, -1)
seg = seg.permute(0, 2, 1)
warp_mask_to_ref = torch.matmul(f_div_C_v, seg) # 2*1936*channel
warp_mask = torch.matmul(f_div_C, warp_mask_to_ref) # 2*1936*channel
warp_mask = warp_mask.permute(0, 2, 1).contiguous()
coor_out['warp_mask'] = warp_mask.view(batch_size, channel, feature_height, feature_width) # 2*3*44*44
else:
warp_mask = None
if self.opt.warp_cycle_w > 0:
f_div_C_v = F.softmax(f_WTA.transpose(1, 2), dim=-1)
if self.opt.warp_patch:
y = F.unfold(y, self.opt.down, stride=self.opt.down)
y = y.permute(0, 2, 1)
warp_cycle = torch.matmul(f_div_C_v, y)
warp_cycle = warp_cycle.permute(0, 2, 1)
warp_cycle = F.fold(warp_cycle, 256, self.opt.down, stride=self.opt.down)
coor_out['warp_cycle'] = warp_cycle
else:
channel = y.shape[1]
y = y.view(batch_size, channel, -1).permute(0, 2, 1)
warp_cycle = torch.matmul(f_div_C_v, y).permute(0, 2, 1).contiguous()
coor_out['warp_cycle'] = warp_cycle.view(batch_size, channel, feature_height, feature_width)
if self.opt.two_cycle:
real_img = F.avg_pool2d(real_img, self.opt.down)
real_img = real_img.view(batch_size, channel, -1)
real_img = real_img.permute(0, 2, 1)
warp_i2r = torch.matmul(f_div_C_v, real_img).permute(0, 2, 1).contiguous() #warp input to ref
warp_i2r = warp_i2r.view(batch_size, channel, feature_height, feature_width)
warp_i2r2i = torch.matmul(f_div_C, warp_i2r.view(batch_size, channel, -1).permute(0, 2, 1))
coor_out['warp_i2r'] = warp_i2r
coor_out['warp_i2r2i'] = warp_i2r2i.permute(0, 2, 1).contiguous().view(batch_size, channel, feature_height, feature_width)
return coor_out
| 9,137 |
3,402 | <reponame>ApacheSourceCode/kylin<filename>core-metadata/src/main/java/org/apache/kylin/dimension/DimensionEncoding.java
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kylin.dimension;
import java.io.Externalizable;
import org.apache.kylin.common.util.StringUtil;
import org.apache.kylin.metadata.datatype.DataTypeSerializer;
/**
* Dimension encoding maps a dimension (String) to bytes of fixed length.
*
* It is similar to Dictionary in 1) the bytes is fixed length; 2) bi-way mapping;
* 3) the mapping preserves order, but is also different to Dictionary as the target
* bytes can be very long while dictionary ID is 4 bytes at most. This means it is
* hard to enumerate all values of a encoding, thus TupleFilterDictionaryTranslater
* cannot work on DimensionEncoding.
*/
public abstract class DimensionEncoding implements Externalizable {
private static final long serialVersionUID = 1L;
// it's convention that all 0xff means NULL
public static final byte NULL = (byte) 0xff;
public static boolean isNull(byte[] bytes, int offset, int length) {
// all 0xFF is NULL
if (length == 0) {
return false;
}
for (int i = 0; i < length; i++) {
if (bytes[i + offset] != NULL) {
return false;
}
}
return true;
}
public static Object[] parseEncodingConf(String encoding) {
String[] parts = encoding.split("\\s*[(),:]\\s*");
if (parts == null || parts.length == 0 || parts[0].isEmpty())
throw new IllegalArgumentException("Not supported row key col encoding: '" + encoding + "'");
final String encodingName = parts[0];
final String[] encodingArgs = parts[parts.length - 1].isEmpty() //
? StringUtil.subArray(parts, 1, parts.length - 1) : StringUtil.subArray(parts, 1, parts.length);
return new Object[] { encodingName, encodingArgs };
}
/** return the fixed length of encoded bytes */
abstract public int getLengthOfEncoding();
/** encode given value to bytes, note the NULL convention */
abstract public void encode(String value, byte[] output, int outputOffset);
/** decode given bytes to value string, note the NULL convention */
abstract public String decode(byte[] bytes, int offset, int len);
/** return a DataTypeSerializer that does the same encoding/decoding on ByteBuffer */
abstract public DataTypeSerializer<Object> asDataTypeSerializer();
}
| 1,012 |
555 | package japa.parser.ast.body;
import japa.parser.ast.expr.AnnotationExpr;
import java.util.List;
/**
* An element which can be the target of annotations.
*
* @author <NAME>
* @since July 2014
*/
public interface AnnotableNode {
public List<AnnotationExpr> getAnnotations();
}
| 95 |
543 | # Copyright (c) 2021, NVIDIA CORPORATION.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import tensorflow as tf
class XDeepFmOuterProduct(tf.keras.layers.Layer):
"""
Layer implementing the outer product transformation used in
the Compressed Interaction Network (CIN) proposed in
in https://arxiv.org/abs/1803.05170. Treats the feature dimension
H_k of a B x H_k x D feature embedding tensor as a feature map
of the D embedding elements, and computes element-wise multiplication
interaction between these maps and those from an initial input tensor
x_0 before taking the inner product with a parameter matrix.
Parameters
------------
dim : int
Feature dimension of the layer. Output will be of shape
(batch_size, dim, embedding_dim)
"""
def __init__(self, dim, **kwargs):
self.dim = dim
super().__init__(**kwargs)
def build(self, input_shapes):
if not isinstance(input_shapes[0], tuple):
raise ValueError("Should be called on a list of inputs.")
if len(input_shapes) != 2:
raise ValueError("Should only have two inputs, found {}".format(len(input_shapes)))
for shape in input_shapes:
if len(shape) != 3:
raise ValueError("Found shape {} without 3 dimensions".format(shape))
if input_shapes[0][-1] != input_shapes[1][-1]:
raise ValueError(
"Last dimension should match, found dimensions {} and {}".format(
input_shapes[0][-1], input_shapes[1][-1]
)
)
# H_k x H_{k-1} x m
shape = (self.dim, input_shapes[0][1], input_shapes[1][1])
self.kernel = self.add_weight(
name="kernel", initializer="glorot_uniform", trainable=True, shape=shape
)
self.built = True
def call(self, inputs):
"""
Parameters
------------
inputs : array-like(tf.Tensor)
The two input tensors, the first of which should be the
output of the previous layer, and the second of which
should be the input to the CIN.
"""
x_k_minus_1, x_0 = inputs
# need to do shape manipulations so that we
# can do element-wise multiply
x_k_minus_1 = tf.expand_dims(x_k_minus_1, axis=2) # B x H_{k-1} x 1 x D
x_k_minus_1 = tf.tile(x_k_minus_1, [1, 1, x_0.shape[1], 1]) # B x H_{k-1} x m x D
x_k_minus_1 = tf.transpose(x_k_minus_1, (1, 0, 2, 3)) # H_{k-1} x B x m x D
z_k = x_k_minus_1 * x_0 # H_{k-1} x B x m x D
z_k = tf.transpose(z_k, (1, 0, 2, 3)) # B x H_{k-1} x m x D
# now we need to map to B x H_k x D
x_k = tf.tensordot(self.kernel, z_k, axes=[[1, 2], [1, 2]])
x_k = tf.transpose(x_k, (1, 0, 2))
return x_k
def compute_output_shape(self, input_shapes):
return (input_shapes[0][0], self.dim, input_shapes[0][2])
| 1,458 |
3,301 | <reponame>zhangjun0x01/Alink
package com.alibaba.alink.operator.common.tensorflow;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.tuple.Tuple4;
import org.apache.flink.ml.api.misc.param.Params;
import org.apache.flink.table.api.TableSchema;
import org.apache.flink.types.Row;
import com.alibaba.alink.common.mapper.ModelMapper;
import com.alibaba.alink.common.dl.utils.PythonFileUtils;
import com.alibaba.alink.common.dl.utils.ZipFileUtil;
import com.alibaba.alink.operator.common.io.csv.CsvUtil;
import com.alibaba.alink.params.tensorflow.savedmodel.TFTableModelPredictParams;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.Serializable;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Base64;
import java.util.Base64.Decoder;
import java.util.Comparator;
import java.util.List;
/**
* {@link TFTableModelPredictModelMapper} provides inference for SavedModel which is in Alink Model format.
*/
public class TFTableModelPredictModelMapper extends ModelMapper implements Serializable {
private final BaseTFSavedModelPredictRowMapper mapper;
public TFTableModelPredictModelMapper(TableSchema modelSchema, TableSchema dataSchema, Params params) {
super(modelSchema, dataSchema, params);
mapper = new BaseTFSavedModelPredictRowMapper(dataSchema, params);
}
@Override
public void open() {
mapper.open();
}
@Override
public void close() {
mapper.close();
}
@Override
public void loadModel(List <Row> modelRows) {
String workDir;
try {
workDir = PythonFileUtils.createTempWorkDir("temp_");
} catch (Exception e) {
throw new RuntimeException("Cannot create temporary work directory.", e);
}
modelRows = new ArrayList <>(modelRows);
modelRows.sort(Comparator.comparingLong(d -> (Long) d.getField(0)));
String zipFilename = (String) modelRows.get(0).getField(1);
Path zipPath = Paths.get(workDir, zipFilename);
try (FileOutputStream fos = new FileOutputStream(zipPath.toFile())) {
Decoder decoder = Base64.getDecoder();
for (int i = 1, modelRowsSize = modelRows.size(); i < modelRowsSize; i += 1) {
Row modelRow = modelRows.get(i);
fos.write(decoder.decode((String) modelRow.getField(1)));
}
} catch (Exception e) {
throw new RuntimeException(String.format("Cannot extract data to %s", zipFilename), e);
}
String targetDir = zipFilename.substring(0, zipFilename.indexOf(".zip"));
Path targetPath = Paths.get(workDir, targetDir);
try {
ZipFileUtil.unzipFileIntoDirectory(zipPath.toFile(), targetPath.toFile());
} catch (IOException e) {
throw new RuntimeException(String.format("Failed to unzip %s to %s.", zipPath.toString(), targetPath.toString()), e);
}
mapper.setModelPath(targetPath.toAbsolutePath().toString());
}
@Override
public ModelMapper createNew(List <Row> newModelRows) {
return super.createNew(newModelRows);
}
@Override
protected Tuple4 <String[], String[], TypeInformation <?>[], String[]> prepareIoSchema(TableSchema modelSchema,
TableSchema dataSchema,
Params params) {
String[] tfInputCols = params.get(TFTableModelPredictParams.SELECTED_COLS);
if (null == tfInputCols) {
tfInputCols = dataSchema.getFieldNames();
}
String tfOutputSchemaStr = params.get(TFTableModelPredictParams.OUTPUT_SCHEMA_STR);
TableSchema tfOutputSchema = CsvUtil.schemaStr2Schema(tfOutputSchemaStr);
String[] reservedCols = params.get(TFTableModelPredictParams.RESERVED_COLS);
return Tuple4.of(tfInputCols,
tfOutputSchema.getFieldNames(),
tfOutputSchema.getFieldTypes(),
reservedCols);
}
@Override
protected void map(SlicedSelectedSample selection, SlicedResult result) throws Exception {
mapper.map(selection, result);
}
}
| 1,405 |
3,102 | <filename>clang/test/CodeGen/arm-pcs.c
// REQUIRES: arm-registered-target
// RUN: %clang_cc1 -triple arm-none-linux-gnueabi -emit-llvm -w -o - < %s | FileCheck %s
typedef int __attribute__((pcs("aapcs"))) (*aapcs_fn)(void);
typedef int __attribute__((pcs("aapcs-vfp"))) (*aapcs_vfp_fn)(void);
aapcs_fn bar;
int foo(aapcs_vfp_fn baz) {
// CHECK-LABEL: define i32 @foo
// CHECK: call arm_aapcscc
// CHECK: call arm_aapcs_vfpcc
return bar() + baz();
}
| 206 |
675 | // This file is part of Eigen, a lightweight C++ template library
// for linear algebra.
//
// Copyright (C) 2015 <NAME> <<EMAIL>>
//
// This Source Code Form is subject to the terms of the Mozilla
// Public License v. 2.0. If a copy of the MPL was not distributed
// with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
#define EIGEN_USE_THREADS
#include <atomic>
#include <stdlib.h>
#include "main.h"
#include <Eigen/CXX11/Tensor>
static void test_notification_single()
{
ThreadPool thread_pool(1);
std::atomic<int> counter(0);
Eigen::Notification n;
auto func = [&n, &counter](){ n.Wait(); ++counter;};
thread_pool.Schedule(func);
std::this_thread::sleep_for(std::chrono::milliseconds(1000));
// The thread should be waiting for the notification.
VERIFY_IS_EQUAL(counter, 0);
// Unblock the thread
n.Notify();
std::this_thread::sleep_for(std::chrono::milliseconds(1000));
// Verify the counter has been incremented
VERIFY_IS_EQUAL(counter, 1);
}
// Like test_notification_single() but enqueues multiple threads to
// validate that all threads get notified by Notify().
static void test_notification_multiple()
{
ThreadPool thread_pool(1);
std::atomic<int> counter(0);
Eigen::Notification n;
auto func = [&n, &counter](){ n.Wait(); ++counter;};
thread_pool.Schedule(func);
thread_pool.Schedule(func);
thread_pool.Schedule(func);
thread_pool.Schedule(func);
std::this_thread::sleep_for(std::chrono::milliseconds(1000));
VERIFY_IS_EQUAL(counter, 0);
n.Notify();
std::this_thread::sleep_for(std::chrono::milliseconds(1000));
VERIFY_IS_EQUAL(counter, 4);
}
EIGEN_DECLARE_TEST(cxx11_tensor_notification)
{
CALL_SUBTEST(test_notification_single());
CALL_SUBTEST(test_notification_multiple());
}
| 625 |
406 | package org.broad.igv.htsget;
import htsjdk.tribble.Feature;
import htsjdk.variant.vcf.VCFHeader;
import org.broad.igv.feature.genome.Genome;
import org.junit.Test;
import java.util.Iterator;
import static org.junit.Assert.*;
public class HtsgetVariantSourceTest {
@Test
public void testGetHeader() throws Exception {
String url = "https://htsget.ga4gh.org/variants/giab.NA12878";
Genome genome = null;
HtsgetUtils.Metadata metadata = HtsgetUtils.getMetadata(url);
HtsgetVariantSource source = new HtsgetVariantSource(metadata, genome);
VCFHeader header = (VCFHeader) source.getHeader();
assertNotNull(header);
}
@Test
public void testReadFeatures() throws Exception {
String url = "https://htsget.ga4gh.org/variants/giab.NA12878";
String chr = "8";
int start = 128732400 - 1;
int end = 128770475;
Genome genome = null;
HtsgetUtils.Metadata metadata = HtsgetUtils.getMetadata(url);
HtsgetVariantSource source = new HtsgetVariantSource(metadata, genome);
Iterator<Feature> featureIterator = source.getFeatures(chr, start, end);
int featureCount = 0;
while (featureIterator.hasNext()) {
Feature f = featureIterator.next();
featureCount++;
}
assertEquals(11, featureCount);
}
// public static void main(String [] args) throws Exception {
// (new HtsgetVariantSourceTest()).testReadFeatures();
// }
} | 623 |
852 | <filename>L1TriggerConfig/L1ScalesProducers/src/L1JetEtScaleOnlineProd.cc
// -*- C++ -*-
//
// Package: L1JetEtScaleOnlineProd
// Class: L1JetEtScaleOnlineProd
//
/**\class L1JetEtScaleOnlineProd L1JetEtScaleOnlineProd.h L1TriggerConfig/L1ScalesProducers/src/L1JetEtScaleOnlineProd.cc
Description: Online producer for L1 jet Et scales
Implementation:
<Notes on implementation>
*/
//
// Original Author: <NAME>
// Created: Tue Sep 16 22:43:22 CEST 2008
//
//
// system include files
// user include files
#include "CondTools/L1Trigger/interface/L1ConfigOnlineProdBase.h"
#include "CondFormats/L1TObjects/interface/L1CaloEtScale.h"
#include "CondFormats/DataRecord/interface/L1JetEtScaleRcd.h"
//
// class declaration
//
class L1JetEtScaleOnlineProd : public L1ConfigOnlineProdBase<L1JetEtScaleRcd, L1CaloEtScale> {
public:
L1JetEtScaleOnlineProd(const edm::ParameterSet&);
~L1JetEtScaleOnlineProd() override;
std::unique_ptr<L1CaloEtScale> newObject(const std::string& objectKey) override;
private:
// ----------member data ---------------------------
};
//
// constants, enums and typedefs
//
//
// static data member definitions
//
//
// constructors and destructor
//
L1JetEtScaleOnlineProd::L1JetEtScaleOnlineProd(const edm::ParameterSet& iConfig)
: L1ConfigOnlineProdBase<L1JetEtScaleRcd, L1CaloEtScale>(iConfig) {
//the following line is needed to tell the framework what
// data is being produced
//now do what ever other initialization is needed
}
L1JetEtScaleOnlineProd::~L1JetEtScaleOnlineProd() {
// do anything here that needs to be done at desctruction time
// (e.g. close files, deallocate resources etc.)
}
std::unique_ptr<L1CaloEtScale> L1JetEtScaleOnlineProd::newObject(const std::string& objectKey) {
// get scales keys
l1t::OMDSReader::QueryResults scalesKeyResults = m_omdsReader.basicQuery("GCT_SCALES_KEY",
"CMS_GCT",
"GCT_PHYS_PARAMS",
"GCT_PHYS_PARAMS.CONFIG_KEY",
m_omdsReader.singleAttribute(objectKey));
std::string scalesKey;
if (scalesKeyResults.queryFailed()) {
edm::LogError("L1-O2O") << "Problem with key for L1JetEtScaleRcd : GCT scales key query failed ";
} else if (scalesKeyResults.numberRows() != 1) {
edm::LogError("L1-O2O") << "Problem with key for L1JetEtScaleRcd : " << (scalesKeyResults.numberRows())
<< " rows were returned when getting GCT scales key";
} else {
scalesKeyResults.fillVariable(scalesKey);
}
// get jet scale key
l1t::OMDSReader::QueryResults jetScaleKeyResults =
m_omdsReader.basicQuery("SC_CENJET_ET_THRESHOLD_FK", "CMS_GT", "L1T_SCALES", "L1T_SCALES.ID", scalesKeyResults);
std::string jetScaleKey;
if (jetScaleKeyResults.queryFailed()) {
edm::LogError("L1-O2O") << "Problem with key for L1GctJetEtScaleRcd : jet scale key query failed ";
} else if (jetScaleKeyResults.numberRows() != 1) {
edm::LogError("L1-O2O") << "Problem with key for L1GctJetEtScaleRcd : " << (jetScaleKeyResults.numberRows())
<< " rows were returned when getting jet Et scale key";
} else {
jetScaleKeyResults.fillVariable(jetScaleKey);
}
// get thresholds
std::vector<std::string> queryStrings;
queryStrings.push_back("ET_GEV_BIN_LOW_0");
queryStrings.push_back("ET_GEV_BIN_LOW_1");
queryStrings.push_back("ET_GEV_BIN_LOW_2");
queryStrings.push_back("ET_GEV_BIN_LOW_3");
queryStrings.push_back("ET_GEV_BIN_LOW_4");
queryStrings.push_back("ET_GEV_BIN_LOW_5");
queryStrings.push_back("ET_GEV_BIN_LOW_6");
queryStrings.push_back("ET_GEV_BIN_LOW_7");
queryStrings.push_back("ET_GEV_BIN_LOW_8");
queryStrings.push_back("ET_GEV_BIN_LOW_9");
queryStrings.push_back("ET_GEV_BIN_LOW_10");
queryStrings.push_back("ET_GEV_BIN_LOW_11");
queryStrings.push_back("ET_GEV_BIN_LOW_12");
queryStrings.push_back("ET_GEV_BIN_LOW_13");
queryStrings.push_back("ET_GEV_BIN_LOW_14");
queryStrings.push_back("ET_GEV_BIN_LOW_15");
queryStrings.push_back("ET_GEV_BIN_LOW_16");
queryStrings.push_back("ET_GEV_BIN_LOW_17");
queryStrings.push_back("ET_GEV_BIN_LOW_18");
queryStrings.push_back("ET_GEV_BIN_LOW_19");
queryStrings.push_back("ET_GEV_BIN_LOW_20");
queryStrings.push_back("ET_GEV_BIN_LOW_21");
queryStrings.push_back("ET_GEV_BIN_LOW_22");
queryStrings.push_back("ET_GEV_BIN_LOW_23");
queryStrings.push_back("ET_GEV_BIN_LOW_24");
queryStrings.push_back("ET_GEV_BIN_LOW_25");
queryStrings.push_back("ET_GEV_BIN_LOW_26");
queryStrings.push_back("ET_GEV_BIN_LOW_27");
queryStrings.push_back("ET_GEV_BIN_LOW_28");
queryStrings.push_back("ET_GEV_BIN_LOW_29");
queryStrings.push_back("ET_GEV_BIN_LOW_30");
queryStrings.push_back("ET_GEV_BIN_LOW_31");
queryStrings.push_back("ET_GEV_BIN_LOW_32");
queryStrings.push_back("ET_GEV_BIN_LOW_33");
queryStrings.push_back("ET_GEV_BIN_LOW_34");
queryStrings.push_back("ET_GEV_BIN_LOW_35");
queryStrings.push_back("ET_GEV_BIN_LOW_36");
queryStrings.push_back("ET_GEV_BIN_LOW_37");
queryStrings.push_back("ET_GEV_BIN_LOW_38");
queryStrings.push_back("ET_GEV_BIN_LOW_39");
queryStrings.push_back("ET_GEV_BIN_LOW_40");
queryStrings.push_back("ET_GEV_BIN_LOW_41");
queryStrings.push_back("ET_GEV_BIN_LOW_42");
queryStrings.push_back("ET_GEV_BIN_LOW_43");
queryStrings.push_back("ET_GEV_BIN_LOW_44");
queryStrings.push_back("ET_GEV_BIN_LOW_45");
queryStrings.push_back("ET_GEV_BIN_LOW_46");
queryStrings.push_back("ET_GEV_BIN_LOW_47");
queryStrings.push_back("ET_GEV_BIN_LOW_48");
queryStrings.push_back("ET_GEV_BIN_LOW_49");
queryStrings.push_back("ET_GEV_BIN_LOW_50");
queryStrings.push_back("ET_GEV_BIN_LOW_51");
queryStrings.push_back("ET_GEV_BIN_LOW_52");
queryStrings.push_back("ET_GEV_BIN_LOW_53");
queryStrings.push_back("ET_GEV_BIN_LOW_54");
queryStrings.push_back("ET_GEV_BIN_LOW_55");
queryStrings.push_back("ET_GEV_BIN_LOW_56");
queryStrings.push_back("ET_GEV_BIN_LOW_57");
queryStrings.push_back("ET_GEV_BIN_LOW_58");
queryStrings.push_back("ET_GEV_BIN_LOW_59");
queryStrings.push_back("ET_GEV_BIN_LOW_60");
queryStrings.push_back("ET_GEV_BIN_LOW_61");
queryStrings.push_back("ET_GEV_BIN_LOW_62");
queryStrings.push_back("ET_GEV_BIN_LOW_63");
l1t::OMDSReader::QueryResults scaleResults = m_omdsReader.basicQuery(
queryStrings, "CMS_GT", "L1T_SCALE_CALO_ET_THRESHOLD", "L1T_SCALE_CALO_ET_THRESHOLD.ID", jetScaleKeyResults);
std::vector<double> thresholds;
if (scaleResults.queryFailed() || scaleResults.numberRows() != 1) // check query successful
{
edm::LogError("L1-O2O") << "Problem with L1JetEtScale key : when reading scale.";
} else {
for (std::vector<std::string>::iterator thresh = queryStrings.begin(); thresh != queryStrings.end(); ++thresh) {
float tempScale = 0.0;
scaleResults.fillVariable(*thresh, tempScale);
thresholds.push_back(tempScale);
}
}
// get region LSB
double rgnEtLsb = 0.;
l1t::OMDSReader::QueryResults lsbResults = m_omdsReader.basicQuery("GCT_RGN_ET_LSB",
"CMS_GCT",
"GCT_PHYS_PARAMS",
"GCT_PHYS_PARAMS.CONFIG_KEY",
m_omdsReader.singleAttribute(objectKey));
if (lsbResults.queryFailed()) {
edm::LogError("L1-O2O") << "Problem with L1JetEtScale key.";
} else {
lsbResults.fillVariable("GCT_RGN_ET_LSB", rgnEtLsb);
}
// return object
return std::make_unique<L1CaloEtScale>(rgnEtLsb, thresholds);
}
// ------------ method called to produce the data ------------
//define this as a plug-in
DEFINE_FWK_EVENTSETUP_MODULE(L1JetEtScaleOnlineProd);
| 3,778 |
345 | package com.github.theholywaffle.teamspeak3.commands;
/*
* #%L
* TeamSpeak 3 Java API
* %%
* Copyright (C) 2017 <NAME>, <NAME>
* %%
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
* #L%
*/
import com.github.theholywaffle.teamspeak3.api.ClientProperty;
import com.github.theholywaffle.teamspeak3.api.ReasonIdentifier;
import com.github.theholywaffle.teamspeak3.commands.parameter.ArrayParameter;
import com.github.theholywaffle.teamspeak3.commands.parameter.KeyValueParam;
import com.github.theholywaffle.teamspeak3.commands.parameter.OptionParam;
import java.util.Map;
public final class ClientCommands {
private ClientCommands() {
throw new Error("No instances");
}
public static Command clientEdit(int clientId, Map<ClientProperty, String> options) {
CommandBuilder builder = new CommandBuilder("clientedit", 2);
builder.add(new KeyValueParam("clid", clientId));
builder.addProperties(options);
return builder.build();
}
public static Command clientFind(String pattern) {
if (pattern == null || pattern.isEmpty()) {
throw new IllegalArgumentException("Client name pattern must be a non-empty string");
}
return new CommandBuilder("clientfind", 1).add(new KeyValueParam("pattern", pattern)).build();
}
public static Command clientGetDBIdFromUId(String clientUId) {
if (clientUId == null || clientUId.isEmpty()) {
throw new IllegalArgumentException("Client UId must be a non-empty string");
}
return new CommandBuilder("clientgetdbidfromuid", 1).add(new KeyValueParam("cluid", clientUId)).build();
}
public static Command clientGetIds(String clientUId) {
if (clientUId == null || clientUId.isEmpty()) {
throw new IllegalArgumentException("Client UId must be a non-empty string");
}
return new CommandBuilder("clientgetids", 1).add(new KeyValueParam("cluid", clientUId)).build();
}
public static Command clientInfo(int clientId) {
return new CommandBuilder("clientinfo", 1).add(new KeyValueParam("clid", clientId)).build();
}
public static Command clientKick(ReasonIdentifier reason, String reasonMessage, int... clientIds) {
if (clientIds == null || clientIds.length == 0) {
throw new IllegalArgumentException("Client ID array cannot be null or empty");
}
CommandBuilder builder = new CommandBuilder("clientkick", 3);
builder.add(new KeyValueParam("reasonid", reason.getIndex()));
builder.addIf(reasonMessage != null, new KeyValueParam("reasonmsg", reasonMessage));
ArrayParameter clients = new ArrayParameter(clientIds.length);
for (final int id : clientIds) {
clients.add(new KeyValueParam("clid", id));
}
builder.add(clients);
return builder.build();
}
public static Command clientList() {
CommandBuilder builder = new CommandBuilder("clientlist", 11);
builder.add(new OptionParam("uid"));
builder.add(new OptionParam("away"));
builder.add(new OptionParam("voice"));
builder.add(new OptionParam("times"));
builder.add(new OptionParam("groups"));
builder.add(new OptionParam("info"));
builder.add(new OptionParam("icon"));
builder.add(new OptionParam("country"));
builder.add(new OptionParam("ip"));
builder.add(new OptionParam("badges"));
builder.add(new OptionParam("location"));
return builder.build();
}
public static Command clientMove(int clientId, int channelId, String channelPassword) {
CommandBuilder builder = new CommandBuilder("clientmove", 3);
builder.add(new KeyValueParam("clid", clientId));
builder.add(new KeyValueParam("cid", channelId));
builder.addIf(channelPassword != null, new KeyValueParam("cpw", channelPassword));
return builder.build();
}
public static Command clientMove(int[] clientIds, int channelId, String channelPassword) {
if (clientIds == null || clientIds.length == 0) {
throw new IllegalArgumentException("Client ID array cannot be null or empty");
}
CommandBuilder builder = new CommandBuilder("clientmove", 3);
builder.add(new KeyValueParam("cid", channelId));
builder.addIf(channelPassword != null, new KeyValueParam("cpw", channelPassword));
ArrayParameter clients = new ArrayParameter(clientIds.length);
for (final int clientId : clientIds) {
clients.add(new KeyValueParam("clid", clientId));
}
builder.add(clients);
return builder.build();
}
public static Command clientPoke(int clientId, String message) {
CommandBuilder builder = new CommandBuilder("clientpoke", 2);
builder.add(new KeyValueParam("clid", clientId));
builder.add(new KeyValueParam("msg", message));
return builder.build();
}
public static Command clientSetServerQueryLogin(String username) {
CommandBuilder builder = new CommandBuilder("clientsetserverquerylogin", 1);
builder.add(new KeyValueParam("client_login_name", username));
return builder.build();
}
public static Command clientUpdate(Map<ClientProperty, String> options) {
return new CommandBuilder("clientupdate", 1).addProperties(options).build();
}
public static Command sendTextMessage(int targetMode, int targetId, String message) {
CommandBuilder builder = new CommandBuilder("sendtextmessage", 3);
builder.add(new KeyValueParam("targetmode", targetMode));
builder.add(new KeyValueParam("target", targetId));
builder.add(new KeyValueParam("msg", message));
return builder.build();
}
}
| 1,884 |
2,151 | // Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "base/at_exit.h"
#include "base/i18n/icu_util.h"
#include "content/browser/appcache/appcache_manifest_parser.h" // nogncheck
#include "url/gurl.h"
namespace content {
struct IcuEnvironment {
IcuEnvironment() { CHECK(base::i18n::InitializeICU()); }
// used by ICU integration.
base::AtExitManager at_exit_manager;
};
IcuEnvironment* env = new IcuEnvironment();
// Entry point for LibFuzzer.
extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
AppCacheManifest manifest;
const GURL kUrl("http://www.example.com");
ParseManifest(kUrl, reinterpret_cast<const char*>(data), size,
PARSE_MANIFEST_ALLOWING_DANGEROUS_FEATURES, manifest);
return 0;
}
} // namespace content
| 319 |
1,107 | <reponame>malxau/yori
/**
* @file ysetup/ysetup.c
*
* Yori shell GUI installer
*
* Copyright (c) 2018-2021 <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
#include <yoripch.h>
#include <yorilib.h>
#include <yoripkg.h>
#include "resource.h"
#include "ysetup.h"
VOID
SetupGuiUpdateStatus(
__in PCYORI_STRING Text,
__in PVOID Context
)
{
HWND hDlg;
hDlg = (HWND)Context;
ASSERT(YoriLibIsStringNullTerminated(Text));
SetDlgItemText(hDlg, IDC_STATUS, Text->StartOfString);
}
/**
Install the user specified set of packages and options from the dialog.
@param hDlg Specifies the hWnd of the dialog box.
@return TRUE to indicate success, FALSE to indicate failure.
*/
BOOL
SetupGuiInstallSelectedFromDialog(
__in HWND hDlg
)
{
DWORD LengthNeeded;
YORI_STRING InstallDir;
BOOL Result = FALSE;
YSETUP_INSTALL_TYPE InstallType;
DWORD InstallOptions;
YORI_STRING ErrorText;
YoriLibInitEmptyString(&ErrorText);
InstallOptions = 0;
//
// Query the install directory and attempt to create it
//
LengthNeeded = (DWORD)SendDlgItemMessage(hDlg, IDC_INSTALLDIR, WM_GETTEXTLENGTH, 0, 0);
if (!YoriLibAllocateString(&InstallDir, LengthNeeded + 1)) {
MessageBox(hDlg, _T("Installation failed."), _T("Installation failed."), MB_ICONSTOP);
return FALSE;
}
InstallDir.LengthInChars = GetDlgItemText(hDlg, IDC_INSTALLDIR, InstallDir.StartOfString, InstallDir.LengthAllocated);
//
// Count the number of packages we want to install
//
if (IsDlgButtonChecked(hDlg, IDC_COMPLETE)) {
InstallType = InstallTypeComplete;
} else if (IsDlgButtonChecked(hDlg, IDC_COREONLY)) {
InstallType = InstallTypeCore;
} else {
InstallType = InstallTypeTypical;
}
if (IsDlgButtonChecked(hDlg, IDC_SYMBOLS)) {
InstallOptions = InstallOptions | YSETUP_INSTALL_SYMBOLS;
}
if (IsDlgButtonChecked(hDlg, IDC_SOURCE)) {
InstallOptions = InstallOptions | YSETUP_INSTALL_SOURCE;
}
if (IsDlgButtonChecked(hDlg, IDC_DESKTOP_SHORTCUT)) {
InstallOptions = InstallOptions | YSETUP_INSTALL_DESKTOP_SHORTCUT;
}
if (IsDlgButtonChecked(hDlg, IDC_START_SHORTCUT)) {
InstallOptions = InstallOptions | YSETUP_INSTALL_START_SHORTCUT;
}
if (IsDlgButtonChecked(hDlg, IDC_TERMINAL_PROFILE)) {
InstallOptions = InstallOptions | YSETUP_INSTALL_TERMINAL_PROFILE;
}
if (IsDlgButtonChecked(hDlg, IDC_USER_PATH)) {
InstallOptions = InstallOptions | YSETUP_INSTALL_USER_PATH;
}
if (IsDlgButtonChecked(hDlg, IDC_SYSTEM_PATH)) {
InstallOptions = InstallOptions | YSETUP_INSTALL_SYSTEM_PATH;
}
if (IsDlgButtonChecked(hDlg, IDC_UNINSTALL)) {
InstallOptions = InstallOptions | YSETUP_INSTALL_UNINSTALL;
}
Result = SetupInstallSelectedWithOptions(&InstallDir, InstallType, InstallOptions, SetupGuiUpdateStatus, hDlg, &ErrorText);
if (Result) {
MessageBox(hDlg, ErrorText.StartOfString, _T("Installation complete."), MB_ICONINFORMATION);
} else {
MessageBox(hDlg, ErrorText.StartOfString, _T("Installation failed."), MB_ICONSTOP);
}
YoriLibFreeStringContents(&InstallDir);
YoriLibFreeStringContents(&ErrorText);
return Result;
}
/**
The DialogProc for the setup dialog box.
@param hDlg Specifies the hWnd of the dialog box.
@param uMsg Specifies the window message received by the dialog box.
@param wParam Specifies the first parameter to the window message.
@param lParam Specifies the second parameter to the window message.
@return TRUE to indicate the message was successfully processed.
*/
BOOL CALLBACK
SetupGuiDialogProc(
__in HWND hDlg,
__in UINT uMsg,
__in WPARAM wParam,
__in LPARAM lParam
)
{
RECT rcDesktop, rcDlg, rcNew;
WORD CtrlId;
HICON hIcon;
YORI_STRING InstallDir;
DWORD OsVerMajor;
DWORD OsVerMinor;
DWORD OsBuildNumber;
UNREFERENCED_PARAMETER(lParam);
switch(uMsg) {
case WM_COMMAND:
switch(LOWORD(wParam)) {
case IDC_COREONLY:
case IDC_TYPICAL:
case IDC_COMPLETE:
for (CtrlId = IDC_COREONLY; CtrlId <= IDC_COMPLETE; CtrlId++) {
CheckDlgButton(hDlg, CtrlId, FALSE);
}
CheckDlgButton(hDlg, LOWORD(wParam), TRUE);
break;
case IDC_DESKTOP_SHORTCUT:
case IDC_START_SHORTCUT:
case IDC_TERMINAL_PROFILE:
case IDC_SYSTEM_PATH:
case IDC_USER_PATH:
case IDC_SOURCE:
case IDC_SYMBOLS:
case IDC_UNINSTALL:
CtrlId = LOWORD(wParam);
if (IsDlgButtonChecked(hDlg, CtrlId)) {
CheckDlgButton(hDlg, CtrlId, FALSE);
} else {
CheckDlgButton(hDlg, CtrlId, TRUE);
}
break;
case IDC_OK:
if (!SetupGuiInstallSelectedFromDialog(hDlg)) {
EndDialog(hDlg, FALSE);
} else {
EndDialog(hDlg, TRUE);
}
return TRUE;
case IDC_CANCEL:
EndDialog(hDlg, FALSE);
return TRUE;
case IDC_BROWSE:
if (DllShell32.pSHBrowseForFolderW != NULL &&
DllShell32.pSHGetPathFromIDListW != NULL) {
YORI_BROWSEINFO BrowseInfo;
PVOID ShellIdentifierForPath;
ZeroMemory(&BrowseInfo, sizeof(BrowseInfo));
BrowseInfo.hWndOwner = hDlg;
BrowseInfo.Title = _T("Please select a folder to install Yori:");
BrowseInfo.Flags = 0x51;
ShellIdentifierForPath = DllShell32.pSHBrowseForFolderW(&BrowseInfo);
if (ShellIdentifierForPath != NULL) {
YoriLibAllocateString(&InstallDir, MAX_PATH);
DllShell32.pSHGetPathFromIDListW(ShellIdentifierForPath, InstallDir.StartOfString);
SetDlgItemText(hDlg, IDC_INSTALLDIR, InstallDir.StartOfString);
YoriLibFreeStringContents(&InstallDir);
if (DllOle32.pCoTaskMemFree != NULL) {
DllOle32.pCoTaskMemFree(ShellIdentifierForPath);
}
}
}
}
break;
case WM_CLOSE:
EndDialog(hDlg, 0);
return TRUE;
case WM_INITDIALOG:
hIcon = LoadIcon(GetModuleHandle(NULL), MAKEINTRESOURCE(1));
SendMessage(hDlg, WM_SETICON, ICON_SMALL, (LPARAM)hIcon);
SendMessage(hDlg, WM_SETICON, ICON_BIG, (LPARAM)hIcon);
GetWindowRect(GetDesktopWindow(), &rcDesktop);
GetWindowRect(hDlg, &rcDlg);
rcNew.left = ((rcDesktop.right - rcDesktop.left) - (rcDlg.right - rcDlg.left)) / 2;
rcNew.top = ((rcDesktop.bottom - rcDesktop.top) - (rcDlg.bottom - rcDlg.top)) / 2;
SetWindowPos(hDlg, HWND_TOP, rcNew.left, rcNew.top, 0, 0, SWP_NOSIZE);
{
TCHAR Version[32];
#if YORI_BUILD_ID
YoriLibSPrintf(Version, _T("%i.%02i.%i"), YORI_VER_MAJOR, YORI_VER_MINOR, YORI_BUILD_ID);
#else
YoriLibSPrintf(Version, _T("%i.%02i"), YORI_VER_MAJOR, YORI_VER_MINOR);
#endif
SetDlgItemText(hDlg, IDC_VERSION, Version);
}
SetupGetDefaultInstallDir(&InstallDir);
SetDlgItemText(hDlg, IDC_INSTALLDIR, InstallDir.StartOfString);
YoriLibFreeStringContents(&InstallDir);
CheckDlgButton(hDlg, IDC_TYPICAL, TRUE);
//
// On NT 3.5x try to set the font to something not bold that has
// similar geometry to NT 4.0. This helps ensure the text fits
// within the controls, and it just looks nicer. Unfortunately
// the dialog has already been created by this point, so the size
// of the controls and the dialog is set according to the default
// font's specification. Since the default font is larger than
// this one, the result is typically a needlessly large window.
//
YoriLibGetOsVersion(&OsVerMajor, &OsVerMinor, &OsBuildNumber);
if (OsVerMajor < 4) {
HFONT hFont;
HDC hDC;
DWORD Index;
UINT ControlArray[] = {
IDC_INSTALLDIR,
IDC_OK,
IDC_CANCEL,
IDC_BROWSE,
IDC_STATUS,
IDC_VERSION,
IDC_LABEL_INSTALLDIR,
IDC_LABEL_INSTALLTYPE,
IDC_LABEL_COREDESC,
IDC_LABEL_TYPICALDESC,
IDC_LABEL_COMPLETEDESC,
IDC_LABEL_INSTALLOPTIONS,
IDC_COREONLY,
IDC_TYPICAL,
IDC_COMPLETE,
IDC_DESKTOP_SHORTCUT,
IDC_START_SHORTCUT,
IDC_TERMINAL_PROFILE,
IDC_SYSTEM_PATH,
IDC_USER_PATH,
IDC_SOURCE,
IDC_SYMBOLS,
IDC_UNINSTALL,
};
hDC = GetWindowDC(hDlg);
hFont = CreateFont(-MulDiv(8, GetDeviceCaps(hDC, LOGPIXELSY), 72),
0,
0,
0,
FW_NORMAL,
FALSE,
FALSE,
FALSE,
DEFAULT_CHARSET,
OUT_DEFAULT_PRECIS,
CLIP_DEFAULT_PRECIS,
DEFAULT_QUALITY,
FF_DONTCARE,
_T("MS Sans Serif"));
ReleaseDC(hDlg, hDC);
for (Index = 0; Index < sizeof(ControlArray)/sizeof(ControlArray[0]); Index++) {
SendDlgItemMessage(hDlg, ControlArray[Index], WM_SETFONT, (WPARAM)hFont, MAKELPARAM(FALSE, 0));
}
SendMessage(hDlg, WM_SETFONT, (WPARAM)hFont, MAKELPARAM(TRUE, 0));
//
// Since we already have an NT 3.5x branch, disable controls
// that depend on explorer
//
EnableWindow(GetDlgItem(hDlg, IDC_BROWSE), FALSE);
EnableWindow(GetDlgItem(hDlg, IDC_DESKTOP_SHORTCUT), FALSE);
EnableWindow(GetDlgItem(hDlg, IDC_TERMINAL_PROFILE), FALSE);
EnableWindow(GetDlgItem(hDlg, IDC_UNINSTALL), FALSE);
SetDlgItemText(hDlg, IDC_START_SHORTCUT, _T("Install Program Manager &shortcut"));
} else if (!SetupPlatformSupportsShortcuts()) {
//
// On NT 4 RTM, we can create a start menu shortcut via DDE,
// but not a Desktop shortcut.
//
EnableWindow(GetDlgItem(hDlg, IDC_DESKTOP_SHORTCUT), FALSE);
}
return TRUE;
}
return FALSE;
}
/**
Display a dialog allowing the user to select the installation options and
perform the requested operation.
@return TRUE to indicate successful completion, FALSE to indicate failure.
*/
BOOL
SetupGuiDisplayUi(VOID)
{
CONSOLE_SCREEN_BUFFER_INFO ScreenInfo;
DWORD OsVerMajor;
DWORD OsVerMinor;
DWORD OsBuildNumber;
if (GetConsoleScreenBufferInfo(GetStdHandle(STD_OUTPUT_HANDLE), &ScreenInfo)) {
if (ScreenInfo.dwCursorPosition.X == 0 && ScreenInfo.dwCursorPosition.Y == 0) {
FreeConsole();
}
}
//
// When running on NT 3.5x, attempt to provide a 3D appearence from
// Ctl3D32.dll. Since this is cosmetic, just continue on failure.
//
YoriLibGetOsVersion(&OsVerMajor, &OsVerMinor, &OsBuildNumber);
if (OsVerMajor < 4) {
YoriLibLoadCtl3d32Functions();
if (DllCtl3d.pCtl3dRegister != NULL && DllCtl3d.pCtl3dAutoSubclass != NULL) {
DllCtl3d.pCtl3dRegister(NULL);
DllCtl3d.pCtl3dAutoSubclass(NULL);
}
}
if (DllCabinet.pFdiCopy == NULL ||
(DllWinInet.hDll == NULL && DllWinHttp.hDll == NULL)) {
YORI_STRING MessageString;
LPCSTR DllMissingWarning;
DllMissingWarning = SetupGetDllMissingMessage();
YoriLibInitEmptyString(&MessageString);
YoriLibYPrintf(&MessageString, _T("%hs"), MessageString);
MessageBox(NULL,
MessageString.StartOfString,
_T("YSetup"),
MB_ICONEXCLAMATION);
YoriLibFreeStringContents(&MessageString);
return TRUE;
}
DialogBox(NULL, MAKEINTRESOURCE(SETUPDIALOG), NULL, (DLGPROC)SetupGuiDialogProc);
return TRUE;
}
// vim:sw=4:ts=4:et:
| 7,461 |
1,037 | <reponame>mutalisk999/bibi<filename>application/services/payment/wechat.py
# -*- coding: UTF-8 -*-
# File: __init__.py
# Author: SivaCoHan <<EMAIL>>
# Date: 2015-04-27
import time
import json
import qrcode
import hashlib
import urllib
import random
import requests
from xml.etree import ElementTree
from dict2xml import dict2xml
from collections import OrderedDict
from urllib.parse import urlencode
from .exceptions import MissingParameter
from .exceptions import ParameterValueError
from .exceptions import TokenAuthorizationError
class WXPay(object):
''' 微信支付base class '''
URL_UINFIEDORDER = 'https://api.mch.weixin.qq.com/pay/unifiedorder'
URL_VERIFY_ORDER = 'https://api.mch.weixin.qq.com/pay/orderquery'
def __init__(self, appid, mch_id, key, ip,
notify_url=None, appsecret=None):
self.appid = appid
self.mch_id = mch_id
self.key = key
self.appsecret = appsecret
self.ip = ip
self.notify_url = notify_url
self.cert_path = "pem证书路径"
def generate_nonce_str(self, length=32):
''' 生成随机字符串 '''
hashChar = [
'0', '1', '2', '3', '4', '5', '6', '7',
'8', '9',
'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H',
'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P',
'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X',
'Y', 'Z',
'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h',
'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p',
'q', 'r', 's', 't', 'u', 'v', 'w', 'x',
'y', 'z',
]
rand_list = [hashChar[random.randint(0, 61)] for i in range(0, length)]
nonce_str = ''.join(rand_list)
return nonce_str
def generate_sign(self, sign_dict):
''' 生成签名, 目前只支持MD5签名 '''
params_dict = OrderedDict(sorted(sign_dict.items(),
key=lambda t: t[0]))
params_dict['key'] = self.key
foo_sign = []
for k in params_dict:
if isinstance(params_dict[k], unicode):
params_dict[k] = params_dict[k].encode('utf-8')
foo_sign.append('%s=%s' % (k, params_dict[k], ))
foo_sign = '&'.join(foo_sign)
sign = hashlib.md5(foo_sign).hexdigest().upper()
return sign
def unifiedorder(self, product, openid=None, trade_type=None):
''' 统一下单接口 '''
assert isinstance(product, dict)
assert trade_type in ('JSAPI', 'NATIVE', 'APP')
post_dict = {
'appid': self.appid,
'attach': product['attach'],
'body': product['body'],
'mch_id': self.mch_id,
'nonce_str': self.generate_nonce_str(),
'notify_url': self.notify_url,
'out_trade_no': product['out_trade_no'],
'spbill_create_ip': self.ip,
'total_fee': int(product['total_fee']*100), # 微信基本单位是分
'trade_type': trade_type,
}
if trade_type == 'JSAPI':
post_dict['openid'] = openid
if openid is None:
raise MissingParameter(u'JSAPI必须传入openid')
post_dict['sign'] = self.generate_sign(post_dict)
ret_xml = dict2xml(post_dict, wrap='xml')
r = requests.post(self.URL_UINFIEDORDER, data=ret_xml.encode('utf-8'))
r.encoding = 'UTF-8'
data = r.text.encode('utf-8')
ret_dict = {}
x = ElementTree.fromstring(data)
if x.find('return_code').text.upper() == 'FAIL':
raise ParameterValueError(x.find('return_msg').text)
if x.find('result_code').text.upper() == 'FAIL':
raise ParameterValueError(x.find('err_code').text)
if trade_type == 'NATIVE':
ret_dict['prepay_id'] = x.find('prepay_id').text
ret_dict['code_url'] = x.find('code_url').text
else:
ret_dict['prepay_id'] = x.find('prepay_id').text
return ret_dict
def refundorder(self,out_trade_no=None,transaction_id=None,total_fee=None,refund_fee=None):
"""退款接口"""
post_dict = {
'appid': self.appid,
'mch_id': self.mch_id,
'nonce_str': self.generate_nonce_str(),
'out_trade_no': out_trade_no,
"out_refund_no" : out_trade_no,
"transaction_id" : transaction_id,
"total_fee" : total_fee,
'refund_fee': refund_fee,
"op_user_id" : self.mch_id
}
post_dict["sign"] = self.generate_sign(post_dict)
ret_xml = dict2xml(post_dict, wrap='xml')
log.debug("请求参数")
log.debug(ret_xml)
r = requests.post(self.URL_REFUND_ORDER, data=ret_xml.encode('utf-8') ,cert=self.cert_path)
r.encoding = 'UTF-8'
data = r.text.encode('utf-8')
ret_dict = {}
x = ElementTree.fromstring(data)
if x.find('return_code').text.upper() == 'FAIL':
raise ParameterValueError(x.find('return_msg').text)
if x.find('result_code').text.upper() == 'FAIL':
raise ParameterValueError(x.find('err_code').text)
if x.find('return_code').text.upper() == "SUCCESS" and x.find('result_code').text.upper() == "SUCCESS":
return True
return False
def verify_notify(self, xml_str):
''' 验证通知返回值 '''
xml_dict = {}
x = ElementTree.fromstring(xml_str)
xml_dict['appid'] = x.find('appid').text
xml_dict['attach'] = x.find('attach').text
xml_dict['bank_type'] = x.find('bank_type').text
xml_dict['cash_fee'] = x.find('cash_fee').text
xml_dict['fee_type'] = x.find('fee_type').text
xml_dict['is_subscribe'] = x.find('is_subscribe').text
xml_dict['mch_id'] = x.find('mch_id').text
xml_dict['nonce_str'] = x.find('nonce_str').text
xml_dict['openid'] = x.find('openid').text
xml_dict['out_trade_no'] = x.find('out_trade_no').text
xml_dict['result_code'] = x.find('result_code').text
xml_dict['return_code'] = x.find('return_code').text
xml_dict['sign'] = x.find('sign').text
xml_dict['time_end'] = x.find('time_end').text
xml_dict['total_fee'] = x.find('total_fee').text
xml_dict['trade_type'] = x.find('trade_type').text
xml_dict['transaction_id'] = x.find('transaction_id').text
sign = xml_dict.pop('sign')
if sign == self.generate_sign(xml_dict):
return True, xml_dict
else:
raise TokenAuthorizationError(u'签名验证失败')
def generate_notify_resp(self, resp_dict):
assert set(resp_dict.keys()) == set(['return_code', 'return_msg'])
xml_str = dict2xml(resp_dict, wrap='xml')
return xml_str
def verify_order(self, out_trade_no=None, transaction_id=None):
if out_trade_no is None and transaction_id is None:
raise MissingParameter(u'out_trade_no, transaction_id 不能同时为空')
params_dict = {
'appid': self.appid,
'mch_id': self.mch_id,
'nonce_str': self.generate_nonce_str(),
}
if transaction_id is not None:
params_dict['transaction_id'] = transaction_id
elif out_trade_no is not None:
params_dict['out_trade_no'] = out_trade_no
params_dict['sign'] = self.generate_sign(params_dict)
xml_str = dict2xml(params_dict, wrap='xml')
r = requests.post(self.URL_VERIFY_ORDER, xml_str)
r.encoding = 'UTF-8'
data = r.text.encode('UTF-8')
xml_dict = {}
x = ElementTree.fromstring(data)
xml_dict['return_code'] = x.find('return_code').text
xml_dict['return_msg'] = x.find('return_msg').text
if xml_dict['return_code'] == 'FAIL':
return xml_dict
xml_dict['appid'] = x.find('appid').text
xml_dict['mch_id'] = x.find('mch_id').text
# xml_dict['device_info'] = x.find('device_info').text
xml_dict['nonce_str'] = x.find('nonce_str').text
xml_dict['sign'] = x.find('sign').text
xml_dict['result_code'] = x.find('result_code').text
# xml_dict['err_code'] = x.find('err_code').text
# xml_dict['err_code_des'] = x.find('err_code_des').text
xml_dict['openid'] = x.find('openid').text
xml_dict['is_subscribe'] = x.find('is_subscribe').text
xml_dict['trade_type'] = x.find('trade_type').text
xml_dict['bank_type'] = x.find('bank_type').text
xml_dict['total_fee'] = x.find('total_fee').text
xml_dict['fee_type'] = x.find('fee_type').text
xml_dict['cash_fee'] = x.find('cash_fee').text
# xml_dict['cash_fee_type'] = x.find('cash_fee_type').text
# xml_dict['coupon_fee'] = x.find('coupon_fee').text
# xml_dict['coupon_count'] = int(x.find('coupon_count').text)
# for i in range(xml_dict['coupon_count']):
# xml_dict['coupon_batch_id_%d' % i+1] = x.find('coupon_batch_id_%d' % i+1).text
# xml_dict['coupon_id_%d' % i+1] = x.find('coupon_id_%d' % i+1).text
# xml_dict['coupon_fee_%d' % i+1] = x.find('coupon_fee_%d' % i+1).text
xml_dict['transaction_id'] = x.find('transaction_id').text
xml_dict['out_trade_no'] = x.find('out_trade_no').text
xml_dict['attach'] = x.find('attach').text
xml_dict['time_end'] = x.find('time_end').text
xml_dict['trade_state'] = x.find('trade_state').text
sign = xml_dict.pop('sign')
if sign == self.generate_sign(xml_dict):
return xml_dict
else:
raise TokenAuthorizationError(u'签名验证失败')
class QRWXPay(WXPay):
''' 扫码支付接口 '''
URL_QR = 'weixin://wxpay/bizpayurl?%s'
def _generate_qr_url(self, product_id):
'''
生成QR URL
即微信支付模式一, 预生成一个QR, 用户扫描后, 微信会调用在微信平台上配置的回调URL
'''
url_dict = {
'appid': self.appid,
'mch_id': self.mch_id,
'nonce_str': self.generate_nonce_str(),
'product_id': str(product_id),
'time_stamp': str(int(time.time())),
}
url_dict['sign'] = self.generate_sign(url_dict)
url_str = self.URL_QR % urlencode(url_dict).encode("utf-8")
return url_str
def unifiedorder(self, product, openid=None):
ret_dict = super(QRWXPay, self).unifiedorder(product,
trade_type='NATIVE')
return ret_dict
def _generate_unfiedorder_url(self, product):
'''
生成QR URL
即微信支付模式二, 通过统一下单接口生成 code_url
'''
ret = self.unifiedorder(product=product)
return ret['code_url']
def _generate_qr(self, url):
'''
生成url 的QR 码
建议使用Pillow
'''
img = qrcode.make(url)
return img
def generate_static_qr(self, product_id):
'''
生成商品静态QR码
即微信支付模式一
返回为Pillow的img
'''
url = self._generate_qr_url(product_id)
img = self._generate_qr(url)
return img
def generate_product_qr(self, product):
'''
生成商品QR码
即微信支付模式二
QR码有效时间两小时
返回为Pillow的img
'''
url = self._generate_unfiedorder_url(product)
img = self._generate_qr(url)
return img
def _callback_xml2dict(self, xml_str):
ret_dict = {}
x = ElementTree.fromstring(xml_str)
ret_dict['appid'] = x.find('appid').text
ret_dict['openid'] = x.find('openid').text
ret_dict['mch_id'] = x.find('mch_id').text
ret_dict['is_subscribe'] = x.find('is_subscribe').text
ret_dict['nonce_str'] = x.find('nonce_str').text
ret_dict['product_id'] = x.find('product_id').text
ret_dict['sign'] = x.find('sign').text
return ret_dict
def verify_callback(self, xml_str):
''' 验证回调返回值 '''
xml_dict = self._callback_xml2dict(xml_str)
sign = xml_dict.pop('sign')
if sign == self.generate_sign(xml_dict):
return True, xml_dict
else:
raise TokenAuthorizationError(u'签名验证失败')
def generate_cb_resp(self, resp_dict):
ret_dict = {
'appid': self.appid,
'mch_id': self.mch_id,
'nonce_str': self.generate_nonce_str(),
'prepay_id': resp_dict['prepay_id'],
'return_code': resp_dict['return_code'], # 'SUCCESS', 'FAIL'
'return_msg': resp_dict['return_msg'], # 'OK'
'result_code': resp_dict['result_code'], # 'SUCCESS', 'FAIL'
'err_code_des': resp_dict['err_code_des'], # 'OK'
}
ret_dict['sign'] = self.generate_sign(ret_dict)
ret_xml = dict2xml(ret_dict, wrap='xml')
return ret_xml
class JSWXPay(WXPay):
''' JSAPI 支付接口 '''
URL_REDIRECT = '''https://open.weixin.qq.com/connect/oauth2/authorize?%s'''\
'''#wechat_redirect'''
URL_OPENID = '''https://api.weixin.qq.com/sns/oauth2/access_token?%s'''\
'''&grant_type=authorization_code'''
def generate_redirect_url(self, url_dict):
''' 生成跳转URL, 跳转后获取code, 以code获取openid '''
params_dict = {
'appid': self.appid,
'redirect_uri': url_dict['redirect_uri'],
'response_type': 'code',
'scope': 'snsapi_base',
'state': url_dict['state'],
}
for k in params_dict:
if isinstance(params_dict[k], unicode):
params_dict[k] = params_dict[k].encode('utf-8')
foo_url = urllib.urlencode(params_dict)
url = self.URL_REDIRECT % foo_url
return url
def generate_openid(self, code):
''' 根据code 获取openid '''
if self.appsecret is None:
raise MissingParameter(u'缺少appsecret')
params_dict = {
'appid': self.appid,
'secret': self.appsecret,
'code': code,
}
foo_url = []
for k in params_dict:
if isinstance(params_dict[k], unicode):
params_dict[k] = params_dict[k].encode('utf-8')
foo_url.append('%s=%s' % (k, params_dict[k], ))
foo_url = '&'.join(foo_url)
url = self.URL_OPENID % foo_url
r = requests.get(url)
r.encoding = 'UTF-8'
data = json.loads(r.text)
return data['openid']
def unifiedorder(self, product, openid=None):
ret_dict = super(JSWXPay, self).unifiedorder(product,
openid=openid,
trade_type='JSAPI')
return ret_dict
def generate_jsapi(self, product, openid):
''' 实际下单 '''
uni_dict = self.unifiedorder(product, openid)
ret_dict = {
'appId': self.appid,
'timeStamp': str(int(time.time())),
'nonceStr': self.generate_nonce_str(),
'package': 'prepay_id=%s' % uni_dict['prepay_id'],
'signType': 'MD5',
}
ret_dict['paySign'] = self.generate_sign(ret_dict)
return ret_dict
class APPWXPay(WXPay):
''' APP 支付接口 '''
def unifiedorder(self, product, openid=None):
ret_dict = super(APPWXPay, self).unifiedorder(product,
trade_type='APP')
return ret_dict
def generate_req(self, product):
''' 实际下单 '''
uni_dict = self.unifiedorder(product)
ret_dict = {
'appid': self.appid,
'partnerid': self.mch_id,
'prepayid': uni_dict['prepay_id'],
'package': 'Sign=WXPay',
'noncestr': self.generate_nonce_str(),
'timestamp': str(int(time.time())),
}
ret_dict['sign'] = self.generate_sign(ret_dict)
return ret_dict
| 8,508 |
2,151 | <reponame>kjthegod/chromium
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CHROME_BROWSER_UI_AUTOFILL_PASSWORD_GENERATION_POPUP_OBSERVER_H_
#define CHROME_BROWSER_UI_AUTOFILL_PASSWORD_GENERATION_POPUP_OBSERVER_H_
namespace autofill {
// Observer for PasswordGenerationPopup events. Currently only used for testing.
class PasswordGenerationPopupObserver {
public:
virtual void OnPopupShown(bool password_visible) = 0;
virtual void OnPopupHidden() = 0;
};
} // namespace autofill
#endif // CHROME_BROWSER_UI_AUTOFILL_PASSWORD_GENERATION_POPUP_OBSERVER_H_
| 244 |
731 | class HtmlGenerator:
def __init__(self, word):
self.domain = word
async def generatepreviousscanresults(self, previousscanresults):
try:
if previousscanresults[0] == 'No results':
html = '''
<h2><span style="color: #000000;"><strong>Previous scan report </strong></span></h2>
<p> </p>
<table style="height: 63px; border-color: #000000;" border="#000000" width="811">
<tbody>
<tr>
<td style="width: 156.042px; text-align: center;"><strong>Date</strong></td>
<td style="width: 156.042px; text-align: center;"><strong>Domain</strong></td>
<td style="width: 157.153px; text-align: center;"><strong>Plugin</strong></td>
<td style="width: 157.153px; text-align: center;"><strong>Record type</strong></td>
<td style="width: 157.153px; text-align: center;"><strong>Result</strong></td>
</tr>
'''
for i in previousscanresults:
html += '<tr>'
html += '<td style="width: 156.042px;">' + str(i) + "</td>"
html += '<td style="width: 156.042px;">' + str(i) + "</td>"
html += '<td style="width: 157.153px;">' + str(i) + "</td>"
html += '<td style="width: 157.153px;">' + str(i) + "</td>"
html += '<td style="width: 157.153px;">' + str(i) + "</td>"
html += '</tr>'
else:
html = '''
<h2><span style="color: #000000;"><strong>Previous scan report </strong></span></h2>
<p> </p>
<table style="height: 63px; border-color: #000000;" border="#000000" width="811">
<tbody>
<tr>
<td style="width: 156.042px; text-align: center;"><strong>Date</strong></td>
<td style="width: 156.042px; text-align: center;"><strong>Domain</strong></td>
<td style="width: 157.153px; text-align: center;"><strong>Plugin</strong></td>
<td style="width: 157.153px; text-align: center;"><strong>Record type</strong></td>
<td style="width: 157.153px; text-align: center;"><strong>Result</strong></td>
</tr>
<tr>
'''
for i in previousscanresults:
html += '<td style="width: 156.042px;">' + str(i[0]) + "</td>"
html += '<td style="width: 156.042px;">' + str(i[1]) + "</td>"
html += '<td style="width: 157.153px;">' + str(i[2]) + "</td>"
html += '<td style="width: 157.153px;">' + str(i[3]) + "</td>"
html += '<td style="width: 157.153px;">' + str(i[4]) + "</td>"
html += '</tr>'
html += '''
</tbody>
</table>
<p> </p>
<p> </p>
<p> </p>
<p> </p>
'''
return html
except Exception as e:
print(f'Error generating the previous scan results HTML code: {e}')
async def generatelatestscanresults(self, latestscanresults):
try:
html = '''
<h2><span style="color: #000000;"><strong>Latest scan report </strong></span></h2>
<p> </p>
<table style="height: 63px; border-color: #000000;" border="#000000" width="811">
<tbody>
<tr>
<td style="width: 156.042px; text-align: center;"><strong>Date</strong></td>
<td style="width: 156.042px; text-align: center;"><strong>Domain</strong></td>
<td style="width: 157.153px; text-align: center;"><strong>Plugin</strong></td>
<td style="width: 157.153px; text-align: center;"><strong>Record type</strong></td>
<td style="width: 157.153px; text-align: center;"><strong>Result</strong></td>
</tr>
'''
for i in latestscanresults:
html += '<tr>'
html += '<td style="width: 156.042px;">' + str(i[0]) + "</td>"
html += '<td style="width: 156.042px;">' + str(i[1]) + "</td>"
html += '<td style="width: 157.153px;">' + str(i[2]) + "</td>"
html += '<td style="width: 157.153px;">' + str(i[3]) + "</td>"
html += '<td style="width: 157.153px;">' + str(i[4]) + "</td>"
html += '</tr>'
html += '''
</tbody>
</table>
<p> </p>
<p> </p>
<p> </p>
<p> </p>
'''
return html
except Exception as e:
print(f'Error generating the latest scan results HTML code: {e}')
async def beginhtml(self):
html = '''
<!doctype html>
<html>
<head><script src="https://cdn.plot.ly/plotly-latest.min.js" type="text/javascript"></script></head>
<title>theHarvester Scan Report</title>
<body>
<h1 style="text-align: center;"><span style="color: #ff0000;">theHarvester Scan Report</span></h1>
'''
return html
async def generatedashboardcode(self, scanboarddata):
try:
totalnumberofdomains = scanboarddata['domains']
totalnumberofhosts = scanboarddata['host']
totalnumberofip = scanboarddata['ip']
totalnumberofvhost = scanboarddata['vhost']
totalnumberofemail = scanboarddata['email']
totalnumberofshodan = scanboarddata['shodan']
html = '''
<h2 style="text-align: center;"><span style="color: #ff0000;">Scan dashboard</span></h2>
<table style="height: 108px; border-color: #000000; margin-left: auto; margin-right: auto;" border=" #000000" width="713">
<tbody>
<tr>
<td style="width: 113px; text-align: center;background: #ffff38"><h2><strong>Domains</strong></h2></td>
<td style="width: 108px; text-align: center;background: #1f77b4"><h2><strong>Hosts</strong></h2></td>
<td style="width: 119px; text-align: center;background: #ff7f0e"><h2><strong>IP Addresses</strong></h2></td>
<td style="width: 111px; text-align: center;background: #2ca02c"><h2><strong>Vhosts</strong></h2></td>
<td style="width: 110px; text-align: center;background: #9467bd"><h2><strong>Emails</strong></h2></td>
<td style="width: 110px; text-align: center;background: #d62728"><h2><strong>Shodan</strong></h2></td>
</tr>
<tr>
<td style="width: 113px; text-align: center;background: #ffff38"><h2><strong>''' + str(totalnumberofdomains) + '''</strong></h2></td>
<td style="width: 108px; text-align: center;background: #1f77b4"><h2><strong>''' + str(totalnumberofhosts) + '''</strong></h2></td>
<td style="width: 119px; text-align: center;background: #ff7f0e"><h2><strong>''' + str(totalnumberofip) + '''</strong></h2></td>
<td style="width: 111px; text-align: center;background: #2ca02c"><h2><strong>''' + str(totalnumberofvhost) + '''</strong></h2></td>
<td style="width: 110px; text-align: center;background: #9467bd"><h2><strong>''' + str(totalnumberofemail) + '''</strong></h2></td>
<td style="width: 110px; text-align: center;background: #d62728"><h2><strong>''' + str(totalnumberofshodan) + '''</strong></h2></td>
</tr>
</tbody>
</table>
<p> </p>
<p> </p>
'''
return html
except Exception as e:
print(f'Error generating dashboard HTML code: {e}')
async def generatepluginscanstatistics(self, scanstatistics):
try:
html = '''
<h2 style="text-align: center;"><span style="color: #ff0000;">theHarvester plugin statistics</span></h2>
<p> </p>
<table style="height: 63px; border-color: #000000; margin-left: auto; margin-right: auto;" border="#000000" width="811">
<tbody>
<tr>
<td style="width: 156.042px; text-align: center;"><strong>Domain</strong></td>
<td style="width: 156.042px; text-align: center;"><strong>Date</strong></td>
<td style="width: 157.153px; text-align: center;"><strong>Recordtype</strong></td>
<td style="width: 157.153px; text-align: center;"><strong>Source</strong></td>
<td style="width: 157.153px; text-align: center;"><strong>Total results</strong></td>
</tr>
'''
for i in scanstatistics:
html += '<tr>'
html += '<td style="width: 156.042px;">' + str(i[0]) + "</td>"
html += '<td style="width: 156.042px;">' + str(i[1]) + "</td>"
html += '<td style="width: 157.153px;">' + str(i[2]) + "</td>"
html += '<td style="width: 157.153px;">' + str(i[3]) + "</td>"
html += '<td style="width: 157.153px;">' + str(i[4]) + "</td>"
html += '</tr>'
html += '''
</tbody>
</table>
<p> </p>
<p> </p>
'''
return html
except Exception as e:
print(f'Error generating scan statistics HTML code: {e}')
| 3,701 |
678 | /**
* This header is generated by class-dump-z 0.2b.
*
* Source: /System/Library/PrivateFrameworks/ChatKit.framework/ChatKit
*/
#import <ChatKit/CKPreferredServiceManager.h>
@interface CKMessagesAppPreferredServiceManager : CKPreferredServiceManager {
}
- (unsigned)__optionsForConversation:(id)conversation; // 0x85435
- (unsigned)_optionsForRecipients:(id)recipients; // 0x852fd
- (id)preferredServiceForAggregateConversation:(id)aggregateConversation newComposition:(BOOL)composition checkWithServer:(BOOL)server canSend:(BOOL *)send error:(id *)error; // 0x85361
@end
| 191 |
12,252 | <reponame>evtr/keycloak
/*
* Copyright 2021 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.validate.validators;
import java.util.ArrayList;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
import org.keycloak.models.KeycloakSession;
import org.keycloak.provider.ConfiguredProvider;
import org.keycloak.provider.ProviderConfigProperty;
import org.keycloak.validate.AbstractStringValidator;
import org.keycloak.validate.ValidationContext;
import org.keycloak.validate.ValidationError;
import org.keycloak.validate.ValidationResult;
import org.keycloak.validate.ValidatorConfig;
/**
* Validate String against configured RegEx pattern - accepts plain string and collection of strings, for basic behavior
* like null/blank values handling and collections support see {@link AbstractStringValidator}.
*/
public class PatternValidator extends AbstractStringValidator implements ConfiguredProvider {
public static final String ID = "pattern";
public static final PatternValidator INSTANCE = new PatternValidator();
public static final String CFG_PATTERN = "pattern";
public static final String MESSAGE_NO_MATCH = "error-pattern-no-match";
public static final String CFG_ERROR_MESSAGE = "error-message";
private static final List<ProviderConfigProperty> configProperties = new ArrayList<>();
static {
ProviderConfigProperty property;
property = new ProviderConfigProperty();
property.setName(CFG_PATTERN);
property.setLabel("RegExp pattern");
property.setHelpText("RegExp pattern the value must match. Java Pattern syntax is used.");
property.setType(ProviderConfigProperty.STRING_TYPE);
configProperties.add(property);
property = new ProviderConfigProperty();
property.setName(CFG_ERROR_MESSAGE);
property.setLabel("Error message key");
property.setHelpText("Key of the error message in i18n bundle. Dafault message key is " + MESSAGE_NO_MATCH);
property.setType(ProviderConfigProperty.STRING_TYPE);
configProperties.add(property);
}
@Override
public String getId() {
return ID;
}
@Override
protected void doValidate(String value, String inputHint, ValidationContext context, ValidatorConfig config) {
Pattern pattern = config.getPattern(CFG_PATTERN);
if (!pattern.matcher(value).matches()) {
context.addError(new ValidationError(ID, inputHint, config.getStringOrDefault(CFG_ERROR_MESSAGE, MESSAGE_NO_MATCH), config.getString(CFG_PATTERN)));
}
}
@Override
public ValidationResult validateConfig(KeycloakSession session, ValidatorConfig config) {
Set<ValidationError> errors = new LinkedHashSet<>();
if (config == null || config == ValidatorConfig.EMPTY || !config.containsKey(CFG_PATTERN)) {
errors.add(new ValidationError(ID, CFG_PATTERN, ValidatorConfigValidator.MESSAGE_CONFIG_MISSING_VALUE));
} else {
Object maybePattern = config.get(CFG_PATTERN);
try {
Pattern pattern = config.getPattern(CFG_PATTERN);
if (pattern == null) {
errors.add(new ValidationError(ID, CFG_PATTERN, ValidatorConfigValidator.MESSAGE_CONFIG_INVALID_VALUE, maybePattern));
}
} catch (PatternSyntaxException pse) {
errors.add(new ValidationError(ID, CFG_PATTERN, ValidatorConfigValidator.MESSAGE_CONFIG_INVALID_VALUE, maybePattern, pse.getMessage()));
}
}
return new ValidationResult(errors);
}
@Override
public String getHelpText() {
return "RegExp Pattern validator";
}
@Override
public List<ProviderConfigProperty> getConfigProperties() {
return configProperties;
}
}
| 1,595 |
973 | //
// TM & (c) 2021 Lucasfilm Entertainment Company Ltd. and Lucasfilm Ltd.
// All rights reserved. See LICENSE.txt for license.
//
#ifndef MATERIALX_CAMERA_H
#define MATERIALX_CAMERA_H
#include <MaterialXRender/Export.h>
#include <MaterialXCore/Types.h>
MATERIALX_NAMESPACE_BEGIN
/// Shared pointer to a Camera
using CameraPtr = std::shared_ptr<class Camera>;
/// @class Camera
/// A simple camera class, supporting transform matrices and arcball
/// functionality for object-viewing applications.
class MX_RENDER_API Camera
{
public:
Camera() :
_worldMatrix(Matrix44::IDENTITY),
_viewMatrix(Matrix44::IDENTITY),
_projectionMatrix(Matrix44::IDENTITY),
_arcballActive(false),
_arcballQuat(Quaternion::IDENTITY),
_arcballDelta(Quaternion::IDENTITY),
_arcballSpeed(2.0f)
{
}
~Camera() { }
/// Create a new camera.
static CameraPtr create() { return std::make_shared<Camera>(); }
/// @name Transform Matrices
/// @{
/// Set the world matrix.
void setWorldMatrix(const Matrix44& mat)
{
_worldMatrix = mat;
}
/// Return the world matrix.
const Matrix44& getWorldMatrix() const
{
return _worldMatrix;
}
/// Set the view matrix.
void setViewMatrix(const Matrix44& mat)
{
_viewMatrix = mat;
}
/// Return the view matrix.
const Matrix44& getViewMatrix() const
{
return _viewMatrix;
}
/// Set the projection matrix.
void setProjectionMatrix(const Matrix44& mat)
{
_projectionMatrix = mat;
}
/// Return the projection matrix.
const Matrix44& getProjectionMatrix() const
{
return _projectionMatrix;
}
/// Compute our full model-view-projection matrix.
Matrix44 getWorldViewProjMatrix() const
{
return _worldMatrix * _viewMatrix * _projectionMatrix;
}
/// Derive viewer position from the view matrix.
Vector3 getViewPosition() const
{
Matrix44 invView = _viewMatrix.getInverse();
return Vector3(invView[3][0], invView[3][1], invView[3][2]);
}
/// Derive viewer direction from the view matrix.
Vector3 getViewDirection() const
{
Matrix44 invView = _viewMatrix.getInverse();
return Vector3(invView[2][0], invView[2][1], invView[2][2]);
}
/// @}
/// @name Viewport
/// @{
/// Set the size of the viewport window.
void setViewportSize(const Vector2& size)
{
_viewportSize = size;
}
/// Return the size of the viewport window.
const Vector2& getViewportSize() const
{
return _viewportSize;
}
/// Project a position from object to viewport space.
Vector3 projectToViewport(Vector3 v)
{
v = transformPointPerspective(getWorldViewProjMatrix(), v);
v = v * 0.5f + Vector3(0.5f);
v[0] *= _viewportSize[0];
v[1] *= _viewportSize[1];
return v;
}
/// Unproject a position from viewport to object space.
Vector3 unprojectFromViewport(Vector3 v)
{
v[0] /= _viewportSize[0];
v[1] /= _viewportSize[1];
v = v * 2.0f - Vector3(1.0f);
v = transformPointPerspective(getWorldViewProjMatrix().getInverse(), v);
return v;
}
/// @}
/// @name Arcball
/// @{
/// Indicates a button state change, with pos being the instantaneous location of the mouse.
void arcballButtonEvent(const Vector2& pos, bool pressed);
/// Apply mouse motion to the arcball state.
bool applyArcballMotion(const Vector2& pos);
/// Return the arcball matrix.
Matrix44 arcballMatrix() const
{
return Matrix44::createRotation(_arcballDelta * _arcballQuat);
}
/// @}
/// @name Utilities
/// @{
/// Create a view matrix given an eye position, a target position and an up vector.
static Matrix44 createViewMatrix(const Vector3& eye,
const Vector3& target,
const Vector3& up);
/// Create a perpective projection matrix given a set of clip planes.
static Matrix44 createPerspectiveMatrix(float left, float right,
float bottom, float top,
float nearP, float farP);
/// Create an orthographic projection matrix given a set of clip planes.
static Matrix44 createOrthographicMatrix(float left, float right,
float bottom, float top,
float nearP, float farP);
/// Apply a perspective transform to the given 3D point, performing a
/// homogeneous divide on the transformed result.
static Vector3 transformPointPerspective(const Matrix44& m, const Vector3& v)
{
Vector4 res = m.multiply(Vector4(v[0], v[1], v[2], 1.0f));
return Vector3(res[0], res[1], res[2]) / res[3];
}
/// @}
protected:
// Transform matrices
Matrix44 _worldMatrix;
Matrix44 _viewMatrix;
Matrix44 _projectionMatrix;
// Viewport size
Vector2 _viewportSize;
// Arcball properties
bool _arcballActive;
Vector2 _arcballLastPos;
Quaternion _arcballQuat;
Quaternion _arcballDelta;
float _arcballSpeed;
};
MATERIALX_NAMESPACE_END
#endif
| 2,201 |
12,004 | # coding=utf-8
# *** WARNING: this file was generated by test. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
from . import outputs
__all__ = [
'SsisEnvironmentReferenceResponse',
'SsisEnvironmentResponse',
'SsisFolderResponse',
'SsisPackageResponse',
'SsisParameterResponse',
'SsisProjectResponse',
'SsisVariableResponse',
'StorageAccountKeyResponse',
]
@pulumi.output_type
class SsisEnvironmentReferenceResponse(dict):
"""
Ssis environment reference.
"""
def __init__(__self__, *,
environment_folder_name: Optional[str] = None,
environment_name: Optional[str] = None,
id: Optional[float] = None,
reference_type: Optional[str] = None):
"""
Ssis environment reference.
:param str environment_folder_name: Environment folder name.
:param str environment_name: Environment name.
:param float id: Environment reference id.
:param str reference_type: Reference type
"""
if environment_folder_name is not None:
pulumi.set(__self__, "environment_folder_name", environment_folder_name)
if environment_name is not None:
pulumi.set(__self__, "environment_name", environment_name)
if id is not None:
pulumi.set(__self__, "id", id)
if reference_type is not None:
pulumi.set(__self__, "reference_type", reference_type)
@property
@pulumi.getter(name="environmentFolderName")
def environment_folder_name(self) -> Optional[str]:
"""
Environment folder name.
"""
return pulumi.get(self, "environment_folder_name")
@property
@pulumi.getter(name="environmentName")
def environment_name(self) -> Optional[str]:
"""
Environment name.
"""
return pulumi.get(self, "environment_name")
@property
@pulumi.getter
def id(self) -> Optional[float]:
"""
Environment reference id.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="referenceType")
def reference_type(self) -> Optional[str]:
"""
Reference type
"""
return pulumi.get(self, "reference_type")
@pulumi.output_type
class SsisEnvironmentResponse(dict):
"""
Ssis environment.
"""
def __init__(__self__, *,
type: str,
description: Optional[str] = None,
folder_id: Optional[float] = None,
id: Optional[float] = None,
name: Optional[str] = None,
variables: Optional[Sequence['outputs.SsisVariableResponse']] = None):
"""
Ssis environment.
:param str type: The type of SSIS object metadata.
Expected value is 'Environment'.
:param str description: Metadata description.
:param float folder_id: Folder id which contains environment.
:param float id: Metadata id.
:param str name: Metadata name.
:param Sequence['SsisVariableResponse'] variables: Variable in environment
"""
pulumi.set(__self__, "type", 'Environment')
if description is not None:
pulumi.set(__self__, "description", description)
if folder_id is not None:
pulumi.set(__self__, "folder_id", folder_id)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
if variables is not None:
pulumi.set(__self__, "variables", variables)
@property
@pulumi.getter
def type(self) -> str:
"""
The type of SSIS object metadata.
Expected value is 'Environment'.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
Metadata description.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="folderId")
def folder_id(self) -> Optional[float]:
"""
Folder id which contains environment.
"""
return pulumi.get(self, "folder_id")
@property
@pulumi.getter
def id(self) -> Optional[float]:
"""
Metadata id.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Metadata name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def variables(self) -> Optional[Sequence['outputs.SsisVariableResponse']]:
"""
Variable in environment
"""
return pulumi.get(self, "variables")
@pulumi.output_type
class SsisFolderResponse(dict):
"""
Ssis folder.
"""
def __init__(__self__, *,
type: str,
description: Optional[str] = None,
id: Optional[float] = None,
name: Optional[str] = None):
"""
Ssis folder.
:param str type: The type of SSIS object metadata.
Expected value is 'Folder'.
:param str description: Metadata description.
:param float id: Metadata id.
:param str name: Metadata name.
"""
pulumi.set(__self__, "type", 'Folder')
if description is not None:
pulumi.set(__self__, "description", description)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter
def type(self) -> str:
"""
The type of SSIS object metadata.
Expected value is 'Folder'.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
Metadata description.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def id(self) -> Optional[float]:
"""
Metadata id.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Metadata name.
"""
return pulumi.get(self, "name")
@pulumi.output_type
class SsisPackageResponse(dict):
"""
Ssis Package.
"""
def __init__(__self__, *,
type: str,
description: Optional[str] = None,
folder_id: Optional[float] = None,
id: Optional[float] = None,
name: Optional[str] = None,
parameters: Optional[Sequence['outputs.SsisParameterResponse']] = None,
project_id: Optional[float] = None,
project_version: Optional[float] = None):
"""
Ssis Package.
:param str type: The type of SSIS object metadata.
Expected value is 'Package'.
:param str description: Metadata description.
:param float folder_id: Folder id which contains package.
:param float id: Metadata id.
:param str name: Metadata name.
:param Sequence['SsisParameterResponse'] parameters: Parameters in package
:param float project_id: Project id which contains package.
:param float project_version: Project version which contains package.
"""
pulumi.set(__self__, "type", 'Package')
if description is not None:
pulumi.set(__self__, "description", description)
if folder_id is not None:
pulumi.set(__self__, "folder_id", folder_id)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
if parameters is not None:
pulumi.set(__self__, "parameters", parameters)
if project_id is not None:
pulumi.set(__self__, "project_id", project_id)
if project_version is not None:
pulumi.set(__self__, "project_version", project_version)
@property
@pulumi.getter
def type(self) -> str:
"""
The type of SSIS object metadata.
Expected value is 'Package'.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
Metadata description.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="folderId")
def folder_id(self) -> Optional[float]:
"""
Folder id which contains package.
"""
return pulumi.get(self, "folder_id")
@property
@pulumi.getter
def id(self) -> Optional[float]:
"""
Metadata id.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Metadata name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def parameters(self) -> Optional[Sequence['outputs.SsisParameterResponse']]:
"""
Parameters in package
"""
return pulumi.get(self, "parameters")
@property
@pulumi.getter(name="projectId")
def project_id(self) -> Optional[float]:
"""
Project id which contains package.
"""
return pulumi.get(self, "project_id")
@property
@pulumi.getter(name="projectVersion")
def project_version(self) -> Optional[float]:
"""
Project version which contains package.
"""
return pulumi.get(self, "project_version")
@pulumi.output_type
class SsisParameterResponse(dict):
"""
Ssis parameter.
"""
def __init__(__self__, *,
data_type: Optional[str] = None,
default_value: Optional[str] = None,
description: Optional[str] = None,
design_default_value: Optional[str] = None,
id: Optional[float] = None,
name: Optional[str] = None,
required: Optional[bool] = None,
sensitive: Optional[bool] = None,
sensitive_default_value: Optional[str] = None,
value_set: Optional[bool] = None,
value_type: Optional[str] = None,
variable: Optional[str] = None):
"""
Ssis parameter.
:param str data_type: Parameter type.
:param str default_value: Default value of parameter.
:param str description: Parameter description.
:param str design_default_value: Design default value of parameter.
:param float id: Parameter id.
:param str name: Parameter name.
:param bool required: Whether parameter is required.
:param bool sensitive: Whether parameter is sensitive.
:param str sensitive_default_value: Default sensitive value of parameter.
:param bool value_set: Parameter value set.
:param str value_type: Parameter value type.
:param str variable: Parameter reference variable.
"""
if data_type is not None:
pulumi.set(__self__, "data_type", data_type)
if default_value is not None:
pulumi.set(__self__, "default_value", default_value)
if description is not None:
pulumi.set(__self__, "description", description)
if design_default_value is not None:
pulumi.set(__self__, "design_default_value", design_default_value)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
if required is not None:
pulumi.set(__self__, "required", required)
if sensitive is not None:
pulumi.set(__self__, "sensitive", sensitive)
if sensitive_default_value is not None:
pulumi.set(__self__, "sensitive_default_value", sensitive_default_value)
if value_set is not None:
pulumi.set(__self__, "value_set", value_set)
if value_type is not None:
pulumi.set(__self__, "value_type", value_type)
if variable is not None:
pulumi.set(__self__, "variable", variable)
@property
@pulumi.getter(name="dataType")
def data_type(self) -> Optional[str]:
"""
Parameter type.
"""
return pulumi.get(self, "data_type")
@property
@pulumi.getter(name="defaultValue")
def default_value(self) -> Optional[str]:
"""
Default value of parameter.
"""
return pulumi.get(self, "default_value")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
Parameter description.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="designDefaultValue")
def design_default_value(self) -> Optional[str]:
"""
Design default value of parameter.
"""
return pulumi.get(self, "design_default_value")
@property
@pulumi.getter
def id(self) -> Optional[float]:
"""
Parameter id.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Parameter name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def required(self) -> Optional[bool]:
"""
Whether parameter is required.
"""
return pulumi.get(self, "required")
@property
@pulumi.getter
def sensitive(self) -> Optional[bool]:
"""
Whether parameter is sensitive.
"""
return pulumi.get(self, "sensitive")
@property
@pulumi.getter(name="sensitiveDefaultValue")
def sensitive_default_value(self) -> Optional[str]:
"""
Default sensitive value of parameter.
"""
return pulumi.get(self, "sensitive_default_value")
@property
@pulumi.getter(name="valueSet")
def value_set(self) -> Optional[bool]:
"""
Parameter value set.
"""
return pulumi.get(self, "value_set")
@property
@pulumi.getter(name="valueType")
def value_type(self) -> Optional[str]:
"""
Parameter value type.
"""
return pulumi.get(self, "value_type")
@property
@pulumi.getter
def variable(self) -> Optional[str]:
"""
Parameter reference variable.
"""
return pulumi.get(self, "variable")
@pulumi.output_type
class SsisProjectResponse(dict):
"""
Ssis project.
"""
def __init__(__self__, *,
type: str,
description: Optional[str] = None,
environment_refs: Optional[Sequence['outputs.SsisEnvironmentReferenceResponse']] = None,
folder_id: Optional[float] = None,
id: Optional[float] = None,
name: Optional[str] = None,
parameters: Optional[Sequence['outputs.SsisParameterResponse']] = None,
version: Optional[float] = None):
"""
Ssis project.
:param str type: The type of SSIS object metadata.
Expected value is 'Project'.
:param str description: Metadata description.
:param Sequence['SsisEnvironmentReferenceResponse'] environment_refs: Environment reference in project
:param float folder_id: Folder id which contains project.
:param float id: Metadata id.
:param str name: Metadata name.
:param Sequence['SsisParameterResponse'] parameters: Parameters in project
:param float version: Project version.
"""
pulumi.set(__self__, "type", 'Project')
if description is not None:
pulumi.set(__self__, "description", description)
if environment_refs is not None:
pulumi.set(__self__, "environment_refs", environment_refs)
if folder_id is not None:
pulumi.set(__self__, "folder_id", folder_id)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
if parameters is not None:
pulumi.set(__self__, "parameters", parameters)
if version is not None:
pulumi.set(__self__, "version", version)
@property
@pulumi.getter
def type(self) -> str:
"""
The type of SSIS object metadata.
Expected value is 'Project'.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
Metadata description.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="environmentRefs")
def environment_refs(self) -> Optional[Sequence['outputs.SsisEnvironmentReferenceResponse']]:
"""
Environment reference in project
"""
return pulumi.get(self, "environment_refs")
@property
@pulumi.getter(name="folderId")
def folder_id(self) -> Optional[float]:
"""
Folder id which contains project.
"""
return pulumi.get(self, "folder_id")
@property
@pulumi.getter
def id(self) -> Optional[float]:
"""
Metadata id.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Metadata name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def parameters(self) -> Optional[Sequence['outputs.SsisParameterResponse']]:
"""
Parameters in project
"""
return pulumi.get(self, "parameters")
@property
@pulumi.getter
def version(self) -> Optional[float]:
"""
Project version.
"""
return pulumi.get(self, "version")
@pulumi.output_type
class SsisVariableResponse(dict):
"""
Ssis variable.
"""
def __init__(__self__, *,
data_type: Optional[str] = None,
description: Optional[str] = None,
id: Optional[float] = None,
name: Optional[str] = None,
sensitive: Optional[bool] = None,
sensitive_value: Optional[str] = None,
value: Optional[str] = None):
"""
Ssis variable.
:param str data_type: Variable type.
:param str description: Variable description.
:param float id: Variable id.
:param str name: Variable name.
:param bool sensitive: Whether variable is sensitive.
:param str sensitive_value: Variable sensitive value.
:param str value: Variable value.
"""
if data_type is not None:
pulumi.set(__self__, "data_type", data_type)
if description is not None:
pulumi.set(__self__, "description", description)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
if sensitive is not None:
pulumi.set(__self__, "sensitive", sensitive)
if sensitive_value is not None:
pulumi.set(__self__, "sensitive_value", sensitive_value)
if value is not None:
pulumi.set(__self__, "value", value)
@property
@pulumi.getter(name="dataType")
def data_type(self) -> Optional[str]:
"""
Variable type.
"""
return pulumi.get(self, "data_type")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
Variable description.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def id(self) -> Optional[float]:
"""
Variable id.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Variable name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def sensitive(self) -> Optional[bool]:
"""
Whether variable is sensitive.
"""
return pulumi.get(self, "sensitive")
@property
@pulumi.getter(name="sensitiveValue")
def sensitive_value(self) -> Optional[str]:
"""
Variable sensitive value.
"""
return pulumi.get(self, "sensitive_value")
@property
@pulumi.getter
def value(self) -> Optional[str]:
"""
Variable value.
"""
return pulumi.get(self, "value")
@pulumi.output_type
class StorageAccountKeyResponse(dict):
"""
An access key for the storage account.
"""
def __init__(__self__, *,
creation_time: str,
key_name: str,
permissions: str,
value: str):
"""
An access key for the storage account.
:param str creation_time: Creation time of the key, in round trip date format.
:param str key_name: Name of the key.
:param str permissions: Permissions for the key -- read-only or full permissions.
:param str value: Base 64-encoded value of the key.
"""
pulumi.set(__self__, "creation_time", creation_time)
pulumi.set(__self__, "key_name", key_name)
pulumi.set(__self__, "permissions", permissions)
pulumi.set(__self__, "value", value)
@property
@pulumi.getter(name="creationTime")
def creation_time(self) -> str:
"""
Creation time of the key, in round trip date format.
"""
return pulumi.get(self, "creation_time")
@property
@pulumi.getter(name="keyName")
def key_name(self) -> str:
"""
Name of the key.
"""
return pulumi.get(self, "key_name")
@property
@pulumi.getter
def permissions(self) -> str:
"""
Permissions for the key -- read-only or full permissions.
"""
return pulumi.get(self, "permissions")
@property
@pulumi.getter
def value(self) -> str:
"""
Base 64-encoded value of the key.
"""
return pulumi.get(self, "value")
| 10,073 |
852 | <reponame>ckamtsikis/cmssw
#include "DPGAnalysis/Skims/interface/TriggerMatchProducer.h"
#include "FWCore/Framework/interface/MakerMacros.h"
#include "DataFormats/METReco/interface/MET.h"
typedef TriggerMatchProducer<reco::MET> trgMatchMETProducer;
DEFINE_FWK_MODULE(trgMatchMETProducer);
| 110 |
1,091 | /*
* Copyright 2019-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.net.pi.service;
import org.onosproject.event.AbstractEvent;
import org.onosproject.net.pi.model.PiPipeconf;
import org.onosproject.net.pi.model.PiPipeconfId;
import static com.google.common.base.Preconditions.checkNotNull;
/**
* Event related to the PiPipeconfService.
*/
public class PiPipeconfEvent extends AbstractEvent<PiPipeconfEvent.Type, PiPipeconfId> {
private final PiPipeconf pipeconf;
/**
* Type of pipeconf event.
*/
public enum Type {
REGISTERED,
UNREGISTERED
}
/**
* Creates anew pipeconf event for the given type and pipeconf.
*
* @param type type of event
* @param pipeconf pipeconf
*/
public PiPipeconfEvent(Type type, PiPipeconf pipeconf) {
super(type, checkNotNull(pipeconf).id());
this.pipeconf = pipeconf;
}
/**
* Creates anew pipeconf event for the given type and pipeconf ID.
*
* @param type type of event
* @param pipeconfId pipeconf ID
*/
public PiPipeconfEvent(Type type, PiPipeconfId pipeconfId) {
super(type, pipeconfId);
pipeconf = null;
}
/**
* Returns the pipeconf instance associated to this event, or null if one
* was not provided. For example, {@link Type#UNREGISTERED} events are not
* expected to carry the pipeconf instance that was unregistered, but just
* the ID (via {@link #subject()}).
*
* @return pipeconf instance or null
*/
public PiPipeconf pipeconf() {
return pipeconf;
}
}
| 765 |
823 | /**************************************************************************
Copyright (c) 2017 sewenew
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*************************************************************************/
#ifndef SEWENEW_REDISPLUSPLUS_TEST_STREAM_CMDS_TEST_HPP
#define SEWENEW_REDISPLUSPLUS_TEST_STREAM_CMDS_TEST_HPP
#include <vector>
#include <string>
#include <thread>
#include <chrono>
#include <unordered_map>
#include "utils.h"
namespace sw {
namespace redis {
namespace test {
template <typename RedisInstance>
void StreamCmdsTest<RedisInstance>::run() {
_test_stream_cmds();
_test_group_cmds();
}
template <typename RedisInstance>
void StreamCmdsTest<RedisInstance>::_test_stream_cmds() {
auto key = test_key("stream");
KeyDeleter<RedisInstance> deleter(_redis, key);
std::vector<std::pair<std::string, std::string>> attrs = {
{"f1", "v1"},
{"f2", "v2"}
};
const std::vector<std::string> ids = {"1565427842-0", "1565427842-1"};
REDIS_ASSERT(_redis.xadd(key, ids.at(0), attrs.begin(), attrs.end()) == ids.at(0),
"failed to test xadd");
std::vector<std::pair<std::string, std::string>> keys = {std::make_pair(key, "0-0")};
Result result;
_redis.xread(keys.begin(), keys.end(), 1, std::inserter(result, result.end()));
REDIS_ASSERT(result.size() == 1
&& result.find(key) != result.end()
&& result[key].size() == 1
&& result[key].at(0).first == ids.at(0)
&& result[key].at(0).second.size() == 2,
"failed to test xread");
result.clear();
_redis.xread(key, std::string("0-0"), 1, std::inserter(result, result.end()));
REDIS_ASSERT(result.size() == 1
&& result.find(key) != result.end()
&& result[key].size() == 1
&& result[key].at(0).first == ids.at(0)
&& result[key].at(0).second.size() == 2,
"failed to test xread");
result.clear();
keys = {std::make_pair(key, ids.at(0))};
_redis.xread(keys.begin(),
keys.end(),
std::chrono::seconds(1),
2,
std::inserter(result, result.end()));
REDIS_ASSERT(result.size() == 0, "failed to test xread");
_redis.xread(key,
ids.at(0),
std::chrono::seconds(1),
2,
std::inserter(result, result.end()));
REDIS_ASSERT(result.size() == 0, "failed to test xread");
REDIS_ASSERT(_redis.xadd(key, ids.at(1),
attrs.begin(),
attrs.end()) == ids.at(1),
"failed to test xadd");
REDIS_ASSERT(_redis.xlen(key) == 2, "failed to test xlen");
std::vector<Item> items;
_redis.xrange(key, "-", "+", 2, std::back_inserter(items));
REDIS_ASSERT(items.size() == 2 &&
items.at(0).first == ids.at(0) &&
items.at(1).first == ids.at(1),
"failed to test xrange with count");
items.clear();
_redis.xrange(key, "-", "+", 1, std::back_inserter(items));
REDIS_ASSERT(items.size() == 1 &&
items.at(0).first == ids.at(0),
"failed to test xrange with count");
items.clear();
_redis.xrevrange(key, "+", "-", 2, std::back_inserter(items));
REDIS_ASSERT(items.size() == 2 &&
items.at(0).first == ids.at(1) &&
items.at(1).first == ids.at(0),
"failed to test xrevrange with count");
items.clear();
_redis.xrevrange(key, "+", "-", 1, std::back_inserter(items));
REDIS_ASSERT(items.size() == 1 &&
items.at(0).first == ids.at(1),
"failed to test xrevrange with count");
REDIS_ASSERT(_redis.xtrim(key, 1, false) == 1, "failed to test xtrim");
items.clear();
_redis.xrange(key, "-", "+", std::back_inserter(items));
REDIS_ASSERT(items.size() == 1 && items[0].first == ids.at(1),
"failed to test xrange");
items.clear();
_redis.xrevrange(key, "+", "-", std::back_inserter(items));
REDIS_ASSERT(items.size() == 1 && items[0].first == ids.at(1),
"failed to test xrevrange");
REDIS_ASSERT(_redis.xdel(key, {ids.at(1), std::string("111-111")}) == 1,
"failed to test xdel");
}
template <typename RedisInstance>
void StreamCmdsTest<RedisInstance>::_test_group_cmds() {
auto key = test_key("stream");
KeyDeleter<RedisInstance> deleter(_redis, key);
auto group = "group";
auto consumer1 = "consumer1";
_redis.xgroup_create(key, group, "$", true);
std::vector<std::pair<std::string, std::string>> attrs = {
{"f1", "v1"},
{"f2", "v2"}
};
auto id = _redis.xadd(key, "*", attrs.begin(), attrs.end());
auto keys = {std::make_pair(key, ">")};
Result result;
_redis.xreadgroup(group,
consumer1,
keys.begin(),
keys.end(),
1,
std::inserter(result, result.end()));
REDIS_ASSERT(result.size() == 1
&& result.find(key) != result.end()
&& result[key].size() == 1
&& result[key][0].first == id,
"failed to test xreadgroup");
result.clear();
_redis.xreadgroup(group,
consumer1,
key,
std::string(">"),
1,
std::inserter(result, result.end()));
REDIS_ASSERT(result.size() == 0, "failed to test xreadgroup");
result.clear();
_redis.xreadgroup(group,
"not-exist-consumer",
keys.begin(),
keys.end(),
1,
std::inserter(result, result.end()));
REDIS_ASSERT(result.size() == 0, "failed to test xreadgroup");
result.clear();
_redis.xreadgroup(group,
consumer1,
keys.begin(),
keys.end(),
std::chrono::seconds(1),
1,
std::inserter(result, result.end()));
REDIS_ASSERT(result.size() == 0, "failed to test xreadgroup");
result.clear();
_redis.xreadgroup(group,
consumer1,
key,
">",
std::chrono::seconds(1),
1,
std::inserter(result, result.end()));
REDIS_ASSERT(result.size() == 0, "failed to test xreadgroup");
using PendingResult = std::vector<std::tuple<std::string, std::string, long long, long long>>;
PendingResult pending_result;
_redis.xpending(key, group, "-", "+", 1, consumer1, std::back_inserter(pending_result));
REDIS_ASSERT(pending_result.size() == 1
&& std::get<0>(pending_result[0]) == id
&& std::get<1>(pending_result[0]) == consumer1,
"failed to test xpending");
std::this_thread::sleep_for(std::chrono::seconds(1));
auto consumer2 = "consumer2";
std::vector<Item> items;
auto ids = {id};
_redis.xclaim(key,
group,
consumer2,
std::chrono::milliseconds(10),
ids,
std::back_inserter(items));
REDIS_ASSERT(items.size() == 1 && items[0].first == id, "failed to test xclaim");
std::this_thread::sleep_for(std::chrono::seconds(1));
items.clear();
_redis.xclaim(key, group, consumer1, std::chrono::milliseconds(10), id, std::back_inserter(items));
REDIS_ASSERT(items.size() == 1 && items[0].first == id, "failed to test xclaim: " + std::to_string(items.size()));
_redis.xack(key, group, id);
REDIS_ASSERT(_redis.xgroup_delconsumer(key, group, consumer1) == 0,
"failed to test xgroup_delconsumer");
REDIS_ASSERT(_redis.xgroup_delconsumer(key, group, consumer2) == 0,
"failed to test xgroup_delconsumer");
REDIS_ASSERT(_redis.xgroup_destroy(key, group) == 1,
"failed to test xgroup_destroy");
}
}
}
}
#endif // end SEWENEW_REDISPLUSPLUS_TEST_STREAM_CMDS_TEST_HPP
| 3,938 |
1,338 | <reponame>Kirishikesan/haiku
/*
* Copyright 2012 Haiku, Inc. All rights reserved.
* Distributed under the terms of the MIT License.
*
* Authors:
* <NAME>, <EMAIL>
*/
#ifndef REPLYBUILDER_H
#define REPLYBUILDER_H
#include <SupportDefs.h>
#include "RPCCallbackReply.h"
#include "XDR.h"
class ReplyBuilder {
public:
ReplyBuilder(uint32 xid);
~ReplyBuilder();
RPC::CallbackReply* Reply();
status_t GetAttr(status_t status, int mask,
uint64 size, uint64 change);
status_t Recall(status_t status);
private:
void _InitHeader();
static uint32 _HaikuErrorToNFS4(status_t error);
status_t fStatus;
XDR::Stream::Position fStatusPosition;
uint32 fOpCount;
XDR::Stream::Position fOpCountPosition;
RPC::CallbackReply* fReply;
};
#endif // REPLYBUILDER_H
| 364 |
895 | <filename>package.nls.ko.json<gh_stars>100-1000
{
"plantuml.exportCurrent.title": "현재 다이어그램 내보내기",
"plantuml.exportDocument.title": "현재 다이어그램 파일 내보내기",
"plantuml.exportWorkspace.title": "작업공간의 다이어그램 내보내기",
"plantuml.preview.title": "현재 다이어그램 미리보기",
"plantuml.URLCurrent.title": "현재 다이어그램에 대한 URL 생성하기",
"plantuml.URLDocument.title": "현재 다이어그램 파일에 대한 URL 생성하기",
"plantuml.extractSource.title": "그림에서 PlantUML 다이어그램 소스 추출하기",
"plantuml.configuration.configTitle": "PlantUML 환경설정",
"plantuml.configuration.exportFormat": "내보내기 형식. 매번 내보낼 때마다 선택하려면 비워두십시오.",
"plantuml.configuration.jar": "plantuml.jar의 다른 위치. 통합 jar를 쓰려면 비워두십시오.",
"plantuml.configuration.diagramsRoot": "모든 다이어그램 파일들을 저장할 위치 (작업공간의 상대 폴더).",
"plantuml.configuration.fileExtensions": "내보낼 파일을 찾을 때 사용할 확장자. 특히 작업공간 설정에서, \".java\" 같은 확장자를 추가하여 소스코드 파일의 다이어그램을 내보낼 수 있습니다.",
"plantuml.configuration.exportSubFolder": "호스트 파일과 같은 이름의 폴더에 다이어그램을 내보낼 수 있습니다.",
"plantuml.configuration.exportConcurrency": "여러개의 다이어그램을 내보내기 할 때 동시작업개수를 지정합니다.",
"plantuml.configuration.exportOutDir": "내보낸 작업공간 다이어그램이 이 디렉터리에 보관됩니다. (작업공간의 상대 폴더).",
"plantuml.configuration.previewAutoUpdate": "미리보기 화면을 자동으로 업데이트할지를 결정합니다.",
"plantuml.configuration.previewSnapIndicators": "미리보기 화면에서 스냅 지시자를 표시할지를 결정합니다.",
"plantuml.configuration.server": "UML 다이어그램을 바로 생성할 수 있는 Plantuml 서버. 데이터가 공개되도 괜찮다면 https://www.plantuml.com/plantuml 공식서버를 사용할 수 있습니다.",
"plantuml.configuration.urlFormat": "URL 형식. URL을 만들 때마다 선택하려면 비워두십시오.",
"plantuml.configuration.urlResult": "URL 결과 형식. 단순URL 또는 MarkDown용.",
"plantuml.configuration.render": "내보내기와 미리보기 모두에 사용할 다이어그램 처리기.\n로컬: 기존 방식으로 로컬에서 다이어그램을 처리합니다. 먼저 JAVA와 GraphViz를 설치해야합니다.\nPlantUMLServer: \"plantuml.server\"에 지정된 서버를 사용하여 다이어그램을 처리합니다. 훨씬 빠르지만 서버를 필요로 합니다.\n로컬 설정이 기본값입니다.",
"plantuml.configuration.commandArgs": "commandArgs는 \"-DPLANTUML_LIMIT_SIZE=8192\" 같은 추가 옵션을 java에 전달할 수 있습니다.",
"plantuml.configuration.jarArgs": "jarArgs는 \"-config plantuml.config\" 같은 추가 옵셥을 plantuml.jar에 전달할 수 있습니다.",
"plantuml.configuration.exportMapFile": "내보내기 할 때 이미지 맵 (.cmapx) 파일도 내보낼지를 결정합니다.",
"plantuml.configuration.java": "Java 실행파일 위치.",
"plantuml.configuration.includepaths": "소스 폴더와 'diagramsRoot'외에 포함할 경로를 지정합니다.",
"plantuml.untrusted.description": "Required for loading vulnerable workspace settings"
}
| 2,328 |
4,145 | # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Any, List
import numpy as np
import torch.utils.data as pt_data
from torch.utils.data import IterableDataset
__all__ = ['ConcatDataset']
class ConcatDataset(IterableDataset):
"""
A dataset that accepts as argument multiple datasets and then samples from them based on the specified
sampling technique.
Args:
datasets (list): A list of datasets to sample from.
shuffle (bool): Whether to shuffle individual datasets. Only works with non-iterable datasets.
Defaults to True.
sampling_technique (str): Sampling technique to choose which dataset to draw a sample from.
Defaults to 'temperature'. Currently supports 'temperature', 'random' and 'round-robin'.
sampling_temperature (int): Temperature value for sampling. Only used when sampling_technique = 'temperature'.
Defaults to 5.
sampling_probabilities (list): Probability values for sampling. Only used when sampling_technique = 'random'.
global_rank (int): Worker rank, used for partitioning map style datasets. Defaults to 0.
world_size (int): Total number of processes, used for partitioning map style datasets. Defaults to 1.
"""
def __init__(
self,
datasets: List[Any],
shuffle: bool = True,
sampling_technique: str = 'temperature',
sampling_temperature: int = 5,
sampling_probabilities: List[float] = None,
global_rank: int = 0,
world_size: int = 1,
):
super().__init__()
supported_sampling_techniques = ['temperature', 'random', 'round-robin']
self.datasets = datasets
self.iterables = [None] * len(datasets)
self.shuffle = shuffle
self.global_rank = global_rank
self.world_size = world_size
self.sampling_kwargs = {}
if sampling_technique == 'temperature':
self.index_generator = ConcatDataset.temperature_generator
self.sampling_kwargs['temperature'] = sampling_temperature
elif sampling_technique == 'random':
self.index_generator = ConcatDataset.random_generator
self.sampling_kwargs['p'] = sampling_probabilities
elif sampling_technique == 'round-robin':
self.index_generator = ConcatDataset.round_robin_generator
else:
raise ValueError(f"Currently we only support sampling techniques in {supported_sampling_techniques}.")
self.length = 0
if isinstance(datasets[0], IterableDataset):
self.kind = 'iterable'
else:
self.kind = 'map'
for idx, dataset in enumerate(datasets):
isiterable = isinstance(dataset, IterableDataset)
if (isiterable and not self.kind == 'iterable') or (not isiterable and self.kind == 'iterable'):
raise ValueError("All datasets in ConcatDataset must be of the same kind (Iterable or Map).")
if self.kind == 'map':
self.length += len(dataset) // world_size
else:
self.length += len(dataset)
def get_iterable(self, dataset):
if isinstance(dataset, IterableDataset):
return dataset.__iter__()
else:
indices = np.arange(len(dataset))
if self.shuffle:
np.random.shuffle(indices)
return iter(indices)
def __iter__(self):
worker_info = pt_data.get_worker_info()
if worker_info is None:
max_elements = self.length
wid = 0
wnum = 1
else:
wid = worker_info.id
wnum = worker_info.num_workers
max_elements = len(range(wid, self.length, wnum))
if self.kind == 'map':
for idx in range(len(self.datasets)):
start_idx = (len(self.datasets[idx]) // self.world_size) * self.global_rank
end_idx = start_idx + (len(self.datasets[idx]) // self.world_size)
if self.global_rank == self.world_size - 1:
end_idx = len(self.datasets[idx])
indices = range(start_idx + wid, end_idx, wnum)
self.datasets[idx] = pt_data.Subset(self.datasets[idx], indices)
for idx, dataset in enumerate(self.datasets):
iterable = self.get_iterable(dataset)
self.iterables[idx] = iterable
n = 0
ind_gen = self.index_generator(self.datasets, **self.sampling_kwargs)
while n < max_elements:
n += 1
try:
ind = next(ind_gen)
except StopIteration:
return
try:
val = next(self.iterables[ind])
if self.kind == 'map':
val = self.datasets[ind][val]
yield val
except StopIteration:
self.iterables[ind] = self.get_iterable(self.datasets[ind])
n -= 1
def __len__(self):
return self.length
@staticmethod
def temperature_generator(datasets, **kwargs):
temp = kwargs.get('temperature')
if not temp:
raise ValueError("Temperature generator expects a 'temperature' keyword argument.")
lengths = []
num = len(datasets)
for dataset in datasets:
lengths.append(len(dataset))
p = np.array(lengths) / np.sum(lengths)
p = np.power(p, 1 / temp)
p = p / np.sum(p)
while True:
ind = np.random.choice(np.arange(num), p=p)
yield ind
@staticmethod
def round_robin_generator(datasets, **kwargs):
num = len(datasets)
while True:
for i in range(num):
yield i
@staticmethod
def random_generator(datasets, **kwargs):
p = kwargs.get('p')
if not p:
raise ValueError("Random generator expects a 'p' keyowrd argument for sampling probabilities.")
num = len(datasets)
if len(p) != num:
raise ValueError("Length of probabilities list must be equal to the number of datasets.")
while True:
ind = np.random.choice(np.arange(num), p=p)
yield ind
| 2,995 |
471 | <gh_stars>100-1000
/////////////////////////////////////////////////////////////////////////////
// Name: nativdlg.h
// Purpose: Native Windows dialog sample
// Author: <NAME>
// Modified by:
// Created: 04/01/98
// Copyright: (c) <NAME>
// Licence: wxWindows licence
/////////////////////////////////////////////////////////////////////////////
// Define a new application
class MyApp: public wxApp
{
public:
MyApp(void){};
bool OnInit(void);
};
class MyFrame: public wxFrame
{
public:
wxWindow *panel;
MyFrame(wxWindow *parent, const wxWindowID id, const wxString& title, const wxPoint& pos, const wxSize& size);
void OnQuit(wxCommandEvent& event);
void OnTest1(wxCommandEvent& event);
wxDECLARE_EVENT_TABLE();
};
class MyDialog : public wxDialog
{
public:
void OnOk(wxCommandEvent& event);
void OnCancel(wxCommandEvent& event);
wxDECLARE_EVENT_TABLE();
};
#define RESOURCE_QUIT 4
#define RESOURCE_TEST1 2
| 350 |
621 | <filename>carina-webdriver/src/main/java/com/qaprosoft/carina/core/foundation/utils/appletv/RemoteControlKeyword.java
package com.qaprosoft.carina.core.foundation.utils.appletv;
public enum RemoteControlKeyword {
HOME("home"),
LEFT("left"),
RIGHT("right"),
UP("up"),
DOWN("down"),
MENU("menu"),
SELECT("select"),
PLAY("playpause");
private String controlKeyword;
private RemoteControlKeyword(String controlKeyword) {
this.setControlKeyword(controlKeyword);
}
public String getControlKeyword() {
return controlKeyword;
}
public void setControlKeyword(String controlKeyword) {
this.controlKeyword = controlKeyword;
}
} | 222 |
1,251 | <reponame>palerdot/BlingFire
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License.
*/
#ifndef _FA_RSDFA_CA_H_
#define _FA_RSDFA_CA_H_
#include "FAConfig.h"
#include "FASecurity.h"
namespace BlingFire
{
///
/// client-side interface for Rabin-Scott NFA
///
class FARSDfaCA {
public:
/// returns the initial state
virtual const int GetInitial () const = 0;
/// returns true if the State is a final
virtual const bool IsFinal (const int State) const = 0;
/// for the given State and the Iw returns the Destination state
/// returns -1 if transition does not exist
virtual const int GetDest (const int State, const int Iw) const = 0;
/// returns automaton's alphabet
virtual const int GetIWs (
__out_ecount_opt (MaxIwCount) int * pIws,
const int MaxIwCount
) const = 0;
};
}
#endif
| 350 |
4,054 | <reponame>Anlon-Burke/vespa<filename>fsa/src/alltest/segmenter_test.cpp<gh_stars>1000+
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
/**
* @author <NAME>
* @date 2004/08/20
* @version $Id$
* @file segmenter_test.cpp
* @brief Test for the Segmenter class
*
*/
#include <iostream>
#include <iomanip>
#include <vespa/fsa/segmenter.h>
using namespace fsa;
int main(int argc, char **argv)
{
FSA dict(argc>=2? argv[1] : "__testfsa__.__fsa__");
Segmenter segmenter(dict);
Segmenter::Segments segments;
const Segmenter::Segmentation *segmentation;
std::string text;
while(!std::cin.eof()){
getline(std::cin,text);
if(text.size()>3){
segmenter.segment(text,segments);
std::cout << "List of all segments:" << std::endl;
for(unsigned int i=0; i<segments.size(); i++){
std::cout << " "
<< segments.sgm(i) << ":" << segments.conn(i) << " ["
<< segments.beg(i) << "," << segments.end(i)-1 << "]"
<< std::endl;
}
segmentation=segments.segmentation(Segmenter::SEGMENTATION_WEIGHTED);
std::cout << "Weighted segmentation:" << std::endl << " ";
for(Segmenter::SegmentationConstIterator it=segmentation->begin();
it!=segmentation->end();++it){
std::cout << "(" << segments.sgm(*it) << ")";
}
std::cout << std::endl;
segmentation=segments.segmentation(Segmenter::SEGMENTATION_RIGHTMOST_LONGEST);
std::cout << "Rightmost-longest segmentation:" << std::endl << " ";
for(Segmenter::SegmentationConstIterator it=segmentation->begin();
it!=segmentation->end();++it){
std::cout << "(" << segments.sgm(*it) << ")";
}
std::cout << std::endl;
segmentation=segments.segmentation(Segmenter::SEGMENTATION_LEFTMOST_LONGEST);
std::cout << "Lefttmost-longest segmentation:" << std::endl << " ";
for(Segmenter::SegmentationConstIterator it=segmentation->begin();
it!=segmentation->end();++it){
std::cout << "(" << segments.sgm(*it) << ")";
}
std::cout << std::endl;
}
}
return 0;
}
| 968 |
17,242 | <gh_stars>1000+
// Copyright 2019 The MediaPipe Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MEDIAPIPE_FRAMEWORK_DEMANGLE_H_
#define MEDIAPIPE_FRAMEWORK_DEMANGLE_H_
#ifndef MEDIAPIPE_HAS_CXA_DEMANGLE
// We only support some compilers that support __cxa_demangle.
// TODO: Checks if Android NDK has fixed this issue or not.
#if defined(__ANDROID__) && (defined(__i386__) || defined(__x86_64__))
#define MEDIAPIPE_HAS_CXA_DEMANGLE 0
#elif (__GNUC__ >= 4 || (__GNUC__ >= 3 && __GNUC_MINOR__ >= 4)) && \
!defined(__mips__)
#define MEDIAPIPE_HAS_CXA_DEMANGLE 1
#elif defined(__clang__) && !defined(_MSC_VER)
#define MEDIAPIPE_HAS_CXA_DEMANGLE 1
#else
#define MEDIAPIPE_HAS_CXA_DEMANGLE 0
#endif
#endif
#include <stdlib.h>
#include <string>
#if MEDIAPIPE_HAS_CXA_DEMANGLE
#include <cxxabi.h>
#endif
namespace mediapipe {
// Demangle a mangled symbol name and return the demangled name.
// If 'mangled' isn't mangled in the first place, this function
// simply returns 'mangled' as is.
//
// This function is used for demangling mangled symbol names such as
// '_Z3bazifdPv'. It uses abi::__cxa_demangle() if your compiler has
// the API. Otherwise, this function simply returns 'mangled' as is.
//
// Currently, we support only GCC 3.4.x or later for the following
// reasons.
//
// - GCC 2.95.3 doesn't have cxxabi.h
// - GCC 3.3.5 and ICC 9.0 have a bug. Their abi::__cxa_demangle()
// returns junk values for non-mangled symbol names (ex. function
// names in C linkage). For example,
// abi::__cxa_demangle("main", 0, 0, &status)
// returns "unsigned long" and the status code is 0 (successful).
//
// Also,
//
// - MIPS is not supported because abi::__cxa_demangle() is not defined.
// - Android x86 is not supported because STLs don't define __cxa_demangle
//
// Prefer using MediaPipeTypeStringOrDemangled<T>() when possible (defined
// in type_map.h).
inline std::string Demangle(const char* mangled) {
int status = 0;
char* demangled = nullptr;
#if MEDIAPIPE_HAS_CXA_DEMANGLE
demangled = abi::__cxa_demangle(mangled, nullptr, nullptr, &status);
#endif
std::string out;
if (status == 0 && demangled != nullptr) { // Demangling succeeeded.
out.append(demangled);
free(demangled);
} else {
out.append(mangled);
}
return out;
}
} // namespace mediapipe
#endif // MEDIAPIPE_FRAMEWORK_DEMANGLE_H_
| 1,036 |
1,083 | <reponame>agxmaster/polarphp<filename>include/polarphp/pil/optimizer/utils/Devirtualize.h
//===--- Devirtualize.h - Helper for devirtualizing apply -------*- C++ -*-===//
//
// This source file is part of the Swift.org open source project
//
// Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors
// Licensed under Apache License v2.0 with Runtime Library Exception
//
// See https://swift.org/LICENSE.txt for license information
// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
//
//===----------------------------------------------------------------------===//
//
// This contains helper functions that perform the work of devirtualizing a
// given apply when possible.
//
//===----------------------------------------------------------------------===//
#ifndef POLARPHP_PIL_OPTIMIZER_UTILS_DEVIRTUALIZE_H
#define POLARPHP_PIL_OPTIMIZER_UTILS_DEVIRTUALIZE_H
#include "polarphp/ast/Decl.h"
#include "polarphp/ast/Types.h"
#include "polarphp/pil/lang/PILDeclRef.h"
#include "polarphp/pil/lang/PILFunction.h"
#include "polarphp/pil/lang/PILInstruction.h"
#include "polarphp/pil/lang/PILModule.h"
#include "polarphp/pil/lang/PILType.h"
#include "polarphp/pil/lang/PILValue.h"
#include "polarphp/pil/optimizer/analysis/ClassHierarchyAnalysis.h"
#include "polarphp/pil/optimizer/utils/InstOptUtils.h"
#include "llvm/ADT/ArrayRef.h"
namespace polar {
namespace optremark {
class Emitter;
}
/// Compute all subclasses of a given class.
///
/// \p CHA class hierarchy analysis
/// \p CD class declaration
/// \p ClassType type of the instance
/// \p M PILModule
/// \p Subs a container to be used for storing the set of subclasses
void getAllSubclasses(ClassHierarchyAnalysis *CHA,
ClassDecl *CD,
CanType ClassType,
PILModule &M,
ClassHierarchyAnalysis::ClassList &Subs);
/// Given an apply instruction of a protocol requirement and a witness method
/// for the requirement, compute a substitution suitable for a direct call
/// to the witness method.
///
/// \p Module PILModule
/// \p AI ApplySite that applies a procotol method
/// \p F PILFunction with convention @convention(witness_method)
/// \p CRef a concrete InterfaceConformanceRef
SubstitutionMap getWitnessMethodSubstitutions(PILModule &Module, ApplySite AI,
PILFunction *F,
InterfaceConformanceRef CRef);
/// Attempt to devirtualize the given apply site. If this fails,
/// the returned ApplySite will be null.
///
/// If this succeeds, the caller must call deleteDevirtualizedApply on
/// the original apply site.
ApplySite tryDevirtualizeApply(ApplySite AI,
ClassHierarchyAnalysis *CHA,
optremark::Emitter *ORE = nullptr);
bool canDevirtualizeApply(FullApplySite AI, ClassHierarchyAnalysis *CHA);
bool canDevirtualizeClassMethod(FullApplySite AI, ClassDecl *CD,
optremark::Emitter *ORE = nullptr,
bool isEffectivelyFinalMethod = false);
PILFunction *getTargetClassMethod(PILModule &M, ClassDecl *CD,
MethodInst *MI);
CanType getSelfInstanceType(CanType ClassOrMetatypeType);
/// Devirtualize the given apply site, which is known to be devirtualizable.
///
/// The caller must call deleteDevirtualizedApply on the original apply site.
FullApplySite devirtualizeClassMethod(FullApplySite AI,
PILValue ClassInstance,
ClassDecl *CD,
optremark::Emitter *ORE);
/// Attempt to devirtualize the given apply site, which is known to be
/// of a class method. If this fails, the returned FullApplySite will be null.
///
/// If this succeeds, the caller must call deleteDevirtualizedApply on
/// the original apply site.
FullApplySite
tryDevirtualizeClassMethod(FullApplySite AI,
PILValue ClassInstance,
ClassDecl *CD,
optremark::Emitter *ORE,
bool isEffectivelyFinalMethod = false);
/// Attempt to devirtualize the given apply site, which is known to be
/// of a witness method. If this fails, the returned FullApplySite
/// will be null.
///
/// If this succeeds, the caller must call deleteDevirtualizedApply on
/// the original apply site.
ApplySite tryDevirtualizeWitnessMethod(ApplySite AI, optremark::Emitter *ORE);
/// Delete a successfully-devirtualized apply site. This must always be
/// called after devirtualizing an apply; not only is it not semantically
/// equivalent to leave the old apply in-place, but the PIL isn't necessary
/// well-formed.
///
/// Devirtualization is responsible for replacing uses of the original
/// apply site with uses of the new one. The only thing this does is delete
/// the instruction and any now-trivially-dead operands; it is separated
/// from the actual devirtualization step only to apply the caller to log
/// information about the original apply site. This is probably not a
/// good enough reason to complicate the API.
void deleteDevirtualizedApply(ApplySite AI);
} // end namespace polar
#endif
| 1,919 |
1,780 | /**
* Created by G-Canvas Open Source Team.
* Copyright (c) 2017, Alibaba, Inc. All rights reserved.
*
* This source code is licensed under the Apache Licence 2.0.
* For the full copyright and license information, please view
* the LICENSE file in the root directory of this source tree.
*/
#include "CharacterSet.h"
Utf8ToUCS2::Utf8ToUCS2(const Utf8ToUCS2 &utf8)
: utf8(0), utf8len(0), ucs2(0), ucs2len(0)
{
}
Utf8ToUCS2::Utf8ToUCS2(const char *utf8In, int utf8lenIn)
: utf8(utf8In), utf8len(utf8lenIn), ucs2(0), ucs2len(0)
{
getUcs2();
}
Utf8ToUCS2::~Utf8ToUCS2()
{
if (ucs2) delete[] ucs2;
}
void Utf8ToUCS2::getUcs2()
{
ucs2 = new unsigned short[utf8len + 1];
char *ptr = (char *)utf8, *last = (char *)utf8 + utf8len;
int offset = 0;
unsigned ch;
for (; ptr < last; offset++)
{
if ((*ptr & 0x80) == 0)
{
ucs2[offset] = (*ptr);
++ptr;
}
else if ((*ptr & 0x20) == 0)
{
ch = *ptr & 0x1f;
++ptr;
ch <<= 6;
ch += *ptr & 0x3f;
ucs2[offset] = ch;
++ptr;
}
else if ((*ptr & 0x10) == 0)
{
ch = *ptr & 0x0f;
++ptr;
ch <<= 6;
ch += *ptr & 0x3f;
++ptr;
ch <<= 6;
ch += *ptr & 0x3f;
ucs2[offset] = ch;
++ptr;
}
else
{
// symbol number is > 0xffff
ucs2[offset] = 'X';
ptr += 4;
}
}
ucs2len = offset;
}
| 903 |
316 | <filename>tests/reference/ast-subroutine3b-98faad7.json<gh_stars>100-1000
{
"basename": "ast-subroutine3b-98faad7",
"cmd": "lfortran --show-ast --no-color {infile} -o {outfile}",
"infile": "tests/subroutine3b.f90",
"infile_hash": "0726b3cb8b3ba1a2fc624a4215146065a6ce9a6a0b460ebe40c172a5",
"outfile": null,
"outfile_hash": null,
"stdout": "ast-subroutine3b-98faad7.stdout",
"stdout_hash": "34ab11412b71a6250d8f0706f3ce3042d4d36be749ec04e050a6d652",
"stderr": null,
"stderr_hash": null,
"returncode": 0
} | 281 |
1,144 | <filename>backend/de.metas.handlingunits.base/src/main/java-gen/de/metas/handlingunits/model/X_M_HU_BestBefore_V.java
// Generated Model - DO NOT CHANGE
package de.metas.handlingunits.model;
import java.sql.ResultSet;
import java.util.Properties;
import javax.annotation.Nullable;
/** Generated Model for M_HU_BestBefore_V
* @author metasfresh (generated)
*/
@SuppressWarnings("unused")
public class X_M_HU_BestBefore_V extends org.compiere.model.PO implements I_M_HU_BestBefore_V, org.compiere.model.I_Persistent
{
private static final long serialVersionUID = 83716577L;
/** Standard Constructor */
public X_M_HU_BestBefore_V (final Properties ctx, final int M_HU_BestBefore_V_ID, @Nullable final String trxName)
{
super (ctx, M_HU_BestBefore_V_ID, trxName);
}
/** Load Constructor */
public X_M_HU_BestBefore_V (final Properties ctx, final ResultSet rs, @Nullable final String trxName)
{
super (ctx, rs, trxName);
}
/** Load Meta Data */
@Override
protected org.compiere.model.POInfo initPO(final Properties ctx)
{
return org.compiere.model.POInfo.getPOInfo(Table_Name);
}
@Override
public void setGuaranteeDaysMin (final int GuaranteeDaysMin)
{
set_ValueNoCheck (COLUMNNAME_GuaranteeDaysMin, GuaranteeDaysMin);
}
@Override
public int getGuaranteeDaysMin()
{
return get_ValueAsInt(COLUMNNAME_GuaranteeDaysMin);
}
@Override
public void setHU_BestBeforeDate (final @Nullable java.sql.Timestamp HU_BestBeforeDate)
{
set_ValueNoCheck (COLUMNNAME_HU_BestBeforeDate, HU_BestBeforeDate);
}
@Override
public java.sql.Timestamp getHU_BestBeforeDate()
{
return get_ValueAsTimestamp(COLUMNNAME_HU_BestBeforeDate);
}
@Override
public void setHU_Expired (final @Nullable java.lang.String HU_Expired)
{
set_ValueNoCheck (COLUMNNAME_HU_Expired, HU_Expired);
}
@Override
public java.lang.String getHU_Expired()
{
return get_ValueAsString(COLUMNNAME_HU_Expired);
}
@Override
public void setHU_ExpiredWarnDate (final @Nullable java.sql.Timestamp HU_ExpiredWarnDate)
{
set_ValueNoCheck (COLUMNNAME_HU_ExpiredWarnDate, HU_ExpiredWarnDate);
}
@Override
public java.sql.Timestamp getHU_ExpiredWarnDate()
{
return get_ValueAsTimestamp(COLUMNNAME_HU_ExpiredWarnDate);
}
@Override
public void setM_HU_ID (final int M_HU_ID)
{
if (M_HU_ID < 1)
set_ValueNoCheck (COLUMNNAME_M_HU_ID, null);
else
set_ValueNoCheck (COLUMNNAME_M_HU_ID, M_HU_ID);
}
@Override
public int getM_HU_ID()
{
return get_ValueAsInt(COLUMNNAME_M_HU_ID);
}
} | 1,035 |
602 | <reponame>jtgrabowski/stargate
/*
* Copyright The Stargate Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.stargate.health;
import com.codahale.metrics.health.HealthCheck;
import java.util.ArrayList;
import java.util.List;
import org.osgi.framework.Bundle;
import org.osgi.framework.BundleContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class BundleStateChecker extends HealthCheck {
private static final Logger logger = LoggerFactory.getLogger(BundleStateChecker.class);
private final BundleContext context;
public BundleStateChecker(BundleContext context) {
this.context = context;
}
@Override
protected Result check() {
List<String> inactive = new ArrayList<>();
for (Bundle bundle : context.getBundles()) {
if (bundle.getState() != Bundle.ACTIVE) {
inactive.add(bundle.getSymbolicName());
}
}
if (inactive.isEmpty()) {
return Result.healthy("All bundles active");
} else {
logger.warn("Inactive bundles: {}", inactive);
return Result.unhealthy("Inactive bundles: " + inactive);
}
}
}
| 511 |
5,129 | <reponame>aimore-globality/unilm
import faiss
import json
import logging
import numpy as np
import os
import torch
from src.pequod.data.xretrieval import load_and_cache_examples
from src.pequod.eval.evaluator import Evaluator
from src.pequod.eval.utils_retrieve import mine_bitext, bucc_eval
logger = logging.getLogger(__name__)
def load_embeddings(embed_file, num_sentences=None):
logger.info(' loading from {}'.format(embed_file))
embeds = np.load(embed_file)
return embeds
class BuccEvaluator(Evaluator):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.model_langs = ["share_lang", "order"]
self.proj_matrix_fast = kwargs.get("proj_matrix_fast", None)
if self.proj_matrix_fast is not None:
logger.info("proj_matrix_fast:" + str(self.proj_matrix_fast.size()))
self.proj_matrix_fast = self.proj_matrix_fast[0].float().cuda()
self.res = {}
def get_mean_emb(self, layer_outputs, pool_mask):
embs = (layer_outputs * pool_mask.unsqueeze(2).float()).sum(dim=1) / \
pool_mask.sum(dim=1).view(-1, 1).float()
return embs
def get_cxlm_emb(self, layer_outputs):
if self.proj_matrix_fast is None:
raise ValueError
ret = torch.mm(layer_outputs[:,0,:], self.proj_matrix_fast)
# ret = layer_outputs[:,0,:]
return ret
def get_cls_emb(self, layer_outputs):
return layer_outputs[:,0,:]
def bt_norm(self, x):
m = x.mean(0, keepdim=True)
v = x.var(0, unbiased=True, keepdim=True)
return (x-m) / torch.sqrt(v+1e-5)
def get_embeddings(self, batch, outputs, emb_type=None, is_bt_norm=False):
if emb_type is None:
emb_type = self.args.emb_type
last_layer_outputs, first_token_outputs, all_layer_outputs = outputs
if emb_type == "mean":
ret = self.get_mean_emb(all_layer_outputs[self.args.mean_layer_id], batch["attention_mask"])
elif emb_type == "cls":
ret = self.get_cls_emb(all_layer_outputs[-1])
elif emb_type == "cxlm":
ret = self.get_cxlm_emb(all_layer_outputs[self.args.mean_layer_id]) #TODO
else: raise ValueError
if is_bt_norm:
ret = self.bt_norm(ret)
ret = ret.cpu().numpy().astype(np.float32)
# ret = None
del last_layer_outputs, first_token_outputs, all_layer_outputs
torch.cuda.empty_cache()
return ret
def run(self):
args = self.args
self.model.eval()
best_threshold = None
SL, TL = args.src_language, args.tgt_language
for split in ['test']:
# for split in ['dev', 'test']:
prefix = f'{SL}-{TL}.{split}'
if args.extract_embeds:
for lang in [SL, TL]:
file = os.path.join(args.output_dir, f'{prefix}.{lang}.npy')
if os.path.exists(file):
continue
langpair = f'{SL}-{TL}.{split}'
dl1 = self.get_dataloader(langpair, lang)
all_emb1 = []
for batch1 in dl1:
batch1 = self._parse_batch(batch1, has_label=False)
#forward
with torch.no_grad():
outputs1 = self.model(**batch1)
all_emb1.append(self.get_embeddings(batch1, outputs1, is_bt_norm=args.bt_norm))
all_emb1 = np.concatenate(all_emb1)
file = os.path.join(args.output_dir, f'{prefix}.{lang}.npy')
logger.info('save embed {} to file {}'.format(all_emb1.shape, file))
np.save(file, all_emb1)
if args.mine_bitext:
threshold = None
cand2score_file = os.path.join(args.output_dir, 'candidates.tsv')
x = load_embeddings(os.path.join(args.output_dir, f'{prefix}.{SL}.npy'))
y = load_embeddings(os.path.join(args.output_dir, f'{prefix}.{TL}.npy'))
x_text_file = os.path.join(args.data_dir, f'{prefix}.{SL}.txt')
y_text_file = os.path.join(args.data_dir, f'{prefix}.{TL}.txt')
x_id_file = os.path.join(args.data_dir, f'{prefix}.{SL}.id')
y_id_file = os.path.join(args.data_dir, f'{prefix}.{TL}.id')
mine_bitext(x, y, x_text_file, y_text_file, cand2score_file, dist=args.dist, use_shift_embeds=args.use_shift_embeds)
gold_file = os.path.join(args.data_dir, f'{prefix}.gold')
if os.path.exists(gold_file):
predict_file = os.path.join(args.output_dir, f'test-{SL}.tsv')
results = bucc_eval(cand2score_file, gold_file, x_text_file, y_text_file, x_id_file, y_id_file, predict_file, threshold)
with open(os.path.join(args.output_dir, 'final.txt'), 'w', encoding='utf-8') as f:
f.write(json.dumps(results))
best_threshold = results['best-threshold']
logger.info('--Candidates: {}'.format(cand2score_file))
logger.info(' '.join('{}={:.4f}'.format(k,v) for k,v in results.items()))
# if args.layer_ensemble:
# threshold = None
# prefix = 'mean_l2'
# layers = args.ens_layers.split(',')
#
# cand2score_file = os.path.join(args.output_dir, 'candidates.tsv')
#
# x = load_embeddings(os.path.join(args.output_dir, f'{prefix}.{SL}.npy'))
# y = load_embeddings(os.path.join(args.output_dir, f'{prefix}.{TL}.npy'))
#
# x_text_file = os.path.join(args.data_dir, f'{prefix}.{SL}.txt')
# y_text_file = os.path.join(args.data_dir, f'{prefix}.{TL}.txt')
# x_id_file = os.path.join(args.data_dir, f'{prefix}.{SL}.id')
# y_id_file = os.path.join(args.data_dir, f'{prefix}.{TL}.id')
#
# mine_bitext(x, y, x_text_file, y_text_file, cand2score_file, dist=args.dist, use_shift_embeds=args.use_shift_embeds)
# gold_file = os.path.join(args.data_dir, f'{prefix}.gold')
# if os.path.exists(gold_file):
# predict_file = os.path.join(args.output_dir, f'test-{SL}.tsv')
# results = bucc_eval(cand2score_file, gold_file, x_text_file, y_text_file, x_id_file, y_id_file, predict_file, threshold)
#
# with open(os.path.join(args.output_dir, 'final.txt'), 'w', encoding='utf-8') as f:
# f.write(json.dumps(results))
#
# best_threshold = results['best-threshold']
# logger.info('--Candidates: {}'.format(cand2score_file))
# logger.info(' '.join('{}={:.4f}'.format(k,v) for k,v in results.items()))
# output retrieval results
# with open(os.path.join(args.output_dir, 'test-{0}.tsv'.format(lang1)), 'w', encoding='utf-8') as writer:
# for i, pred in enumerate(predictions):
# writer.write(str(pred[0]) + '\n')
def load_and_cache_examples(self, langpair, lang, **kwargs):
args = self.args
cache_key = "%s-%s" % (args.model_key, args.model_type)
return load_and_cache_examples(
args=args,
langpair=langpair,
lang=lang,
tokenizer=self.tokenizer,
key=cache_key,
prefix=args.data_prefix,
)
| 3,320 |
868 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.artemis.client.cdi.extension;
import javax.enterprise.event.Observes;
import javax.enterprise.inject.spi.AfterBeanDiscovery;
import javax.enterprise.inject.spi.Extension;
import javax.enterprise.inject.spi.ProcessBean;
import org.apache.activemq.artemis.core.config.Configuration;
import org.apache.artemis.client.cdi.configuration.ArtemisClientConfiguration;
import org.apache.artemis.client.cdi.logger.ActiveMQCDILogger;
public class ArtemisExtension implements Extension {
private boolean foundEmbeddedConfig = false;
private boolean foundConfiguration = false;
void foundClientConfig(@Observes ProcessBean<?> processBean) {
if (processBean.getBean().getTypes().contains(ArtemisClientConfiguration.class)) {
ActiveMQCDILogger.LOGGER.discoveredConfiguration(processBean);
foundConfiguration = true;
}
}
void foundEmbeddedConfig(@Observes ProcessBean<?> processBean) {
if (processBean.getBean().getTypes().contains(Configuration.class)) {
ActiveMQCDILogger.LOGGER.discoveredClientConfiguration(processBean);
foundEmbeddedConfig = true;
}
}
void afterBeanDiscovery(@Observes AfterBeanDiscovery afterBeanDiscovery) {
if (!foundConfiguration) {
afterBeanDiscovery.addBean(new ArtemisClientConfigBean());
} else {
ActiveMQCDILogger.LOGGER.notUsingDefaultConfiguration();
}
if (!foundEmbeddedConfig) {
afterBeanDiscovery.addBean(new ArtemisEmbeddedServerConfigBean());
} else {
ActiveMQCDILogger.LOGGER.notUsingDefaultClientConfiguration();
}
}
}
| 779 |
630 | <reponame>Anyz01/FeatureFu<filename>expr/src/main/java/com/linkedin/featurefu/expr/Expr.java<gh_stars>100-1000
/*
* Copyright 2015 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License.
You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*/
package com.linkedin.featurefu.expr;
/**
* This interface represents an s-expression: http://en.wikipedia.org/wiki/S-expression
*
* an s-expression is classically defined:
* 1. an atom (constant or variable), or
* 2. an expression of the form (operator x y) where x and y are s-expressions
*
* therefore the inheritance hierarchy: Expr -> Atom -> Constant
* \ \__-> Variable
* \_->Expression
*
* Operator -> dozens of Operator implementations
*
* More s-expression parser implementation in different languages
* http://rosettacode.org/wiki/S-Expressions
*
* Author: <NAME> <http://www.linkedin.com/in/lijuntang>
*/
public interface Expr {
/**
* Each Expr can be evaluated, that's the whole purpose
* @return evaluation result
*/
public double evaluate();
/**
* For displaying purpose, to be more readable for human
* @return human friendly string
*/
public String toString();
}
| 583 |
460 | <reponame>dyzmapl/BumpTop
/*------------------------------------------------------------------------------
* Copyright (C) 2003-2006 <NAME> and the CLucene Team
*
* Distributable under the terms of either the Apache License (Version 2.0) or
* the GNU Lesser General Public License, as specified in the COPYING file.
------------------------------------------------------------------------------*/
#ifndef _lucene_analysis_standard_StandardAnalyzer
#define _lucene_analysis_standard_StandardAnalyzer
#if defined(_LUCENE_PRAGMA_ONCE)
# pragma once
#endif
#include "CLucene/util/VoidMap.h"
#include "CLucene/util/Reader.h"
#include "CLucene/analysis/AnalysisHeader.h"
#include "CLucene/analysis/Analyzers.h"
#include "StandardFilter.h"
#include "StandardTokenizer.h"
CL_NS_DEF2(analysis,standard)
/** Represents a standard analyzer. */
class StandardAnalyzer : public Analyzer
{
private:
CL_NS(util)::CLSetList<const TCHAR*> stopSet;
public:
/** Builds an analyzer.*/
StandardAnalyzer();
/** Builds an analyzer with the given stop words. */
StandardAnalyzer( const TCHAR** stopWords);
~StandardAnalyzer();
/**
* Constructs a StandardTokenizer filtered by a
* StandardFilter, a LowerCaseFilter and a StopFilter.
*/
TokenStream* tokenStream(const TCHAR* fieldName, CL_NS(util)::Reader* reader)
;
};
CL_NS_END2
#endif
| 478 |
2,591 | <reponame>meisenla/liquibase
package liquibase.change.core;
import liquibase.change.ColumnConfig;
import liquibase.parser.core.ParsedNode;
import liquibase.parser.core.ParsedNodeException;
import liquibase.resource.ResourceAccessor;
public class LoadDataColumnConfig extends ColumnConfig {
private Integer index;
private String header;
private Boolean allowUpdate;
private LoadDataChange.LOAD_DATA_TYPE loadType;
public Integer getIndex() {
return index;
}
public void setIndex(Integer index) {
this.index = index;
}
public String getHeader() {
return header;
}
public void setHeader(String header) {
this.header = header;
}
/**
* Returns true if this Column should be updated. Returns null if update hasn't been explicitly assigned.
*/
public Boolean getAllowUpdate() {
return allowUpdate;
}
public void setAllowUpdate(Boolean getAllowUpdate) {
this.allowUpdate = getAllowUpdate;
}
@Override
public void load(ParsedNode parsedNode, ResourceAccessor resourceAccessor) throws ParsedNodeException {
super.load(parsedNode, resourceAccessor);
this.index = parsedNode.getChildValue(null, "index", Integer.class);
this.header = parsedNode.getChildValue(null, "header", String.class);
this.allowUpdate = parsedNode.getChildValue(null, "allowUpdate", Boolean.class);
}
public ColumnConfig setType(LoadDataChange.LOAD_DATA_TYPE value) {
super.setType(value.toString());
this.loadType = value;
return this;
}
/**
* Return {@link #getType()} as a standard enum, or null if the type is null OR {@link liquibase.change.core.LoadDataChange.LOAD_DATA_TYPE#UNKNOWN} if it doesn't match a standard type.
* @return
*/
public LoadDataChange.LOAD_DATA_TYPE getTypeEnum() {
final String type = this.getType();
if (type == null) {
return null;
}
if (this.loadType == null) {
try {
this.loadType = LoadDataChange.LOAD_DATA_TYPE.valueOf(type.toUpperCase());
} catch (IllegalArgumentException e) {
return LoadDataChange.LOAD_DATA_TYPE.UNKNOWN;
}
}
return this.loadType;
}
}
| 884 |
4,772 | package example.service;
import example.repo.Customer1349Repository;
import org.springframework.stereotype.Service;
@Service
public class Customer1349Service {
public Customer1349Service(Customer1349Repository repo) {}
}
| 64 |
892 | {
"schema_version": "1.2.0",
"id": "GHSA-h382-3v8j-jf8j",
"modified": "2022-05-01T17:42:32Z",
"published": "2022-05-01T17:42:32Z",
"aliases": [
"CVE-2007-0222"
],
"details": "Directory traversal vulnerability in the EmChartBean server side component for Oracle Application Server 10g allows remote attackers to read arbitrary files via unknown vectors, probably \"\\..\" sequences in the beanId parameter. NOTE: this is likely a duplicate of another CVE that Oracle addressed in CPU Jan 2007, but due to lack of details by Oracle, it is unclear which BugID this issue is associated with, so the other CVE cannot be determined. Possibilities include EM02 (CVE-2007-0292) or EM05 (CVE-2007-0293).",
"severity": [
],
"affected": [
],
"references": [
{
"type": "ADVISORY",
"url": "https://nvd.nist.gov/vuln/detail/CVE-2007-0222"
},
{
"type": "WEB",
"url": "http://secunia.com/advisories/23794"
},
{
"type": "WEB",
"url": "http://securitytracker.com/id?1017522"
},
{
"type": "WEB",
"url": "http://www.oracle.com/technetwork/topics/security/cpujan2007-101493.html"
},
{
"type": "WEB",
"url": "http://www.securityfocus.com/archive/1/457105/100/0/threaded"
},
{
"type": "WEB",
"url": "http://www.securityfocus.com/archive/1/458657/100/0/threaded"
},
{
"type": "WEB",
"url": "http://www.securityfocus.com/bid/22027"
},
{
"type": "WEB",
"url": "http://www.securityfocus.com/bid/22083"
}
],
"database_specific": {
"cwe_ids": [
],
"severity": "MODERATE",
"github_reviewed": false
}
} | 720 |
348 | {"nom":"Senneville-sur-Fécamp","circ":"9ème circonscription","dpt":"Seine-Maritime","inscrits":670,"abs":319,"votants":351,"blancs":20,"nuls":5,"exp":326,"res":[{"nuance":"REM","nom":"<NAME>","voix":205},{"nuance":"FN","nom":"<NAME>","voix":121}]} | 97 |
375 | filename = os.path.join(os.path.dirname(os.path.abspath(__file__)), ".idaprobe")
with open(filename, "wb") as fd:
fd.write(b"WOO")
idc.Exit(-5)
| 70 |
344 | <reponame>ProEditor/VSCode_SourceAnalysis
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
// Do not edit this file. It is machine generated.
{
"typingsReference.already.exists": "{0} esiste già. Verificare che il file sia incluso nel file jsconfig.json del progetto",
"typingsReference.error.download": "Non è possibile recuperare il file d.ts in {0}: {1}",
"typingsReference.error.write": "Problema durante la creazione di {0}: {1}",
"typingsReference.success.nojsconfig": "Download di {0} riuscito",
"typingsReference.success.withjsconfig": "Download di {0} riuscito. Verificare che il file d.ts sia incluso nel file 'jsconfig.json' del progetto."
} | 261 |
315 | <reponame>tharunkb/javamicroservice
package com.apssouza.listeners;
import com.apssouza.events.TodoServiceMethodInvokedEvent;
import com.apssouza.monitors.TodoServiceMethodInvokedStore;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.event.EventListener;
import org.springframework.stereotype.Component;
/**
* TodoService method invoked listener
*
* @author apssouza
*/
@Component
public class TodoServiceMethodListener {
@Autowired
private TodoServiceMethodInvokedStore methodMonitor;
@EventListener
public void onMethodCalled(TodoServiceMethodInvokedEvent event){
methodMonitor.addNewEvent(event);
}
}
| 239 |
682 | <reponame>pengwei1024/JsBridge
package com.apkfuns.jsbridgesample.module;
import android.widget.Toast;
import com.apkfuns.jsbridge.module.JSBridgeMethod;
import com.apkfuns.jsbridge.module.JsModule;
/**
* Created by pengwei on 2017/6/13.
*/
public class MultiLayerModule2 extends JsModule {
@Override
public String getModuleName() {
return "native.extend.api";
}
@JSBridgeMethod
public void toast(String msg) {
Toast.makeText(getContext(), msg, Toast.LENGTH_SHORT).show();
}
}
| 201 |
377 | <filename>inception/inception-api-dao/src/main/java/de/tudarmstadt/ukp/clarin/webanno/api/dao/casstorage/CasPersistenceUtils.java
/*
* Licensed to the Technische Universität Darmstadt under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The Technische Universität Darmstadt
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.tudarmstadt.ukp.clarin.webanno.api.dao.casstorage;
import static de.tudarmstadt.ukp.clarin.webanno.api.annotation.util.WebAnnoCasUtil.getRealCas;
import static org.apache.uima.cas.impl.Serialization.deserializeCASComplete;
import static org.apache.uima.cas.impl.Serialization.serializeCASComplete;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import org.apache.commons.io.FileUtils;
import org.apache.uima.cas.CAS;
import org.apache.uima.cas.SerialFormat;
import org.apache.uima.cas.impl.CASCompleteSerializer;
import org.apache.uima.cas.impl.CASImpl;
import org.apache.uima.util.CasIOUtils;
import org.apache.uima.util.TypeSystemUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import de.tudarmstadt.ukp.clarin.webanno.api.annotation.util.WebAnnoCasUtil;
import de.tudarmstadt.ukp.dkpro.core.api.metadata.type.DocumentMetaData;
public final class CasPersistenceUtils
{
private final static Logger LOG = LoggerFactory.getLogger(CasPersistenceUtils.class);
private CasPersistenceUtils()
{
// No instances
}
public static void writeSerializedCas(CAS aCas, File aFile) throws IOException
{
FileUtils.forceMkdir(aFile.getParentFile());
CASCompleteSerializer serializer = null;
CAS realCas = getRealCas(aCas);
// UIMA-6162 Workaround: synchronize CAS during de/serialization
synchronized (((CASImpl) realCas).getBaseCAS()) {
try {
serializer = serializeCASComplete((CASImpl) getRealCas(aCas));
// BEGIN SAFEGUARD --------------
// Safeguard that we do NOT write a CAS which can afterwards not be read and thus
// would render the document broken within the project
// Reason we do this: https://issues.apache.org/jira/browse/UIMA-6162
CAS dummy = WebAnnoCasUtil.createCas();
deserializeCASComplete(serializer, (CASImpl) getRealCas(dummy));
// END SAFEGUARD --------------
}
catch (Exception e) {
if (LOG.isDebugEnabled()) {
preserveForDebugging(aFile, aCas, serializer);
}
throw new IOException(e);
}
try (ObjectOutputStream os = new ObjectOutputStream(new FileOutputStream(aFile))) {
os.writeObject(serializer);
}
}
}
private static void preserveForDebugging(File aFile, CAS aCas,
CASCompleteSerializer aSerializer)
{
long ts = System.currentTimeMillis();
try (FileOutputStream xmiout = new FileOutputStream(
new File(aFile.getPath() + ".borked-" + ts + ".xmi"))) {
CasIOUtils.save(aCas, xmiout, SerialFormat.XMI);
}
catch (Exception e2) {
LOG.error("Debug XMI serialization failed: {}", e2.getMessage(), e2);
}
try (FileOutputStream tsout = new FileOutputStream(
new File(aFile.getPath() + ".borked-" + ts + ".ts.xml"))) {
TypeSystemUtil.typeSystem2TypeSystemDescription(aCas.getTypeSystem()).toXML(tsout);
}
catch (Exception e2) {
LOG.error("Debug type system serialization failed: {}", e2.getMessage(), e2);
}
try (ObjectOutputStream os = new ObjectOutputStream(
new FileOutputStream(new File(aFile.getPath() + ".borked-" + ts + ".ser")))) {
os.writeObject(aSerializer);
}
catch (Exception e2) {
LOG.error("Debug serialization failed: {}", e2.getMessage(), e2);
}
}
public static void readSerializedCas(CAS aCas, File aFile) throws IOException
{
CAS realCas = getRealCas(aCas);
// UIMA-6162 Workaround: synchronize CAS during de/serialization
synchronized (((CASImpl) realCas).getBaseCAS()) {
try (ObjectInputStream is = new ObjectInputStream(new FileInputStream(aFile))) {
CASCompleteSerializer serializer = (CASCompleteSerializer) is.readObject();
deserializeCASComplete(serializer, (CASImpl) realCas);
// Workaround for UIMA adding back deleted DocumentAnnotations
// https://issues.apache.org/jira/browse/UIMA-6199
// If there is a DocumentMetaData annotation, then we can drop any of the default
// UIMA DocumentAnnotation instances (excluding the DocumentMetaData of course)
if (!aCas.select(DocumentMetaData.class.getName()).isEmpty()) {
aCas.select(CAS.TYPE_NAME_DOCUMENT_ANNOTATION).filter(
fs -> !DocumentMetaData.class.getName().equals(fs.getType().getName()))
.forEach(aCas::removeFsFromIndexes);
}
}
catch (ClassNotFoundException e) {
throw new IOException(e);
}
}
}
}
| 2,478 |
879 | <filename>plugin/flatNetworkProvider/src/main/java/org/zstack/network/service/flat/FlatNetworkServiceValidator.java
package org.zstack.network.service.flat;
import org.zstack.core.ScatteredValidator;
import org.zstack.header.apimediator.StopRoutingException;
import java.lang.reflect.Method;
import java.util.List;
/**
* @ Author : yh.w
* @ Date : Created in 10:43 2021/10/26
*/
public class FlatNetworkServiceValidator extends ScatteredValidator {
private static List<Method> methods;
static {
// method signature: static void xxx(String hostUuid)
methods = collectValidatorMethods(FlatNetworkServiceValidatorMethod.class, String.class);
}
public boolean validate(String hostUuid) {
try {
invokeValidatorMethods(methods, hostUuid);
} catch (SkipApplyFlatNetworkServiceException e) {
return true;
}
return false;
}
}
| 331 |
831 | /*
* Copyright (C) 2019 Square, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "ExceptionThrowers.h"
#include "Context.h"
void throwJavaException(JNIEnv *env, const char *exceptionClass, const char *fmt, ...) {
char msg[512];
va_list args;
va_start (args, fmt);
vsnprintf(msg, sizeof(msg), fmt, args);
va_end (args);
env->ThrowNew(env->FindClass(exceptionClass), msg);
}
void throwJsExceptionFmt(JNIEnv *env, const Context *context, const char *fmt, ...) {
char msg[512];
va_list args;
va_start (args, fmt);
vsnprintf(msg, sizeof(msg), fmt, args);
va_end (args);
jobject exception = env->NewObject(context->quickJsExceptionClass,
context->quickJsExceptionConstructor,
env->NewStringUTF(msg),
NULL);
env->Throw(static_cast<jthrowable>(exception));
}
| 518 |
773 |
import os
import time
import signal
import socket
import logging
import SocketServer
import base
logger = logging.getLogger(__name__)
class BaseTCPHandler(SocketServer.BaseRequestHandler):
def generate_response(self, size):
""" This generates a wrong HTTP request, Content-Length is higher
than the response size.
"""
r = 'HTTP/1.1 200 OK\n'
r += 'Content-Type: text/plain\n'
r += 'Content-Length: {0}\n'.format(size)
r += '\n'
r += 'test'
return r
def receive(self):
data = self.request.recv(4096)
logger.info("Received from {0}: {1}".format(self.client_address[0], data))
class TCPHandlerBodyTooShort(BaseTCPHandler):
def handle(self):
self.receive()
self.request.sendall(self.generate_response(4096))
self.request.close()
class TCPHandlerBodyTooLong(BaseTCPHandler):
def handle(self):
self.receive()
self.request.sendall(self.generate_response(2))
self.request.close()
class ParseError(base.TestCase):
""" This test makes sure an HTTP parse error won't kill the server """
def _spawn_server(self, port, handler):
pid = os.fork()
if pid > 0:
while True:
r = self.http_request('localhost', port=port)
if r > 0:
logger.info('FAKE httpd spawned on port {0}. PID: {1}'.format(port, pid))
self.pids.append(pid)
return pid
time.sleep(0.5)
SocketServer.TCPServer.allow_reuse_address = True
server = SocketServer.TCPServer(('localhost', port), handler)
server.allow_reuse_address = True
server.serve_forever()
def setUp(self):
self.pids = []
def tearDown(self):
if not self.pids:
return
for pid in self.pids:
os.kill(pid, signal.SIGKILL)
os.wait()
def test_parseerror_body_too_long(self):
""" Invalid backend: len(payload) > Content-Length """
port = 2080
self._spawn_server(port, TCPHandlerBodyTooLong)
self.register_frontend('foobar', ['http://localhost:{0}'.format(port)])
# The request will throw a TCP timeout (since all bytes announced in
# the Content-Length cannot be read)
self.assertEqual(self.http_request('foobar'), 200)
def test_parseerror_body_too_short(self):
""" Invalid backend: len(payload) < Content-Length """
port = 2080
self._spawn_server(port, TCPHandlerBodyTooShort)
self.register_frontend('foobar', ['http://localhost:{0}'.format(port)])
# The request will throw a TCP timeout (since all bytes announced in
# the Content-Length cannot be read)
self.assertEqual(self.http_request('foobar'), -1)
def test_parseerror_malformed_client(self):
""" Invalid request made on a valid backend. """
port = 2080
self.spawn_httpd(port)
self.register_frontend('foobar', ['http://localhost:{0}'.format(port)])
self.assertEqual(self.http_request('foobar'), 200)
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# Connect on Hipache
sock.connect(('localhost', 1080))
data = 'GET /pipo&%$#(PIPO HTTP/1.1\n'
data += 'Host: foobar\n\n'
sock.sendall(data)
response_code = sock.recv(12).split(' ')[1]
self.assertEqual(response_code, '200')
sock.sendall(data)
sock.close()
| 1,546 |
346 | <filename>src/game/HelpScreen.h
#ifndef _HELP_SCREEN__H_
#define _HELP_SCREEN__H_
//enum used for the different help screens that can come up
enum HelpScreenID
{
HELP_SCREEN_NONE = -1,
HELP_SCREEN_LAPTOP,
HELP_SCREEN_MAPSCREEN,
HELP_SCREEN_MAPSCREEN_NO_ONE_HIRED,
HELP_SCREEN_MAPSCREEN_NOT_IN_ARULCO,
HELP_SCREEN_MAPSCREEN_SECTOR_INVENTORY,
HELP_SCREEN_TACTICAL,
HELP_SCREEN_OPTIONS,
HELP_SCREEN_LOAD_GAME,
HELP_SCREEN_NUMBER_OF_HELP_SCREENS,
};
struct HELP_SCREEN_STRUCT
{
HelpScreenID bCurrentHelpScreen;
UINT32 uiFlags;
UINT16 usHasPlayerSeenHelpScreenInCurrentScreen;
UINT8 ubHelpScreenDirty;
UINT16 usScreenLocX;
UINT16 usScreenLocY;
UINT16 usScreenWidth;
UINT16 usScreenHeight;
INT32 iLastMouseClickY; //last position the mouse was clicked ( if != -1 )
INT8 bCurrentHelpScreenActiveSubPage; //used to keep track of the current page being displayed
INT8 bNumberOfButtons;
//used so if the user checked the box to show the help, it doesnt automatically come up every frame
BOOLEAN fHaveAlreadyBeenInHelpScreenSinceEnteringCurrenScreen;
INT8 bDelayEnteringHelpScreenBy1FrameCount;
UINT16 usLeftMarginPosX;
UINT16 usCursor;
BOOLEAN fWasTheGamePausedPriorToEnteringHelpScreen;
//scroll variables
UINT16 usTotalNumberOfPixelsInBuffer;
UINT32 uiLineAtTopOfTextBuffer;
UINT16 usTotalNumberOfLinesInBuffer;
BOOLEAN fForceHelpScreenToComeUp;
};
extern HELP_SCREEN_STRUCT gHelpScreen;
BOOLEAN ShouldTheHelpScreenComeUp(HelpScreenID, BOOLEAN fForceHelpScreenToComeUp);
void HelpScreenHandler(void);
void InitHelpScreenSystem(void);
void NewScreenSoResetHelpScreen(void);
HelpScreenID HelpScreenDetermineWhichMapScreenHelpToShow();
#endif
| 680 |
Subsets and Splits