max_stars_count
int64
301
224k
text
stringlengths
6
1.05M
token_count
int64
3
727k
340
<reponame>ankane/informers<gh_stars>100-1000 from pathlib import Path import tempfile from transformers.convert_graph_to_onnx import convert, quantize dest = Path(tempfile.mkdtemp(), "ner.onnx") convert( pipeline_name="ner", model="dbmdz/bert-large-cased-finetuned-conll03-english", output=dest, framework="pt", opset=11 ) quantize(dest)
133
3,084
<reponame>ixjf/Windows-driver-samples //////////////////////////////////////////////////////////////////////////////////////////////////// // // Copyright (c) 2012 Microsoft Corporation. All Rights Reserved. // // Module Name: // HelperFunctions_ThreadsAndEvents.cpp // // Abstract: // This module contains functions which functions which simplify threads and eventing. // // Naming Convention: // // <Scope><Module><Object><Action><Modifier> // // i.e. // // <Scope> // { // - Function is likely visible to other modules // } // <Module> // { // Hlpr - Function is from HelperFunctions_* Modules. // } // <Object> // { // Event - Function pertains to events. // Thread - Function pertains to threads. // } // <Action> // { // Cleanup - // Set - // Start - // Stop - // Wait - // } // <Modifier> // { // ForCompletion - // ForEvent - Function determines equality between values. // } // // Private Functions: // // Public Functions: // // Author: // <NAME> (DHarper) // // Revision History: // // [ Month ][Day] [Year] - [Revision]-[ Comments ] // May 01, 2010 - 1.0 - Creation // //////////////////////////////////////////////////////////////////////////////////////////////////// #include "HelperFunctions_Include.h" /// . /** @helper_function="HlprEventReset" Purpose: Sets the event to nonsignaled. <br> <br> Notes: <br> <br> MSDN_Ref: HTTP://MSDN.Microsoft.com/En-Us/Library/Windows/Desktop/MS685081.aspx <br> */ VOID HlprEventReset(_In_opt_ HANDLE event) { if(event) ResetEvent(event); return; } /** @helper_function="HlprEventSet" Purpose: Signals an event. <br> <br> Notes: <br> <br> MSDN_Ref: HTTP://MSDN.Microsoft.com/En-Us/Library/Windows/Desktop/MS686211.aspx <br> */ VOID HlprEventSet(_In_opt_ HANDLE event) { if(event) SetEvent(event); return; } /** @helper_function="HlprThreadStart" Purpose: Generates a new thread. <br> <br> Notes: <br> <br> MSDN_Ref: HTTP://MSDN.Microsoft.com/En-Us/Library/Windows/Desktop/MS682396.aspx <br> HTTP://MSDN.Microsoft.com/En-Us/Library/Windows/Desktop/MS682453.aspx <br> */ _Success_(return == NO_ERROR) UINT32 HlprThreadStart(_Inout_ THREAD_DATA* pThreadData, _In_opt_ VOID* pData) /* 0 */ { UINT32 status = NO_ERROR; if(pThreadData) { HlprThreadCleanup(pThreadData); if(pThreadData->threadStartRoutine) { pThreadData->threadStartEvent = CreateEvent(0, TRUE, FALSE, 0); pThreadData->threadStopEvent = CreateEvent(0, TRUE, FALSE, 0); pThreadData->threadContinueEvent = CreateEvent(0, TRUE, FALSE, 0); pThreadData->thread = CreateThread(0, 0, (LPTHREAD_START_ROUTINE)pThreadData->threadStartRoutine, pData ? pData : pThreadData, 0, (LPDWORD)&(pThreadData->threadId)); if(pThreadData->threadStartEvent && pThreadData->threadStopEvent && pThreadData->threadContinueEvent && pThreadData->thread) HlprThreadWaitForEvent(pThreadData->threadStartEvent, pThreadData); else { status = ERROR_GEN_FAILURE; HlprThreadCleanup(pThreadData); HlprLogError(L"HlprThreadStart() [status: %#x]", status); } } else { status = ERROR_INVALID_DATA; HlprLogError(L"HlprThreadStart() [status: %#x][pThreadData->threadStartRoutine: %#x]", status, pThreadData->threadStartRoutine); } } else { status = ERROR_INVALID_PARAMETER; HlprLogError(L"HlprThreadStart() [status: %#x][pThread: %#x]", status, pThreadData); } return status; } /** @helper_function="HlprThreadStop" Purpose: Signals a thread to stop and cleans it up. <br> <br> Notes: <br> <br> MSDN_Ref: HTTP://MSDN.Microsoft.com/En-Us/Library/Windows/Desktop/MS682396.aspx <br> <br> */ _Success_(return == NO_ERROR) UINT32 HlprThreadStop(_Inout_ THREAD_DATA* pThreadData) { UINT32 status = NO_ERROR; const UINT32 RETRY_ATTEMPTS = 5; if(pThreadData) { for(UINT32 i = 0; status != WAIT_TIMEOUT && i < RETRY_ATTEMPTS; i++) { HlprEventSet(pThreadData->threadStopEvent); status = HlprThreadWaitForCompletion(pThreadData); } HlprThreadCleanup(pThreadData); } return status; } /** @helper_function="HlprThreadStop" Purpose: Signals a thread to stop and cleans it up, and frees any allocated memory. <br> <br> Notes: <br> <br> MSDN_Ref: <br> */ _At_(*ppThreadData, _Pre_ _Notnull_) _When_(return != NO_ERROR, _At_(*ppThreadData, _Post_ _Notnull_)) _When_(return == NO_ERROR, _At_(*ppThreadData, _Post_ _Null_)) _Success_(return == NO_ERROR && *ppThreadData == 0) UINT32 HlprThreadStop(_Inout_ THREAD_DATA** ppThreadData) { UINT32 status = NO_ERROR; if(ppThreadData) { status = HlprThreadStop(*ppThreadData); if(status == NO_ERROR) { HLPR_DELETE(*ppThreadData) } } return status; } /** @helper_function="HlprThreadCleanup" Purpose: Cleans up a previously allocated thread. <br> <br> Notes: <br> <br> MSDN_Ref: HTTP://MSDN.Microsoft.com/En-Us/Library/Windows/Desktop/MS683190.aspx <br> HTTP://MSDN.Microsoft.com/En-Us/Library/Windows/Desktop/MS686717.aspx <br> */ VOID HlprThreadCleanup(_Inout_opt_ THREAD_DATA* pThreadData) { if(pThreadData) { LPTHREAD_START_ROUTINE threadStartRoutine = pThreadData->threadStartRoutine; UINT32 status = NO_ERROR; if(pThreadData->thread) { if(GetExitCodeThread(pThreadData->thread, (DWORD*)&status)) { if(status == STILL_ACTIVE) { UINT64 timeAlpha = GetTickCount64(); const UINT32 FOUR_MINUTES = 240000; for(UINT64 timeOmega = GetTickCount64(); timeOmega - timeAlpha < FOUR_MINUTES; timeOmega = GetTickCount64()) { HlprEventSet(pThreadData->threadStopEvent); status = HlprThreadWaitForCompletion(pThreadData); if(status == NO_ERROR) break; } if(status != NO_ERROR) { HlprLogInfo(L"Possible Runaway Thread"); #pragma warning(push) #pragma warning(disable: 6258) /// This is a last resort TerminateThread(pThreadData->thread, ERROR_THREAD_WAS_SUSPENDED); #pragma warning(pop) } } } else { status = GetLastError(); HlprLogError(L"HlprThreadCleanup() [status:%#x]", status); } } HLPR_CLOSE_HANDLE(pThreadData->threadStopEvent); HLPR_CLOSE_HANDLE(pThreadData->threadStartEvent); HLPR_CLOSE_HANDLE(pThreadData->threadContinueEvent); ZeroMemory(pThreadData, sizeof(THREAD_DATA)); pThreadData->threadStartRoutine = threadStartRoutine; } return; } /** @helper_function="HlprThreadWaitForCompletion" Purpose: Waits for a thread to complete. <br> <br> Notes: <br> <br> MSDN_Ref: <br> */ _Success_(return == NO_ERROR) UINT32 HlprThreadWaitForCompletion(_Inout_opt_ THREAD_DATA* pThreadData) { UINT32 status = NO_ERROR; if(pThreadData) { HlprEventSet(pThreadData->threadStopEvent); status = HlprThreadWaitForEvent(pThreadData->thread, pThreadData); } return status; } /** @helper_function="HlprThreadWaitForEvent" Purpose: Waits for a particular event to be set within the thread. <br> <br> Notes: <br> <br> MSDN_Ref: <br> */ _Success_(return == NO_ERROR) UINT32 HlprThreadWaitForEvent(_In_ HANDLE eventHandle, _In_ THREAD_DATA* pThreadData) { UINT32 status = NO_ERROR; const UINT32 RETRY_ATTEMPTS = 6; WCHAR* pEventName = 0; if(eventHandle && pThreadData) { if(eventHandle == pThreadData->threadContinueEvent) pEventName = L"Continue Event"; else if(eventHandle == pThreadData->threadStartEvent) pEventName = L"Start Event"; else if(eventHandle == pThreadData->threadStopEvent) pEventName = L"Stop Event"; else if(eventHandle == pThreadData->thread) pEventName = L"Thread"; /// Try for 30 seconds before bailing for(UINT32 i = 0; WaitForSingleObject(eventHandle, 5000) == WAIT_TIMEOUT; i++) { HlprLogInfo(L"HlprThreadWaitForEvent() Waiting for %s ...", pEventName); if(i == RETRY_ATTEMPTS - 1) { status = WAIT_TIMEOUT; HlprLogInfo(L"HlprThreadWaitForEvent() [status: %#x]"); break; } } } return status; }
8,768
1,416
<gh_stars>1000+ from .aitextgen import aitextgen from .TokenDataset import TokenDataset from .tokenizers import train_tokenizer import fire def aitextgen_cli(**kwargs): """Entrypoint for the CLI""" fire.Fire( { "encode": encode_cli, "train": train_cli, "generate": generate_cli, "train_tokenizer": train_tokenizer_cli, } ) def encode_cli(file_path: str, **kwargs): """Encode + compress a dataset""" TokenDataset(file_path, save_cache=True, **kwargs) def train_cli(file_path: str, **kwargs): """Train on a dataset.""" ai = aitextgen(**kwargs) from_cache = file_path.endswith(".tar.gz") dataset = TokenDataset(file_path, from_cache=from_cache, **kwargs) ai.train(dataset, **kwargs) def generate_cli(to_file: bool = True, **kwargs): """Generate from a trained model, or download one if not present.""" ai = aitextgen(**kwargs) if to_file: ai.generate_to_file(**kwargs) else: ai.generate(**kwargs) def train_tokenizer_cli(files: str, **kwargs): """Trains a tokenizer on the specified file.""" train_tokenizer(files, **kwargs)
505
3,227
namespace CGAL { namespace Surface_mesh_simplification { /*! \ingroup PkgSurfaceMeshSimplificationRef The class `Edge_length_cost` is a model for the `GetCost` concept, which computes the collapse cost as the squared length of the edge. \tparam TriangleMesh is the type of surface mesh being simplified, and must be a model of the `MutableFaceGraph` and `HalfedgeListGraph` concepts. \cgalModels `GetCost` */ template <typename TriangleMesh> class Edge_length_cost { public: /// \name Creation /// @{ /*! %Default constructor */ Edge_length_cost(); /// @} /// \name Operations /// @{ /*! Returns the <I>collapse cost</I> as the squared distance between the points of the source and target vertices (that is, `profile.p0()` and `profile.p1()`. The argument `placement` is unused. */ boost::optional<typename Edge_profile::FT> operator()(const Edge_profile& profile, const boost::optional<typename Edge_profile::Point>& placement) const; /// @} }; } // namespace Surface_mesh_simplification } // namespace CGAL
385
1,003
/* Copyright © 2016 <NAME> <<EMAIL>> * This code is released under the license described in the LICENSE file */ #include <iostream> #include <unordered_map> #include <functional> #include <args.hxx> void Init(const std::string &progname, std::vector<std::string>::const_iterator beginargs, std::vector<std::string>::const_iterator endargs); void Add(const std::string &progname, std::vector<std::string>::const_iterator beginargs, std::vector<std::string>::const_iterator endargs); using commandtype = std::function<void(const std::string &, std::vector<std::string>::const_iterator, std::vector<std::string>::const_iterator)>; int main(int argc, char **argv) { std::unordered_map<std::string, commandtype> map{ {"init", Init}, {"add", Add}}; const std::vector<std::string> args(argv + 1, argv + argc); args::ArgumentParser parser("This is a git-like program", "Valid commands are init and add"); args::HelpFlag help(parser, "help", "Display this help menu", {'h', "help"}); parser.Prog(argv[0]); parser.ProglinePostfix("{command options}"); args::Flag version(parser, "version", "Show the version of this program", {"version"}); args::ValueFlag<std::string> htmlpath(parser, "html-path", "Specify the html path", {"html-path"}); args::MapPositional<std::string, commandtype> command(parser, "command", "Command to execute", map); command.KickOut(true); try { auto next = parser.ParseArgs(args); std::cout << std::boolalpha; std::cout << "Before command options:" << std::endl; std::cout << "Version called: " << bool{version} << std::endl; std::cout << "html-path called: " << bool{htmlpath} << ", value: " << args::get(htmlpath) << std::endl; if (command) { args::get(command)(argv[0], next, std::end(args)); } else { std::cout << parser; } } catch (args::Help) { std::cout << parser; return 0; } catch (args::Error e) { std::cerr << e.what() << std::endl; std::cerr << parser; return 1; } return 0; } void Init(const std::string &progname, std::vector<std::string>::const_iterator beginargs, std::vector<std::string>::const_iterator endargs) { std::cout << "In Init" << std::endl; args::ArgumentParser parser(""); parser.Prog(progname + " init"); args::HelpFlag help(parser, "help", "Display this help menu", {'h', "help"}); args::ValueFlag<std::string> templatedir(parser, "template-directory", "directory from which templates will be used", {"template"}); args::Flag bare(parser, "bare", "create a bare repository", {"bare"}); args::Flag quiet(parser, "quiet", "be quiet", {'q', "quiet"}); args::Positional<std::string> directory(parser, "directory", "The directory to create in", "."); try { parser.ParseArgs(beginargs, endargs); std::cout << std::boolalpha; std::cout << "templatedir: " << bool{templatedir} << ", value: " << args::get(templatedir) << std::endl; std::cout << "bare: " << bool{bare} << std::endl; std::cout << "quiet: " << bool{quiet} << std::endl; std::cout << "directory: " << bool{directory} << ", value: " << args::get(directory) << std::endl; } catch (args::Help) { std::cout << parser; return; } catch (args::ParseError e) { std::cerr << e.what() << std::endl; std::cerr << parser; return; } } void Add(const std::string &progname, std::vector<std::string>::const_iterator beginargs, std::vector<std::string>::const_iterator endargs) { std::cout << "In Add" << std::endl; args::ArgumentParser parser(""); parser.Prog(progname + " add"); args::HelpFlag help(parser, "help", "Display this help menu", {'h', "help"}); args::Flag dryrun(parser, "dryrun", "dry run", {'n', "dry-run"}); args::Flag verbose(parser, "verbose", "be verbose", {'v', "verbose"}); args::Flag refresh(parser, "refresh", "Don't add, only refresh the index", {"refresh"}); args::PositionalList<std::string> pathspec(parser, "pathspec", "pathspecs"); try { parser.ParseArgs(beginargs, endargs); std::cout << std::boolalpha; std::cout << "dryrun: " << bool{dryrun} << std::endl;; std::cout << "verbose: " << bool{verbose} << std::endl; std::cout << "refresh: " << bool{refresh} << std::endl; std::cout << "pathspec: " << bool{pathspec} << std::endl; if (pathspec) { std::cout << "values: " << std::endl; for (const auto &spec: args::get(pathspec)) { std::cout << " - " << spec << std::endl; } } } catch (args::Help) { std::cout << parser; return; } catch (args::ParseError e) { std::cerr << e.what() << std::endl; std::cerr << parser; return; } }
2,078
984
<reponame>forksnd/win32metadata //*************************************************************************** // // Copyright (c) Microsoft Corporation. All rights reserved. // // CHSTRING.h // // Purpose: Utility library version of MFC CString // //*************************************************************************** #if _MSC_VER > 1000 #pragma once #endif #ifndef _CHSTRING_H #define _CHSTRING_H #include <winapifamily.h> #pragma region Desktop Family #if WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_DESKTOP) ///////////////////////////////////////////////////////////////////////////// #include <windows.h> #include <limits.h> #include <tchar.h> #include <polarity.h> #pragma warning( disable : 4290 ) // Ignore 'C++ Exception Specification ignored' #include <ProvExce.h> ///////////////////////////////////////////////////////////////////////////// struct _DOUBLE { BYTE doubleBits[sizeof(double)]; }; #ifdef FRAMEWORK_ALLOW_DEPRECATED void POLARITY WINAPI SetCHStringResourceHandle(HINSTANCE handle); #endif ///////////////////////////////////////////////////////////////////////////// // CHString formatting ///////////////////////////////////////////////////////////////////////////// #define TCHAR_ARG WCHAR #define WCHAR_ARG WCHAR #define CHAR_ARG char #if defined(_68K_) || defined(_X86_) #define DOUBLE_ARG _DOUBLE #else #define DOUBLE_ARG double #endif struct CHStringData { long nRefs; int nDataLength; int nAllocLength; WCHAR* data() { return (WCHAR*)(this+1); } }; ///////////////////////////////////////////////////////////////////////////// class POLARITY CHString { protected: LPWSTR m_pchData; // pointer to ref counted string data protected: // implementation helpers CHStringData* GetData() const; // returns data pointer void Init(); void AllocCopy(CHString& dest, int nCopyLen, int nCopyIndex, int nExtraLen) const throw ( CHeap_Exception ) ; void AllocBuffer(int nLen) throw ( CHeap_Exception ) ; void AssignCopy(int nSrcLen, LPCWSTR lpszSrcData) throw ( CHeap_Exception ) ; void ConcatCopy(int nSrc1Len, LPCWSTR lpszSrc1Data, int nSrc2Len, LPCWSTR lpszSrc2Data) throw ( CHeap_Exception ) ; void ConcatInPlace(int nSrcLen, LPCWSTR lpszSrcData); void CopyBeforeWrite() throw ( CHeap_Exception ) ; void AllocBeforeWrite(int nLen) throw ( CHeap_Exception ) ; static inline int WINAPI SafeStrlen(LPCWSTR lpsz) { return (lpsz == NULL) ? 0 : (int)wcslen(lpsz); } // Helper function used to load resource into lpszBuf buffer. #ifdef FRAMEWORK_ALLOW_DEPRECATED int LoadStringW(UINT nID, _In_reads_(nMaxBuf) LPWSTR lpszBuf, UINT nMaxBuf) throw ( CHeap_Exception ) ; #endif public: // Constructors/Destruction CHString(); CHString(const CHString& stringSrc); CHString(WCHAR ch, int nRepeat = 1) throw ( CHeap_Exception ) ; CHString(LPCSTR lpsz) throw ( CHeap_Exception ) ; CHString(LPCWSTR lpsz) throw ( CHeap_Exception ) ; CHString(LPCWSTR lpch, int nLength) throw ( CHeap_Exception ) ; inline CHString(const unsigned char* lpsz) { Init(); *this = (LPCSTR)lpsz; } ~CHString(); // Functions void SetAt(int nIndex, WCHAR ch) throw ( CHeap_Exception ) ; void Empty(); // inlines inline int GetLength() const { return GetData()->nDataLength; } inline BOOL IsEmpty() const { return GetData()->nDataLength == 0; } #if (!defined DEBUG && !defined _DEBUG) #ifdef _PREFAST_ #pragma prefast(push) #pragma prefast(disable:26018 ) #endif inline WCHAR GetAt(int nIndex) const{ return m_pchData[nIndex]; } inline WCHAR operator[](int nIndex) const{ return m_pchData[nIndex]; } #ifdef _PREFAST_ #pragma prefast(pop) #endif #else WCHAR GetAt(int nIndex) const; WCHAR operator[](int nIndex) const; #endif inline operator LPCWSTR() const { return m_pchData; } inline int GetAllocLength() const { return GetData()->nAllocLength; } // overloaded assignment const CHString& operator=(const CHString& stringSrc) throw ( CHeap_Exception ) ; const CHString& operator=(WCHAR ch) throw ( CHeap_Exception ) ; const CHString& operator=(LPCSTR lpsz) throw ( CHeap_Exception ) ; const CHString& operator=(LPCWSTR lpsz) throw ( CHeap_Exception ) ; inline const CHString& operator=(const unsigned char* lpsz) throw ( CHeap_Exception ) { *this = (LPCSTR)lpsz; return *this; } inline const CHString& operator=(CHString *p) throw ( CHeap_Exception ) { *this = *p; return *this; } inline const CHString& operator=(char ch) throw ( CHeap_Exception ) { *this = (WCHAR)ch; return *this; } inline const CHString& operator+=(char ch) throw ( CHeap_Exception ) { *this += (WCHAR)ch; return *this; } friend inline CHString operator+(const CHString& string, char ch) throw ( CHeap_Exception ) { return string + (WCHAR)ch; } friend inline CHString operator+(char ch, const CHString& string) throw ( CHeap_Exception ) { return (WCHAR)ch + string; } const CHString& operator+=(const CHString& string) throw ( CHeap_Exception ) ; const CHString& operator+=(WCHAR ch) throw ( CHeap_Exception ) ; const CHString& operator+=(LPCWSTR lpsz) throw ( CHeap_Exception ) ; friend CHString POLARITY WINAPI operator+(const CHString& string1, const CHString& string2) throw ( CHeap_Exception ) ; friend CHString POLARITY WINAPI operator+(const CHString& string, WCHAR ch) throw ( CHeap_Exception ) ; friend CHString POLARITY WINAPI operator+(WCHAR ch, const CHString& string) throw ( CHeap_Exception ) ; friend CHString POLARITY WINAPI operator+(const CHString& string, LPCWSTR lpsz) throw ( CHeap_Exception ) ; friend CHString POLARITY WINAPI operator+(LPCWSTR lpsz, const CHString& string) throw ( CHeap_Exception ) ; // string comparison int Compare(LPCWSTR lpsz) const; inline int CompareNoCase(LPCWSTR lpsz) const { // ignore case return _wcsicmp(m_pchData, lpsz); } // MBCS/Unicode aware inline int Collate(LPCWSTR lpsz) const { // NLS aware // CHString::Collate is often slower than Compare but is MBSC/Unicode // aware as well as locale-sensitive with respect to sort order. return wcscoll(m_pchData, lpsz); } // locale sensitive // Load string from resource file. #ifdef FRAMEWORK_ALLOW_DEPRECATED BOOL LoadStringW(UINT nID) throw ( CHeap_Exception ) ; #endif // Access to string implementation buffer as "C" character array LPWSTR GetBuffer(int nMinBufLength) throw ( CHeap_Exception ) ; void ReleaseBuffer(int nNewLength = -1) throw ( CHeap_Exception ) ; LPWSTR GetBufferSetLength(int nNewLength) throw ( CHeap_Exception ) ; void FreeExtra() throw ( CHeap_Exception ) ; // Use LockBuffer/UnlockBuffer to turn refcounting off LPWSTR LockBuffer() ; void UnlockBuffer(); // searching (return starting index, or -1 if not found) // look for a single character match int Find(WCHAR ch) const; // like "C" strchr int FindOneOf(LPCWSTR lpszCharSet) const; int ReverseFind(WCHAR ch) const; // look for a specific sub-string int Find(LPCWSTR lpszSub) const; // like "C" strstr // upper/lower/reverse conversion void MakeUpper() throw ( CHeap_Exception ) ; void MakeLower() throw ( CHeap_Exception ) ; void MakeReverse() throw ( CHeap_Exception ) ; // simple sub-string extraction CHString Mid(int nFirst, int nCount) const throw ( CHeap_Exception ) ; CHString Mid(int nFirst) const throw ( CHeap_Exception ) ; CHString Left(int nCount) const throw ( CHeap_Exception ) ; CHString Right(int nCount) const throw ( CHeap_Exception ) ; CHString SpanIncluding(LPCWSTR lpszCharSet) const throw ( CHeap_Exception ) ; CHString SpanExcluding(LPCWSTR lpszCharSet) const throw ( CHeap_Exception ) ; // trimming whitespace (either side) void TrimRight() throw ( CHeap_Exception ) ; void TrimLeft() throw ( CHeap_Exception ) ; // printf-like formatting using passed string void __cdecl Format(LPCWSTR lpszFormat, ...) throw ( CHeap_Exception ) ; void FormatV(LPCWSTR lpszFormat, va_list argList); // printf-like formatting using referenced string resource #ifdef FRAMEWORK_ALLOW_DEPRECATED void __cdecl Format(UINT nFormatID, ...) throw ( CHeap_Exception ) ; #endif // format using FormatMessage API on passed string // Warning: if you pass string inserts to this function, they must // be LPCSTRs on Win9x and LPCWSTRs on NT. void __cdecl FormatMessageW(LPCWSTR lpszFormat, ...) throw ( CHeap_Exception ) ; // format using FormatMessage API on referenced string resource // Warning: if you pass string inserts to this function, they must // be LPCSTRs on Win9x and LPCWSTRs on NT. #ifdef FRAMEWORK_ALLOW_DEPRECATED void __cdecl FormatMessageW(UINT nFormatID, ...) throw ( CHeap_Exception ) ; #endif #ifndef _NO_BSTR_SUPPORT // OLE BSTR support (use for OLE automation) BSTR AllocSysString() const throw ( CHeap_Exception ) ; static void WINAPI Release(CHStringData* pData); void Release(); #endif }; inline BOOL operator==(const CHString& s1, const CHString& s2) { return s1.Compare(s2) == 0; } inline BOOL operator==(const CHString& s1, LPCWSTR s2) { return s1.Compare(s2) == 0; } inline BOOL operator!=(const CHString& s1, const CHString& s2) { return s1.Compare(s2) != 0; } inline BOOL operator!=(const CHString& s1, LPCWSTR s2) { return s1.Compare(s2) != 0; } inline BOOL operator<(const CHString& s1, const CHString& s2) { return s1.Compare(s2) < 0; } inline BOOL operator<(const CHString& s1, LPCWSTR s2) { return s1.Compare(s2) < 0; } inline BOOL operator>(const CHString& s1, const CHString& s2) { return s1.Compare(s2) > 0; } inline BOOL operator>(const CHString& s1, LPCWSTR s2) { return s1.Compare(s2) > 0; } inline BOOL operator<=(const CHString& s1, const CHString& s2) { return s1.Compare(s2) <= 0; } inline BOOL operator<=(const CHString& s1, LPCWSTR s2) { return s1.Compare(s2) <= 0; } inline BOOL operator>=(const CHString& s1, const CHString& s2) { return s1.Compare(s2) >= 0; } inline BOOL operator>=(const CHString& s1, LPCWSTR s2) { return s1.Compare(s2) >= 0; } #endif /* WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_DESKTOP) */ #pragma endregion #endif
4,196
47,880
/* * Copyright (C) 2011 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.collect; import static com.google.common.collect.Lists.newArrayList; import static com.google.common.truth.Truth.assertThat; import static java.lang.Long.MAX_VALUE; import static java.lang.Thread.currentThread; import static java.util.concurrent.Executors.newCachedThreadPool; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static java.util.concurrent.TimeUnit.NANOSECONDS; import static java.util.concurrent.TimeUnit.SECONDS; import com.google.common.base.Stopwatch; import java.util.Collection; import java.util.List; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; import java.util.concurrent.Callable; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; import java.util.concurrent.LinkedBlockingDeque; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.PriorityBlockingQueue; import java.util.concurrent.SynchronousQueue; import java.util.concurrent.TimeUnit; import junit.framework.TestCase; /** * Tests for {@link Queues}. * * @author <NAME> */ public class QueuesTest extends TestCase { /* * All the following tests relate to BlockingQueue methods in Queues. */ public static List<BlockingQueue<Object>> blockingQueues() { return ImmutableList.<BlockingQueue<Object>>of( new LinkedBlockingQueue<Object>(), new LinkedBlockingQueue<Object>(10), new SynchronousQueue<Object>(), new ArrayBlockingQueue<Object>(10), new LinkedBlockingDeque<Object>(), new LinkedBlockingDeque<Object>(10), new PriorityBlockingQueue<Object>(10, Ordering.arbitrary())); } /* * We need to perform operations in a thread pool, even for simple cases, because the queue might * be a SynchronousQueue. */ private ExecutorService threadPool; @Override public void setUp() { threadPool = newCachedThreadPool(); } @Override public void tearDown() throws InterruptedException { threadPool.shutdown(); assertTrue("Some worker didn't finish in time", threadPool.awaitTermination(10, SECONDS)); } private static <T> int drain( BlockingQueue<T> q, Collection<? super T> buffer, int maxElements, long timeout, TimeUnit unit, boolean interruptibly) throws InterruptedException { return interruptibly ? Queues.drain(q, buffer, maxElements, timeout, unit) : Queues.drainUninterruptibly(q, buffer, maxElements, timeout, unit); } public void testMultipleProducers() throws Exception { for (BlockingQueue<Object> q : blockingQueues()) { testMultipleProducers(q); } } private void testMultipleProducers(BlockingQueue<Object> q) throws InterruptedException { for (boolean interruptibly : new boolean[] {true, false}) { @SuppressWarnings("unused") // https://errorprone.info/bugpattern/FutureReturnValueIgnored Future<?> possiblyIgnoredError = threadPool.submit(new Producer(q, 20)); @SuppressWarnings("unused") // https://errorprone.info/bugpattern/FutureReturnValueIgnored Future<?> possiblyIgnoredError1 = threadPool.submit(new Producer(q, 20)); @SuppressWarnings("unused") // https://errorprone.info/bugpattern/FutureReturnValueIgnored Future<?> possiblyIgnoredError2 = threadPool.submit(new Producer(q, 20)); @SuppressWarnings("unused") // https://errorprone.info/bugpattern/FutureReturnValueIgnored Future<?> possiblyIgnoredError3 = threadPool.submit(new Producer(q, 20)); @SuppressWarnings("unused") // https://errorprone.info/bugpattern/FutureReturnValueIgnored Future<?> possiblyIgnoredError4 = threadPool.submit(new Producer(q, 20)); List<Object> buf = newArrayList(); int elements = drain(q, buf, 100, MAX_VALUE, NANOSECONDS, interruptibly); assertEquals(100, elements); assertEquals(100, buf.size()); assertDrained(q); } } public void testDrainTimesOut() throws Exception { for (BlockingQueue<Object> q : blockingQueues()) { testDrainTimesOut(q); } } private void testDrainTimesOut(BlockingQueue<Object> q) throws Exception { for (boolean interruptibly : new boolean[] {true, false}) { assertEquals(0, Queues.drain(q, ImmutableList.of(), 1, 10, MILLISECONDS)); Producer producer = new Producer(q, 1); // producing one, will ask for two Future<?> producerThread = threadPool.submit(producer); producer.beganProducing.await(); // make sure we time out Stopwatch timer = Stopwatch.createStarted(); int drained = drain(q, newArrayList(), 2, 10, MILLISECONDS, interruptibly); assertThat(drained).isAtMost(1); assertThat(timer.elapsed(MILLISECONDS)).isAtLeast(10L); // If even the first one wasn't there, clean up so that the next test doesn't see an element. producerThread.cancel(true); producer.doneProducing.await(); if (drained == 0) { q.poll(); // not necessarily there if producer was interrupted } } } public void testZeroElements() throws Exception { for (BlockingQueue<Object> q : blockingQueues()) { testZeroElements(q); } } private void testZeroElements(BlockingQueue<Object> q) throws InterruptedException { for (boolean interruptibly : new boolean[] {true, false}) { // asking to drain zero elements assertEquals(0, drain(q, ImmutableList.of(), 0, 10, MILLISECONDS, interruptibly)); } } public void testEmpty() throws Exception { for (BlockingQueue<Object> q : blockingQueues()) { testEmpty(q); } } private void testEmpty(BlockingQueue<Object> q) { assertDrained(q); } public void testNegativeMaxElements() throws Exception { for (BlockingQueue<Object> q : blockingQueues()) { testNegativeMaxElements(q); } } private void testNegativeMaxElements(BlockingQueue<Object> q) throws InterruptedException { @SuppressWarnings("unused") // https://errorprone.info/bugpattern/FutureReturnValueIgnored Future<?> possiblyIgnoredError = threadPool.submit(new Producer(q, 1)); List<Object> buf = newArrayList(); int elements = Queues.drain(q, buf, -1, MAX_VALUE, NANOSECONDS); assertEquals(0, elements); assertThat(buf).isEmpty(); // Free the producer thread, and give subsequent tests a clean slate. q.take(); } public void testDrain_throws() throws Exception { for (BlockingQueue<Object> q : blockingQueues()) { testDrain_throws(q); } } private void testDrain_throws(BlockingQueue<Object> q) { @SuppressWarnings("unused") // https://errorprone.info/bugpattern/FutureReturnValueIgnored Future<?> possiblyIgnoredError = threadPool.submit(new Interrupter(currentThread())); try { Queues.drain(q, ImmutableList.of(), 100, MAX_VALUE, NANOSECONDS); fail(); } catch (InterruptedException expected) { } } public void testDrainUninterruptibly_doesNotThrow() throws Exception { for (BlockingQueue<Object> q : blockingQueues()) { testDrainUninterruptibly_doesNotThrow(q); } } private void testDrainUninterruptibly_doesNotThrow(final BlockingQueue<Object> q) { final Thread mainThread = currentThread(); @SuppressWarnings("unused") // https://errorprone.info/bugpattern/FutureReturnValueIgnored Future<?> possiblyIgnoredError = threadPool.submit( new Callable<Void>() { @Override public Void call() throws InterruptedException { new Producer(q, 50).call(); new Interrupter(mainThread).run(); new Producer(q, 50).call(); return null; } }); List<Object> buf = newArrayList(); int elements = Queues.drainUninterruptibly(q, buf, 100, MAX_VALUE, NANOSECONDS); // so when this drains all elements, we know the thread has also been interrupted in between assertTrue(Thread.interrupted()); assertEquals(100, elements); assertEquals(100, buf.size()); } public void testNewLinkedBlockingDequeCapacity() { try { Queues.newLinkedBlockingDeque(0); fail("Should have thrown IllegalArgumentException"); } catch (IllegalArgumentException expected) { // any capacity less than 1 should throw IllegalArgumentException } assertEquals(1, Queues.newLinkedBlockingDeque(1).remainingCapacity()); assertEquals(11, Queues.newLinkedBlockingDeque(11).remainingCapacity()); } public void testNewLinkedBlockingQueueCapacity() { try { Queues.newLinkedBlockingQueue(0); fail("Should have thrown IllegalArgumentException"); } catch (IllegalArgumentException expected) { // any capacity less than 1 should throw IllegalArgumentException } assertEquals(1, Queues.newLinkedBlockingQueue(1).remainingCapacity()); assertEquals(11, Queues.newLinkedBlockingQueue(11).remainingCapacity()); } /** Checks that #drain() invocations behave correctly for a drained (empty) queue. */ private void assertDrained(BlockingQueue<Object> q) { assertNull(q.peek()); assertInterruptibleDrained(q); assertUninterruptibleDrained(q); } private void assertInterruptibleDrained(BlockingQueue<Object> q) { // nothing to drain, thus this should wait doing nothing try { assertEquals(0, Queues.drain(q, ImmutableList.of(), 0, 10, MILLISECONDS)); } catch (InterruptedException e) { throw new AssertionError(); } // but does the wait actually occurs? @SuppressWarnings("unused") // https://errorprone.info/bugpattern/FutureReturnValueIgnored Future<?> possiblyIgnoredError = threadPool.submit(new Interrupter(currentThread())); try { // if waiting works, this should get stuck Queues.drain(q, newArrayList(), 1, MAX_VALUE, NANOSECONDS); fail(); } catch (InterruptedException expected) { // we indeed waited; a slow thread had enough time to interrupt us } } // same as above; uninterruptible version private void assertUninterruptibleDrained(BlockingQueue<Object> q) { assertEquals(0, Queues.drainUninterruptibly(q, ImmutableList.of(), 0, 10, MILLISECONDS)); // but does the wait actually occurs? @SuppressWarnings("unused") // https://errorprone.info/bugpattern/FutureReturnValueIgnored Future<?> possiblyIgnoredError = threadPool.submit(new Interrupter(currentThread())); Stopwatch timer = Stopwatch.createStarted(); Queues.drainUninterruptibly(q, newArrayList(), 1, 10, MILLISECONDS); assertThat(timer.elapsed(MILLISECONDS)).isAtLeast(10L); // wait for interrupted status and clear it while (!Thread.interrupted()) { Thread.yield(); } } private static class Producer implements Callable<Void> { final BlockingQueue<Object> q; final int elements; final CountDownLatch beganProducing = new CountDownLatch(1); final CountDownLatch doneProducing = new CountDownLatch(1); Producer(BlockingQueue<Object> q, int elements) { this.q = q; this.elements = elements; } @Override public Void call() throws InterruptedException { try { beganProducing.countDown(); for (int i = 0; i < elements; i++) { q.put(new Object()); } return null; } finally { doneProducing.countDown(); } } } private static class Interrupter implements Runnable { final Thread threadToInterrupt; Interrupter(Thread threadToInterrupt) { this.threadToInterrupt = threadToInterrupt; } @Override public void run() { try { Thread.sleep(100); } catch (InterruptedException e) { throw new AssertionError(); } finally { threadToInterrupt.interrupt(); } } } }
4,371
6,098
<gh_stars>1000+ import h2o class FeatureInteraction: def _feature_interaction(self, max_interaction_depth=100, max_tree_depth=100, max_deepening=-1, path=None): """ Feature interactions and importance, leaf statistics and split value histograms in a tabular form. Available for XGBoost and GBM. Metrics: Gain - Total gain of each feature or feature interaction. FScore - Amount of possible splits taken on a feature or feature interaction. wFScore - Amount of possible splits taken on a feature or feature interaction weighed by the probability of the splits to take place. Average wFScore - wFScore divided by FScore. Average Gain - Gain divided by FScore. Expected Gain - Total gain of each feature or feature interaction weighed by the probability to gather the gain. Average Tree Index Average Tree Depth :param max_interaction_depth: Upper bound for extracted feature interactions depth. Defaults to 100. :param max_tree_depth: Upper bound for tree depth. Defaults to 100. :param max_deepening: Upper bound for interaction start deepening (zero deepening => interactions starting at root only). Defaults to -1. :param path: (Optional) Path where to save the output in .xlsx format (e.g. ``/mypath/file.xlsx``). Please note that Pandas and XlsxWriter need to be installed for using this option. Defaults to None. :examples: >>> boston = h2o.import_file("https://s3.amazonaws.com/h2o-public-test-data/smalldata/gbm_test/BostonHousing.csv") >>> predictors = boston.columns[:-1] >>> response = "medv" >>> boston['chas'] = boston['chas'].asfactor() >>> train, valid = boston.split_frame(ratios=[.8]) >>> boston_xgb = H2OXGBoostEstimator(seed=1234) >>> boston_xgb.train(y=response, x=predictors, training_frame=train) >>> feature_interactions = boston_xgb.feature_interaction() """ kwargs = {} kwargs["model_id"] = self.model_id kwargs["max_interaction_depth"] = max_interaction_depth kwargs["max_tree_depth"] = max_tree_depth kwargs["max_deepening"] = max_deepening json = h2o.api("POST /3/FeatureInteraction", data=kwargs) if path is not None: import pandas as pd writer = pd.ExcelWriter(path, engine='xlsxwriter') for fi in json['feature_interaction']: fi.as_data_frame().to_excel(writer, sheet_name=fi._table_header) writer.save() return json['feature_interaction']
1,034
512
<filename>tests/spot/mining/test_mining_earnings_list.py<gh_stars>100-1000 import responses from tests.util import random_str from tests.util import mock_http_response from binance.spot import Spot as Client from binance.error import ParameterRequiredError mock_item = {"key_1": "value_1", "key_2": "value_2"} key = random_str() secret = random_str() def test_mining_earnings_list_without_algo(): """Tests the API endpoint to get earnings list without algo""" client = Client(key, secret) client.mining_earnings_list.when.called_with("", "test_name").should.throw( ParameterRequiredError ) def test_mining_earnings_list_without_username(): """Tests the API endpoint to get earnings list without username""" client = Client(key, secret) client.mining_earnings_list.when.called_with("sha256", "").should.throw( ParameterRequiredError ) @mock_http_response( responses.GET, "/sapi/v1/mining/payment/list\\?algo=sha256&userName=user_name", mock_item, 200, ) def test_mining_earnings_list(): """Tests the API endpoint to get earnings list""" client = Client(key, secret) response = client.mining_earnings_list("sha256", "user_name") response.should.equal(mock_item)
439
348
<reponame>chamberone/Leaflet.PixiOverlay<gh_stars>100-1000 {"nom":"Criquiers","circ":"6ème circonscription","dpt":"Seine-Maritime","inscrits":504,"abs":267,"votants":237,"blancs":8,"nuls":0,"exp":229,"res":[{"nuance":"FN","nom":"<NAME>","voix":95},{"nuance":"UDI","nom":"Mme <NAME>","voix":43},{"nuance":"REM","nom":"<NAME>","voix":42},{"nuance":"SOC","nom":"Mme <NAME>","voix":30},{"nuance":"COM","nom":"M. <NAME>","voix":12},{"nuance":"DIV","nom":"<NAME>","voix":3},{"nuance":"EXG","nom":"M. <NAME>","voix":2},{"nuance":"DVG","nom":"M. <NAME>","voix":2}]}
229
3,027
<reponame>BGTCapital/hummingbot from decimal import Decimal from unittest import TestCase from hummingbot.connector.exchange.coinbase_pro.coinbase_pro_in_flight_order import CoinbaseProInFlightOrder from hummingbot.core.event.events import OrderType, TradeType class CoinbaseProInFlightOrderTests(TestCase): def setUp(self): super().setUp() self.base_token = "BTC" self.quote_token = "USDT" self.trading_pair = f"{self.base_token}-{self.quote_token}" def test_update_with_partial_trade_event(self): order = CoinbaseProInFlightOrder( client_order_id="OID1", exchange_order_id="EOID1", trading_pair=self.trading_pair, order_type=OrderType.LIMIT, trade_type=TradeType.BUY, price=Decimal(10000), amount=Decimal(1) ) trade_event_info = { "type": "match", "trade_id": 1, "sequence": 50, "maker_order_id": "EOID1", "taker_order_id": "132fb6ae-456b-4654-b4e0-d681ac05cea1", "time": "2014-11-07T08:19:27.028459Z", "product_id": "BTC-USDT", "size": "0.1", "price": "10050.0", "side": "buy", "taker_user_id": "5844eceecf7e803e259d0365", "user_id": "5844eceecf7e803e259d0365", "taker_profile_id": "765d1549-9660-4be2-97d4-fa2d65fa3352", "profile_id": "765d1549-9660-4be2-97d4-fa2d65fa3352", "taker_fee_rate": "0.005" } update_result = order.update_with_trade_update(trade_event_info) self.assertTrue(update_result) self.assertFalse(order.is_done) self.assertEqual("open", order.last_state) self.assertEqual(Decimal(str(trade_event_info["size"])), order.executed_amount_base) expected_executed_quote_amount = Decimal(str(trade_event_info["size"])) * Decimal(str(trade_event_info["price"])) self.assertEqual(expected_executed_quote_amount, order.executed_amount_quote) self.assertEqual(Decimal(trade_event_info["taker_fee_rate"]) * expected_executed_quote_amount, order.fee_paid) self.assertEqual(order.quote_asset, order.fee_asset) def test_update_with_full_fill_trade_event(self): order = CoinbaseProInFlightOrder( client_order_id="OID1", exchange_order_id="EOID1", trading_pair=self.trading_pair, order_type=OrderType.LIMIT, trade_type=TradeType.BUY, price=Decimal(10000), amount=Decimal(1) ) trade_event_info = { "type": "match", "trade_id": 1, "sequence": 50, "maker_order_id": "EOID1", "taker_order_id": "132fb6ae-456b-4654-b4e0-d681ac05cea1", "time": "2014-11-07T08:19:27.028459Z", "product_id": "BTC-USDT", "size": "0.1", "price": "10050.0", "side": "buy", "taker_user_id": "5844eceecf7e803e259d0365", "user_id": "<PASSWORD>", "taker_profile_id": "765d1549-9660-4be2-97d4-fa2d65fa3352", "profile_id": "765d1549-9660-4be2-97d4-fa2d65fa3352", "taker_fee_rate": "0.005" } update_result = order.update_with_trade_update(trade_event_info) self.assertTrue(update_result) self.assertFalse(order.is_done) self.assertEqual("open", order.last_state) self.assertEqual(Decimal(str(trade_event_info["size"])), order.executed_amount_base) expected_executed_quote_amount = Decimal(str(trade_event_info["size"])) * Decimal( str(trade_event_info["price"])) self.assertEqual(expected_executed_quote_amount, order.executed_amount_quote) expected_partial_event_fee = (Decimal(trade_event_info["taker_fee_rate"]) * expected_executed_quote_amount) self.assertEqual(expected_partial_event_fee, order.fee_paid) complete_event_info = { "type": "match", "trade_id": 2, "sequence": 50, "maker_order_id": "EOID1", "taker_order_id": "132fb6ae-456b-4654-b4e0-d681ac05cea1", "time": "2014-11-07T08:19:27.028459Z", "product_id": "BTC-USDT", "size": "0.9", "price": "10050.0", "side": "buy", "taker_user_id": "<PASSWORD>365", "user_id": "5844eceecf7e803e259d0365", "taker_profile_id": "765d1549-9660-4be2-97d4-fa2d65fa3352", "profile_id": "765d1549-9660-4be2-97d4-fa2d65fa3352", "taker_fee_rate": "0.001" } update_result = order.update_with_trade_update(complete_event_info) self.assertTrue(update_result) # orders are marked as done with the done event self.assertFalse(order.is_done) self.assertEqual("open", order.last_state) self.assertEqual(order.amount, order.executed_amount_base) expected_executed_quote_amount += Decimal(str(complete_event_info["size"])) * Decimal( str(complete_event_info["price"])) self.assertEqual(expected_executed_quote_amount, order.executed_amount_quote) expected_complete_event_fee = (Decimal(complete_event_info["taker_fee_rate"]) * Decimal(str(complete_event_info["size"])) * Decimal(str(complete_event_info["price"]))) self.assertEqual(expected_partial_event_fee + expected_complete_event_fee, order.fee_paid) def test_update_with_repeated_trade_id_is_ignored(self): order = CoinbaseProInFlightOrder( client_order_id="OID1", exchange_order_id="EOID1", trading_pair=self.trading_pair, order_type=OrderType.LIMIT, trade_type=TradeType.BUY, price=Decimal(10000), amount=Decimal(1) ) trade_event_info = { "type": "match", "trade_id": 1, "sequence": 50, "maker_order_id": "EOID1", "taker_order_id": "132fb6ae-456b-4654-b4e0-d681ac05cea1", "time": "2014-11-07T08:19:27.028459Z", "product_id": "BTC-USDT", "size": "0.1", "price": "10050.0", "side": "buy", "taker_user_id": "<PASSWORD>", "user_id": "<PASSWORD>", "taker_profile_id": "765d1549-9660-4be2-97d4-fa2d65fa3352", "profile_id": "765d1549-9660-4be2-97d4-fa2d65fa3352", "taker_fee_rate": "0.005" } update_result = order.update_with_trade_update(trade_event_info) self.assertTrue(update_result) complete_event_info = { "type": "match", "trade_id": 1, "sequence": 50, "maker_order_id": "EOID1", "taker_order_id": "132fb6ae-456b-4654-b4e0-d681ac05cea1", "time": "2014-11-07T08:19:27.028459Z", "product_id": "BTC-USDT", "size": "0.9", "price": "10050.0", "side": "buy", "taker_user_id": "<PASSWORD>", "user_id": "5<PASSWORD>d0365", "taker_profile_id": "765d1549-9660-4be2-97d4-fa2d65fa3352", "profile_id": "765d1549-9660-4be2-97d4-fa2d65fa3352", "taker_fee_rate": "0.001" } update_result = order.update_with_trade_update(complete_event_info) self.assertFalse(update_result) self.assertFalse(order.is_done) self.assertEqual("open", order.last_state) self.assertEqual(Decimal(str(trade_event_info["size"])), order.executed_amount_base) expected_executed_quote_amount = Decimal(str(trade_event_info["size"])) * Decimal( str(trade_event_info["price"])) self.assertEqual(expected_executed_quote_amount, order.executed_amount_quote) self.assertEqual(Decimal(trade_event_info["taker_fee_rate"]) * expected_executed_quote_amount, order.fee_paid)
4,206
12,278
<filename>3rdParty/boost/1.71.0/libs/geometry/test/algorithms/area/area.cpp // Boost.Geometry (aka GGL, Generic Geometry Library) // Unit Test // Copyright (c) 2007-2012 <NAME>, Amsterdam, the Netherlands. // Copyright (c) 2008-2012 <NAME>, Paris, France. // Copyright (c) 2009-2012 <NAME>, London, UK. // This file was modified by Oracle on 2015, 2016, 2017. // Modifications copyright (c) 2015-2017, Oracle and/or its affiliates. // Contributed and/or modified by <NAME>, on behalf of Oracle // Contributed and/or modified by <NAME>, on behalf of Oracle // Parts of Boost.Geometry are redesigned from Geodan's Geographic Library // (geolib/GGL), copyright (c) 1995-2010 Geodan, Amsterdam, the Netherlands. // Use, modification and distribution is subject to the Boost Software License, // Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) #include <algorithms/area/test_area.hpp> #include <boost/geometry/geometries/point_xy.hpp> #include <boost/geometry/geometries/point.hpp> #include <boost/geometry/geometries/box.hpp> #include <boost/geometry/geometries/ring.hpp> #include <boost/geometry/geometries/polygon.hpp> #include <test_geometries/all_custom_ring.hpp> #include <test_geometries/all_custom_polygon.hpp> //#define BOOST_GEOMETRY_TEST_DEBUG #include <boost/variant/variant.hpp> template <typename Polygon> void test_polygon() { // Rotated square, length=sqrt(2) -> area=2 test_geometry<Polygon>("POLYGON((1 1,2 2,3 1,2 0,1 1))", 2.0); test_geometry<Polygon>("POLYGON((1 1,2 2,3 1,2 0,1 1))", 2.0); test_geometry<Polygon>("POLYGON((0 0,0 7,4 2,2 0,0 0))", 16.0); test_geometry<Polygon>("POLYGON((1 1,2 1,2 2,1 2,1 1))", -1.0); test_geometry<Polygon>("POLYGON((0 0,0 7,4 2,2 0,0 0), (1 1,2 1,2 2,1 2,1 1))", 15.0); } template <typename P> void test_all() { test_geometry<bg::model::box<P> >("POLYGON((0 0,2 2))", 4.0); test_geometry<bg::model::box<P> >("POLYGON((2 2,0 0))", 4.0); test_polygon<bg::model::polygon<P> >(); test_polygon<all_custom_polygon<P> >(); // clockwise rings (second is wrongly ordered) test_geometry<bg::model::ring<P> >("POLYGON((0 0,0 7,4 2,2 0,0 0))", 16.0); test_geometry<bg::model::ring<P> >("POLYGON((0 0,2 0,4 2,0 7,0 0))", -16.0); test_geometry<all_custom_ring<P> >("POLYGON((0 0,0 7,4 2,2 0,0 0))", 16.0); // ccw test_geometry<bg::model::polygon<P, false> > ("POLYGON((0 0,0 7,4 2,2 0,0 0), (1 1,2 1,2 2,1 2,1 1))", -15.0); test_geometry<bg::model::polygon<P, false> > ("POLYGON((1 0,0 1,-1 0,0 -1,1 0))", 2); typedef typename bg::coordinate_type<P>::type coord_type; if (BOOST_GEOMETRY_CONDITION((boost::is_same<coord_type, double>::value))) { test_geometry<bg::model::polygon<P, false, false> > ("POLYGON((100000001 100000000, 100000000 100000001, \ 99999999 100000000, 100000000 99999999))", 2); } else if (BOOST_GEOMETRY_CONDITION((boost::is_same<coord_type, float>::value))) { test_geometry<bg::model::polygon<P, false, false> > ("POLYGON((100001 100000, 100000 100001, \ 99999 100000, 100000 99999))", 2); } } template <typename P> void test_ccw() { typedef typename bg::coordinate_type<P>::type ct; bg::model::polygon<P, false> ccw_polygon; // counterclockwise rings (second is wrongly ordered) std::string poly1 = "POLYGON((1 1,2 2,3 1,2 0,1 1))"; std::string poly2 = "POLYGON((1 1,2 0,3 1,2 2,1 1))"; std::string poly3 = "POLYGON((0 0,0 7,4 2,2 0,0 0))"; std::string poly4 = "POLYGON((0 0,2 0,4 2,0 7,0 0))"; bg::read_wkt(poly1, ccw_polygon); ct area1 = bg::area(ccw_polygon); bg::read_wkt(poly2, ccw_polygon); ct area2 = bg::area(ccw_polygon); bg::read_wkt(poly3, ccw_polygon); ct area3 = bg::area(ccw_polygon); bg::read_wkt(poly4, ccw_polygon); ct area4 = bg::area(ccw_polygon); BOOST_CHECK_CLOSE(area1, -1 * area2, 0.001); BOOST_CHECK_CLOSE(area3, -1 * area4, 0.001); } template <typename P, typename CT> void test_open(CT expected_area) { typedef bg::model::polygon<P, true, false> open_polygon; test_geometry<open_polygon>("POLYGON((1 1,2 2,3 1,2 0))", expected_area); // Note the triangular testcase used in CCW is not sensible for open/close } template <typename P, typename CT> void test_open_ccw(CT expected_area) { typedef bg::model::polygon<P, false, false> open_polygon; test_geometry<open_polygon>("POLYGON((1 1,2 0,3 1,2 2))", expected_area); // Note the triangular testcase used in CCW is not sensible for open/close } template <typename P> void test_poles_ccw() { typedef typename bg::coordinate_type<P>::type ct; bg::model::polygon<P, false> polygon; std::string poly1 = "POLYGON((45 45,45 95,95 45,45 45))"; std::string poly2 = "POLYGON((45 45,95 45,45 95,45 45))"; std::string poly3 = "POLYGON((45 -45,45 -95,95 -45,45 -45))"; std::string poly4 = "POLYGON((45 -45,95 -45,45 -95,45 -45))"; bg::read_wkt(poly1, polygon); ct area1 = bg::area(polygon); bg::read_wkt(poly2, polygon); ct area2 = bg::area(polygon); bg::read_wkt(poly3, polygon); ct area3 = bg::area(polygon); bg::read_wkt(poly4, polygon); ct area4 = bg::area(polygon); BOOST_CHECK_CLOSE(area1, -1 * area2, 0.001); BOOST_CHECK_CLOSE(area3, -1 * area4, 0.001); } template <typename P> void test_empty_input() { bg::model::polygon<P> poly_empty; bg::model::ring<P> ring_empty; test_empty_input(poly_empty); test_empty_input(ring_empty); } void test_large_integers() { typedef bg::model::point<int, 2, bg::cs::cartesian> int_point_type; typedef bg::model::point<double, 2, bg::cs::cartesian> double_point_type; bg::model::polygon<int_point_type> int_poly; bg::model::polygon<double_point_type> double_poly; std::string const polygon_li = "POLYGON((1872000 528000,1872000 192000,\ 1536119 192000,1536000 528000,1200000 528000,\ 1200000 863880,1536000 863880,1872000 863880,\ 1872000 528000))"; bg::read_wkt(polygon_li, int_poly); bg::read_wkt(polygon_li, double_poly); double int_area = bg::area(int_poly); double double_area = bg::area(double_poly); BOOST_CHECK_CLOSE(int_area, double_area, 0.0001); } void test_variant() { typedef bg::model::point<double, 2, bg::cs::cartesian> double_point_type; typedef bg::model::polygon<double_point_type> polygon_type; typedef bg::model::box<double_point_type> box_type; polygon_type poly; std::string const polygon_li = "POLYGON((18 5,18 1,15 1,15 5,12 5,12 8,15 8,18 8,18 5))"; bg::read_wkt(polygon_li, poly); box_type box; std::string const box_li = "BOX(0 0,2 2)"; bg::read_wkt(box_li, box); boost::variant<polygon_type, box_type> v; v = poly; BOOST_CHECK_CLOSE(bg::area(v), bg::area(poly), 0.0001); v = box; BOOST_CHECK_CLOSE(bg::area(v), bg::area(box), 0.0001); } int test_main(int, char* []) { test_all<bg::model::point<int, 2, bg::cs::cartesian> >(); test_all<bg::model::point<float, 2, bg::cs::cartesian> >(); test_all<bg::model::point<double, 2, bg::cs::cartesian> >(); typedef bg::model::point<double, 2, bg::cs::cartesian> pt_crt; typedef bg::model::point<double, 2, bg::cs::spherical_equatorial<bg::degree> > pt_sph; typedef bg::model::point<double, 2, bg::cs::geographic<bg::degree> > pt_geo; // mean Earth's radius^2 double r2 = bg::math::sqr(bg::get_radius<0>(bg::srs::sphere<double>())); test_ccw<pt_crt>(); test_ccw<pt_sph>(); test_ccw<pt_geo>(); test_open<pt_crt>(2.0); test_open<pt_sph>(24726179921.523518 / r2); test_open<pt_geo >(24615492936.977146); test_open_ccw<pt_crt>(2.0); test_open_ccw<pt_sph>(24726179921.523518 / r2); test_open_ccw<pt_geo >(24615492936.977146); test_poles_ccw<pt_crt>(); test_poles_ccw<pt_sph>(); test_poles_ccw<pt_geo >(); #ifdef HAVE_TTMATH test_all<bg::model::d2::point_xy<ttmath_big> >(); test_spherical_geo<ttmath_big>(); #endif test_large_integers(); test_variant(); // test_empty_input<bg::model::d2::point_xy<int> >(); return 0; }
3,848
10,192
/* * Copyright (C) 2009-2021 Lightbend Inc. <https://www.lightbend.com> */ package jdocs.stream.operators.sourceorflow; import akka.NotUsed; import akka.actor.typed.ActorSystem; import akka.stream.javadsl.Sink; import akka.stream.javadsl.Source; import java.util.List; import java.util.concurrent.CompletionStage; public class Limit { public void simple() { ActorSystem<?> system = null; // #simple Source<String, NotUsed> untrustedSource = Source.repeat("element"); CompletionStage<List<String>> elements = untrustedSource.limit(10000).runWith(Sink.seq(), system); // #simple } }
225
343
from mayan.apps.testing.tests.base import GenericViewTestCase from ..events import event_workflow_template_edited from ..permissions import ( permission_workflow_template_edit, permission_workflow_template_view ) from .mixins.workflow_template_mixins import WorkflowTemplateTestMixin from .mixins.workflow_template_transition_mixins import ( WorkflowTransitionFieldTestMixin, WorkflowTransitionFieldViewTestMixin, WorkflowTemplateTransitionViewTestMixin ) class WorkflowTransitionFieldViewTestCase( WorkflowTemplateTestMixin, WorkflowTransitionFieldTestMixin, WorkflowTransitionFieldViewTestMixin, WorkflowTemplateTransitionViewTestMixin, GenericViewTestCase ): def setUp(self): super().setUp() self._create_test_workflow_template() self._create_test_workflow_template_state() self._create_test_workflow_template_state() self._create_test_workflow_template_transition() def test_workflow_template_transition_field_create_view_no_permission(self): workflow_template_transition_field_count = self._test_workflow_template_transition.fields.count() self._clear_events() response = self._request_workflow_template_transition_field_create_view() self.assertEqual(response.status_code, 404) self.assertEqual( self._test_workflow_template_transition.fields.count(), workflow_template_transition_field_count ) events = self._get_test_events() self.assertEqual(events.count(), 0) def test_workflow_template_transition_field_create_view_with_access(self): workflow_template_transition_field_count = self._test_workflow_template_transition.fields.count() self.grant_access( obj=self._test_workflow_template, permission=permission_workflow_template_edit ) self._clear_events() response = self._request_workflow_template_transition_field_create_view() self.assertEqual(response.status_code, 302) self.assertEqual( self._test_workflow_template_transition.fields.count(), workflow_template_transition_field_count + 1 ) events = self._get_test_events() self.assertEqual(events.count(), 1) self.assertEqual( events[0].action_object, self._test_workflow_template_transition_field ) self.assertEqual(events[0].actor, self._test_case_user) self.assertEqual(events[0].target, self._test_workflow_template) self.assertEqual(events[0].verb, event_workflow_template_edited.id) def test_workflow_template_transition_field_delete_view_no_permission(self): self._create_test_workflow_template_transition_field() workflow_template_transition_field_count = self._test_workflow_template_transition.fields.count() self._clear_events() response = self._request_workflow_template_transition_field_delete_view() self.assertEqual(response.status_code, 404) self.assertEqual( self._test_workflow_template_transition.fields.count(), workflow_template_transition_field_count ) events = self._get_test_events() self.assertEqual(events.count(), 0) def test_workflow_template_transition_field_delete_view_with_access(self): self._create_test_workflow_template_transition_field() workflow_template_transition_field_count = self._test_workflow_template_transition.fields.count() self.grant_access( obj=self._test_workflow_template, permission=permission_workflow_template_edit ) self._clear_events() response = self._request_workflow_template_transition_field_delete_view() self.assertEqual(response.status_code, 302) self.assertEqual( self._test_workflow_template_transition.fields.count(), workflow_template_transition_field_count - 1 ) events = self._get_test_events() self.assertEqual(events.count(), 1) self.assertEqual(events[0].action_object, None) self.assertEqual(events[0].actor, self._test_case_user) self.assertEqual(events[0].target, self._test_workflow_template) self.assertEqual(events[0].verb, event_workflow_template_edited.id) def test_workflow_template_transition_field_edit_view_no_permission(self): self._create_test_workflow_template_transition_field() workflow_template_transition_field_label = self._test_workflow_template_transition_field.label self._clear_events() response = self._request_workflow_template_transition_field_edit_view() self.assertEqual(response.status_code, 404) self._test_workflow_template_transition_field.refresh_from_db() self.assertEqual( workflow_template_transition_field_label, self._test_workflow_template_transition_field.label ) events = self._get_test_events() self.assertEqual(events.count(), 0) def test_workflow_template_transition_field_edit_view_with_access(self): self._create_test_workflow_template_transition_field() workflow_template_transition_field_label = self._test_workflow_template_transition_field.label self.grant_access( obj=self._test_workflow_template, permission=permission_workflow_template_edit ) self._clear_events() response = self._request_workflow_template_transition_field_edit_view() self.assertEqual(response.status_code, 302) self._test_workflow_template_transition_field.refresh_from_db() self.assertNotEqual( workflow_template_transition_field_label, self._test_workflow_template_transition_field.label ) events = self._get_test_events() self.assertEqual(events.count(), 1) self.assertEqual( events[0].action_object, self._test_workflow_template_transition_field ) self.assertEqual(events[0].actor, self._test_case_user) self.assertEqual(events[0].target, self._test_workflow_template) self.assertEqual(events[0].verb, event_workflow_template_edited.id) def test_workflow_template_transition_field_list_view_no_permission(self): self._create_test_workflow_template_transition_field() self._clear_events() response = self._request_test_workflow_template_transition_field_list_view() self.assertNotContains( response=response, text=self._test_workflow_template_transition_field.label, status_code=404 ) events = self._get_test_events() self.assertEqual(events.count(), 0) def test_workflow_template_transition_field_list_view_with_access(self): self._create_test_workflow_template_transition_field() self.grant_access( obj=self._test_workflow_template, permission=permission_workflow_template_view ) self._clear_events() response = self._request_test_workflow_template_transition_field_list_view() self.assertContains( response=response, text=self._test_workflow_template_transition_field.label, status_code=200 ) events = self._get_test_events() self.assertEqual(events.count(), 0)
3,036
14,668
<reponame>zealoussnow/chromium // Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "components/media_router/common/media_sink.h" #include "components/media_router/common/mojom/media_route_provider_id.mojom.h" #include "testing/gmock/include/gmock/gmock.h" namespace media_router { TEST(MediaSinkTest, TestEquals) { MediaSink sink1("sinkId", "Sink", SinkIconType::CAST, mojom::MediaRouteProviderId::CAST); MediaSink sink1_copy(sink1); EXPECT_EQ(sink1, sink1_copy); // No name. MediaSink sink2("sinkId", "", SinkIconType::CAST, mojom::MediaRouteProviderId::CAST); EXPECT_FALSE(sink1 == sink2); // Sink name is different from sink1's. MediaSink sink3("sinkId", "Other Sink", SinkIconType::CAST, mojom::MediaRouteProviderId::CAST); EXPECT_FALSE(sink1 == sink3); // Sink ID is diffrent from sink1's. MediaSink sink4("otherSinkId", "Sink", SinkIconType::CAST, mojom::MediaRouteProviderId::CAST); EXPECT_FALSE(sink1 == sink4); // Sink icon type is diffrent from sink1's. MediaSink sink5("otherSinkId", "Sink", SinkIconType::GENERIC, mojom::MediaRouteProviderId::CAST); EXPECT_FALSE(sink1 == sink5); } } // namespace media_router
567
1,104
package com.cg.baseproject.utils.android; import android.Manifest; import android.annotation.TargetApi; import android.app.Activity; import android.graphics.Bitmap; import android.graphics.Canvas; import android.os.Build; import android.view.View; import com.cg.baseproject.utils.FileUtils; import java.io.File; import github.nisrulz.screenshott.ScreenShott; /** * @author * @version 1.0 * @date 2018/5/31 */ public class ScreenShotUtils { @TargetApi(Build.VERSION_CODES.M) public static void easyPermissionCheck(Activity activity) { String[] perms = { // 把你想要申请的权限放进这里就行,注意用逗号隔开 Manifest.permission.WRITE_EXTERNAL_STORAGE, Manifest.permission.READ_EXTERNAL_STORAGE, }; boolean flag = EasyPermissionUtils.checkPermission(activity,perms); if(!flag){ ToastUtils.showShort("没有相关授权,请去登录页从新进行授权"); // Constants.isLogin = false; // activity.startActivity(new Intent(activity, LoginActivity.class)); } } private static Bitmap loadBitmapFromView(View v) { int w = v.getWidth(); int h = v.getHeight(); Bitmap bmp = Bitmap.createBitmap(w, h, Bitmap.Config.ARGB_8888); Canvas c = new Canvas(bmp); // c.drawColor(Color.WHITE); /** 如果不设置canvas画布为白色,则生成透明 */ v.layout(0, 0, w, h); v.draw(c); return bmp; } public static String getViewBitmapPath(Activity activity, View view, String dirName, String fileName){ String bitmapFilePath = null; File file = null; try { file = FileUtils.saveBitmapFile(activity,dirName,fileName,loadBitmapFromView(view)); bitmapFilePath = file.getAbsolutePath(); } catch (Exception e) { e.printStackTrace(); } return bitmapFilePath; } public static String getViewBitmapPath(Activity activity,View view){ String bitmapFilePath = null; File file = null; try { file = ScreenShott.getInstance().saveScreenshotToPicturesFolder(activity, loadBitmapFromView(view), "my_screenshot_filename"); bitmapFilePath = file.getAbsolutePath(); } catch (Exception e) { e.printStackTrace(); } return bitmapFilePath; } }
1,144
1,056
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.netbeans.modules.gsf.testrunner.ui.annotation; import java.awt.Point; import java.awt.event.ActionEvent; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; import javax.swing.AbstractAction; import javax.swing.Action; import javax.swing.SwingUtilities; import javax.swing.text.BadLocationException; import javax.swing.text.Document; import javax.swing.text.JTextComponent; import javax.swing.text.StyledDocument; import org.netbeans.editor.AnnotationDesc; import org.netbeans.editor.BaseDocument; import org.netbeans.editor.ImplementationProvider; import org.netbeans.editor.JumpList; import org.netbeans.modules.gsf.testrunner.api.CommonUtils; import org.netbeans.modules.gsf.testrunner.ui.annotation.SelectActionPopup.ActionDescription; import org.netbeans.modules.gsf.testrunner.ui.api.TestMethodController.TestMethod; import org.netbeans.spi.project.ActionProvider; import org.netbeans.spi.project.SingleMethod; import org.openide.awt.ActionID; import org.openide.awt.ActionReference; import org.openide.awt.ActionRegistration; import org.openide.awt.StatusDisplayer; import org.openide.text.CloneableEditorSupport; import org.openide.text.NbDocument; import org.openide.util.NbBundle; import org.openide.util.NbBundle.Messages; import org.openide.util.lookup.Lookups; import org.openide.windows.TopComponent; @ActionID(id = "org.netbeans.modules.gsf.testrunner.ui.RunDebugTestGutterAction", category = "CommonTestRunner") @ActionRegistration(displayName = "#NM_RunGutterAction", lazy = false) @ActionReference(path = "Editors/GlyphGutterActions", position = 190) @Messages("NM_RunGutterAction=Run") public class RunDebugTestGutterAction extends AbstractAction { private static final Logger LOG = Logger.getLogger(RunDebugTestGutterAction.class.getName()); private static final Set<String> fixableAnnotations = new HashSet<>(); static { fixableAnnotations.add("org-netbeans-modules-gsf-testrunner-runnable-test-annotation"); // NOI18N } public RunDebugTestGutterAction() { putValue(NAME, Bundle.NM_RunGutterAction()); } @Override public Object getValue(String key) { if ("supported-annotation-types".equals(key)) {//NOI18N return fixableAnnotations.toArray(new String[0]); } return super.getValue(key); } @Messages({ "ERR_NoTestMethod=No Test Method", "# {0} - method name", "DN_run.single.method=Run {0} method", "# {0} - method name", "DN_debug.single.method=Debug {0} method", "CAP_SelectAction=Select Action", }) public void actionPerformed(ActionEvent e) { Object source = e.getSource(); if (!(source instanceof JTextComponent)) { StatusDisplayer.getDefault().setStatusText(Bundle.ERR_NoTestMethod()); return ; //probably right click menu } JTextComponent comp = (JTextComponent) source; Document doc = comp.getDocument(); int caretPos = comp.getCaretPosition(); int line = NbDocument.findLineNumber((StyledDocument) doc, caretPos); AnnotationDesc activeAnnotation = ((BaseDocument) doc).getAnnotations().getActiveAnnotation(line); if (activeAnnotation != null && fixableAnnotations.contains(activeAnnotation.getAnnotationType())) { Map<Integer, TestMethod> annotationLines = (Map<Integer, TestMethod>) doc.getProperty(TestMethodAnnotation.DOCUMENT_ANNOTATION_LINES_KEY); TestMethod testMethod = annotationLines.get(line); if (testMethod != null) { SingleMethod singleMethod = testMethod.method(); List<ActionDescription> actions = new ArrayList<>(); ActionProvider ap = CommonUtils.getInstance().getActionProvider(singleMethod.getFile()); if (ap != null) { for (String command : new String[] {SingleMethod.COMMAND_RUN_SINGLE_METHOD, SingleMethod.COMMAND_DEBUG_SINGLE_METHOD}) { String displayName = NbBundle.getMessage(RunDebugTestGutterAction.class, "DN_" + command, singleMethod.getMethodName()); if (Arrays.asList(ap.getSupportedActions()).contains(command) && ap.isActionEnabled(command, Lookups.singleton(singleMethod))) { actions.add(new ActionDescription(displayName, () -> ap.invokeAction(command, Lookups.singleton(singleMethod)))); } } } if (actions.size() > 1) { final Point[] p = new Point[1]; doc.render(new Runnable() { public void run() { try { int startOffset = NbDocument.findLineOffset((StyledDocument) doc, line); p[0] = comp.modelToView(startOffset).getLocation(); } catch (BadLocationException ex) { LOG.log(Level.WARNING, null, ex); } } }); JumpList.checkAddEntry(comp, caretPos); SwingUtilities.convertPointToScreen(p[0], comp); PopupUtil.showPopup(new SelectActionPopup(Bundle.CAP_SelectAction(), actions), Bundle.CAP_SelectAction(), p[0].x, p[0].y, true, 0); } else if (actions.size() == 1) { actions.get(0).action.run(); } return ; } } Action actions[] = ImplementationProvider.getDefault().getGlyphGutterActions((JTextComponent) source); if (actions == null) return ; int nextAction = 0; while (nextAction < actions.length && actions[nextAction] != this) nextAction++; nextAction++; if (actions.length > nextAction) { Action a = actions[nextAction]; if (a != null && a.isEnabled()){ a.actionPerformed(e); } } } @Override public boolean isEnabled() { TopComponent activetc = TopComponent.getRegistry().getActivated(); if (activetc instanceof CloneableEditorSupport.Pane) { return true; } return false; } }
3,052
15,179
"""Argparser module for Flow""" import argparse from .base import set_base_parser from .helper import add_arg_group, KVAppendAction, _SHOW_ALL_ARGS from ..enums import InfrastructureType def mixin_flow_features_parser(parser): """Add the arguments for the Flow features to the parser :param parser: the parser configure """ from ..enums import FlowInspectType gp = add_arg_group(parser, title='Flow Feature') gp.add_argument('--uses', type=str, help='The YAML file represents a flow') gp.add_argument( '--env', action=KVAppendAction, metavar='KEY: VALUE', nargs='*', help='The map of environment variables that are available inside runtime', ) gp.add_argument( '--inspect', type=FlowInspectType.from_string, choices=list(FlowInspectType), default=FlowInspectType.COLLECT, help=''' The strategy on those inspect pods in the flow. If `REMOVE` is given then all inspect pods are removed when building the flow. ''', ) def mixin_k8s_parser(parser): """Add the arguments for the Kubernetes features to the parser :param parser: the parser configure """ gp = add_arg_group(parser, title='Kubernetes Feature') gp.add_argument( '--infrastructure', type=InfrastructureType.from_string, choices=list(InfrastructureType), default=InfrastructureType.JINA, help='Infrastructure where the Flow runs on. Currently, `local` and `k8s` are supported' if _SHOW_ALL_ARGS else argparse.SUPPRESS, ) def set_flow_parser(parser=None, with_identity=False): """Set the parser for the flow :param parser: an (optional) initial parser to build upon :param with_identity: if to include identity in the parser :return: the parser """ from .peapods.base import mixin_base_ppr_parser if not parser: parser = set_base_parser() mixin_base_ppr_parser(parser, with_identity=with_identity) parser.set_defaults(workspace='./') mixin_flow_features_parser(parser) mixin_k8s_parser(parser) return parser
810
625
<filename>util/jupyter/seq_kernel/redirector.py """ Stdout and stderr redirector. Based on https://eli.thegreenplace.net/2015/redirecting-all-kinds-of-stdout-in-python/ """ from contextlib import contextmanager import ctypes import io import os import sys import tempfile libc = ctypes.CDLL(None) if sys.platform == "darwin": c_stdout = ctypes.c_void_p.in_dll(libc, '__stdoutp') c_stderr = ctypes.c_void_p.in_dll(libc, '__stderrp') else: c_stdout = ctypes.c_void_p.in_dll(libc, 'stdout') c_stderr = ctypes.c_void_p.in_dll(libc, 'stderr') @contextmanager def stdout_stderr_redirector(out_stream, err_stream): # The original fd stdout points to. Usually 1 on POSIX systems. original_stdout_fd = sys.__stdout__.fileno() original_stderr_fd = sys.__stderr__.fileno() def _redirect_stdout(to_fd): """Redirect stdout to the given file descriptor.""" # Flush the C-level buffer stdout libc.fflush(c_stdout) # Flush and close sys.stdout - also closes the file descriptor (fd) sys.__stdout__.close() # Make original_stdout_fd point to the same file as to_fd os.dup2(to_fd, original_stdout_fd) # Create a new sys.stdout that points to the redirected fd sys.__stdout__ = io.TextIOWrapper(os.fdopen(original_stdout_fd, 'wb')) def _redirect_stderr(to_fd): """Redirect stderr to the given file descriptor.""" # Flush the C-level buffer stderr libc.fflush(c_stderr) # Flush and close sys.stderr - also closes the file descriptor (fd) sys.__stderr__.close() # Make original_stderr_fd point to the same file as to_fd os.dup2(to_fd, original_stderr_fd) # Create a new sys.stderr that points to the redirected fd sys.__stderr__ = io.TextIOWrapper(os.fdopen(original_stderr_fd, 'wb')) # Save a copy of the original stdout fd in saved_stdout_fd saved_stdout_fd = os.dup(original_stdout_fd) saved_stderr_fd = os.dup(original_stderr_fd) try: # Create a temporary file and redirect stdout to it tfile_out = tempfile.TemporaryFile(mode='w+b') tfile_err = tempfile.TemporaryFile(mode='w+b') _redirect_stdout(tfile_out.fileno()) _redirect_stderr(tfile_err.fileno()) # Yield to caller, then redirect stdout back to the saved fd yield _redirect_stdout(saved_stdout_fd) _redirect_stderr(saved_stderr_fd) # Copy contents of temporary file to the given stream tfile_out.flush() tfile_err.flush() tfile_out.seek(0, io.SEEK_SET) tfile_err.seek(0, io.SEEK_SET) out_stream.write(tfile_out.read()) err_stream.write(tfile_err.read()) finally: tfile_out.close() tfile_err.close() os.close(saved_stdout_fd) os.close(saved_stderr_fd)
1,299
1,392
import graphene from django.core.exceptions import ValidationError from ....core.permissions import OrderPermissions from ....core.tracing import traced_atomic_transaction from ....giftcard.utils import deactivate_order_gift_cards from ....order.actions import cancel_order from ....order.error_codes import OrderErrorCode from ...core.mutations import BaseMutation from ...core.types import OrderError from ..types import Order def clean_order_cancel(order): if order and not order.can_cancel(): raise ValidationError( { "order": ValidationError( "This order can't be canceled.", code=OrderErrorCode.CANNOT_CANCEL_ORDER, ) } ) class OrderCancel(BaseMutation): order = graphene.Field(Order, description="Canceled order.") class Arguments: id = graphene.ID(required=True, description="ID of the order to cancel.") class Meta: description = "Cancel an order." permissions = (OrderPermissions.MANAGE_ORDERS,) error_type_class = OrderError error_type_field = "order_errors" @classmethod @traced_atomic_transaction() def perform_mutation(cls, _root, info, **data): order = cls.get_node_or_error(info, data.get("id"), only_type=Order) clean_order_cancel(order) user = info.context.user app = info.context.app cancel_order( order=order, user=user, app=app, manager=info.context.plugins, ) deactivate_order_gift_cards(order.id, user, app) return OrderCancel(order=order)
686
6,139
<reponame>Digitaltransform/tensorboard # Copyright 2019 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Test utils for mesh plugin tests.""" import collections import json import numpy as np from tensorboard.util import tb_logging Mesh = collections.namedtuple("Mesh", ("vertices", "faces", "colors")) logger = tb_logging.get_logger() def get_random_mesh( num_vertices, add_faces=False, add_colors=False, batch_size=1 ): """Returns a random point cloud, optionally with random disconnected faces. Args: num_vertices: Number of vertices in the point cloud or mesh. add_faces: Random faces will be generated and added to the mesh when True. add_colors: Random colors will be assigned to each vertex when True. Each color will be in a range of [0, 255]. batch_size: Size of batch dimension in output array. Returns: Mesh namedtuple with vertices and optionally with faces and/or colors. """ vertices = np.random.random([num_vertices, 3]) * 1000 # Add batch dimension. vertices = np.tile(vertices, [batch_size, 1, 1]) faces = None colors = None if add_faces: arranged_vertices = np.random.permutation(num_vertices) faces = [] for i in range(num_vertices - 2): faces.append( [ arranged_vertices[i], arranged_vertices[i + 1], arranged_vertices[i + 2], ] ) faces = np.array(faces) faces = np.tile(faces, [batch_size, 1, 1]).astype(np.int32) if add_colors: colors = np.random.randint(low=0, high=255, size=[num_vertices, 3]) colors = np.tile(colors, [batch_size, 1, 1]).astype(np.uint8) return Mesh(vertices.astype(np.float32), faces, colors) def deserialize_json_response(byte_content): """Deserializes byte content that is a JSON encoding. Args: byte_content: The byte content of a response. Returns: The deserialized python object decoded from JSON. """ return json.loads(byte_content.decode("utf-8")) def deserialize_array_buffer_response(byte_content, data_type): """Deserializes arraybuffer response and optionally tiles the array. Args: byte_content: The byte content of a response. data_type: Numpy type to parse data with. Returns: Flat numpy array with the data. """ return np.frombuffer(byte_content, dtype=data_type)
1,111
1,444
package mage.cards.b; import java.util.EnumSet; import java.util.UUID; import mage.abilities.common.EntersBattlefieldTappedAbility; import mage.abilities.common.FetchLandActivatedAbility; import mage.cards.CardImpl; import mage.cards.CardSetInfo; import mage.constants.CardType; import mage.constants.SubType; /** * * @author LevelX2 */ public final class BadRiver extends CardImpl { public BadRiver(UUID ownerId, CardSetInfo setInfo) { super(ownerId,setInfo,new CardType[]{CardType.LAND},""); // Bad River enters the battlefield tapped. this.addAbility(new EntersBattlefieldTappedAbility()); // {tap}, Sacrifice Bad River: Search your library for an Island or Swamp card and put it onto the battlefield. Then shuffle your library. this.addAbility(new FetchLandActivatedAbility(false, SubType.ISLAND, SubType.SWAMP)); } private BadRiver(final BadRiver card) { super(card); } @Override public BadRiver copy() { return new BadRiver(this); } }
354
340
<reponame>definitelyNotFBI/utt<gh_stars>100-1000 # Concord # # Copyright (c) 2020 VMware, Inc. All Rights Reserved. # # This product is licensed to you under the Apache 2.0 license (the "License"). # You may not use this product except in compliance with the Apache 2.0 License. # # This product may include a number of subcomponents with separate copyright # notices and license terms. Your use of these subcomponents is subject to the # terms and conditions of the subcomponent's license, as noted in the LICENSE # file. import os from exceptions import CmfParseError from python.py_visitor import PyVisitor from walker import Walker header = """######################################## # Autogenerated by cmfc. Do not modify. ######################################## """ def translate(ast, namespace=None): """ Walk concord message format(CMF) AST and generate Python code. Return Python code as a string. """ with open(os.path.join(os.path.dirname(__file__), "serialize.py")) as f: base_serializers = f.read() + '\n' visitor = PyVisitor() walker = Walker(ast, visitor) walker.walk() return header + base_serializers + visitor.output
350
2,232
/* Copyright (c) 2016 Microsoft Corporation. All rights reserved. Released under Apache 2.0 license as described in the file LICENSE. Author: <NAME> */ #include "kernel/instantiate.h" #include "library/placeholder.h" #include "library/explicit.h" #include "library/choice.h" #include "library/vm/vm.h" #include "library/vm/vm_expr.h" #include "library/vm/vm_name.h" #include "library/vm/vm_list.h" #include "library/vm/vm_option.h" #include "frontends/lean/util.h" #include "frontends/lean/structure_instance.h" namespace lean { vm_obj pexpr_of_expr(vm_obj const & e) { return to_obj(mk_as_is(to_expr(e))); } vm_obj pexpr_is_placeholder(vm_obj const & e) { return mk_vm_bool(is_placeholder(to_expr(e))); } vm_obj pexpr_mk_placeholder() { return to_obj(mk_expr_placeholder()); } vm_obj pexpr_mk_explicit(vm_obj const & e) { return to_obj(mk_explicit(to_expr(e))); } vm_obj pexpr_mk_field_macro(vm_obj const & e, vm_obj const & fname) { return to_obj(mk_field_notation(to_expr(e), to_name(fname))); } vm_obj pexpr_is_choice_macro(vm_obj const & e) { return mk_vm_bool(is_choice(to_expr(e))); } vm_obj pexpr_mk_structure_instance(vm_obj const & info) { name struct_name; buffer<name> field_names; buffer<expr> field_values; buffer<expr> sources; if (!is_none(cfield(info, 0))) { struct_name = to_name(get_some_value(cfield(info, 0))); } to_buffer_name(cfield(info, 1), field_names); to_buffer_expr(cfield(info, 2), field_values); to_buffer_expr(cfield(info, 3), sources); return to_obj(mk_structure_instance(struct_name, field_names, field_values, sources)); } vm_obj pexpr_get_structure_instance_info(vm_obj const & e) { if (!is_structure_instance(to_expr(e))) { return mk_vm_none(); } auto info = get_structure_instance_info(to_expr(e)); optional<name> opt_struct_name; if (info.m_struct_name) { opt_struct_name = info.m_struct_name; } return mk_vm_some(mk_vm_constructor(0, to_obj(opt_struct_name), to_obj(info.m_field_names), to_obj(info.m_field_values), to_obj(info.m_sources))); } void initialize_vm_pexpr() { DECLARE_VM_BUILTIN(name({"pexpr", "of_expr"}), pexpr_of_expr); DECLARE_VM_BUILTIN(name({"pexpr", "is_placeholder"}), pexpr_is_placeholder); DECLARE_VM_BUILTIN(name({"pexpr", "mk_placeholder"}), pexpr_mk_placeholder); DECLARE_VM_BUILTIN(name("pexpr", "mk_explicit"), pexpr_mk_explicit); DECLARE_VM_BUILTIN(name("pexpr", "mk_field_macro"), pexpr_mk_field_macro); DECLARE_VM_BUILTIN(name("pexpr", "is_choice_macro"), pexpr_is_choice_macro); DECLARE_VM_BUILTIN(name("pexpr", "mk_structure_instance"), pexpr_mk_structure_instance); DECLARE_VM_BUILTIN(name("pexpr", "get_structure_instance_info"), pexpr_get_structure_instance_info); } void finalize_vm_pexpr() { } }
1,280
1,814
<gh_stars>1000+ /* Copyright 2019 The Waymo Open Dataset Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ #include "waymo_open_dataset/metrics/breakdown_generator.h" #include <gtest/gtest.h> #include "waymo_open_dataset/label.pb.h" #include "waymo_open_dataset/metrics/test_utils.h" #include "waymo_open_dataset/protos/breakdown.pb.h" #include "waymo_open_dataset/protos/metrics.pb.h" namespace waymo { namespace open_dataset { namespace { TEST(BreakdownGenerator, BreakdownGeneratorAll) { const auto generator = BreakdownGenerator::Create(Breakdown::ONE_SHARD); EXPECT_EQ(1, generator->NumShards()); EXPECT_EQ(0, generator->Shard(Object())); EXPECT_EQ(Breakdown::ONE_SHARD, generator->Id()); } TEST(BreakdownGenerator, BreakdownGeneratorObjectType) { const auto generator = BreakdownGenerator::Create(Breakdown::OBJECT_TYPE); EXPECT_EQ(Label::Type_MAX, generator->NumShards()); Object object; object.mutable_object()->set_type(Label::TYPE_VEHICLE); EXPECT_EQ(Breakdown::OBJECT_TYPE, generator->Id()); } TEST(BreakdownGenerator, BreakdownGeneratorRange) { const auto generator = BreakdownGenerator::Create(Breakdown::RANGE); EXPECT_EQ(3 * Label::Type_MAX, generator->NumShards()); Object object1; *object1.mutable_object()->mutable_box() = BuildAA2dBox(1.0, 0.0, 1.0, 1.0); object1.mutable_object()->set_type(Label::TYPE_VEHICLE); Object object2; *object2.mutable_object()->mutable_box() = BuildAA2dBox(30.0, 0.0, 1.0, 1.0); object2.mutable_object()->set_type(Label::TYPE_SIGN); Object object3; *object3.mutable_object()->mutable_box() = BuildAA2dBox(50.0, 0.0, 1.0, 1.0); object3.mutable_object()->set_type(Label::TYPE_PEDESTRIAN); EXPECT_EQ(0, generator->Shard(object1)); EXPECT_EQ(1 + 3 * (Label::TYPE_SIGN - 1), generator->Shard(object2)); EXPECT_EQ(2 + 3 * (Label::TYPE_PEDESTRIAN - 1), generator->Shard(object3)); EXPECT_EQ(Breakdown::RANGE, generator->Id()); } TEST(BreakdownGenerator, BreakdownGeneratorVelocity) { const auto generator = BreakdownGenerator::Create(Breakdown::VELOCITY); EXPECT_EQ(4 * 5, generator->NumShards()); Object object; object.mutable_object()->set_type(Label::TYPE_VEHICLE); object.mutable_object()->mutable_metadata()->set_speed_x(0.05); object.mutable_object()->mutable_metadata()->set_speed_y(0.05); EXPECT_EQ(0, generator->Shard(object)); object.mutable_object()->set_type(Label::TYPE_PEDESTRIAN); object.mutable_object()->mutable_metadata()->set_speed_x(0.5); object.mutable_object()->mutable_metadata()->set_speed_y(0.5); EXPECT_EQ(5 + 1, generator->Shard(object)); object.mutable_object()->set_type(Label::TYPE_SIGN); object.mutable_object()->mutable_metadata()->set_speed_x(2.); object.mutable_object()->mutable_metadata()->set_speed_y(2.); EXPECT_EQ(10 + 2, generator->Shard(object)); object.mutable_object()->set_type(Label::TYPE_CYCLIST); object.mutable_object()->mutable_metadata()->set_speed_x(5.); object.mutable_object()->mutable_metadata()->set_speed_y(5.); EXPECT_EQ(15 + 3, generator->Shard(object)); object.mutable_object()->set_type(Label::TYPE_VEHICLE); object.mutable_object()->mutable_metadata()->set_speed_x(20.); object.mutable_object()->mutable_metadata()->set_speed_y(15.); EXPECT_EQ(4, generator->Shard(object)); } } // namespace } // namespace open_dataset } // namespace waymo
1,416
1,144
/* * Copyright (C) 2013 Realtek Semiconductor Corp. * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. */ #ifdef HAVE_CONFIG_H #include <config.h> #endif #include <stdio.h> #include <errno.h> #include <unistd.h> #include <stdlib.h> #include <termios.h> #include <time.h> #include <sys/time.h> #include <sys/types.h> #include <sys/param.h> #include <sys/ioctl.h> #include <sys/socket.h> #include <sys/uio.h> #include <sys/stat.h> #include <fcntl.h> #include <signal.h> #include <stdint.h> #include <string.h> #include <endian.h> #include <byteswap.h> #include <netinet/in.h> #include <poll.h> #include <sys/timerfd.h> #include <sys/epoll.h> #include "rtb_fwc.h" #include "hciattach.h" #include "hciattach_h4.h" #define RTK_VERSION "3.1.bca84ed.20190715-143612" #define TIMESTAMP_PR #define MAX_EVENTS 10 /* #define SERIAL_NONBLOCK_READ */ #ifdef SERIAL_NONBLOCK_READ #define FD_BLOCK 0 #define FD_NONBLOCK 1 #endif /* #define RTL_8703A_SUPPORT */ /* #define RTL8723DSH4_UART_HWFLOWC */ /* 8723DS H4 special */ uint8_t DBG_ON = 1; #define HCI_EVENT_HDR_SIZE 2 /* #define RTK_PATCH_LENGTH_MAX 24576 */ //24*1024 #define RTB_PATCH_LENGTH_MAX (40 * 1024) #define PATCH_DATA_FIELD_MAX_SIZE 252 #define HCI_CMD_READ_BD_ADDR 0x1009 #define HCI_VENDOR_CHANGE_BAUD 0xfc17 #define HCI_VENDOR_READ_ROM_VER 0xfc6d #define HCI_CMD_READ_LOCAL_VER 0x1001 #define HCI_VENDOR_READ_CHIP_TYPE 0xfc61 #define HCI_CMD_RESET 0x0c03 /* HCI data types */ #define H5_ACK_PKT 0x00 #define HCI_COMMAND_PKT 0x01 #define HCI_ACLDATA_PKT 0x02 #define HCI_SCODATA_PKT 0x03 #define HCI_EVENT_PKT 0x04 #define H5_VDRSPEC_PKT 0x0E #define H5_LINK_CTL_PKT 0x0F #define H5_HDR_SEQ(hdr) ((hdr)[0] & 0x07) #define H5_HDR_ACK(hdr) (((hdr)[0] >> 3) & 0x07) #define H5_HDR_CRC(hdr) (((hdr)[0] >> 6) & 0x01) #define H5_HDR_RELIABLE(hdr) (((hdr)[0] >> 7) & 0x01) #define H5_HDR_PKT_TYPE(hdr) ((hdr)[1] & 0x0f) #define H5_HDR_LEN(hdr) ((((hdr)[1] >> 4) & 0xff) + ((hdr)[2] << 4)) #define H5_HDR_SIZE 4 struct sk_buff { uint32_t max_len; uint32_t data_len; uint8_t *data; }; struct hci_ev_cmd_complete { uint8_t ncmd; uint16_t opcode; } __attribute__ ((packed)); #define OP_H5_SYNC 0x01 #define OP_H5_CONFIG 0x02 #define OP_ROM_VER ((1 << 24) | HCI_VENDOR_READ_ROM_VER) #define OP_LMP_VER ((1 << 24) | HCI_CMD_READ_LOCAL_VER) #define OP_CHIP_TYPE ((1 << 24) | HCI_VENDOR_READ_CHIP_TYPE) #define OP_SET_BAUD ((1 << 24) | HCI_VENDOR_CHANGE_BAUD) #define OP_HCI_RESET ((1 << 24) | HCI_CMD_RESET) struct rtb_struct rtb_cfg; /* bite reverse in bytes * 00000001 -> 10000000 * 00000100 -> 00100000 */ const uint8_t byte_rev_table[256] = { 0x00, 0x80, 0x40, 0xc0, 0x20, 0xa0, 0x60, 0xe0, 0x10, 0x90, 0x50, 0xd0, 0x30, 0xb0, 0x70, 0xf0, 0x08, 0x88, 0x48, 0xc8, 0x28, 0xa8, 0x68, 0xe8, 0x18, 0x98, 0x58, 0xd8, 0x38, 0xb8, 0x78, 0xf8, 0x04, 0x84, 0x44, 0xc4, 0x24, 0xa4, 0x64, 0xe4, 0x14, 0x94, 0x54, 0xd4, 0x34, 0xb4, 0x74, 0xf4, 0x0c, 0x8c, 0x4c, 0xcc, 0x2c, 0xac, 0x6c, 0xec, 0x1c, 0x9c, 0x5c, 0xdc, 0x3c, 0xbc, 0x7c, 0xfc, 0x02, 0x82, 0x42, 0xc2, 0x22, 0xa2, 0x62, 0xe2, 0x12, 0x92, 0x52, 0xd2, 0x32, 0xb2, 0x72, 0xf2, 0x0a, 0x8a, 0x4a, 0xca, 0x2a, 0xaa, 0x6a, 0xea, 0x1a, 0x9a, 0x5a, 0xda, 0x3a, 0xba, 0x7a, 0xfa, 0x06, 0x86, 0x46, 0xc6, 0x26, 0xa6, 0x66, 0xe6, 0x16, 0x96, 0x56, 0xd6, 0x36, 0xb6, 0x76, 0xf6, 0x0e, 0x8e, 0x4e, 0xce, 0x2e, 0xae, 0x6e, 0xee, 0x1e, 0x9e, 0x5e, 0xde, 0x3e, 0xbe, 0x7e, 0xfe, 0x01, 0x81, 0x41, 0xc1, 0x21, 0xa1, 0x61, 0xe1, 0x11, 0x91, 0x51, 0xd1, 0x31, 0xb1, 0x71, 0xf1, 0x09, 0x89, 0x49, 0xc9, 0x29, 0xa9, 0x69, 0xe9, 0x19, 0x99, 0x59, 0xd9, 0x39, 0xb9, 0x79, 0xf9, 0x05, 0x85, 0x45, 0xc5, 0x25, 0xa5, 0x65, 0xe5, 0x15, 0x95, 0x55, 0xd5, 0x35, 0xb5, 0x75, 0xf5, 0x0d, 0x8d, 0x4d, 0xcd, 0x2d, 0xad, 0x6d, 0xed, 0x1d, 0x9d, 0x5d, 0xdd, 0x3d, 0xbd, 0x7d, 0xfd, 0x03, 0x83, 0x43, 0xc3, 0x23, 0xa3, 0x63, 0xe3, 0x13, 0x93, 0x53, 0xd3, 0x33, 0xb3, 0x73, 0xf3, 0x0b, 0x8b, 0x4b, 0xcb, 0x2b, 0xab, 0x6b, 0xeb, 0x1b, 0x9b, 0x5b, 0xdb, 0x3b, 0xbb, 0x7b, 0xfb, 0x07, 0x87, 0x47, 0xc7, 0x27, 0xa7, 0x67, 0xe7, 0x17, 0x97, 0x57, 0xd7, 0x37, 0xb7, 0x77, 0xf7, 0x0f, 0x8f, 0x4f, 0xcf, 0x2f, 0xaf, 0x6f, 0xef, 0x1f, 0x9f, 0x5f, 0xdf, 0x3f, 0xbf, 0x7f, 0xff, }; static __inline uint8_t bit_rev8(uint8_t byte) { return byte_rev_table[byte]; } static __inline uint16_t bit_rev16(uint16_t x) { return (bit_rev8(x & 0xff) << 8) | bit_rev8(x >> 8); } static const uint16_t crc_table[] = { 0x0000, 0x1081, 0x2102, 0x3183, 0x4204, 0x5285, 0x6306, 0x7387, 0x8408, 0x9489, 0xa50a, 0xb58b, 0xc60c, 0xd68d, 0xe70e, 0xf78f }; /* Initialise the crc calculator */ #define H5_CRC_INIT(x) x = 0xffff static __inline struct sk_buff *skb_alloc(unsigned int len) { struct sk_buff *skb = NULL; if ((skb = malloc(len + sizeof(*skb)))) { skb->max_len = len; skb->data_len = 0; skb->data = ((uint8_t *)skb) + sizeof(*skb); } else { RS_ERR("Allocate skb fails!"); skb = NULL; return NULL; } memset(skb->data, 0, len); return skb; } static __inline void skb_free(struct sk_buff *skb) { free(skb); return; } /* * Add data to a buffer * This function extends the used data area of the buffer. */ static uint8_t *skb_put(struct sk_buff *skb, uint32_t len) { uint32_t old_len = skb->data_len; if ((skb->data_len + len) > (skb->max_len)) { RS_ERR("Buffer too small"); exit(EXIT_FAILURE); } skb->data_len += len; return (skb->data + old_len); } /* * Remove end from a buffer * Cut the length of a buffer down by removing data from the tail */ static void skb_trim(struct sk_buff *skb, uint32_t len) { if (skb->data_len > len) { skb->data_len = len; } else { RS_ERR("Trim error, data_len %u < len %u", skb->data_len, len); } } /* * Remove data from the start of a buffer * This function removes data from the start of a buffer. * A pointer to the next data in the buffer is returned */ static uint8_t *skb_pull(struct sk_buff *skb, uint32_t len) { if (len > skb->data_len) { RS_ERR("Pull error, data_len %u < len %u", skb->data_len, len); exit(EXIT_FAILURE); } skb->data_len -= len; skb->data += len; return skb->data; } /** * Add "d" into crc scope, caculate the new crc value * * @param crc crc data * @param d one byte data */ static void h5_crc_update(uint16_t * crc, uint8_t d) { uint16_t reg = *crc; reg = (reg >> 4) ^ crc_table[(reg ^ d) & 0x000f]; reg = (reg >> 4) ^ crc_table[(reg ^ (d >> 4)) & 0x000f]; *crc = reg; } struct __una_u16 { uint16_t x; }; static __inline uint16_t __get_unaligned_cpu16(const void *p) { const struct __una_u16 *ptr = (const struct __una_u16 *)p; return ptr->x; } static __inline uint16_t get_unaligned_be16(const void *p) { return __get_unaligned_cpu16((const uint8_t *)p); } /* * Get crc data. */ static uint16_t h5_get_crc(struct rtb_struct * h5) { uint16_t crc = 0; uint8_t *data = h5->rx_skb->data + h5->rx_skb->data_len - 2; crc = data[1] + (data[0] << 8); return crc; /* return get_unaligned_be16(&h5->rx_skb->data[h5->rx_skb->data_len - 2]); */ } /* * Add 0xc0 to buffer. */ static void h5_slip_msgdelim(struct sk_buff *skb) { const char pkt_delim = 0xc0; memcpy(skb_put(skb, 1), &pkt_delim, 1); } /* * Encode one byte in h5 proto * 0xc0 -> 0xdb, 0xdc * 0xdb -> 0xdb, 0xdd * 0x11 -> 0xdb, 0xde * 0x13 -> 0xdb, 0xdf * others will not change */ static void h5_slip_one_byte(struct sk_buff *skb, uint8_t c) { const uint8_t esc_c0[2] = { 0xdb, 0xdc }; const uint8_t esc_db[2] = { 0xdb, 0xdd }; const uint8_t esc_11[2] = { 0xdb, 0xde }; const uint8_t esc_13[2] = { 0xdb, 0xdf }; switch (c) { case 0xc0: memcpy(skb_put(skb, 2), &esc_c0, 2); break; case 0xdb: memcpy(skb_put(skb, 2), &esc_db, 2); break; case 0x11: memcpy(skb_put(skb, 2), &esc_11, 2); break; case 0x13: memcpy(skb_put(skb, 2), &esc_13, 2); break; default: memcpy(skb_put(skb, 1), &c, 1); break; } } /* * Decode one byte in h5 proto * 0xdb, 0xdc -> 0xc0 * 0xdb, 0xdd -> 0xdb * 0xdb, 0xde -> 0x11 * 0xdb, 0xdf -> 0x13 * others will not change */ static void h5_unslip_one_byte(struct rtb_struct * h5, unsigned char byte) { const uint8_t c0 = 0xc0, db = 0xdb; const uint8_t oof1 = 0x11, oof2 = 0x13; if (H5_ESCSTATE_NOESC == h5->rx_esc_state) { if (0xdb == byte) { h5->rx_esc_state = H5_ESCSTATE_ESC; } else { memcpy(skb_put(h5->rx_skb, 1), &byte, 1); /* Check Pkt Header's CRC enable bit */ if ((h5->rx_skb->data[0] & 0x40) != 0 && h5->rx_state != H5_W4_CRC) { h5_crc_update(&h5->message_crc, byte); } h5->rx_count--; } } else if (H5_ESCSTATE_ESC == h5->rx_esc_state) { switch (byte) { case 0xdc: memcpy(skb_put(h5->rx_skb, 1), &c0, 1); if ((h5->rx_skb->data[0] & 0x40) != 0 && h5->rx_state != H5_W4_CRC) h5_crc_update(&h5->message_crc, 0xc0); h5->rx_esc_state = H5_ESCSTATE_NOESC; h5->rx_count--; break; case 0xdd: memcpy(skb_put(h5->rx_skb, 1), &db, 1); if ((h5->rx_skb->data[0] & 0x40) != 0 && h5->rx_state != H5_W4_CRC) h5_crc_update(&h5->message_crc, 0xdb); h5->rx_esc_state = H5_ESCSTATE_NOESC; h5->rx_count--; break; case 0xde: memcpy(skb_put(h5->rx_skb, 1), &oof1, 1); if ((h5->rx_skb->data[0] & 0x40) != 0 && h5->rx_state != H5_W4_CRC) h5_crc_update(&h5->message_crc, oof1); h5->rx_esc_state = H5_ESCSTATE_NOESC; h5->rx_count--; break; case 0xdf: memcpy(skb_put(h5->rx_skb, 1), &oof2, 1); if ((h5->rx_skb->data[0] & 0x40) != 0 && h5->rx_state != H5_W4_CRC) h5_crc_update(&h5->message_crc, oof2); h5->rx_esc_state = H5_ESCSTATE_NOESC; h5->rx_count--; break; default: RS_ERR("Error: Invalid byte %02x after esc byte", byte); skb_free(h5->rx_skb); h5->rx_skb = NULL; h5->rx_state = H5_W4_PKT_DELIMITER; h5->rx_count = 0; break; } } } /* * Prepare h5 packet * Refer to Core Spec Vol 4, Part D * Three-wire UART Transport Layer: 4 PACKET HEADER */ static struct sk_buff *h5_prepare_pkt(struct rtb_struct * h5, uint8_t *data, int len, int pkt_type) { struct sk_buff *nskb; uint8_t hdr[4]; uint16_t H5_CRC_INIT(h5_txmsg_crc); int rel, i; switch (pkt_type) { case HCI_ACLDATA_PKT: case HCI_COMMAND_PKT: case HCI_EVENT_PKT: rel = 1; /* reliable */ break; case H5_ACK_PKT: case H5_VDRSPEC_PKT: case H5_LINK_CTL_PKT: rel = 0; /* unreliable */ break; default: RS_ERR("Unknown packet type"); return NULL; } /* Max len of packet: (len + 4(h5 hdr) + 2(crc))*2 * Because bytes 0xc0 and 0xdb are escaped, worst case is that the * packet is only made of 0xc0 and 0xdb * The additional 2-octets are 0xc0 delimiters at start and end of each * packet. */ nskb = skb_alloc((len + 6) * 2 + 2); if (!nskb) return NULL; /* Add SLIP start byte: 0xc0 */ h5_slip_msgdelim(nskb); /* Set ack number in SLIP header */ hdr[0] = h5->rxseq_txack << 3; h5->is_txack_req = 0; /* RS_DBG("Request packet no(%u) to card", h5->rxseq_txack); */ /* RS_DBG("Sending packet with seqno %u and wait %u", h5->msgq_txseq, * h5->rxseq_txack); */ if (rel) { /* Set reliable bit and seq number */ hdr[0] |= 0x80 + h5->msgq_txseq; /* RS_DBG("Sending packet with seqno(%u)", h5->msgq_txseq); */ ++(h5->msgq_txseq); h5->msgq_txseq = (h5->msgq_txseq) & 0x07; } /* Set DIC Present bit */ if (h5->use_crc) hdr[0] |= 0x40; /* Set packet type and payload length */ hdr[1] = ((len << 4) & 0xff) | pkt_type; hdr[2] = (uint8_t) (len >> 4); /* Set header checksum */ hdr[3] = ~(hdr[0] + hdr[1] + hdr[2]); /* Encode h5 header */ for (i = 0; i < 4; i++) { h5_slip_one_byte(nskb, hdr[i]); if (h5->use_crc) h5_crc_update(&h5_txmsg_crc, hdr[i]); } /* Encode payload */ for (i = 0; i < len; i++) { h5_slip_one_byte(nskb, data[i]); if (h5->use_crc) h5_crc_update(&h5_txmsg_crc, data[i]); } /* Encode CRC */ if (h5->use_crc) { h5_txmsg_crc = bit_rev16(h5_txmsg_crc); h5_slip_one_byte(nskb, (uint8_t) ((h5_txmsg_crc >> 8) & 0x00ff)); h5_slip_one_byte(nskb, (uint8_t) (h5_txmsg_crc & 0x00ff)); } /* Add 0xc0 at the end of the packet */ h5_slip_msgdelim(nskb); return nskb; } /* * Remove controller acked packet from host unacked lists */ /* static void h5_remove_acked_pkt(struct rtb_struct * h5) * { * int pkts_to_be_removed = 0; * int seqno = 0; * int i = 0; * * seqno = h5->msgq_txseq; * // pkts_to_be_removed = GetListLength(h5->unacked); * * while (pkts_to_be_removed) { * if (h5->rxack == seqno) * break; * * pkts_to_be_removed--; * seqno = (seqno - 1) & 0x07; * } * * if (h5->rxack != seqno) { * RS_DBG("Peer acked invalid packet"); * } * // skb_queue_walk_safe(&h5->unack, skb, tmp) * // remove ack'ed packet from h5->unack queue * for (i = 0; i < 5; ++i) { * if (i >= pkts_to_be_removed) * break; * i++; * //__skb_unlink(skb, &h5->unack); * //skb_free(skb); * } * * // if (skb_queue_empty(&h5->unack)) * // del_timer(&h5->th5); * // spin_unlock_irqrestore(&h5->unack.lock, flags); * * if (i != pkts_to_be_removed) * RS_DBG("Removed only (%u) out of (%u) pkts", i, * pkts_to_be_removed); * } */ /* * Send host ack. */ static void rtb_send_ack(int fd) { int len; struct sk_buff *nskb = h5_prepare_pkt(&rtb_cfg, NULL, 0, H5_ACK_PKT); len = write(fd, nskb->data, nskb->data_len); if (len != nskb->data_len) RS_ERR("Write pure ack fails"); skb_free(nskb); return; } /* * Parse hci command complete event in h5 init state. */ static void h5_init_hci_cc(struct sk_buff *skb) { struct hci_ev_cmd_complete *ev = NULL; uint16_t opcode = 0; uint8_t status = 0; skb_pull(skb, HCI_EVENT_HDR_SIZE); ev = (struct hci_ev_cmd_complete *)skb->data; opcode = le16_to_cpu(ev->opcode); RS_DBG("Receive cmd complete event of command: %04x", opcode); skb_pull(skb, sizeof(struct hci_ev_cmd_complete)); status = skb->data[0]; if (status) { RS_ERR("status is %u for cmd %04x", status, opcode); return; } if (rtb_cfg.cmd_state.opcode != opcode) { RS_ERR("%s: Received unexpected cc for cmd %04x, %04x of cc", __func__, rtb_cfg.cmd_state.opcode, opcode); return; } rtb_cfg.cmd_state.state = CMD_STATE_SUCCESS; switch (opcode) { case HCI_VENDOR_CHANGE_BAUD: RS_INFO("Received cc of vendor change baud"); break; case HCI_CMD_READ_BD_ADDR: RS_INFO("BD Address: %02x:%02x:%02x:%02x:%02x:%02x", skb->data[5], skb->data[4], skb->data[3], skb->data[2], skb->data[1], skb->data[0]); break; case HCI_CMD_READ_LOCAL_VER: rtb_cfg.hci_ver = skb->data[1]; rtb_cfg.hci_rev = (skb->data[2] | skb->data[3] << 8); rtb_cfg.lmp_subver = (skb->data[7] | (skb->data[8] << 8)); RS_INFO("HCI Version 0x%02x", rtb_cfg.hci_ver); RS_INFO("HCI Revision 0x%04x", rtb_cfg.hci_rev); RS_INFO("LMP Subversion 0x%04x", rtb_cfg.lmp_subver); break; case HCI_VENDOR_READ_ROM_VER: rtb_cfg.eversion = skb->data[1]; RS_INFO("Read ROM version %02x", rtb_cfg.eversion); break; case HCI_VENDOR_READ_CHIP_TYPE: rtb_cfg.chip_type = (skb->data[1] & 0x0f); RS_INFO("Read chip type %02x", rtb_cfg.chip_type); break; default: return; } /* Count the cmd num for makeing the seq number aligned */ rtb_cfg.num_of_cmd_sent++; } /* * Parse hci command complete event in h5 post state. */ static void h5_post_hci_cc(struct sk_buff *skb) { struct hci_ev_cmd_complete *ev = NULL; uint16_t opcode = 0; uint8_t status = 0; skb_pull(skb, HCI_EVENT_HDR_SIZE); ev = (struct hci_ev_cmd_complete *)skb->data; opcode = le16_to_cpu(ev->opcode); RS_DBG("Receive cmd complete event of command: %04x", opcode); skb_pull(skb, sizeof(struct hci_ev_cmd_complete)); status = skb->data[0]; if (status) { RS_ERR("status is %u for cmd %04x", status, opcode); return; } if (rtb_cfg.cmd_state.opcode != opcode) { RS_ERR("%s: Received unexpected cc for cmd %04x, %04x of cc", __func__, rtb_cfg.cmd_state.opcode, opcode); return; } rtb_cfg.cmd_state.state = CMD_STATE_SUCCESS; switch (opcode) { case HCI_CMD_RESET: RS_INFO("Received cc of hci reset cmd"); rtb_cfg.link_estab_state = H5_ACTIVE; break; default: break; } } /* * Process a hci frame */ static void hci_recv_frame(struct sk_buff *skb) { if (rtb_cfg.link_estab_state == H5_INIT) { if (skb->data[0] == 0x0e) h5_init_hci_cc(skb); /* * rtb_send_ack(rtb_cfg.serial_fd); * usleep(10000); * rtb_send_ack(rtb_cfg.serial_fd); */ } else if (rtb_cfg.link_estab_state == H5_PATCH) { if (skb->data[0] != 0x0e) { RS_INFO("Received event 0x%x during download patch", skb->data[0]); return; } rtb_cfg.rx_index = skb->data[6]; /* RS_INFO("rx_index %d", rtb_cfg.rx_index); */ /* Download fw/config done */ if (rtb_cfg.rx_index & 0x80) { rtb_cfg.rx_index &= ~0x80; rtb_cfg.link_estab_state = H5_HCI_RESET; } } else if (rtb_cfg.link_estab_state == H5_HCI_RESET) { if (skb->data[0] == 0x0e) h5_post_hci_cc(skb); } else { RS_ERR("receive packets in active state"); } } static void h5_handle_internal_rx(struct sk_buff *skb) { int len; uint8_t sync_req[2] = { 0x01, 0x7E }; uint8_t sync_resp[2] = { 0x02, 0x7D }; uint8_t sync_resp_pkt[0x8] = { 0xc0, 0x00, 0x2F, 0x00, 0xD0, 0x02, 0x7D, 0xc0 }; uint8_t conf_req[2] = { 0x03, 0xFC }; uint8_t conf_resp[2] = { 0x04, 0x7B }; uint8_t conf_resp_pkt[0x8] = { 0xc0, 0x00, 0x2F, 0x00, 0xD0, 0x04, 0x7B, 0xc0 }; if (rtb_cfg.link_estab_state == H5_SYNC) { if (!memcmp(skb->data, sync_req, 2)) { RS_INFO("[SYNC] Get SYNC Pkt\n"); len = write(rtb_cfg.serial_fd, sync_resp_pkt, 0x8); if (len != 0x08) RS_ERR("Send h5 sync resp error, %s", strerror(errno)); } else if (!memcmp(skb->data, sync_resp, 2)) { RS_INFO("[SYNC] Get SYNC Resp Pkt"); rtb_cfg.link_estab_state = H5_CONFIG; } } else if (rtb_cfg.link_estab_state == H5_CONFIG) { if (!memcmp(skb->data, sync_req, 0x2)) { RS_INFO("[CONFIG] Get SYNC pkt"); len = write(rtb_cfg.serial_fd, sync_resp_pkt, 0x8); if (len != 0x08) RS_ERR("Send h5 sync resp error, %s", strerror(errno)); } else if (!memcmp(skb->data, conf_req, 0x2)) { RS_INFO("[CONFIG] Get CONFG pkt"); len = write(rtb_cfg.serial_fd, conf_resp_pkt, 0x8); if (len != 0x08) RS_ERR("Send h5 sync resp to ctl error, %s", strerror(errno)); } else if (!memcmp(skb->data, conf_resp, 0x2)) { RS_INFO("[CONFIG] Get CONFG resp pkt"); /* Change state to H5_INIT after receiving a conf resp */ rtb_cfg.link_estab_state = H5_INIT; if (skb->data_len > 2) { rtb_cfg.use_crc = ((skb->data[2]) >> 4) & 0x01; RS_INFO("dic is %u, cfg field 0x%02x", rtb_cfg.use_crc, skb->data[2]); } } else { RS_WARN("[CONFIG] Get unknown pkt"); rtb_send_ack(rtb_cfg.serial_fd); } } } /* * Process the received complete h5 packet */ static void h5_complete_rx_pkt(struct rtb_struct *h5) { int pass_up = 1; uint8_t *h5_hdr = NULL; h5_hdr = (uint8_t *) (h5->rx_skb->data); if (H5_HDR_RELIABLE(h5_hdr)) { /* RS_DBG("Received reliable seqno %u from card", h5->rxseq_txack); */ h5->rxseq_txack = H5_HDR_SEQ(h5_hdr) + 1; /* h5->rxseq_txack %= 8; */ h5->rxseq_txack &= 0x07; h5->is_txack_req = 1; } h5->rxack = H5_HDR_ACK(h5_hdr); switch (H5_HDR_PKT_TYPE(h5_hdr)) { case HCI_ACLDATA_PKT: case HCI_EVENT_PKT: case HCI_COMMAND_PKT: /* h5_remove_acked_pkt(h5); */ pass_up = 1; break; case HCI_SCODATA_PKT: pass_up = 1; break; case H5_LINK_CTL_PKT: pass_up = 0; skb_pull(h5->rx_skb, H5_HDR_SIZE); h5_handle_internal_rx(h5->rx_skb); break; default: /* Pure ack or other unexpected pkt */ pass_up = 0; break; } if (pass_up) { skb_pull(h5->rx_skb, H5_HDR_SIZE); hci_recv_frame(h5->rx_skb); } if (h5->is_txack_req) { rtb_send_ack(rtb_cfg.serial_fd); h5->is_txack_req = 0; } skb_free(h5->rx_skb); h5->rx_state = H5_W4_PKT_DELIMITER; h5->rx_skb = NULL; } /* * Parse the receive data in h5 proto. */ static int h5_recv(struct rtb_struct *h5, void *data, int count) { unsigned char *ptr; ptr = (unsigned char *)data; while (count) { if (h5->rx_count) { if (*ptr == 0xc0) { RS_ERR("Short h5 packet"); skb_free(h5->rx_skb); h5->rx_state = H5_W4_PKT_START; h5->rx_count = 0; } else h5_unslip_one_byte(h5, *ptr); ptr++; count--; continue; } switch (h5->rx_state) { case H5_W4_HDR: /* Check header checksum */ if ((0xff & (uint8_t)~(h5->rx_skb->data[0] + h5->rx_skb->data[1] + h5->rx_skb->data[2])) != h5->rx_skb->data[3]) { RS_ERR("h5 hdr checksum error"); skb_free(h5->rx_skb); h5->rx_state = H5_W4_PKT_DELIMITER; h5->rx_count = 0; continue; } /* The received seq number is unexpected */ if (h5->rx_skb->data[0] & 0x80 && (h5->rx_skb->data[0] & 0x07) != h5->rxseq_txack) { uint8_t rxseq_txack = (h5->rx_skb->data[0] & 0x07); RS_ERR("Out-of-order packet arrived, got(%u)expected(%u)", h5->rx_skb->data[0] & 0x07, h5->rxseq_txack); h5->is_txack_req = 1; skb_free(h5->rx_skb); h5->rx_state = H5_W4_PKT_DELIMITER; h5->rx_count = 0; /* Depend on whether Controller will reset ack * number or not */ if (rtb_cfg.link_estab_state == H5_PATCH && rtb_cfg.tx_index == rtb_cfg.total_num) rtb_cfg.rxseq_txack = rxseq_txack; continue; } h5->rx_state = H5_W4_DATA; h5->rx_count = (h5->rx_skb->data[1] >> 4) + (h5->rx_skb->data[2] << 4); continue; case H5_W4_DATA: /* Packet with crc */ if (h5->rx_skb->data[0] & 0x40) { h5->rx_state = H5_W4_CRC; h5->rx_count = 2; } else { h5_complete_rx_pkt(h5); } continue; case H5_W4_CRC: if (bit_rev16(h5->message_crc) != h5_get_crc(h5)) { RS_ERR("Checksum failed, computed %04x received %04x", bit_rev16(h5->message_crc), h5_get_crc(h5)); skb_free(h5->rx_skb); h5->rx_state = H5_W4_PKT_DELIMITER; h5->rx_count = 0; continue; } skb_trim(h5->rx_skb, h5->rx_skb->data_len - 2); h5_complete_rx_pkt(h5); continue; case H5_W4_PKT_DELIMITER: switch (*ptr) { case 0xc0: h5->rx_state = H5_W4_PKT_START; break; default: break; } ptr++; count--; break; case H5_W4_PKT_START: switch (*ptr) { case 0xc0: ptr++; count--; break; default: h5->rx_state = H5_W4_HDR; h5->rx_count = 4; h5->rx_esc_state = H5_ESCSTATE_NOESC; H5_CRC_INIT(h5->message_crc); /* Do not increment ptr or decrement count * Allocate packet. Max len of a H5 pkt= * 0xFFF (payload) +4 (header) +2 (crc) */ h5->rx_skb = skb_alloc(0x1005); if (!h5->rx_skb) { RS_ERR("Can't alloc skb for new pkt"); h5->rx_state = H5_W4_PKT_DELIMITER; h5->rx_count = 0; return 0; } break; } break; default: break; } } return count; } static const char *op_string(uint32_t op) { switch (op) { case OP_SET_BAUD: return "OP_SET_BAUD"; case OP_H5_SYNC: return "OP_H5_SYNC"; case OP_H5_CONFIG: return "OP_H5_CONFIG"; case OP_HCI_RESET: return "OP_HCI_RESET"; case OP_CHIP_TYPE: return "OP_CHIP_TYPE"; case OP_ROM_VER: return "OP_ROM_VER"; case OP_LMP_VER: return "OP_LMP_VER"; default: return "OP_UNKNOWN"; } } static int start_transmit_wait(int fd, struct sk_buff *skb, uint32_t op, unsigned int msec, int retry) { unsigned char buf[128]; ssize_t result; struct iovec iov; ssize_t ret; uint8_t *data; int len; int op_result = -1; uint64_t expired; int n; struct epoll_event events[MAX_EVENTS]; int nfds; uint16_t opcode = 0; if (fd == -1 || !skb) { RS_ERR("Invalid parameter"); return -1; } data = skb->data; len = skb->data_len; if (op & (1 << 24)) { opcode = (op & 0xffff); if (opcode != rtb_cfg.cmd_state.opcode || rtb_cfg.cmd_state.state != CMD_STATE_UNKNOWN) { RS_ERR("Invalid opcode or cmd state"); return -1; } } iov.iov_base = data; iov.iov_len = len; do { ret = writev(fd, &iov, 1); if (ret != len) RS_WARN("Writev partially, ret %d", (int)ret); } while (ret < 0 && errno == EINTR); if (ret < 0) { RS_ERR("Call writev error, %s", strerror(errno)); return -errno; } /* Set timeout */ if (rtb_cfg.timerfd > 0) timeout_set(rtb_cfg.timerfd, msec); do { nfds = epoll_wait(rtb_cfg.epollfd, events, MAX_EVENTS, msec); if (nfds == -1) { RS_ERR("epoll_wait, %s (%d)", strerror(errno), errno); exit(EXIT_FAILURE); } for (n = 0; n < nfds; ++n) { if (events[n].data.fd == rtb_cfg.serial_fd) { if (events[n].events & (EPOLLERR | EPOLLHUP | EPOLLRDHUP)) { RS_ERR("%s: Error happens on serial fd", __func__); exit(EXIT_FAILURE); } result = read(events[n].data.fd, buf, sizeof(buf)); if (result <= 0) { RS_ERR("Read serial error, %s", strerror(errno)); continue; } else { h5_recv(&rtb_cfg, buf, result); } } else if (events[n].data.fd == rtb_cfg.timerfd) { if (events[n].events & (EPOLLERR | EPOLLHUP | EPOLLRDHUP)) { RS_ERR("%s: Error happens on timer fd", __func__); exit(EXIT_FAILURE); } RS_WARN("%s Transmission timeout", op_string(op)); result = read(events[n].data.fd, &expired, sizeof(expired)); if (result != sizeof(expired)) { RS_ERR("Skip retransmit"); break; } if (retry <= 0) { RS_ERR("Retransmission exhausts"); tcflush(fd, TCIOFLUSH); exit(EXIT_FAILURE); } iov.iov_base = data; iov.iov_len = len; do { ret = writev(fd, &iov, 1); if (ret != len) RS_WARN("Writev partial, %d", (int)ret); } while (ret < 0 && errno == EINTR); if (ret < 0) { RS_ERR("ReCall writev error, %s", strerror(errno)); return -errno; } retry--; timeout_set(rtb_cfg.timerfd, msec); } } if (!(op & (1 << 24))) { /* h5 sync or config */ if (op == OP_H5_SYNC && rtb_cfg.link_estab_state == H5_CONFIG) { op_result = 0; break; } if (op == OP_H5_CONFIG && rtb_cfg.link_estab_state == H5_INIT) { op_result = 0; break; } continue; } if (rtb_cfg.cmd_state.opcode == opcode && rtb_cfg.cmd_state.state == CMD_STATE_SUCCESS) { op_result = 0; break; } } while (1); /* Disarms timer */ timeout_set(rtb_cfg.timerfd, 0); return op_result; } static int h5_download_patch(int dd, int index, uint8_t *data, int len, struct termios *ti) { unsigned char buf[64]; int retlen; struct iovec iov; ssize_t ret; int nfds; struct epoll_event events[MAX_EVENTS]; int n; int timeout; uint64_t expired; int retry = 3; struct sk_buff *nskb; uint8_t hci_patch[PATCH_DATA_FIELD_MAX_SIZE + 4]; if (index & 0x80) { rtb_cfg.tx_index = index & 0x7f; timeout = 1000; } else { rtb_cfg.tx_index = index; timeout = 800; } /* download cmd: 0xfc20 */ hci_patch[0] = 0x20; hci_patch[1] = 0xfc; hci_patch[2] = len + 1; hci_patch[3] = (uint8_t)index; if (data) memcpy(&hci_patch[4], data, len); /* length: 2-byte opcode + 1-byte len + 1-byte index + payload */ nskb = h5_prepare_pkt(&rtb_cfg, hci_patch, len + 4, HCI_COMMAND_PKT); if (!nskb) { RS_ERR("Prepare command packet for download"); return -1; } /* Save pkt address and length for re-transmission */ len = nskb->data_len; data = nskb->data; iov.iov_base = nskb->data; iov.iov_len = nskb->data_len; do { ret = writev(dd, &iov, 1); if (ret != len) RS_WARN("Writev partially, ret %d", (int)ret); } while (ret < 0 && errno == EINTR); if (ret < 0) { RS_ERR("Call writev error, %s", strerror(errno)); skb_free(nskb); return -errno; } /* RS_INFO("%s: tx_index %d, rx_index %d", __func__, * rtb_cfg.tx_index, rtb_cfg.rx_index); */ if (index & 0x80) { /* For the last pkt, wait for its complete */ tcdrain(dd); if (rtb_cfg.uart_flow_ctrl) { RS_INFO("Enable host hw flow control"); ti->c_cflag |= CRTSCTS; } else { RS_INFO("Disable host hw flow control"); ti->c_cflag &= ~CRTSCTS; } if (tcsetattr(dd, TCSANOW, ti) < 0) { RS_ERR("Can't set port settings"); skb_free(nskb); return -1; } /* RS_INFO("Change baud to %d", rtb_cfg.final_speed); * if (set_speed(dd, ti, rtb_cfg.final_speed) < 0) { * RS_ERR("Set final speed %d error", * rtb_cfg.final_speed); * } */ } if (rtb_cfg.timerfd > 0) timeout_set(rtb_cfg.timerfd, timeout); do { nfds = epoll_wait(rtb_cfg.epollfd, events, MAX_EVENTS, -1); if (nfds == -1) { RS_ERR("epoll_wait, %s (%d)", strerror(errno), errno); exit(EXIT_FAILURE); } for (n = 0; n < nfds; ++n) { if (events[n].data.fd == dd) { if (events[n].events & (EPOLLERR | EPOLLHUP | EPOLLRDHUP)) { RS_ERR("%s: Error happens on serial fd", __func__); exit(EXIT_FAILURE); } retlen = read(dd, buf, sizeof(buf)); if (retlen <= 0) { RS_ERR("Read serial error, %s", strerror(errno)); continue; } else { h5_recv(&rtb_cfg, buf, retlen); } } else if (events[n].data.fd == rtb_cfg.timerfd) { int fd = events[n].data.fd; if (events[n].events & (EPOLLERR | EPOLLHUP | EPOLLRDHUP)) { RS_ERR("%s: Error happens on timer fd", __func__); exit(EXIT_FAILURE); } RS_WARN("Patch pkt trans timeout, re-trans"); ret = read(fd, &expired, sizeof(expired)); if (ret != sizeof(expired)) { RS_ERR("Read expired info error"); exit(EXIT_FAILURE); } if (retry <= 0) { RS_ERR("%s: Retransmission exhausts", __func__); tcflush(fd, TCIOFLUSH); exit(EXIT_FAILURE); } iov.iov_base = data; iov.iov_len = len; do { ret = writev(dd, &iov, 1); if (ret != len) RS_WARN("Writev partial, %d", (int)ret); } while (ret < 0 && errno == EINTR); if (ret < 0) { RS_ERR("ReCall writev error, %s", strerror(errno)); skb_free(nskb); return -errno; } retry--; timeout_set(fd, timeout); } } } while (rtb_cfg.rx_index != rtb_cfg.tx_index); /* Disarms timer */ if (rtb_cfg.timerfd > 0) timeout_set(rtb_cfg.timerfd, 0); skb_free(nskb); return 0; } /* * Change the Controller's UART speed. */ int h5_vendor_change_speed(int fd, uint32_t baudrate) { struct sk_buff *nskb = NULL; unsigned char cmd[16] = { 0 }; int result; cmd[0] = 0x17; cmd[1] = 0xfc; cmd[2] = 4; baudrate = cpu_to_le32(baudrate); #ifdef BAUDRATE_4BYTES memcpy((uint16_t *) & cmd[3], &baudrate, 4); #else memcpy((uint16_t *) & cmd[3], &baudrate, 2); cmd[5] = 0; cmd[6] = 0; #endif RS_DBG("baudrate in change speed command: 0x%02x 0x%02x 0x%02x 0x%02x", cmd[3], cmd[4], cmd[5], cmd[6]); nskb = h5_prepare_pkt(&rtb_cfg, cmd, 7, HCI_COMMAND_PKT); if (!nskb) { RS_ERR("Prepare command packet for change speed fail"); return -1; } rtb_cfg.cmd_state.opcode = HCI_VENDOR_CHANGE_BAUD;; rtb_cfg.cmd_state.state = CMD_STATE_UNKNOWN; result = start_transmit_wait(fd, nskb, OP_SET_BAUD, 1000, 0); skb_free(nskb); if (result < 0) { RS_ERR("OP_SET_BAUD Transmission error"); return result; } return 0; } /* * Init realtek Bluetooth h5 proto. * There are two steps: h5 sync and h5 config. */ int rtb_init_h5(int fd, struct termios *ti) { struct sk_buff *nskb; unsigned char h5sync[2] = { 0x01, 0x7E }; /* 16-bit CCITT CRC may be used and the sliding win size is 4 */ unsigned char h5conf[3] = { 0x03, 0xFC, 0x14 }; int result; /* Disable CRTSCTS by default */ ti->c_cflag &= ~CRTSCTS; /* set even parity */ ti->c_cflag |= PARENB; ti->c_cflag &= ~(PARODD); if (tcsetattr(fd, TCSANOW, ti) < 0) { RS_ERR("Can't set port settings"); return -1; } /* h5 sync */ rtb_cfg.link_estab_state = H5_SYNC; nskb = h5_prepare_pkt(&rtb_cfg, h5sync, sizeof(h5sync), H5_LINK_CTL_PKT); result = start_transmit_wait(fd, nskb, OP_H5_SYNC, 500, 10); skb_free(nskb); if (result < 0) { RS_ERR("OP_H5_SYNC Transmission error"); return -1; } /* h5 config */ nskb = h5_prepare_pkt(&rtb_cfg, h5conf, sizeof(h5conf), H5_LINK_CTL_PKT); result = start_transmit_wait(fd, nskb, OP_H5_CONFIG, 500, 10); skb_free(nskb); if (result < 0) { RS_ERR("OP_H5_CONFIG Transmission error"); return -1; } rtb_send_ack(fd); RS_DBG("H5 init finished\n"); rtb_cfg.cmd_state.state = CMD_STATE_UNKNOWN; return 0; } static int h5_hci_reset(int fd) { uint8_t cmd[3] = { 0x03, 0x0c, 0x00}; struct sk_buff *nskb; int result; RS_INFO("%s: Issue hci reset cmd", __func__); nskb = h5_prepare_pkt(&rtb_cfg, cmd, sizeof(cmd), HCI_COMMAND_PKT); if (!nskb) { RS_ERR("%s: Failed to alloc mem for hci reset skb", __func__); return -1; } rtb_cfg.cmd_state.opcode = HCI_CMD_RESET; rtb_cfg.cmd_state.state = CMD_STATE_UNKNOWN; result = start_transmit_wait(fd, nskb, OP_HCI_RESET, 1500, 1); skb_free(nskb); if (result < 0) RS_ERR("hci reset failed"); return result; } #ifdef SERIAL_NONBLOCK_READ static int set_fd_nonblock(int fd) { long arg; int old_fl; arg = fcntl(fd, F_GETFL); if (arg < 0) return -errno; /* Return if already nonblock */ if (arg & O_NONBLOCK) return FD_NONBLOCK; old_fl = FD_BLOCK; arg |= O_NONBLOCK; if (fcntl(fd, F_SETFL, arg) < 0) return -errno; return old_fl; } static int set_fd_block(int fd) { long arg; arg = fcntl(fd, F_GETFL); if (arg < 0) return -errno; /* Return if already block */ if (!(arg & O_NONBLOCK)) return 0; arg &= ~O_NONBLOCK; if (fcntl(fd, F_SETFL, arg) < 0) return -errno; return 0; } #endif /* * Download Realtek Firmware and Config */ static int rtb_download_fwc(int fd, uint8_t *buf, int size, int proto, struct termios *ti) { uint8_t curr_idx = 0; uint8_t curr_len = 0; uint8_t lp_len = 0; uint8_t add_pkts = 0; uint16_t end_idx = 0; uint16_t total_idx = 0; uint16_t num; unsigned char *pkt_buf; uint16_t i, j; int result; #ifdef SERIAL_NONBLOCK_READ int old_fl; #endif end_idx = (uint16_t)((size - 1) / PATCH_DATA_FIELD_MAX_SIZE); lp_len = size % PATCH_DATA_FIELD_MAX_SIZE; num = rtb_cfg.num_of_cmd_sent; num += end_idx + 1; add_pkts = num % 8 ? (8 - num % 8) : 0; #ifdef SERIAL_NONBLOCK_READ old_fl = set_fd_nonblock(fd); if (old_fl < 0) { RS_ERR("Set fd nonblock error, %s", strerror(errno)); } if (old_fl == FD_BLOCK) RS_INFO("old fd state is block"); #endif /* Make sure the next seqno is zero after download patch and * hci reset */ if (proto == HCI_UART_3WIRE) { if (add_pkts) add_pkts -= 1; else add_pkts += 7; } else add_pkts = 0; /* No additional packets need */ total_idx = add_pkts + end_idx; rtb_cfg.total_num = total_idx; RS_INFO("end_idx: %u, lp_len: %u, additional pkts: %u\n", end_idx, lp_len, add_pkts); RS_INFO("Start downloading..."); if (lp_len == 0) lp_len = PATCH_DATA_FIELD_MAX_SIZE; pkt_buf = buf; for (i = 0; i <= total_idx; i++) { /* Index will roll over when it reaches 0x80 * 0, 1, 2, 3, ..., 126, 127(7f), 1, 2, 3, ... */ if (i > 0x7f) j = (i & 0x7f) + 1; else j = i; if (i < end_idx) { curr_idx = j; curr_len = PATCH_DATA_FIELD_MAX_SIZE; } else if (i == end_idx) { /* Send last data packets */ if (i == total_idx) curr_idx = j | 0x80; else curr_idx = j; curr_len = lp_len; } else if (i < total_idx) { /* Send additional packets */ curr_idx = j; pkt_buf = NULL; curr_len = 0; RS_INFO("Send additional packet %u", curr_idx); } else { /* Send last packet */ curr_idx = j | 0x80; pkt_buf = NULL; curr_len = 0; RS_INFO("Last packet %u", curr_idx); } if (curr_idx & 0x80) RS_INFO("Send last pkt"); if (proto == HCI_UART_H4) { curr_idx = h4_download_patch(fd, curr_idx, pkt_buf, curr_len); if (curr_idx != j && i != total_idx) { RS_ERR("Index mismatch %u, curr_idx %u", j, curr_idx); return -1; } } else if (proto == HCI_UART_3WIRE) { if (h5_download_patch(fd, curr_idx, pkt_buf, curr_len, ti) < 0) return -1; } if (curr_idx < end_idx) { pkt_buf += PATCH_DATA_FIELD_MAX_SIZE; } } /* Make hci reset after Controller applies the Firmware and Config */ if (proto == HCI_UART_H4) result = h4_hci_reset(fd); else result = h5_hci_reset(fd); if (proto == HCI_UART_3WIRE) { /* Make sure the last pure ack is sent */ tcdrain(fd); } if (result) return result; #ifdef SERIAL_NONBLOCK_READ if (old_fl == FD_BLOCK) set_fd_block(fd); #endif return 0; } #define ARRAY_SIZE(a) (sizeof(a)/sizeof(a[0]) ) struct rtb_baud { uint32_t rtb_speed; int uart_speed; }; #ifdef BAUDRATE_4BYTES struct rtb_baud baudrates[] = { #ifdef RTL_8703A_SUPPORT {0x00004003, 1500000}, /* for rtl8703as */ #endif {0x0252C014, 115200}, {0x0252C00A, 230400}, {0x05F75004, 921600}, {0x00005004, 1000000}, {0x04928002, 1500000}, {0x01128002, 1500000}, //8761AT {0x00005002, 2000000}, {0x0000B001, 2500000}, {0x04928001, 3000000}, {0x052A6001, 3500000}, {0x00005001, 4000000}, }; #else struct rtb_baud baudrates[] = { {0x701d, 115200} {0x6004, 921600}, {0x4003, 1500000}, {0x5002, 2000000}, {0x8001, 3000000}, {0x9001, 3000000}, {0x7001, 3500000}, {0x5001, 4000000}, }; #endif static void vendor_speed_to_std(uint32_t rtb_speed, uint32_t *uart_speed) { *uart_speed = 115200; unsigned int i; for (i = 0; i < ARRAY_SIZE(baudrates); i++) { if (baudrates[i].rtb_speed == rtb_speed) { *uart_speed = baudrates[i].uart_speed; return; } } return; } static inline void std_speed_to_vendor(int uart_speed, uint32_t *rtb_speed) { *rtb_speed = 0x701D; unsigned int i; for (i = 0; i < ARRAY_SIZE(baudrates); i++) { if (baudrates[i].uart_speed == uart_speed) { *rtb_speed = baudrates[i].rtb_speed; return; } } return; } void rtb_read_chip_type(int dd) { /* 0xB000A094 */ unsigned char cmd_buff[] = { 0x61, 0xfc, 0x05, 0x00, 0x94, 0xa0, 0x00, 0xb0 }; struct sk_buff *nskb; int result; nskb = h5_prepare_pkt(&rtb_cfg, cmd_buff, sizeof(cmd_buff), HCI_COMMAND_PKT); if (!nskb) { RS_ERR("Alloc chip type cmd skb buff error"); exit(EXIT_FAILURE); } rtb_cfg.cmd_state.opcode = HCI_VENDOR_READ_CHIP_TYPE; rtb_cfg.cmd_state.state = CMD_STATE_UNKNOWN; result = start_transmit_wait(dd, nskb, OP_CHIP_TYPE, 250, 3); skb_free(nskb); if (result < 0) RS_ERR("OP_CHIP_TYPE Transmission error"); return; } /* * Read ECO version with vendor cmd 0xfc65 */ void rtb_read_eversion(int dd) { int result; unsigned char cmd_buf[3] = { 0x6d, 0xfc, 0x00 }; struct sk_buff *nskb; nskb= h5_prepare_pkt(&rtb_cfg, cmd_buf, 3, HCI_COMMAND_PKT); if (!nskb) { RS_ERR("Alloc eversion cmd skb buff error"); exit(EXIT_FAILURE); } rtb_cfg.cmd_state.opcode = HCI_VENDOR_READ_ROM_VER; rtb_cfg.cmd_state.state = CMD_STATE_UNKNOWN; result = start_transmit_wait(dd, nskb, OP_ROM_VER, 500, 3); skb_free(nskb); if (result < 0) { RS_ERR("OP_ROM_VER Transmit error"); } return; } void rtb_read_local_version(int dd) { int result; unsigned char cmd_buf[3] = { 0x01, 0x10, 0x00 }; struct sk_buff *nskb; nskb = h5_prepare_pkt(&rtb_cfg, cmd_buf, 3, HCI_COMMAND_PKT); if (!nskb) { RS_ERR("Alloc local ver cmd skb buff error"); exit(EXIT_FAILURE); } rtb_cfg.cmd_state.state = CMD_STATE_UNKNOWN; rtb_cfg.cmd_state.opcode = HCI_CMD_READ_LOCAL_VER; result = start_transmit_wait(dd, nskb, OP_LMP_VER, 500, 3); skb_free(nskb); if (result < 0) { RS_ERR("OP_LMP_VER Transmit error"); } return; } /* * Config Realtek Bluetooth. * Config parameters are got from Realtek Config file and FW. * * speed is the init_speed in uart struct * Returns 0 on success */ static int rtb_config(int fd, int proto, int speed, struct termios *ti) { int final_speed = 0; int ret = 0; rtb_cfg.proto = proto; /* Read Local Version Information and RTK ROM version */ if (proto == HCI_UART_3WIRE) { RS_INFO("Realtek H5 IC"); rtb_read_local_version(fd); rtb_read_eversion(fd); } else { RS_INFO("Realtek H4 IC"); /* The following set is for special requirement that enables * flow control before initializing */ #ifdef RTL8723DSH4_UART_HWFLOWC ti->c_cflag &= ~PARENB; ti->c_cflag |= CRTSCTS; if (tcsetattr(fd, TCSANOW, ti) < 0) { RS_ERR("H4 Can't enable RTSCTS"); return -1; } usleep(20 * 1000); #endif h4_read_local_ver(fd); h4_vendor_read_rom_ver(fd); if (rtb_cfg.lmp_subver == ROM_LMP_8761btc) { /* 8761B Test Chip */ rtb_cfg.chip_type = CHIP_8761BTC; rtb_cfg.uart_flow_ctrl = 1; /* TODO: Change to different uart baud */ std_speed_to_vendor(1500000, &rtb_cfg.vendor_baud); goto change_baud; } else if (rtb_cfg.lmp_subver == ROM_LMP_8761a) { if (rtb_cfg.hci_rev == 0x000b) { /* 8761B Test Chip without download */ rtb_cfg.chip_type = CHIP_8761BH4; /* rtb_cfg.uart_flow_ctrl = 1; */ /* TODO: Change to different uart baud */ /* std_speed_to_vendor(1500000, &rtb_cfg.vendor_baud); * goto change_baud; */ } else if (rtb_cfg.hci_rev == 0x000a) { if (rtb_cfg.eversion == 3) rtb_cfg.chip_type = CHIP_8761ATF; else if (rtb_cfg.eversion == 2) rtb_cfg.chip_type = CHIP_8761AT; else rtb_cfg.chip_type = CHIP_UNKNOWN; } } else if (rtb_cfg.lmp_subver == ROM_LMP_8723b) { if (rtb_cfg.hci_ver == 0x08 && rtb_cfg.hci_rev == 0x000d) { rtb_cfg.chip_type = CHIP_8723DS; } else if (rtb_cfg.hci_ver == 0x06 && rtb_cfg.hci_rev == 0x000b) { rtb_cfg.chip_type = CHIP_8723BS; } else { RS_ERR("H4: unknown chip"); return -1; } } } RS_INFO("LMP Subversion 0x%04x", rtb_cfg.lmp_subver); RS_INFO("EVersion %u", rtb_cfg.eversion); switch (rtb_cfg.lmp_subver) { case ROM_LMP_8723a: break; case ROM_LMP_8723b: #ifdef RTL_8703A_SUPPORT /* Set chip type for matching fw/config entry */ rtl->chip_type = CHIP_8703AS; #endif break; case ROM_LMP_8821a: break; case ROM_LMP_8761a: break; case ROM_LMP_8703b: rtb_read_chip_type(fd); break; } rtb_cfg.patch_ent = get_patch_entry(&rtb_cfg); if (rtb_cfg.patch_ent) { RS_INFO("IC: %s", rtb_cfg.patch_ent->ic_name); RS_INFO("Firmware/config: %s, %s", rtb_cfg.patch_ent->patch_file, rtb_cfg.patch_ent->config_file); } else { RS_ERR("Can not find firmware/config entry\n"); return -1; } rtb_cfg.config_buf = rtb_read_config(&rtb_cfg, &rtb_cfg.config_len); if (!rtb_cfg.config_buf) { RS_ERR("Read Config file error, use eFuse settings"); rtb_cfg.config_len = 0; } rtb_cfg.fw_buf = rtb_read_firmware(&rtb_cfg, &rtb_cfg.fw_len); if (!rtb_cfg.fw_buf) { RS_ERR("Read Bluetooth firmware error"); rtb_cfg.fw_len = 0; /* Free config buf */ if (rtb_cfg.config_buf) { free(rtb_cfg.config_buf); rtb_cfg.config_buf = NULL; rtb_cfg.config_len = 0; } return -1; } else { rtb_cfg.total_buf = rtb_get_final_patch(fd, proto, &rtb_cfg.total_len); /* If the above function executes successfully, the Config and * patch were copied to the total buf */ /* Free config buf */ if (rtb_cfg.config_buf) { free(rtb_cfg.config_buf); rtb_cfg.config_buf = NULL; } /* Free the fw buf */ free(rtb_cfg.fw_buf); rtb_cfg.fw_buf = NULL; rtb_cfg.fw_len = 0; if (!rtb_cfg.total_buf) { RS_ERR("Failed to get the final patch"); exit(EXIT_FAILURE); } } if (rtb_cfg.total_len > RTB_PATCH_LENGTH_MAX) { RS_ERR("Total length of fwc is larger than allowed"); goto buf_free; } RS_INFO("Total len %d for fwc", rtb_cfg.total_len); /* rtl8723ds h4 */ if (rtb_cfg.chip_type == CHIP_8723DS && rtb_cfg.proto == HCI_UART_H4) { if (rtb_cfg.parenb) { /* set parity */ ti->c_cflag |= PARENB; if (rtb_cfg.pareven) ti->c_cflag &= ~(PARODD); else ti->c_cflag |= PARODD; if (tcsetattr(fd, TCSANOW, ti) < 0) { RS_ERR("8723DSH4 Can't set parity"); goto buf_free; } } } change_baud: /* change baudrate if needed * rtb_cfg.vendor_baud is a __u32/__u16 vendor-specific variable * parsed from config file * */ if (rtb_cfg.vendor_baud == 0) { /* No baud setting in Config file */ std_speed_to_vendor(speed, &rtb_cfg.vendor_baud); RS_INFO("No baud from Config file, set baudrate: %d, 0x%08x", speed, rtb_cfg.vendor_baud); goto start_download; } else vendor_speed_to_std(rtb_cfg.vendor_baud, (uint32_t *)&(rtb_cfg.final_speed)); if (rtb_cfg.final_speed == 115200) { RS_INFO("Final speed is %d, no baud change needs", rtb_cfg.final_speed); goto start_download; } if (proto == HCI_UART_3WIRE) h5_vendor_change_speed(fd, rtb_cfg.vendor_baud); else h4_vendor_change_speed(fd, rtb_cfg.vendor_baud); /* Make sure the ack for cmd complete event is transmitted */ tcdrain(fd); usleep(50000); /* The same value as before */ final_speed = rtb_cfg.final_speed ? rtb_cfg.final_speed : speed; RS_INFO("Final speed %d", final_speed); if (set_speed(fd, ti, final_speed) < 0) { RS_ERR("Can't set baud rate: %d, %d, %d", final_speed, rtb_cfg.final_speed, speed); goto buf_free; } start_download: /* For 8761B Test chip, no patch to download */ if (rtb_cfg.chip_type == CHIP_8761BTC) goto done; if (rtb_cfg.total_len > 0 && rtb_cfg.dl_fw_flag) { rtb_cfg.link_estab_state = H5_PATCH; rtb_cfg.rx_index = -1; ret = rtb_download_fwc(fd, rtb_cfg.total_buf, rtb_cfg.total_len, proto, ti); free(rtb_cfg.total_buf); if (ret < 0) return ret; } done: RS_DBG("Init Process finished"); return 0; buf_free: free(rtb_cfg.total_buf); return -1; } int rtb_init(int fd, int proto, int speed, struct termios *ti) { struct epoll_event ev; int result; RS_INFO("Realtek hciattach version %s \n", RTK_VERSION); memset(&rtb_cfg, 0, sizeof(rtb_cfg)); rtb_cfg.serial_fd = fd; rtb_cfg.dl_fw_flag = 1; rtb_cfg.epollfd = epoll_create(64); if (rtb_cfg.epollfd == -1) { RS_ERR("epoll_create1, %s (%d)", strerror(errno), errno); exit(EXIT_FAILURE); } ev.events = EPOLLIN | EPOLLERR | EPOLLHUP | EPOLLRDHUP; ev.data.fd = fd; if (epoll_ctl(rtb_cfg.epollfd, EPOLL_CTL_ADD, fd, &ev) == -1) { RS_ERR("epoll_ctl: epoll ctl add, %s (%d)", strerror(errno), errno); exit(EXIT_FAILURE); } rtb_cfg.timerfd = timerfd_create(CLOCK_MONOTONIC, 0); if (rtb_cfg.timerfd == -1) { RS_ERR("timerfd_create error, %s (%d)", strerror(errno), errno); return -1; } if (rtb_cfg.timerfd > 0) { ev.events = EPOLLIN | EPOLLERR | EPOLLHUP | EPOLLRDHUP; ev.data.fd = rtb_cfg.timerfd; if (epoll_ctl(rtb_cfg.epollfd, EPOLL_CTL_ADD, rtb_cfg.timerfd, &ev) == -1) { RS_ERR("epoll_ctl: epoll ctl add, %s (%d)", strerror(errno), errno); exit(EXIT_FAILURE); } } RS_INFO("Use epoll"); if (proto == HCI_UART_3WIRE) { if (rtb_init_h5(fd, ti) < 0) return -1;; } result = rtb_config(fd, proto, speed, ti); epoll_ctl(rtb_cfg.epollfd, EPOLL_CTL_DEL, fd, NULL); epoll_ctl(rtb_cfg.epollfd, EPOLL_CTL_DEL, rtb_cfg.timerfd, NULL); close(rtb_cfg.timerfd); rtb_cfg.timerfd = -1; return result; } int rtb_post(int fd, int proto, struct termios *ti) { /* No need to change baudrate */ /* if (rtb_cfg.final_speed) * return set_speed(fd, ti, rtb_cfg.final_speed); */ return 0; }
24,361
983
package org.xm.classification; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.xm.tokenizer.Tokenizer; import org.xm.tokenizer.Word; import java.io.*; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.stream.Collectors; /** * 代表一个文档实例 */ public class Instance { private static Logger logger = LoggerFactory.getLogger(Instance.class); /** * 文档类别 */ private String category; /** * 文档内容 */ private final Set<String> bag = new HashSet<>(); public Instance(String category, File f, String encoding) { this.category = category; String line = null; try (BufferedReader in = new BufferedReader(new InputStreamReader(new FileInputStream(f), encoding))) { while ((line = in.readLine()) != null) { List<Word> words = Tokenizer.segment(line); bag.addAll(words .stream() .filter(w -> w.getPos().endsWith("adj") || w.getPos().startsWith("n") || w.getPos().startsWith("v")) .map(Word::getName) .collect(Collectors.toList()) ); } } catch (IOException e) { logger.error("current file:{},current line:{}", f.getAbsolutePath(), line); e.printStackTrace(); } } public String getCategory() { return category; } public void setCategory(String category) { this.category = category; } public Set<String> getWords() { return bag; } }
753
742
#!/usr/bin/env python # Software License Agreement (BSD License) # # Copyright (c) 2009, <NAME>, Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided # with the distribution. # * Neither the name of <NAME>, Inc. nor the names of its # contributors may be used to endorse or promote products derived # from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. import os import signal import sys import time import unittest import rospy import std_msgs.msg import rostest from subprocess import Popen, PIPE, check_call, call def run_for(cmd, secs): popen = Popen(cmd, stdout=PIPE, stderr=PIPE, close_fds=True) timeout_t = time.time() + secs while time.time() < timeout_t: time.sleep(0.1) os.kill(popen.pid, signal.SIGKILL) class TestRosnodeOnline(unittest.TestCase): def setUp(self): self.vals = set() self.msgs = {} def callback(self, msg, val): self.vals.add(val) self.msgs[val] = msg def test_rosnode(self): topics = ['/chatter', '/foo/chatter', '/bar/chatter'] # wait for network to initialize rospy.init_node('test') nodes = ['/talker', '/foo/talker', '/bar/talker', rospy.get_caller_id()] for i, t in enumerate(topics): rospy.Subscriber(t, std_msgs.msg.String, self.callback, i) all = set(range(0, len(topics))) timeout_t = time.time() + 10. while time.time() < timeout_t and self.vals != all: time.sleep(0.1) self.assertEquals(self.vals, all, "failed to initialize graph correctly") # network is initialized cmd = 'rosnode' # list # - we aren't matching against the core services as those can make the test suites brittle output = Popen([cmd, 'list'], stdout=PIPE).communicate()[0] output = output.decode() l = set(output.split()) for t in nodes: self.assert_(t in l, "%s not in %s"%(t, l)) output = Popen([cmd, 'list', '-a'], stdout=PIPE).communicate()[0] output = output.decode() l = set(output.split()) for t in nodes: for e in l: if t in e: break else: self.fail("did not find [%s] in list [%s]"%(t, l)) output = Popen([cmd, 'list', '-u'], stdout=PIPE).communicate()[0] output = output.decode() l = set(output.split()) self.assert_(len(l), "list -u is empty") for e in l: self.assert_(e.startswith('http://')) for name in nodes: # type output = Popen([cmd, 'info', name], stdout=PIPE).communicate()[0] output = output.decode() # not really validating output as much as making sure it's not broken self.assert_(name in output) self.assert_('chatter' in output) self.assert_('Publications' in output) self.assert_('Subscriptions' in output) if 0: #ping stdout, stderr = run_for([cmd, 'ping', name], 3.) PKG = 'test_rosnode' NAME = 'test_rosnode_command_line_online' if __name__ == '__main__': rostest.run(PKG, NAME, TestRosnodeOnline, sys.argv)
1,898
707
// Copyright (c) FIRST and other WPILib contributors. // Open Source Software; you can modify and/or share it under the terms of // the WPILib BSD license file in the root directory of this project. #pragma once #include <string_view> namespace frc { // Maintainer note: this enum is mirrored in WPILibJ and in Shuffleboard // Modifying the enum or enum strings requires a corresponding change to the // Java enum and the enum in Shuffleboard enum ShuffleboardEventImportance { kTrivial, kLow, kNormal, kHigh, kCritical }; inline std::string_view ShuffleboardEventImportanceName( ShuffleboardEventImportance importance) { switch (importance) { case kTrivial: return "TRIVIAL"; case kLow: return "LOW"; case kNormal: return "NORMAL"; case kHigh: return "HIGH"; case kCritical: return "CRITICAL"; default: return "NORMAL"; } } } // namespace frc
314
2,724
<filename>learners/uniform_quantization/utils.py # Tencent is pleased to support the open source community by making PocketFlow available. # # Copyright (C) 2018 THL A29 Limited, a Tencent company. All rights reserved. # # Licensed under the BSD 3-Clause License (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://opensource.org/licenses/BSD-3-Clause # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """ Util fnctions for Uniform Quantization """ import tensorflow as tf from tensorflow.contrib import graph_editor as ge # import pdb def prefix_filter(prefix): """ filter out the variable_scope """ ind = prefix.index('/') return prefix[ind+1:] class UniformQuantization: # pylint: disable=too-many-instance-attributes """ Class of uniform quantization """ def __init__(self, sess, bucket_size=0, use_buckets=False, bucket_type='split'): self.sess = sess self.use_buckets = use_buckets self.bucket_size = bucket_size self.bucket_type = bucket_type self.matmul_ops = [] self.activation_ops = [] self.quantized_matmul_ops = [] self.quantized_activation_ops = [] self.bucket_storage = tf.constant(0, dtype=tf.int32) # bits self.__safe_check() # TODO: add more types of activations and matmuls self.support_act_types = ['Relu', 'Relu6', 'Crelu', 'Elu', 'Selu', 'Softplus',\ 'Softsign', 'Sigmoid', 'Tanh'] self.support_mul_types = ['Conv2D', 'MatMul', 'DepthwiseConv2dNative'] def insert_quant_op_for_activations(self, act_bit_dict): """ Insert quantization operation for activation Args: * act_bit_dict: A dict with (key: act_op_name, value: act_bits) """ activation_fn = {'Relu': tf.nn.relu, 'Tanh': tf.nn.tanh, 'Softplus': tf.nn.softplus, 'Sigmoid': tf.nn.sigmoid, 'Relu6': tf.nn.relu6} for op in self.activation_ops: old_sgv = ge.sgv(op) input_ = old_sgv.inputs[0] if op.type in self.support_act_types: try: tmp_input_ = activation_fn[op.type](input_) except KeyError: raise NotImplementedError("The activation_fn needs to include %s manually" % op.type) prefix = prefix_filter(op.name) qa = self.__uniform_quantize(tmp_input_, act_bit_dict[op.name], 'activation', prefix) new_sgv = ge.sgv(qa.op) ge.reroute_outputs(new_sgv, old_sgv) self.quantized_activation_ops.append(qa.op) else: raise ValueError("Unknown activation mode, you may add it manually here") def insert_quant_op_for_weights(self, w_bit_dict): """Insert quantization operation for weights Args: * wewight_bit_dict: A dict with (key: matmul_op_name, value: quant_bits) """ for op in self.matmul_ops: w = op.inputs[1] prefix = prefix_filter(op.name) qw = self.__uniform_quantize(w, w_bit_dict[op.name], 'weight', prefix) weight_fn = {'MatMul': tf.matmul, 'Conv2D': tf.nn.conv2d, 'DepthwiseConv2dNative': tf.nn.depthwise_conv2d} is_conv_fn = lambda x: 'Conv' in x.type try: if is_conv_fn(op): strides = op.get_attr('strides') padding = op.get_attr('padding') qw_op = weight_fn[op.type](op.inputs[0], qw, strides, padding).op else: # fc layers qw_op = weight_fn[op.type](op.inputs[0], qw).op self.quantized_matmul_ops.append(qw_op) except KeyError: raise NotImplementedError("Unrecognied Mul op, \ try to add it into matmul_typs for quantization") # replace input for wop, qwop in zip(self.matmul_ops, self.quantized_matmul_ops): old_sgv = ge.sgv(wop) new_sgv = ge.sgv(qwop) ge.reroute_inputs(new_sgv, old_sgv) def search_matmul_op(self, quantize_all_layers): """ search matmul or Conv2D operations in graph for quantization""" is_student_fn = lambda x: 'distilled' not in x.name for op in self.sess.graph.get_operations(): if op.type in self.support_mul_types and is_student_fn(op): self.matmul_ops.append(op) if not quantize_all_layers: self.matmul_ops = self.matmul_ops[1:-1] # remain full precision for first and last layer return self.matmul_ops def search_activation_op(self): """ search activation operation in graph for quantization """ is_student_fn = lambda x: 'distilled' not in x.name for op in self.sess.graph.get_operations(): if op.type in self.support_act_types and is_student_fn(op): self.activation_ops.append(op) return self.activation_ops def get_layerwise_tune_op(self, var, lrn_rate=1e-3): """ Get the layerwise fine-tuning ops Returns: * A list of ops for fine-tuning with len(matmul_ops) elements """ layerwise_diff = [] tune_ops = [] for (v, q_op) in zip(var, self.quantized_matmul_ops): inputs = q_op.inputs[0] quant_outputs = q_op.outputs[0] # TODO: wrap it into a function, as also used # above. if 'MatMul' in q_op.type: fp_outputs = tf.matmul(inputs, v) elif 'Conv2D' in q_op.type: strides = q_op.get_attr('strides') padding = q_op.get_attr('padding') fp_outputs = tf.nn.conv2d(inputs, v, strides, padding) else: raise ValueError("Unrecognized Mul Op") diff = tf.reduce_mean(tf.square(quant_outputs - fp_outputs)) tune_ops.append(tf.train.AdamOptimizer(lrn_rate).minimize(diff, var_list=v)) layerwise_diff.append(diff) return tune_ops, layerwise_diff def __uniform_quantize(self, x, mbits, mode, prefix=''): """Uniform quantization function Args: * x: A Tensor (weights or activation output) * mbits: A scalar Tensor, tf.int64, spicifying number of bit for quantization * mode: A string, 'weight' or 'activation', where to quantize * prefix: A string, the prefix of scope name Returns: * A Tensor, uniform quantized value """ with tf.variable_scope(prefix + '/quantize'): if self.use_buckets and mode == 'weight': orig_shape = x.get_shape() if self.bucket_type == 'split': x, bucket_num, padded_num = self.__split_bucket(x) elif self.bucket_type == 'channel': x, bucket_num, padded_num = self.__channel_bucket(x) x_normalized, alpha, beta = self.__scale(x, mode) g = self.sess.graph k = tf.cast(2 ** mbits - 1, tf.float32) with g.gradient_override_map({'Round': 'Identity'}): qw = tf.round(x_normalized * k) / k qw = self.__inv_scale(qw, alpha, beta) if self.use_buckets and mode == 'weight': # Reshape w back to the original shape qw = tf.reshape(qw, [-1]) if padded_num != 0: qw = tf.reshape(qw[:-padded_num], orig_shape) else: qw = tf.reshape(qw, orig_shape) # Update bucket storage if use buckets. self.__updt_bucket_storage(bucket_num) print("Quantized: " + tf.get_variable_scope().name) return qw def __scale(self, w, mode): """linear scale function Args: * w: A Tensor (weights or activation output), the shape is [bucket_size, bucekt_num] if use_buckets else the original size. * mode: A string, 'weight' or 'activation' Returns: * A Tensor, the normalized weights * A Tensor, alpha, scalar if activation mode else a vector [bucket_num]. * A Tensor, beta, scalar if activation mode else a vector [bucket_num]. """ if mode == 'weight': if self.use_buckets: axis = 0 else: axis = None elif mode == 'activation': axis = None else: raise ValueError("Unknown mode for scalling") w_max = tf.stop_gradient(tf.reduce_max(w, axis=axis)) w_min = tf.stop_gradient(tf.reduce_min(w, axis=axis)) eps = tf.constant(value=1e-10, dtype=tf.float32) alpha = w_max - w_min + eps beta = w_min w = (w - beta) / alpha return w, alpha, beta def __inv_scale(self, w, alpha, beta): """Inversed linear scale function Args: * w: A Tensor (weights or activation output) * alpha: A float value, scale factor * bete: A float value, scale bias Returns: * A Tensor, inversed scale value1 """ return alpha * w + beta def __split_bucket(self, w): """Create bucket Args: * w: A Tensor (weights) Returns: * A Tensor, with shape [bucket_size, multiple] * An integer: the number of buckets * An integer, the number of padded elements """ flat_w = tf.reshape(w, [-1]) num_w = flat_w.get_shape()[0].value # use the last value to fill fill_value = flat_w[-1] multiple, rest = divmod(num_w, self.bucket_size) if rest != 0: values_to_add = tf.ones(self.bucket_size - rest) * fill_value # add the fill_value to make the tensor into a multiple of the bucket size. flat_w = tf.concat([flat_w, values_to_add], axis=0) multiple += 1 flat_w = tf.reshape(flat_w, [self.bucket_size, -1]) padded_num = (self.bucket_size - rest) if rest != 0 else 0 return flat_w, multiple, padded_num def __channel_bucket(self, w): """ reshape weights according to bucket for 'channel' type. Note that for fc layers, buckets are created row-wisely. Args: w: A Tensor (weights) Returns: A Tensor shape [bucket_size, bucket_num], bucket_size = h*w*cin for conv or cin for fc A integer: the number of buckets A integer (0), zero padded elements """ cout = w.get_shape()[-1].value folded_w = tf.reshape(w, [-1, cout]) return folded_w, cout, 0 def __safe_check(self): """ TODO: Check the name of bucket_type, the value of bucket_size """ if self.bucket_size < 0: raise ValueError("Bucket size must be a postive integer") if self.bucket_type != 'split' and self.bucket_type != 'channel': raise ValueError("Unrecognized bucket type, must be 'weight' or 'channel'.") def __updt_bucket_storage(self, bucket_num): """ Calculate extra storage for the bucket scalling factors Args: * bucket_num: a Tensor, the number of buckets, and 2*bucket_num scalling factors * alpha: a Tensor, the scalling factor """ self.bucket_storage += bucket_num * 32 * 2 # both alpha and beta, so *2
4,455
849
/* * Copyright 2017 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.libraries.remixer; import com.google.android.libraries.remixer.serialization.SerializedColor; import com.google.android.libraries.remixer.serialization.ValueConverter; import com.google.android.libraries.remixer.serialization.converters.BooleanValueConverter; import com.google.android.libraries.remixer.serialization.converters.ColorValueConverter; import com.google.android.libraries.remixer.serialization.converters.FloatValueConverter; import com.google.android.libraries.remixer.serialization.converters.StringValueConverter; import java.util.HashMap; import java.util.Locale; import java.util.Map; /** * The data type for each RemixerItem. The data type is used to determine default layoutIDs and to * help serialization. * * @param <RuntimeT> The type to use during runtime to represent variables of this DataType * @param <SerializableT> The type to use to serialize variables of this type. */ public class DataType<RuntimeT, SerializableT> { /** * The serializable, unique name for this data type. */ private final String name; /** * The runtime class of the values contained by this variable. */ private final Class<RuntimeT> runtimeType; /** * The serializable class of the values contained by this variable. */ private final Class<SerializableT> serializableType; /** * The value converter that aids in the serialization process. */ private final ValueConverter<RuntimeT, SerializableT> converter; /** * Map of default layout ids for this datatype when used with a specific RemixerItem class. * * <p>The key for this map is the specific RemixerItem subclass, and the value is the default * layout to use when a RemixerItem of the specific subclass has this data type. */ private final Map<Class<? extends Variable>, Integer> layoutIdForVariableType = new HashMap<>(); /** * Constructs a datatype with the given {@code name}, that takes values of type * {@code runtimeType} and uses {@code converter} to serialize. * * <p>Note {@code converter} has a {@link ValueConverter#dataType} field that must be initialized * to the same as {@code name}. */ public DataType( String name, Class<RuntimeT> runtimeType, Class<SerializableT> serializableType, ValueConverter<RuntimeT, SerializableT> converter) { this.name = name; this.runtimeType = runtimeType; this.serializableType = serializableType; this.converter = converter; if (!name.equals(converter.getDataType())) { throw new AssertionError(String.format( Locale.getDefault(), "The data type %s has a converter whose data type doesn't match, %s", name, converter.getDataType())); } } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null || getClass() != obj.getClass()) { return false; } DataType dataType = (DataType) obj; if (!name.equals(dataType.name)) { return false; } return runtimeType.equals(dataType.runtimeType); } @Override public int hashCode() { int result = name.hashCode(); result = 31 * result + runtimeType.hashCode(); return result; } public void setLayoutIdForVariableType(Class<? extends Variable> clazz, int layoutId) { layoutIdForVariableType.put(clazz, layoutId); } public int getLayoutIdForVariableType(Class<? extends Variable> clazz) { return layoutIdForVariableType.get(clazz); } public String getName() { return name; } public Class<RuntimeT> getRuntimeType() { return runtimeType; } public Class<SerializableT> getSerializableType() { return serializableType; } public ValueConverter<RuntimeT, SerializableT> getConverter() { return converter; } // ======= Default data types defined here. private static final String KEY_BOOLEAN = "__DataTypeBoolean__"; private static final String KEY_COLOR = "__DataTypeColor__"; private static final String KEY_NUMBER = "__DataTypeNumber__"; private static final String KEY_STRING = "__DataTypeString__"; public static final DataType<Boolean, Boolean> BOOLEAN = new DataType<>( KEY_BOOLEAN, Boolean.class, Boolean.class, new BooleanValueConverter(KEY_BOOLEAN)); public static final DataType<Integer, SerializedColor> COLOR = new DataType<>( KEY_COLOR, Integer.class, SerializedColor.class, new ColorValueConverter(KEY_COLOR)); public static final DataType<Float, Float> NUMBER = new DataType<>( KEY_NUMBER, Float.class, Float.class, new FloatValueConverter(KEY_NUMBER)); public static final DataType<String, String> STRING = new DataType<>( KEY_STRING, String.class, String.class, new StringValueConverter(KEY_STRING)); }
1,698
2,023
def num_in_base(val, base, min_digits=1, complement=False, digits="0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ"): """Convert number to string in specified base If minimum number of digits is specified, pads result to at least that length. If complement is True, prints negative numbers in complement format based on the specified number of digits. Non-standard digits can be used. This can also allow bases greater than 36. """ if base < 2: raise ValueError("Minimum base is 2") if base > len(digits): raise ValueError("Not enough digits for base") # Deal with negative numbers negative = val < 0 val = abs(val) if complement: sign = "" max = base**min_digits if (val > max) or (not negative and val == max): raise ValueError("Value out of range for complemented format") if negative: val = (max - val) else: sign = "-" * negative # Calculate digits val_digits = [] while val: val, digit = divmod(val, base) val_digits.append(digits[digit]) result = "".join(reversed(val_digits)) leading_digits = (digits[0] * (min_digits - len(result))) return sign + leading_digits + result if __name__ == "__main__": # Quick sanity check for base in range(2, 37): for val in range(-1000, 1000): assert val == int(num_in_base(val, base), base) # Quick sanity check of complemented format def comp(val, base, digits): return num_in_base(val, base, digits, complement = True) for base in range(2, 37): for digits in range(1, 11): limit = base ** digits for val in range(-min(limit, 1000), 0): assert limit + val == int(comp(val, base, digits), base) for val in range(0, min(limit, 1000)): assert val == int(comp(val, base, digits), base)
793
856
<reponame>Project-Xtended/external_armnn<filename>src/backends/backendsCommon/test/layerTests/NegTestImpl.cpp // // Copyright © 2020 Arm Ltd. All rights reserved. // SPDX-License-Identifier: MIT // #include "NegTestImpl.hpp" #include "ElementwiseUnaryTestImpl.hpp" template<armnn::DataType ArmnnType, typename T> LayerTestResult<T, 2> Neg2dTest( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, const armnn::ITensorHandleFactory& tensorHandleFactory) { const unsigned int inputShape[] = { 2, 2 }; std::vector<float> inputValues { 1.f, 1.f, 2.f, 25.f }; std::vector<float> expectedOutputValues { -1.f, -1.f, -2.f, -25.f }; return ElementwiseUnaryTestHelper<2, ArmnnType>( workloadFactory, memoryManager, armnn::UnaryOperation::Neg, inputShape, inputValues, inputShape, expectedOutputValues, tensorHandleFactory); } template<armnn::DataType ArmnnType, typename T> LayerTestResult<T, 3> Neg3dTest( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, const armnn::ITensorHandleFactory& tensorHandleFactory) { const unsigned int inputShape[] = { 3, 1, 2 }; std::vector<float> inputValues { 1.f, 0.f, 3.f, 25.f, 64.f, 100.f }; std::vector<float> expectedOutputValues { -1.f, 0.f, -3.f, -25.f, -64.f, -100.f }; return ElementwiseUnaryTestHelper<3, ArmnnType>( workloadFactory, memoryManager, armnn::UnaryOperation::Neg, inputShape, inputValues, inputShape, expectedOutputValues, tensorHandleFactory); } template<armnn::DataType ArmnnType, typename T> LayerTestResult<T, 2> NegZeroTest( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, const armnn::ITensorHandleFactory& tensorHandleFactory) { const unsigned int inputShape[] = { 1, 2 }; std::vector<float> inputValues { 0.f, 0.f }; std::vector<float> expectedOutputValues { 0.f, 0.f }; return ElementwiseUnaryTestHelper<2, ArmnnType>( workloadFactory, memoryManager, armnn::UnaryOperation::Neg, inputShape, inputValues, inputShape, expectedOutputValues, tensorHandleFactory); } template<armnn::DataType ArmnnType, typename T> LayerTestResult<T, 2> NegNegativeTest( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, const armnn::ITensorHandleFactory& tensorHandleFactory) { const unsigned int inputShape[] = { 1, 2 }; std::vector<float> inputValues { -25.f, -16.f }; std::vector<float> expectedOutputValues { 25.f, 16.f }; return ElementwiseUnaryTestHelper<2, ArmnnType>( workloadFactory, memoryManager, armnn::UnaryOperation::Neg, inputShape, inputValues, inputShape, expectedOutputValues, tensorHandleFactory); } // // Explicit template specializations // template LayerTestResult<armnn::ResolveType<armnn::DataType::Float32>, 2> Neg2dTest<armnn::DataType::Float32>( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, const armnn::ITensorHandleFactory& tensorHandleFactory); template LayerTestResult<armnn::ResolveType<armnn::DataType::Float16>, 2> Neg2dTest<armnn::DataType::Float16>( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, const armnn::ITensorHandleFactory& tensorHandleFactory); template LayerTestResult<armnn::ResolveType<armnn::DataType::QAsymmS8>, 2> Neg2dTest<armnn::DataType::QAsymmS8>( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, const armnn::ITensorHandleFactory& tensorHandleFactory); template LayerTestResult<armnn::ResolveType<armnn::DataType::QAsymmU8>, 2> Neg2dTest<armnn::DataType::QAsymmU8>( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, const armnn::ITensorHandleFactory& tensorHandleFactory); template LayerTestResult<armnn::ResolveType<armnn::DataType::QSymmS16>, 2> Neg2dTest<armnn::DataType::QSymmS16>( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, const armnn::ITensorHandleFactory& tensorHandleFactory); template LayerTestResult<armnn::ResolveType<armnn::DataType::Float32>, 3> Neg3dTest<armnn::DataType::Float32>( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, const armnn::ITensorHandleFactory& tensorHandleFactory); template LayerTestResult<armnn::ResolveType<armnn::DataType::Float16>, 3> Neg3dTest<armnn::DataType::Float16>( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, const armnn::ITensorHandleFactory& tensorHandleFactory); template LayerTestResult<armnn::ResolveType<armnn::DataType::QAsymmS8>, 3> Neg3dTest<armnn::DataType::QAsymmS8>( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, const armnn::ITensorHandleFactory& tensorHandleFactory); template LayerTestResult<armnn::ResolveType<armnn::DataType::QAsymmU8>, 3> Neg3dTest<armnn::DataType::QAsymmU8>( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, const armnn::ITensorHandleFactory& tensorHandleFactory); template LayerTestResult<armnn::ResolveType<armnn::DataType::QSymmS16>, 3> Neg3dTest<armnn::DataType::QSymmS16>( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, const armnn::ITensorHandleFactory& tensorHandleFactory); template LayerTestResult<armnn::ResolveType<armnn::DataType::Float32>, 2> NegZeroTest<armnn::DataType::Float32>( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, const armnn::ITensorHandleFactory& tensorHandleFactory); template LayerTestResult<armnn::ResolveType<armnn::DataType::Float32>, 2> NegNegativeTest<armnn::DataType::Float32>( armnn::IWorkloadFactory& workloadFactory, const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager, const armnn::ITensorHandleFactory& tensorHandleFactory);
2,621
334
<filename>qiime2/core/type/tests/test_primitive.py # ---------------------------------------------------------------------------- # Copyright (c) 2016-2021, QIIME 2 development team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. # ---------------------------------------------------------------------------- import unittest import pandas as pd import qiime2.metadata as metadata import qiime2.core.type.primitive as primitive import qiime2.core.type.grammar as grammar class TestIntersectTwoRanges(unittest.TestCase): def assertIntersectEqual(self, a, b, exp): r1 = a & b r2 = b & a self.assertEqual(r1, r2) self.assertEqual(r1, exp) def test_overlap_simple(self): a = primitive.Range(0, 10) b = primitive.Range(3, 7) self.assertIntersectEqual(a, b, b) def test_overlap_inclusive_point(self): a = primitive.Range(0, 5, inclusive_end=True) b = primitive.Range(5, 10) exp = primitive.Range(5, 5, inclusive_start=True, inclusive_end=True) self.assertIntersectEqual(a, b, exp) def test_disjoint_far(self): a = primitive.Range(-10, -5) b = primitive.Range(5, 10) self.assertIntersectEqual(a, b, grammar.UnionExp()) def test_disjoint_exclusive_point(self): a = primitive.Range(0, 5, inclusive_end=False) b = primitive.Range(5, 9, inclusive_start=False) self.assertIntersectEqual(a, b, grammar.UnionExp()) class TestMetadataColumn(unittest.TestCase): def test_decode_categorical_value(self): value = pd.Series({'a': 'a', 'b': 'b', 'c': 'c'}, name='foo') value.index.name = 'id' cat_md = metadata.CategoricalMetadataColumn(value) res = primitive.MetadataColumn[primitive.Categorical].decode(cat_md) self.assertIs(res, cat_md) def test_decode_numeric_value(self): value = pd.Series({'a': 1, 'b': 2, 'c': 3}, name='foo') value.index.name = 'id' num_md = metadata.NumericMetadataColumn(value) res = primitive.MetadataColumn[primitive.Categorical].decode(num_md) self.assertIs(res, num_md) def test_decode_other(self): with self.assertRaisesRegex(TypeError, 'provided.*directly'): primitive.MetadataColumn[primitive.Categorical].decode( "<metadata>") if __name__ == '__main__': unittest.main()
999
787
/* * Java Genetic Algorithm Library (@__identifier__@). * Copyright (c) @__year__@ <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * Author: * <NAME> (<EMAIL>) */ package io.jenetics.ext.util; import static java.util.Objects.requireNonNull; import java.util.function.Function; import io.jenetics.ext.internal.Escaper; /** * Helper methods for creating parentheses tree strings. * * @author <a href="mailto:<EMAIL>"><NAME></a> * @version 4.3 * @since 4.3 */ final class ParenthesesTrees { private ParenthesesTrees() {} private static final char[] PROTECTED_CHARS = { '(', ')', ',' }; static final char ESCAPE_CHAR = '\\'; private static final Escaper ESCAPER = new Escaper(ESCAPE_CHAR, PROTECTED_CHARS); static String escape(final CharSequence value) { return ESCAPER.escape(value); } static String unescape(final CharSequence value) { return ESCAPER.unescape(value); } /* ************************************************************************* * To string methods. **************************************************************************/ /** * Return a compact string representation of the given tree. * <pre> * mul(div(cos(1.0), cos(π)), sin(mul(1.0, z))) * </pre> * * @param tree the input tree * @param mapper the string mapper function * @return the string representation of the given tree */ static <V> String toString( final Tree<V, ?> tree, final Function<? super V, ? extends CharSequence> mapper ) { requireNonNull(mapper); if (tree != null) { final StringBuilder out = new StringBuilder(); toString(out, tree, mapper); return out.toString(); } else { return "null"; } } private static <V> void toString( final StringBuilder out, final Tree<V, ?> tree, final Function<? super V, ? extends CharSequence> mapper ) { out.append(escape(mapper.apply(tree.value()))); if (!tree.isLeaf()) { out.append("("); toString(out, tree.childAt(0), mapper); for (int i = 1; i < tree.childCount(); ++i) { out.append(","); toString(out, tree.childAt(i), mapper); } out.append(")"); } } }
867
786
package org.cellocad.adaptors.ucfwriters.ucf_writers_Eco2C2G2T2; import org.cellocad.MIT.dnacompiler.Util; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.Map; public class collection_writer_gates extends collection_writer { @Override public ArrayList<Map> getObjects() { ArrayList<Map> objects = new ArrayList<>(); ArrayList<ArrayList<String>> gates_tokens = Util.fileTokenizer(getRootPath() + "/resources/csv_gate_libraries/gates_Eco2C2G2T2.csv"); int counter = 0; for(ArrayList<String> gate_tokens: gates_tokens) { if(counter == 0) { counter++; continue; } //System.out.println(gate_tokens.toString()); Map obj = new LinkedHashMap(); obj.put("collection", "gates"); String promoter_name = gate_tokens.get(1); String promoter_seq = gate_tokens.get(2); String sgRNA_name = gate_tokens.get(3); String sgRNA_seq = gate_tokens.get(4); String terminator_name = gate_tokens.get(5); String terminator_seq = gate_tokens.get(6); String equation = gate_tokens.get(7); String a = gate_tokens.get(8); String b = gate_tokens.get(9); String group_name = sgRNA_name.substring(0, 8); System.out.println(group_name); obj.put("regulator", sgRNA_name); obj.put("group_name", group_name); obj.put("gate_name", sgRNA_name); obj.put("gate_type", "NOR"); obj.put("system", "CRISPRi"); objects.add(obj); } return objects; } }
838
1,444
<gh_stars>1000+ package mage.cards.s; import java.util.UUID; import mage.MageInt; import mage.ObjectColor; import mage.abilities.Ability; import mage.abilities.common.SimpleActivatedAbility; import mage.abilities.costs.common.TapSourceCost; import mage.abilities.effects.common.DamageAllEffect; import mage.abilities.keyword.FlyingAbility; import mage.abilities.keyword.ProtectionAbility; import mage.constants.SubType; import mage.cards.CardImpl; import mage.cards.CardSetInfo; import mage.constants.CardType; import mage.constants.Zone; import mage.filter.common.FilterCreaturePermanent; import mage.filter.predicate.Predicates; import mage.filter.predicate.mageobject.AbilityPredicate; /** * * @author TheElk801 */ public final class SubterraneanSpirit extends CardImpl { private static final FilterCreaturePermanent filter = new FilterCreaturePermanent("creature without flying"); static { filter.add(Predicates.not(new AbilityPredicate(FlyingAbility.class))); } public SubterraneanSpirit(UUID ownerId, CardSetInfo setInfo) { super(ownerId, setInfo, new CardType[]{CardType.CREATURE}, "{3}{R}{R}"); this.subtype.add(SubType.ELEMENTAL); this.subtype.add(SubType.SPIRIT); this.power = new MageInt(3); this.toughness = new MageInt(3); // Protection from red this.addAbility(ProtectionAbility.from(ObjectColor.RED)); // {tap}: Subterranean Spirit deals 1 damage to each creature without flying. Ability ability = new SimpleActivatedAbility(Zone.BATTLEFIELD, new DamageAllEffect(1, filter), new TapSourceCost()); this.addAbility(ability); } private SubterraneanSpirit(final SubterraneanSpirit card) { super(card); } @Override public SubterraneanSpirit copy() { return new SubterraneanSpirit(this); } }
634
792
<filename>iPokeGo/LicensesViewController.h // // LicensesViewController.h // iPokeGo // // Created by <NAME> on 15/08/2016. // Copyright © 2016 <NAME>. All rights reserved. // #import <UIKit/UIKit.h> @interface LicensesViewController : UIViewController @property(weak, nonatomic) IBOutlet UIWebView *licensesWebView; @end
116
355
<reponame>joshy56/helper /* * This file is part of helper, licensed under the MIT License. * * Copyright (c) lucko (Luck) <<EMAIL>> * Copyright (c) contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package me.lucko.helper.bucket.factory; import me.lucko.helper.bucket.Bucket; import me.lucko.helper.bucket.partitioning.PartitioningStrategy; import java.util.Set; import java.util.function.Supplier; /** * A set of methods for creating {@link Bucket}s. */ public final class BucketFactory { public static <E> Bucket<E> newBucket(int size, PartitioningStrategy<E> strategy, Supplier<Set<E>> setSupplier) { return new SetSuppliedBucket<>(size, strategy, setSupplier); } public static <E> Bucket<E> newHashSetBucket(int size, PartitioningStrategy<E> strategy) { return new HashSetBucket<>(size, strategy); } public static <E> Bucket<E> newSynchronizedHashSetBucket(int size, PartitioningStrategy<E> strategy) { return new SynchronizedHashSetBucket<>(size, strategy); } public static <E> Bucket<E> newConcurrentBucket(int size, PartitioningStrategy<E> strategy) { return new ConcurrentBucket<>(size, strategy); } private BucketFactory() { throw new UnsupportedOperationException("This class cannot be instantiated"); } }
740
829
<reponame>robert0714/flowing-retail package io.flowing.retail.inventory.messages; import java.io.IOException; import java.util.Arrays; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.cloud.stream.annotation.EnableBinding; import org.springframework.cloud.stream.annotation.StreamListener; import org.springframework.cloud.stream.messaging.Sink; import org.springframework.stereotype.Component; import org.springframework.transaction.annotation.Transactional; import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.JsonMappingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ObjectNode; import io.flowing.retail.inventory.application.InventoryService; import io.flowing.retail.inventory.domain.Item; @Component @EnableBinding(Sink.class) public class MessageListener { @Autowired private MessageSender messageSender; @Autowired private InventoryService inventoryService; @Autowired private ObjectMapper objectMapper; @StreamListener(target = Sink.INPUT, condition="(headers['type']?:'')=='PaymentReceivedEvent'") @Transactional public void paymentReceived(String messageJson) throws JsonParseException, JsonMappingException, IOException { Message<JsonNode> message = objectMapper.readValue(messageJson, new TypeReference<Message<JsonNode>>(){}); ObjectNode payload = (ObjectNode) message.getData(); Item[] items = objectMapper.treeToValue(payload.get("items"), Item[].class); String pickId = inventoryService.pickItems( // Arrays.asList(items), "order", payload.get("orderId").asText()); // as in payment - we have to keep the whole order in the payload // as the data flows through this service payload.put("pickId", pickId); messageSender.send( // new Message<JsonNode>( // "GoodsFetchedEvent", // message.getTraceid(), // payload)); } }
814
494
package app.hanks.com.conquer.util; import android.content.Context; import android.graphics.PixelFormat; import android.graphics.drawable.AnimationDrawable; import android.media.MediaPlayer; import android.media.MediaPlayer.OnCompletionListener; import android.media.MediaPlayer.OnPreparedListener; import android.net.Uri; import android.util.DisplayMetrics; import android.util.Log; import android.view.Gravity; import android.view.View; import android.view.View.OnClickListener; import android.view.ViewGroup; import android.view.WindowManager; import android.widget.ImageButton; import android.widget.ImageView; import android.widget.ProgressBar; import android.widget.TextView; import com.facebook.drawee.view.SimpleDraweeView; import com.nostra13.universalimageloader.core.ImageLoader; import java.util.Date; import java.util.List; import java.util.Timer; import java.util.TimerTask; import app.hanks.com.conquer.CustomApplication; import app.hanks.com.conquer.R; import app.hanks.com.conquer.bean.Card; import cn.bmob.im.BmobUserManager; import cn.bmob.im.bean.BmobChatUser; import cn.bmob.im.bean.BmobInvitation; import cn.bmob.im.db.BmobDB; import cn.bmob.v3.listener.FindListener; import cn.bmob.v3.listener.SaveListener; import cn.bmob.v3.listener.UpdateListener; public class NotifyUtils { private static MediaPlayer player; private static Timer timer_play; private static int curPosition = 0; /** * 有人提醒任务时弹出的卡片 * @param context * @param card */ public static void showZixiAlertToast(final Context context, final Card card) { final WindowManager wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE); final View view = View.inflate(context, R.layout.toast_alert_notify, null); ImageView iv_bell = (ImageView) view.findViewById(R.id.iv_bell); ImageView iv_photo = (ImageView) view.findViewById(R.id.iv_photo); TextView tv_type = (TextView) view.findViewById(R.id.tv_type); TextView tv_from = (TextView) view.findViewById(R.id.tv_from); TextView tv_zixitime = (TextView) view.findViewById(R.id.tv_zixitime); TextView tv_zixiname = (TextView) view.findViewById(R.id.tv_zixiname); TextView tv_content = (TextView) view.findViewById(R.id.tv_content); ViewGroup ll_audio = (ViewGroup) view.findViewById(R.id.ll_audio); final ImageButton ib_play = (ImageButton) view.findViewById(R.id.ib_play); final ProgressBar pb = (ProgressBar) view.findViewById(R.id.pb); ImageLoader.getInstance().displayImage(card.getFavatar(), iv_photo, ImageLoadOptions.getOptions()); tv_type.setText("任务提醒"); tv_from.setText("来自:" + card.getFnick()); tv_zixitime.setText(TaskUtil.getZixiTimeS(card.getTime()) + " " + TaskUtil.getZixiDateS(card.getTime())); tv_zixiname.setText(card.getZixiName()); tv_content.setText(card.getContent()); ll_audio.setVisibility(card.getAudioUrl() != null ? View.VISIBLE : View.GONE); // 设置任务已提醒,不需要本地系统提醒了 TaskUtil.setZixiHasAlerted(context, card.getZixiId()); ib_play.setTag("play"); ib_play.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { ib_play.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { if (ib_play.getTag().equals("play")) { ib_play.setImageResource(R.drawable.pause_audio); ib_play.setTag("pause"); palyAudio(context, ib_play, pb, card.getAudioUrl()); } else { ib_play.setTag("play"); ib_play.setImageResource(R.drawable.play_audio); pauseAudio(ib_play); } } }); } }); // 铃铛动画 iv_bell.setBackgroundResource(R.drawable.alert_bell_anim); AnimationDrawable draw = (AnimationDrawable) iv_bell.getBackground(); draw.start(); final WindowManager.LayoutParams params = new WindowManager.LayoutParams(); DisplayMetrics metrics = new DisplayMetrics(); wm.getDefaultDisplay().getMetrics(metrics); params.height = WindowManager.LayoutParams.MATCH_PARENT; params.width = WindowManager.LayoutParams.MATCH_PARENT; params.gravity = Gravity.BOTTOM; params.flags = WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON; // params.flags = WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE | // WindowManager.LayoutParams.FLAG_NOT_TOUCHABLE // | WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON; params.format = PixelFormat.TRANSLUCENT; params.type = WindowManager.LayoutParams.TYPE_PHONE; params.windowAnimations = android.R.style.Animation_InputMethod; wm.addView(view, params); // 知道了按钮 view.findViewById(R.id.ll_save).setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { removeMyToast(wm, view); saveCard(context, card); } }); } /** * 播放音频 * @param ib_play 播放按钮 * @param pb 进度条 * @param path 音频路径 */ public static void palyAudio(final Context context, final ImageButton ib_play, final ProgressBar pb, String path) { // 播放录音 if (path == null) { T.show(context, "找不到录音文件"); return; } if (player == null) player = new MediaPlayer(); player.reset(); try { player.setDataSource(path); player.prepareAsync(); player.setOnPreparedListener(new OnPreparedListener() { @Override public void onPrepared(MediaPlayer mp) { player.seekTo(curPosition); pb.setMax(player.getDuration()); player.start(); // 秒++ if (timer_play == null) timer_play = new Timer(); timer_play.schedule(new TimerTask() { @Override public void run() { curPosition += 1000; Log.e(" timer_play.schedule", curPosition + ""); pb.setProgress(curPosition); } }, new Date(), 1000); } }); } catch (Exception e) { e.printStackTrace(); T.show(context, "播放出错"); } player.setOnCompletionListener(new OnCompletionListener() { @Override public void onCompletion(MediaPlayer mp) { Log.i("player", "OnCompletionListener"); if (timer_play != null) { timer_play.cancel(); timer_play = null; } ib_play.setImageResource(R.drawable.play_audio); ib_play.setTag("play"); pb.setProgress(0); curPosition = 0; } }); } public static void pauseAudio(ImageButton ib_play) { // 暂停播放,保存播放进度 if (player != null && player.isPlaying()) { curPosition = player.getCurrentPosition(); player.pause(); if (timer_play != null) { timer_play.cancel(); timer_play = null; } if (ib_play != null) { ib_play.setImageResource(R.drawable.play_audio); ib_play.setTag("play"); } } } /** * 有人看到你的任务时,想与你一起上任务是发过来的卡片 * @param context * @param card */ public static void showGoudaToast(final Context context, final Card card) { /** 接受到该类消息本地用户就会就由User变为BmobChatUser,丢失一下信息,还没解决 */ final WindowManager wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE); final View view = View.inflate(context, R.layout.toast_gouda_notify, null); ImageView iv_bell = (ImageView) view.findViewById(R.id.iv_bell); TextView tv_type = (TextView) view.findViewById(R.id.tv_type); TextView tv_from = (TextView) view.findViewById(R.id.tv_from); SimpleDraweeView iv_avatar = (SimpleDraweeView) view.findViewById(R.id.iv_avatar); TextView tv_zixitime = (TextView) view.findViewById(R.id.tv_zixitime); TextView tv_zixiname = (TextView) view.findViewById(R.id.tv_zixiname); TextView tv_content = (TextView) view.findViewById(R.id.tv_content); tv_type.setText("勾搭任务"); tv_from.setText("来自:"+card.getFnick()); tv_zixitime.setText(TaskUtil.getZixiDateS(card.getTime()) + " " + TaskUtil.getZixiTimeS(card.getTime())); tv_zixiname.setText(card.getZixiName()); tv_content.setText(card.getContent()); iv_avatar.setImageURI(Uri.parse(card.getFavatar())); // 铃铛动画 iv_bell.setBackgroundResource(R.drawable.alert_bell_anim); AnimationDrawable draw = (AnimationDrawable) iv_bell.getBackground(); draw.start(); final WindowManager.LayoutParams params = new WindowManager.LayoutParams(); DisplayMetrics metrics = new DisplayMetrics(); wm.getDefaultDisplay().getMetrics(metrics); params.height = metrics.heightPixels / 2; params.width = WindowManager.LayoutParams.MATCH_PARENT; params.gravity = Gravity.BOTTOM; params.flags = WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON; // params.flags = WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE | // WindowManager.LayoutParams.FLAG_NOT_TOUCHABLE // | WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON; params.format = PixelFormat.TRANSLUCENT; params.type = WindowManager.LayoutParams.TYPE_PHONE; params.windowAnimations = android.R.style.Animation_InputMethod; wm.addView(view, params); // 监听点击事件 // 忽略按钮 view.findViewById(R.id.ib_decline).setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { removeMyToast(wm, view); } }); // 拉黑按钮 view.findViewById(R.id.iv_add_black).setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { removeMyToast(wm, view); BmobUserManager.getInstance(context).addBlack(card.getFusername(), new UpdateListener() { @Override public void onSuccess() { T.show(context, "黑名单添加成功!"); // 重新设置下内存中保存的好友列表 CustomApplication.getInstance().setContactList(CollectionUtils.list2map(BmobDB.create(context).getContactList())); BmobDB.create(context).addBlack(card.getFusername()); } @Override public void onFailure(int arg0, String arg1) { T.show(context, "黑名单添加失败:" + arg1); } }); } }); // 同意按钮 view.findViewById(R.id.iv_accept).setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { removeMyToast(wm, view); final BmobInvitation invitation = new BmobInvitation(card.getFid(), card.getFusername(), "", "", System.currentTimeMillis(), 1); BmobUserManager.getInstance(context).agreeAddContact(invitation, new UpdateListener() { @Override public void onSuccess() { saveCard(context, card); BmobUserManager.getInstance(context).queryCurrentContactList(new FindListener<BmobChatUser>() { @Override public void onError(int arg0, String arg1) { L.i("查询好友列表失败:" + arg1); } @Override public void onSuccess(List<BmobChatUser> arg0) { T.show(context, "已将" + card.getFnick() + "添加为陪友"); // 保存到application中方便比较 CustomApplication.getInstance().setContactList(CollectionUtils.list2map(arg0)); } }); // BmobDB.create(context).saveContact(invitation); // CustomApplication.getInstance().setContactList(CollectionUtils.list2map(BmobDB.create(context).getContactList())); } @Override public void onFailure(int arg0, String arg1) { T.show(context, "同意添加好友失敗:" + arg1); } }); } }); } /** * 保存卡片到云 * @param context * @param card */ private static void saveCard(Context context, Card card) { card.save(context, new SaveListener() { @Override public void onSuccess() { L.i("Card保存成功"); } @Override public void onFailure(int arg0, String arg1) { L.i("Card保存失败" + arg0 + arg1); } }); } /** * 移除卡片 * @param wm * @param view */ private static void removeMyToast(final WindowManager wm, final View view) { // params.windowAnimations = android.R.style.Animation_Toast; // wm.updateViewLayout(view, params); pauseAudio(null); if (view != null) wm.removeView(view); } }
4,937
1,350
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. package com.azure.security.keyvault.keys.models; import com.azure.core.annotation.Fluent; import java.time.OffsetDateTime; /** * Represents the configurable options to import a key. */ @Fluent public class ImportKeyOptions extends KeyProperties { /** * The JSON Web Key to import. */ private final JsonWebKey key; /** * The hardware protected indicator for the key. */ private Boolean hardwareProtected; /** * Creates instance of {@link ImportKeyOptions}. * * @param name The name of the key. * @param key The key material to import. */ public ImportKeyOptions(String name, JsonWebKey key) { super.name = name; this.key = key; } /** * Set whether the key being imported is of HSM type or not. * * @param hardwareProtected The HSM value to set. * * @return The {@link ImportKeyOptions} object itself. */ public ImportKeyOptions setHardwareProtected(Boolean hardwareProtected) { this.hardwareProtected = hardwareProtected; return this; } /** * Get the HSM value of the key being imported. * * @return The HSM value. */ public Boolean isHardwareProtected() { return this.hardwareProtected; } /** * Set a value that indicates if the key is enabled. * * @param enabled The enabled value to set. * * @return The updated {@link ImportKeyOptions} object. */ @Override public ImportKeyOptions setEnabled(Boolean enabled) { this.enabled = enabled; return this; } /** * Get the key material of the key being imported. * * @return The key material. */ public JsonWebKey getKey() { return key; } /** * Set the {@link OffsetDateTime key expiration time} in UTC. * * @param expiresOn The {@link OffsetDateTime key expiration time} in UTC. * * @return The updated {@link ImportKeyOptions} object. */ @Override public ImportKeyOptions setExpiresOn(OffsetDateTime expiresOn) { this.expiresOn = expiresOn; return this; } /** * Set the {@link OffsetDateTime key's notBefore time} in UTC. * * @param notBefore The {@link OffsetDateTime key's notBefore time} in UTC. * * @return The updated {@link ImportKeyOptions} object. */ @Override public ImportKeyOptions setNotBefore(OffsetDateTime notBefore) { this.notBefore = notBefore; return this; } }
1,001
3,508
<reponame>Luv8436/Leetcode package com.fishercoder.solutions; import java.util.HashMap; import java.util.Map; public class _91 { /** * Credit: https://discuss.leetcode.com/topic/35840/java-clean-dp-solution-with-explanation * I used a dp array of size n + 1 to save subproblem solutions. * dp[0] means an empty string will have one way to decode, * dp[1] means the way to decode a string of size 1. * I then check one digit and two digit combination and save the results along the way. * In the end, dp[n] will be the end result. */ public static class Solution1 { public int numDecodings(String s) { if (s == null || s.length() == 0) { return 0; } int[] dp = new int[s.length() + 1]; dp[0] = 1; dp[1] = (s.charAt(0) != '0') ? 1 : 0; for (int i = 2; i <= s.length(); i++) { int first = Integer.valueOf(s.substring(i - 1, i)); int second = Integer.valueOf(s.substring(i - 2, i)); if (first > 0 && first <= 9) { dp[i] += dp[i - 1]; } if (second >= 10 && second <= 26) { dp[i] += dp[i - 2]; } } return dp[s.length()]; } } public static class Solution2 { /**credit: https://leetcode.com/problems/decode-ways/solution/ * Approach 1: Recursive Approach with Memoization * * The actual code goes from the right most character to the left side to build out the dp cache map. * And this HashMap uses index as its key instead of a substring. * */ public int numDecodings(String s) { return dp(new HashMap<>(), s, 0); } private int dp(Map<Integer, Integer> cache, String s, int index) { if (cache.containsKey(index)) { return cache.get(index); } if (index == s.length()) { //this means we reached the end of the string, so return 1 as success return 1; } if (s.charAt(index) == '0') { //this means this string cannot be decoded return 0; } if (index == s.length() - 1) { return 1; } int ways = dp(cache, s, index + 1); if (Integer.parseInt(s.substring(index, index + 2)) <= 26) { ways += dp(cache, s, index + 2); } cache.put(index, ways); return cache.get(index); } } }
1,340
338
<gh_stars>100-1000 /* * Copyright 2008-2012 NVIDIA Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #pragma once #include <thrust/detail/config.h> #include <thrust/iterator/iterator_traits.h> #include <thrust/iterator/detail/minimum_system.h> #include <thrust/detail/copy.h> #include <thrust/detail/temporary_array.h> #include <thrust/system/cpp/detail/execution_policy.h> namespace thrust { namespace detail { template<typename InputIterator, typename OutputIterator> OutputIterator sequential_copy(InputIterator first, InputIterator last, OutputIterator result) { for(; first != last; ++first, ++result) { *result = *first; } // end for return result; } // end sequential_copy() template<typename BidirectionalIterator1, typename BidirectionalIterator2> BidirectionalIterator2 sequential_copy_backward(BidirectionalIterator1 first, BidirectionalIterator1 last, BidirectionalIterator2 result) { // yes, we preincrement // the ranges are open on the right, i.e. [first, last) while(first != last) { *--result = *--last; } // end while return result; } // end sequential_copy_backward() namespace dispatch { template<typename DerivedPolicy, typename RandomAccessIterator1, typename RandomAccessIterator2> RandomAccessIterator2 overlapped_copy(thrust::system::cpp::detail::execution_policy<DerivedPolicy> &, RandomAccessIterator1 first, RandomAccessIterator1 last, RandomAccessIterator2 result) { if(first < last && first <= result && result < last) { // result lies in [first, last) // it's safe to use std::copy_backward here thrust::detail::sequential_copy_backward(first, last, result + (last - first)); result += (last - first); } // end if else { // result + (last - first) lies in [first, last) // it's safe to use sequential_copy here result = thrust::detail::sequential_copy(first, last, result); } // end else return result; } // end overlapped_copy() template<typename DerivedPolicy, typename RandomAccessIterator1, typename RandomAccessIterator2> RandomAccessIterator2 overlapped_copy(thrust::execution_policy<DerivedPolicy> &exec, RandomAccessIterator1 first, RandomAccessIterator1 last, RandomAccessIterator2 result) { typedef typename thrust::iterator_value<RandomAccessIterator1>::type value_type; // make a temporary copy of [first,last), and copy into it first thrust::detail::temporary_array<value_type, DerivedPolicy> temp(exec, first, last); return thrust::copy(exec, temp.begin(), temp.end(), result); } // end overlapped_copy() } // end dispatch template<typename RandomAccessIterator1, typename RandomAccessIterator2> RandomAccessIterator2 overlapped_copy(RandomAccessIterator1 first, RandomAccessIterator1 last, RandomAccessIterator2 result) { typedef typename thrust::iterator_system<RandomAccessIterator2>::type System1; typedef typename thrust::iterator_system<RandomAccessIterator2>::type System2; typedef typename thrust::detail::minimum_system<System1, System2>::type System; // XXX presumes System is default constructible System system; return thrust::detail::dispatch::overlapped_copy(system, first, last, result); } // end overlapped_copy() } // end detail } // end thrust
1,662
5,193
/* * Copyright (C) 2016 AriaLyy(https://github.com/AriaLyy/Aria) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.arialyy.aria.core.download; import android.text.TextUtils; import com.arialyy.aria.core.common.RequestEnum; import com.arialyy.aria.core.common.controller.FeatureController; import com.arialyy.aria.core.inf.ICheckEntityUtil; import com.arialyy.aria.core.inf.IOptionConstant; import com.arialyy.aria.orm.DbEntity; import com.arialyy.aria.util.ALog; import com.arialyy.aria.util.CheckUtil; import com.arialyy.aria.util.FileUtil; import com.arialyy.aria.util.RecordUtil; import java.io.File; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Set; public class CheckDGEntityUtil implements ICheckEntityUtil { private final String TAG = "CheckDGEntityUtil"; private DGTaskWrapper mWrapper; private DownloadGroupEntity mEntity; /** * 是否需要修改路径 */ private boolean needModifyPath = false; private int action; /** * @param action {@link FeatureController#ACTION_CREATE} */ public static CheckDGEntityUtil newInstance(DGTaskWrapper wrapper, int action) { return new CheckDGEntityUtil(wrapper, action); } private CheckDGEntityUtil(DGTaskWrapper wrapper, int action) { this.action = action; mWrapper = wrapper; mEntity = mWrapper.getEntity(); } /** * 检查并设置文件夹路径 * * @return {@code true} 合法 */ private boolean checkDirPath() { String dirPath = mWrapper.getDirPathTemp(); if (TextUtils.isEmpty(dirPath)) { ALog.e(TAG, "文件夹路径不能为null"); return false; } File file = new File(dirPath); if (!FileUtil.canWrite(file.getParent()) && !FileUtil.canWrite(dirPath)) { ALog.e(TAG, String.format("路径【%s】不可写", dirPath)); return false; } if (!dirPath.startsWith("/")) { ALog.e(TAG, String.format("文件夹路径【%s】错误", dirPath)); return false; } if (file.isFile()) { ALog.e(TAG, String.format("路径【%s】是文件,请设置文件夹路径", dirPath)); return false; } // 检查路径冲突 if (mWrapper.isNewTask() && !CheckUtil.checkDGPathConflicts(mWrapper.isIgnoreFilePathOccupy(), dirPath)) { return false; } if (TextUtils.isEmpty(mEntity.getDirPath()) || !mEntity.getDirPath() .equals(dirPath)) { if (!file.exists()) { file.mkdirs(); } needModifyPath = true; mEntity.setDirPath(dirPath); ALog.i(TAG, String.format("文件夹路径改变,将更新文件夹路径为:%s", dirPath)); } return true; } /** * 改变任务组文件夹路径,修改文件夹路径会将子任务所有路径更换 * * @param newDirPath 新的文件夹路径 */ private void reChangeDirPath(String newDirPath) { ALog.d(TAG, String.format("修改新路径为:%s", newDirPath)); List<DTaskWrapper> subTasks = mWrapper.getSubTaskWrapper(); if (subTasks != null && !subTasks.isEmpty()) { for (DTaskWrapper dte : subTasks) { DownloadEntity de = dte.getEntity(); String oldPath = de.getFilePath(); String newPath = newDirPath + "/" + de.getFileName(); File file = new File(oldPath); if (file.exists()) { file.renameTo(new File(newPath)); } de.setFilePath(newPath); } } } /** * 检查和处理组合任务的路径冲突 * * @param isIgnoreTaskOccupy true,如果hash冲突,将删除其它任务的记录的 * @param groupHash 组任务hash * @return false 任务不再执行,true 任务继续执行 */ private boolean checkGroupHash(boolean isIgnoreTaskOccupy, String groupHash) { DownloadGroupEntity dge = DbEntity.findFirst(DownloadGroupEntity.class, "groupHash=?", groupHash); if (dge != null && dge.getGroupHash().equals(mEntity.getGroupHash())) { mEntity.rowID = dge.rowID; return true; } if (DbEntity.checkDataExist(DownloadGroupEntity.class, "groupHash=?", groupHash)) { if (!isIgnoreTaskOccupy) { ALog.e(TAG, String.format("下载失败,数据库中已存在相同的url的组任务,groupHash = %s", groupHash)); return false; } else { ALog.w(TAG, String.format("数据库中已存在相同的url的组任务,将删除groupHash = %s 的旧任务", groupHash)); RecordUtil.delGroupTaskRecordByHash(groupHash, true); return true; } } return true; } @Override public boolean checkEntity() { if (mWrapper.getErrorEvent() != null) { ALog.e(TAG, String.format("任务操作失败,%s", mWrapper.getErrorEvent().errorMsg)); return false; } if ((action == FeatureController.ACTION_CREATE || action == FeatureController.ACTION_ADD) && !checkGroupHash(mWrapper.isIgnoreTaskOccupy(), mEntity.getGroupHash())) { return false; } if (!checkDirPath()) { return false; } if (!checkSubName()) { return false; } if (!checkUrls()) { return false; } if (action != FeatureController.ACTION_CANCEL && !mWrapper.isUnknownSize() && mEntity.getFileSize() == 0) { ALog.e(TAG, "组合任务必须设置文件文件大小,默认需要强制设置文件大小。如果无法获取到总长度,请调用#unknownSize()来标志该组合任务"); return false; } if (mWrapper.getOptionParams().getParam(IOptionConstant.requestEnum) == RequestEnum.POST) { for (DTaskWrapper subWrapper : mWrapper.getSubTaskWrapper()) { subWrapper.getOptionParams().setParams(IOptionConstant.requestEnum, RequestEnum.POST); } } if (needModifyPath) { reChangeDirPath(mWrapper.getDirPathTemp()); } if (!mWrapper.getSubNameTemp().isEmpty()) { updateSingleSubFileName(); } saveEntity(); return true; } private void saveEntity() { mEntity.save(); DbEntity.saveAll(mEntity.getSubEntities()); } /** * 更新所有改动的子任务文件名 */ private void updateSingleSubFileName() { List<DTaskWrapper> entities = mWrapper.getSubTaskWrapper(); int i = 0; for (DTaskWrapper taskWrapper : entities) { if (i < mWrapper.getSubNameTemp().size()) { String newName = mWrapper.getSubNameTemp().get(i); DownloadEntity entity = taskWrapper.getEntity(); if (!newName.equals(entity.getFileName())) { String oldPath = mEntity.getDirPath() + "/" + entity.getFileName(); String newPath = mEntity.getDirPath() + "/" + newName; if (DbEntity.checkDataExist(DownloadEntity.class, "downloadPath=?", newPath)) { ALog.w(TAG, String.format("更新文件名失败,路径【%s】被其它任务占用", newPath)); return; } RecordUtil.modifyTaskRecord(oldPath, newPath, mEntity.getTaskType()); entity.setFilePath(newPath); entity.setFileName(newName); } } i++; } } /** * 检查urls是否合法,并删除不合法的子任务 * * @return {@code true} 合法 */ private boolean checkUrls() { if (mEntity.getUrls().isEmpty()) { ALog.e(TAG, "操作失败,子任务下载列表为null"); return false; } Set<String> repeated = new HashSet<>(); List<String> results = new ArrayList<>(); for (String url : mEntity.getUrls()) { if (!repeated.add(url)) { results.add(url); } } if (!results.isEmpty()) { ALog.e(TAG, String.format("组合任务中有url重复,重复的url:%s", Arrays.toString(results.toArray()))); return false; } Set<Integer> delItem = new HashSet<>(); int i = 0; for (String url : mEntity.getUrls()) { if (TextUtils.isEmpty(url)) { ALog.e(TAG, "子任务url为null,即将删除该子任务。"); delItem.add(i); continue; } else if (!url.startsWith("http")) { ALog.e(TAG, "子任务url【" + url + "】错误,即将删除该子任务。"); delItem.add(i); continue; } int index = url.indexOf("://"); if (index == -1) { ALog.e(TAG, "子任务url【" + url + "】不合法,即将删除该子任务。"); delItem.add(i); continue; } i++; } for (int index : delItem) { mEntity.getUrls().remove(index); if (mWrapper.getSubNameTemp() != null && !mWrapper.getSubNameTemp().isEmpty()) { mWrapper.getSubNameTemp().remove(index); } } return true; } /** * 如果用户设置了子任务文件名,检查子任务文件名 * * @return {@code true} 合法 */ private boolean checkSubName() { if (mWrapper.getSubNameTemp() == null || mWrapper.getSubNameTemp().isEmpty()) { return true; } if (mEntity.getUrls().size() != mWrapper.getSubNameTemp().size()) { ALog.e(TAG, "子任务文件名必须和子任务数量一致"); return false; } return true; } }
4,411
837
<gh_stars>100-1000 package me.saket.dank.ui.submission.events; import com.google.auto.value.AutoValue; import me.saket.dank.ui.UiEvent; import me.saket.dank.utils.DankSubmissionRequest; @AutoValue public abstract class SubmissionRequestChanged implements UiEvent { public abstract DankSubmissionRequest request(); public static SubmissionRequestChanged create(DankSubmissionRequest request) { return new AutoValue_SubmissionRequestChanged(request); } }
144
835
<reponame>pristem/fo-dicom // // (C) CharLS Team 2014, all rights reserved. See the accompanying "License.txt" for licensed use. // #include "jpegmarkersegment.h" #include "jpegmarkercode.h" #include "util.h" #include <vector> #include <cstdint> using namespace std; using namespace charls; unique_ptr<JpegMarkerSegment> JpegMarkerSegment::CreateStartOfFrameSegment(int width, int height, int bitsPerSample, int componentCount) { ASSERT(width >= 0 && width <= UINT16_MAX); ASSERT(height >= 0 && height <= UINT16_MAX); ASSERT(bitsPerSample > 0 && bitsPerSample <= UINT8_MAX); ASSERT(componentCount > 0 && componentCount <= (UINT8_MAX - 1)); // Create a Frame Header as defined in T.87, C.2.2 and T.81, B.2.2 vector<uint8_t> content; content.push_back(static_cast<uint8_t>(bitsPerSample)); // P = Sample precision push_back(content, static_cast<uint16_t>(height)); // Y = Number of lines push_back(content, static_cast<uint16_t>(width)); // X = Number of samples per line // Components content.push_back(static_cast<uint8_t>(componentCount)); // Nf = Number of image components in frame for (auto component = 0; component < componentCount; ++component) { // Component Specification parameters content.push_back(static_cast<uint8_t>(component + 1)); // Ci = Component identifier content.push_back(0x11); // Hi + Vi = Horizontal sampling factor + Vertical sampling factor content.push_back(0); // Tqi = Quantization table destination selector (reserved for JPEG-LS, should be set to 0) } return make_unique<JpegMarkerSegment>(JpegMarkerCode::StartOfFrameJpegLS, move(content)); } unique_ptr<JpegMarkerSegment> JpegMarkerSegment::CreateJpegFileInterchangeFormatSegment(const JfifParameters& params) { ASSERT(params.units == 0 || params.units == 1 || params.units == 2); ASSERT(params.Xdensity > 0); ASSERT(params.Ydensity > 0); ASSERT(params.Xthumbnail >= 0 && params.Xthumbnail < 256); ASSERT(params.Ythumbnail >= 0 && params.Ythumbnail < 256); // Create a JPEG APP0 segment in the JPEG File Interchange Format (JFIF), v1.02 vector<uint8_t> content { 'J', 'F', 'I', 'F', '\0' }; push_back(content, static_cast<uint16_t>(params.version)); content.push_back(static_cast<uint8_t>(params.units)); push_back(content, static_cast<uint16_t>(params.Xdensity)); push_back(content, static_cast<uint16_t>(params.Ydensity)); // thumbnail content.push_back(static_cast<uint8_t>(params.Xthumbnail)); content.push_back(static_cast<uint8_t>(params.Ythumbnail)); if (params.Xthumbnail > 0) { if (params.thumbnail) throw charls_error(ApiResult::InvalidJlsParameters, "params.Xthumbnail is > 0 but params.thumbnail == null_ptr"); content.insert(content.end(), static_cast<uint8_t*>(params.thumbnail), static_cast<uint8_t*>(params.thumbnail) + 3 * params.Xthumbnail * params.Ythumbnail); } return make_unique<JpegMarkerSegment>(JpegMarkerCode::ApplicationData0, move(content)); } unique_ptr<JpegMarkerSegment> JpegMarkerSegment::CreateJpegLSPresetParametersSegment(const JpegLSPresetCodingParameters& params) { vector<uint8_t> content; // Parameter ID. 0x01 = JPEG-LS preset coding parameters. content.push_back(1); push_back(content, static_cast<uint16_t>(params.MaximumSampleValue)); push_back(content, static_cast<uint16_t>(params.Threshold1)); push_back(content, static_cast<uint16_t>(params.Threshold2)); push_back(content, static_cast<uint16_t>(params.Threshold3)); push_back(content, static_cast<uint16_t>(params.ResetValue)); return make_unique<JpegMarkerSegment>(JpegMarkerCode::JpegLSPresetParameters, move(content)); } unique_ptr<JpegMarkerSegment> JpegMarkerSegment::CreateColorTransformSegment(ColorTransformation transformation) { return make_unique<JpegMarkerSegment>( JpegMarkerCode::ApplicationData8, vector<uint8_t> { 'm', 'r', 'f', 'x', static_cast<uint8_t>(transformation) }); } unique_ptr<JpegMarkerSegment> JpegMarkerSegment::CreateStartOfScanSegment(int componentIndex, int componentCount, int allowedLossyError, InterleaveMode interleaveMode) { ASSERT(componentIndex >= 0); ASSERT(componentCount > 0); // Create a Scan Header as defined in T.87, C.2.3 and T.81, B.2.3 vector<uint8_t> content; content.push_back(static_cast<uint8_t>(componentCount)); for (auto i = 0; i < componentCount; ++i) { content.push_back(static_cast<uint8_t>(componentIndex + i)); content.push_back(0); // Mapping table selector (0 = no table) } content.push_back(static_cast<uint8_t>(allowedLossyError)); // NEAR parameter content.push_back(static_cast<uint8_t>(interleaveMode)); // ILV parameter content.push_back(0); // transformation return make_unique<JpegMarkerSegment>(JpegMarkerCode::StartOfScan, move(content)); }
1,878
303
<reponame>thomasleveil/docker-HTPC-Manager<filename>modules/transmission.py #!/usr/bin/env python # -*- coding: utf-8 -*- import cherrypy import htpc import urllib2 import base64 from json import loads, dumps import logging from cherrypy.lib.auth2 import require class Transmission: # Transmission Session ID sessionId = '' def __init__(self): self.logger = logging.getLogger('modules.transmission') htpc.MODULES.append({ 'name': 'Transmission', 'id': 'transmission', 'test': htpc.WEBDIR + 'transmission/ping', 'fields': [ {'type': 'bool', 'label': 'Enable', 'name': 'transmission_enable'}, {'type': 'text', 'label': 'Menu name', 'name': 'transmission_name'}, {'type': 'text', 'label': 'IP / Host', 'placeholder': 'localhost', 'name': 'transmission_host'}, {'type': 'text', 'label': 'Port', 'placeholder': '9091', 'name': 'transmission_port'}, {'type': 'text', 'label': 'Reverse Proxy', 'placeholder': '', 'name': 'transmission_revproxy'}, {'type': 'text', 'label': 'Rpc url', 'placeholder': '', 'name': 'transmission_rpcbasepath'}, {'type': 'text', 'label': 'Username', 'name': 'transmission_username'}, {'type': 'password', 'label': 'Password', 'name': 'transmission_password'} ] }) @cherrypy.expose() @require() def index(self): return htpc.LOOKUP.get_template('transmission.html').render(scriptname='transmission') @cherrypy.expose() @require() @cherrypy.tools.json_out() def queue(self): fields = ['id', 'name', 'status', 'comment', 'downloadDir', 'downloadDir', 'percentDone', 'isFinished', 'eta', 'rateDownload', 'rateUpload', 'uploadRatio'] return self.fetch('torrent-get', {'fields': fields}) @cherrypy.expose() @require() @cherrypy.tools.json_out() def stats(self): return self.fetch('session-stats') @cherrypy.expose() @require() @cherrypy.tools.json_out() def ping(self, **kwargs): """ Test connection to Transmission """ host = kwargs["transmission_host"] port = kwargs["transmission_port"] username = kwargs["transmission_username"] password = kwargs["transmission_password"] basepath = kwargs["transmission_rpcbasepath"] if basepath: if not basepath.startswith('/'): basepath = '/%s' % basepath if not basepath.endswith('/'): basepath += '/' else: # Default basepath is transmission basepath = '/transmission/' url = 'http://' + host + ':' + str(port) + basepath + 'rpc/' # format post data data = {'method': 'session-get'} data = dumps(data) # Set Header header = { 'X-Transmission-Session-Id': self.sessionId, 'Content-Type': 'json; charset=UTF-8' } # Add authentication if username and password: authentication = base64.encodestring('%s:%s' % (username, password)).replace('\n', '') header['Authorization'] = "Basic %s" % authentication try: request = urllib2.Request(url, data=data, headers=header) response = urllib2.urlopen(request).read() return loads(response) except urllib2.HTTPError, e: # Fetching url failed Maybe Transmission session must be renewed if (e.getcode() == 409 and e.headers['X-Transmission-Session-Id']): self.logger.debug("Setting new session ID provided by Transmission") # If response is 409 re-set session id from header self.sessionId = e.headers['X-Transmission-Session-Id'] self.logger.debug("Retry Transmission api with new session id.") try: header['X-Transmission-Session-Id'] = self.sessionId req = urllib2.Request(url, data=data, headers=header) response = urllib2.urlopen(req).read() return loads(response) except: self.logger.error("Unable access Transmission api with new session id.") return except Exception: self.logger.error("Unable to fetch information from: " + url) return @cherrypy.expose() @cherrypy.tools.json_out() def session(self): return self.fetch('session-get') @cherrypy.expose() @require() @cherrypy.tools.json_out() def start(self, torrentId=False): if torrentId is False: return self.fetch('torrent-start-now') try: torrentId = int(torrentId) except ValueError: return False return self.fetch('torrent-start-now', {'ids': torrentId}) @cherrypy.expose() @require() @cherrypy.tools.json_out() def stop(self, torrentId=False): if torrentId is False: return self.fetch('torrent-stop') try: torrentId = int(torrentId) except ValueError: return False return self.fetch('torrent-stop', {'ids': torrentId}) @cherrypy.expose() @require() @cherrypy.tools.json_out() def Add(self, filename): return self.fetch('torrent-add', {'filename': filename}) @cherrypy.expose() @require() @cherrypy.tools.json_out() def remove(self, torrentId): try: torrentId = int(torrentId) except ValueError: return False return self.fetch('torrent-remove', {'ids': torrentId}) # Wrapper to access the Transmission Api # If the first call fails, there probably is no valid Session ID so we try it again def fetch(self, method, arguments=''): """ Do request to Transmission api """ self.logger.debug("Request transmission method: " + method) host = htpc.settings.get('transmission_host', '') port = str(htpc.settings.get('transmission_port', '')) # Default basepath is transmission basepath = htpc.settings.get('transmission_rpcbasepath', '/transmission/') if basepath: if not basepath.startswith('/'): basepath = '/%s' % basepath if not basepath.endswith('/'): basepath += '/' else: basepath = '/transmission/' url = 'http://' + host + ':' + str(port) + basepath + 'rpc/' # format post data data = {'method': method} if arguments: data['arguments'] = arguments data = dumps(data) # Set Header header = { 'X-Transmission-Session-Id': self.sessionId, 'Content-Type': 'json; charset=UTF-8' } # Add authentication authentication = self.auth() if authentication: header['Authorization'] = "Basic %s" % authentication try: request = urllib2.Request(url, data=data, headers=header) response = urllib2.urlopen(request).read() return loads(response) except urllib2.HTTPError, e: # Fetching url failed Maybe Transmission session must be renewed if (e.getcode() == 409 and e.headers['X-Transmission-Session-Id']): self.logger.debug("Setting new session ID provided by Transmission") # If response is 409 re-set session id from header self.sessionId = e.headers['X-Transmission-Session-Id'] self.logger.debug("Retry Transmission api with new session id.") try: header['X-Transmission-Session-Id'] = self.sessionId req = urllib2.Request(url, data=data, headers=header) response = urllib2.urlopen(req).read() return loads(response) except: self.logger.error("Unable access Transmission api with new session id.") return except Exception: self.logger.error("Unable to fetch information from: " + url) return # Construct url with login details def auth(self): """ Generate a base64 HTTP auth string based on settings """ self.logger.debug("Generating authentication string for transmission") password = htpc.settings.get('transmission_password', '') username = htpc.settings.get('transmission_username', '') if username and password: return base64.encodestring('%s:%s' % (username, password)).replace('\n', '') return False
3,944
457
package denominator.dynect; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicReference; import javax.inject.Inject; import javax.inject.Provider; import javax.inject.Singleton; import denominator.CheckConnection; import denominator.Credentials; import denominator.dynect.DynECT.Data; import feign.Body; import feign.Headers; import feign.Param; import feign.RequestLine; import static denominator.common.Preconditions.checkNotNull; /** * gets the last auth token, expiring if the url or credentials changed */ // similar to guava MemoizingSupplier @Singleton class InvalidatableTokenProvider implements Provider<String>, CheckConnection { private final denominator.Provider provider; private final Session session; private final Provider<Credentials> credentials; private final AtomicReference<Boolean> sessionValid; transient volatile String lastUrl; transient volatile int lastCredentialsHashCode; // "value" does not need to be volatile; visibility piggy-backs // on above transient String value; @Inject InvalidatableTokenProvider(denominator.Provider provider, Session session, Provider<Credentials> credentials, AtomicReference<Boolean> sessionValid) { this.provider = provider; this.session = session; this.credentials = credentials; this.sessionValid = sessionValid; // for toString this.lastUrl = provider.url(); } @Override public boolean ok() { try { session.check(get()); return true; } catch (RuntimeException e) { e.printStackTrace(); sessionValid.set(false); return false; } } @Override public String get() { String currentUrl = provider.url(); Credentials currentCreds = credentials.get(); if (needsRefresh(currentUrl, currentCreds)) { synchronized (this) { if (needsRefresh(currentUrl, currentCreds)) { lastCredentialsHashCode = currentCreds.hashCode(); lastUrl = currentUrl; String t = auth(currentCreds); value = t; sessionValid.set(true); return t; } } } return value; } private boolean needsRefresh(String currentUrl, Credentials currentCreds) { return !sessionValid.get() || currentCreds.hashCode() != lastCredentialsHashCode || !currentUrl .equals(lastUrl); } private String auth(Credentials currentCreds) { String customer; String username; String password; if (currentCreds instanceof List) { @SuppressWarnings("unchecked") List<Object> listCreds = (List<Object>) currentCreds; customer = listCreds.get(0).toString(); username = listCreds.get(1).toString(); password = listCreds.get(2).toString(); } else if (currentCreds instanceof Map) { @SuppressWarnings("unchecked") Map<String, Object> mapCreds = (Map<String, Object>) currentCreds; customer = checkNotNull(mapCreds.get("customer"), "customer").toString(); username = checkNotNull(mapCreds.get("username"), "username").toString(); password = checkNotNull(mapCreds.get("password"), "password").toString(); } else { throw new IllegalArgumentException("Unsupported credential type: " + currentCreds); } return session.login(customer, username, password).data; } @Override public String toString() { return "InvalidatableTokenSupplier(" + lastUrl + ")"; } interface Session { @RequestLine("POST /Session") @Body("%7B\"customer_name\":\"{customer_name}\",\"user_name\":\"{user_name}\",\"password\":\"{password}\"%7D") Data<String> login(@Param("customer_name") String customer, @Param("user_name") String user, @Param("password") String password); @RequestLine("GET /Session") @Headers("Auth-Token: {Auth-Token}") void check(@Param("Auth-Token") String token); } }
1,422
400
<reponame>tradeshift-zihe/ofdrw package org.ofdrw.layout.element; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.*; class DivTest { @Test void split() { Div div = new Div() .setWidth(400d) .setHeight(800d) .setBackgroundColor(0, 0, 0) .setBorder(1d) .setMargin(15d) .setPadding(8d); Div[] sDivs = div.split(100); assertEquals(sDivs.length, 2); Div d1 = sDivs[0]; Div d2 = sDivs[1]; int h1 = 100 - 1 - 15 - 8; assertEquals(d1.getHeight(), h1); assertEquals(d2.getHeight(),800d-h1 ); assertEquals(d1.getPaddingBottom(), 0); assertEquals(d1.getBorderBottom(), 0); assertEquals(d1.getMarginBottom(), 0); assertEquals(d2.getPaddingTop(), 0); assertEquals(d2.getBorderTop(), 0); assertEquals(d2.getMarginTop(), 0); div = new Div() .setBackgroundColor(0, 0, 0) .setWidth(300d) .setHeight(100d) .setPadding(6d) .setBorder(8d) .setMargin(10d); Div[] sp = div.split(5); assertEquals(sp[0].getMarginTop(), 5); assertEquals(sp[1].getMarginTop(), 5); sp = div.split(10); assertEquals(sp[0].getMarginTop(), 10); assertEquals(sp[1].getMarginTop(), 0); sp = div.split(14); assertEquals(sp[0].getMarginTop(), 10); assertEquals(sp[0].getBorderTop(), 4); assertEquals(sp[1].getMarginTop(), 0); assertEquals(sp[1].getBorderTop(), 4); sp = div.split(21); assertEquals(sp[0].getMarginTop(), 10); assertEquals(sp[0].getBorderTop(), 8); assertEquals(sp[0].getPaddingTop(), 3); assertEquals(sp[1].getMarginTop(), 0); assertEquals(sp[1].getBorderTop(), 0); assertEquals(sp[1].getPaddingTop(), 3); } }
1,075
965
// adds three strings with one call lpszStrings = _T("String 1\0String 2\0String 3\0");
30
351
<gh_stars>100-1000 /* * cen64_wrapper.cpp - CEN64-specific CPU interfaces * @author <NAME> <EMAIL> * Copyright 2019 MIPT-MIPS */ #include "cen64_rcp_interrupt_mask.h" #include "cen64_wrapper.h" static const constexpr uint64 MI_REGS_BASE_ADDRESS = 0x04300000; int vr4300::init( std::shared_ptr<FuncMemory> m) { set_memory( std::move( m)); set_pc( 0x1fc00000ULL); mi_regs[MI_VERSION_REG] = 0x01010101U; mi_regs[MI_INIT_MODE_REG] = 0x80U; return 0; } void vr4300::apply_mask_to_cause( uint64 mask) { uint64 cause = read_cause_register(); write_cause_register( cause | mask); } void vr4300::reset_mask_to_cause( uint64 mask) { uint64 cause = read_cause_register(); write_cause_register( cause & ~mask); } // Checks for interrupts, possibly sets the cause bit. void vr4300::check_for_interrupts() { if ((mi_regs[MI_INTR_REG] & mi_regs[MI_INTR_MASK_REG]) != 0) apply_mask_to_cause( 0x400); else reset_mask_to_cause( 0x400); } void vr4300::clear_rcp_interrupt(uint32 mask) { mi_regs[MI_INTR_REG] &= ~mask; check_for_interrupts(); } void vr4300::signal_rcp_interrupt(uint32 mask) { mi_regs[MI_INTR_REG] |= mask; check_for_interrupts(); } int vr4300::read_mi_regs( uint32_t address, uint32_t* word) const { auto reg = MiRegister{ ( address - MI_REGS_BASE_ADDRESS) / 4}; *word = mi_regs.at( reg); return 0; } int vr4300::write_mi_regs( uint32 address, uint32 word, uint32 dqm) { auto reg = MiRegister{ ( address - MI_REGS_BASE_ADDRESS) / 4}; switch (reg) { case MI_INIT_MODE_REG: write_mi_init_mode_reg( word); break; case MI_INTR_MASK_REG: write_mi_intr_mask_reg( word); break; default: mi_regs.at( reg) = ( mi_regs.at( reg) & ~dqm) | word; break; } return 0; } void vr4300::write_mi_init_mode_reg( uint32 word) { static const constexpr uint64 MI_EBUS_TEST_MODE = 0x0080; static const constexpr uint64 MI_INIT_MODE = 0x0100; static const constexpr uint64 MI_RDRAM_REG_MODE = 0x0200; uint32 result = word & 0x3FFU; if ((word & 0x0080U) != 0) result &= ~MI_INIT_MODE; else if ((word & 0x0100U) != 0) result |= MI_INIT_MODE; if ((word & 0x0200U) != 0) result &= ~MI_EBUS_TEST_MODE; else if ((word & 0x0400U) != 0) result |= MI_EBUS_TEST_MODE; if ((word & 0x0800U) != 0) { mi_regs[MI_INTR_REG] &= ~MI_INTR_DP; check_for_interrupts(); } if ((word & 0x1000U) != 0) result &= ~MI_RDRAM_REG_MODE; else if ((word & 0x2000U) != 0) result |= MI_RDRAM_REG_MODE; mi_regs[MI_INIT_MODE_REG] = result; } void vr4300::write_mi_intr_mask_reg( uint32 word) { if ((word & 0x0001U) != 0) mi_regs[MI_INTR_MASK_REG] &= ~MI_INTR_SP; else if ((word & 0x0002U) != 0) mi_regs[MI_INTR_MASK_REG] |= MI_INTR_SP; if ((word & 0x0004U) != 0) mi_regs[MI_INTR_MASK_REG] &= ~MI_INTR_SI; else if ((word & 0x0008U) != 0) mi_regs[MI_INTR_MASK_REG] |= MI_INTR_SI; if ((word & 0x0010U) != 0) mi_regs[MI_INTR_MASK_REG] &= ~MI_INTR_AI; else if ((word & 0x0020U) != 0) mi_regs[MI_INTR_MASK_REG] |= MI_INTR_AI; if ((word & 0x0040U) != 0) mi_regs[MI_INTR_MASK_REG] &= ~MI_INTR_VI; else if ((word & 0x0080U) != 0) mi_regs[MI_INTR_MASK_REG] |= MI_INTR_VI; if ((word & 0x0100U) != 0) mi_regs[MI_INTR_MASK_REG] &= ~MI_INTR_PI; else if ((word & 0x0200U) != 0) mi_regs[MI_INTR_MASK_REG] |= MI_INTR_PI; if ((word & 0x0400U) != 0) mi_regs[MI_INTR_MASK_REG] &= ~MI_INTR_DP; else if ((word & 0x0800U) != 0) mi_regs[MI_INTR_MASK_REG] |= MI_INTR_DP; check_for_interrupts(); }
1,871
887
<filename>deps/fcl/include/fcl/interval_tree.h /* * Software License Agreement (BSD License) * * Copyright (c) 2011, <NAME>, Inc. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials provided * with the distribution. * * Neither the name of <NAME>, Inc. nor the names of its * contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ /** \author <NAME> */ #ifndef FCL_INTERVAL_TREE_H #define FCL_INTERVAL_TREE_H #include <deque> #include <limits> /** \brief Main namespace */ namespace fcl { /** \brief Interval trees implemented using red-black-trees as described in * the book Introduction_To_Algorithms_ by Cormen, Leisserson, and Rivest. * Can be replaced in part by boost::icl::interval_set, which is only supported after boost 1.46 and does not support delete node routine. */ struct Interval { public: Interval() {} virtual ~Interval() {} virtual void print() {} double low, high; }; class IntervalTreeNode { friend class IntervalTree; public: /** \brief Print the interval node information: set left = nil and right = root */ void print(IntervalTreeNode* left, IntervalTreeNode* right) const; IntervalTreeNode(); IntervalTreeNode(Interval* new_interval); ~IntervalTreeNode(); protected: Interval* stored_interval; double key; double high; double max_high; bool red; /* if red = false then the node is black */ IntervalTreeNode* left; IntervalTreeNode* right; IntervalTreeNode* parent; }; /** \brief Class describes the information needed when we take the * right branch in searching for intervals but possibly come back * and check the left branch as well. */ struct it_recursion_node { public: IntervalTreeNode* start_node; unsigned int parent_index; bool try_right_branch; }; /** \brief Interval tree */ class IntervalTree { public: IntervalTree(); ~IntervalTree(); /** \brief Print the whole interval tree */ void print() const; /** \brief Delete one node of the interval tree */ Interval* deleteNode(IntervalTreeNode* node); /** \brief Insert one node of the interval tree */ IntervalTreeNode* insert(Interval* new_interval); /** \brief get the predecessor of a given node */ IntervalTreeNode* getPredecessor(IntervalTreeNode* node) const; /** \brief Get the successor of a given node */ IntervalTreeNode* getSuccessor(IntervalTreeNode* node) const; /** \brief Return result for a given query */ std::deque<Interval*> query(double low, double high); protected: IntervalTreeNode* root; IntervalTreeNode* nil; /** \brief left rotation of tree node */ void leftRotate(IntervalTreeNode* node); /** \brief right rotation of tree node */ void rightRotate(IntervalTreeNode* node); /** \brief recursively insert a node */ void recursiveInsert(IntervalTreeNode* node); /** \brief recursively print a subtree */ void recursivePrint(IntervalTreeNode* node) const; /** \brief Travels up to the root fixing the max_high fields after an insertion or deletion */ void fixupMaxHigh(IntervalTreeNode* node); void deleteFixup(IntervalTreeNode* node); private: unsigned int recursion_node_stack_size; it_recursion_node* recursion_node_stack; unsigned int current_parent; unsigned int recursion_node_stack_top; }; } #endif
1,403
453
package com.waylau.spring.boot.blog.repository; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.data.jpa.repository.JpaRepository; import com.waylau.spring.boot.blog.domain.User; /** * 用户仓库. * * @since 1.0.0 2017年3月2日 * @author <a href="https://waylau.com"><NAME></a> */ public interface UserRepository extends JpaRepository<User, Long>{ /** * 根据用户名分页查询用户列表 * @param name * @param pageable * @return */ Page<User> findByNameLike(String name, Pageable pageable); User findByUsername(String username); }
279
2,151
// Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package com.squareup.leakcanary; // Normally generated by Gradle. class BuildConfig { static final String LIBRARY_VERSION = "1.4-beta1 (chromium)"; static final String GIT_SHA = "unknown"; // Unlikely to be kept up-to-date, so don't try. }
126
1,093
/** * Provides classes for configuration - parsers, namespace handlers, factory beans. */ package org.springframework.integration.ip.config;
34
1,144
<filename>backend/de.metas.adempiere.adempiere/client/src/main/java/org/adempiere/ui/IContextMenuAction.java<gh_stars>1000+ package org.adempiere.ui; /* * #%L * de.metas.adempiere.adempiere.client * %% * Copyright (C) 2015 metas GmbH * %% * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as * published by the Free Software Foundation, either version 2 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public * License along with this program. If not, see * <http://www.gnu.org/licenses/gpl-2.0.html>. * #L% */ import java.util.List; import javax.swing.KeyStroke; import org.compiere.grid.ed.VEditor; /** * Context menu action (i.e. displayed on {@link VEditor}'s right click * * @author tsa */ public interface IContextMenuAction extends Runnable { /** * Set evaluation context. * * NOTE: don't call directly, this method is called by API once, when the context menu is initialized. * * @param menuCtx */ void setContext(final IContextMenuActionContext menuCtx); /** @return action's title, already translated */ String getTitle(); /** * Return the filename of the icon to use, <b>without the file ending</b>. Or return <code>null</code> if their shall be no icon. * * @return */ String getIcon(); KeyStroke getKeyStroke(); /** * @return True if the action should exist in the context menu. */ boolean isAvailable(); /** * @return True if the action can be run in the context menu. */ boolean isRunnable(); /** * Returns how the action shall be displayed when not runnable. * * @return <ul> * <li>true if this action shall be hidden when not runnable * <li>false if this action shall be displayed but grayed when not runnable * </ul> */ boolean isHideWhenNotRunnable(); /** * @return child {@link IContextMenuAction} */ List<IContextMenuAction> getChildren(); @Override void run(); /** * If is a long operation, menu renderer will: * <ul> * <li>show waiting cursor * </ul> * * @return true if executing this action can take a while. */ boolean isLongOperation(); }
817
1,350
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.cognitiveservices.language.luis.authoring.models; import java.util.List; import com.fasterxml.jackson.annotation.JsonProperty; /** * The HierarchicalModelV2 model. */ public class HierarchicalModelV2 { /** * The name property. */ @JsonProperty(value = "name") private String name; /** * The children property. */ @JsonProperty(value = "children") private List<String> children; /** * The inherits property. */ @JsonProperty(value = "inherits") private PrebuiltDomainObject inherits; /** * The roles property. */ @JsonProperty(value = "roles") private List<String> roles; /** * Get the name value. * * @return the name value */ public String name() { return this.name; } /** * Set the name value. * * @param name the name value to set * @return the HierarchicalModelV2 object itself. */ public HierarchicalModelV2 withName(String name) { this.name = name; return this; } /** * Get the children value. * * @return the children value */ public List<String> children() { return this.children; } /** * Set the children value. * * @param children the children value to set * @return the HierarchicalModelV2 object itself. */ public HierarchicalModelV2 withChildren(List<String> children) { this.children = children; return this; } /** * Get the inherits value. * * @return the inherits value */ public PrebuiltDomainObject inherits() { return this.inherits; } /** * Set the inherits value. * * @param inherits the inherits value to set * @return the HierarchicalModelV2 object itself. */ public HierarchicalModelV2 withInherits(PrebuiltDomainObject inherits) { this.inherits = inherits; return this; } /** * Get the roles value. * * @return the roles value */ public List<String> roles() { return this.roles; } /** * Set the roles value. * * @param roles the roles value to set * @return the HierarchicalModelV2 object itself. */ public HierarchicalModelV2 withRoles(List<String> roles) { this.roles = roles; return this; } }
1,054
1,139
package com.journaldev.xml.sax; import java.util.ArrayList; import java.util.List; import org.xml.sax.Attributes; import org.xml.sax.SAXException; import org.xml.sax.helpers.DefaultHandler; import com.journaldev.xml.Employee; public class MyHandler extends DefaultHandler { // List to hold Employees object private List<Employee> empList = null; private Employee emp = null; private StringBuilder data = null; // getter method for employee list public List<Employee> getEmpList() { return empList; } boolean bAge = false; boolean bName = false; boolean bGender = false; boolean bRole = false; @Override public void startElement(String uri, String localName, String qName, Attributes attributes) throws SAXException { if (qName.equalsIgnoreCase("Employee")) { // create a new Employee and put it in Map String id = attributes.getValue("id"); // initialize Employee object and set id attribute emp = new Employee(); emp.setId(Integer.parseInt(id)); // initialize list if (empList == null) empList = new ArrayList<>(); } else if (qName.equalsIgnoreCase("name")) { // set boolean values for fields, will be used in setting Employee variables bName = true; } else if (qName.equalsIgnoreCase("age")) { bAge = true; } else if (qName.equalsIgnoreCase("gender")) { bGender = true; } else if (qName.equalsIgnoreCase("role")) { bRole = true; } // create the data container data = new StringBuilder(); } @Override public void endElement(String uri, String localName, String qName) throws SAXException { if (bAge) { // age element, set Employee age emp.setAge(Integer.parseInt(data.toString())); bAge = false; } else if (bName) { emp.setName(data.toString()); bName = false; } else if (bRole) { emp.setRole(data.toString()); bRole = false; } else if (bGender) { emp.setGender(data.toString()); bGender = false; } if (qName.equalsIgnoreCase("Employee")) { // add Employee object to list empList.add(emp); } } @Override public void characters(char ch[], int start, int length) throws SAXException { data.append(new String(ch, start, length)); } }
786
3,212
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.security.util.crypto; import org.apache.nifi.security.util.KeyDerivationFunction; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.HashMap; import java.util.Map; /** * <p> Provides a factory for SecureHasher implementations. Will return Argon2 by default if no algorithm parameter is given. * Algorithm parameter should align with the below registered secure hasher names (PBKDF2, BCRYPT, SCRYPT, ARGON2). */ public class SecureHasherFactory { private static final Logger LOGGER = LoggerFactory.getLogger(SecureHasherFactory.class); private static Map<KeyDerivationFunction, Class<? extends SecureHasher>> registeredSecureHashers; private static final Class<? extends SecureHasher> DEFAULT_SECURE_HASHER_CLASS = Argon2SecureHasher.class; static { registeredSecureHashers = new HashMap<>(); registeredSecureHashers.put(KeyDerivationFunction.PBKDF2, PBKDF2SecureHasher.class); registeredSecureHashers.put(KeyDerivationFunction.BCRYPT, BcryptSecureHasher.class); registeredSecureHashers.put(KeyDerivationFunction.SCRYPT, ScryptSecureHasher.class); registeredSecureHashers.put(KeyDerivationFunction.ARGON2, Argon2SecureHasher.class); } public static SecureHasher getSecureHasher(final String algorithm) { Class<? extends SecureHasher> secureHasherClass = DEFAULT_SECURE_HASHER_CLASS; final String algorithmPattern = algorithm.toUpperCase(); try { for (KeyDerivationFunction keyDerivationFunction : registeredSecureHashers.keySet()) { final String functionName = keyDerivationFunction.getKdfName().toUpperCase(); if (algorithmPattern.contains(functionName)) { secureHasherClass = registeredSecureHashers.get(keyDerivationFunction); } } LOGGER.debug("Creating SecureHasher [{}] for algorithm [{}]", secureHasherClass.getName(), algorithm); return secureHasherClass.getDeclaredConstructor().newInstance(); } catch (Exception e) { throw new SecureHasherException(String.format("Failed to create SecureHasher for algorithm [%s]", algorithm), e); } } }
977
1,144
/* * This file is subject to the terms and conditions of the GNU General Public * License. See the file "COPYING" in the main directory of this archive * for more details. * * Copyright (C) 1996, 97, 98, 99, 2000 by <NAME> * Copyright (C) 1999, 2000 Silicon Graphics, Inc. */ #ifndef _ASM_POSIX_TYPES_H #define _ASM_POSIX_TYPES_H #include <asm/sgidefs.h> /* * This file is generally used by user-level software, so you need to * be a little careful about namespace pollution etc. Also, we cannot * assume GCC is being used. */ typedef long __kernel_daddr_t; #define __kernel_daddr_t __kernel_daddr_t #if (_MIPS_SZLONG == 32) typedef struct { long val[2]; } __kernel_fsid_t; #define __kernel_fsid_t __kernel_fsid_t #endif #include <asm-generic/posix_types.h> #endif /* _ASM_POSIX_TYPES_H */
298
1,800
package com.limpoxe.fairy.core.exception; /** * Created by cailiming on 16/11/18. */ public class PluginNotFoundError extends Error { public PluginNotFoundError(String detailMessage) { super(detailMessage); } public PluginNotFoundError(String detailMessage, Throwable throwable) { super(detailMessage, throwable); } public PluginNotFoundError(Throwable throwable) { super(throwable); } }
157
1,144
package de.metas.handlingunits; /* * #%L * de.metas.handlingunits.base * %% * Copyright (C) 2015 metas GmbH * %% * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as * published by the Free Software Foundation, either version 2 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public * License along with this program. If not, see * <http://www.gnu.org/licenses/gpl-2.0.html>. * #L% */ import org.adempiere.exceptions.AdempiereException; import org.adempiere.util.lang.IMutable; import de.metas.handlingunits.model.I_M_HU; import de.metas.handlingunits.model.I_M_HU_Item; import de.metas.handlingunits.storage.IHUItemStorage; import de.metas.util.Check; /** * {@link IHUIteratorListener} adapter, can be used if there is an existing listener implementation <b>"A"</b> and you want to use <b>most</b> most of <b>"A"</b>'s method implementations. In that case * you can create a subclass of <code>this</code> class, call its constructor with <b>"A"</b> as parameter and only override the methods you need a different implementation for. * * @author tsa * */ public class HUIteratorListenerDelegateAdapter implements IHUIteratorListener { private final IHUIteratorListener delegate; public HUIteratorListenerDelegateAdapter(final IHUIteratorListener delegate) { super(); Check.assumeNotNull(delegate, "delegate not null"); this.delegate = delegate; } @Override public String toString() { return getClass().getSimpleName() + "[" + delegate + "]"; } @Override public void setHUIterator(final IHUIterator iterator) { delegate.setHUIterator(iterator); } protected IHUIterator getHUIterator() { if (delegate instanceof HUIteratorListenerAdapter) { return ((HUIteratorListenerAdapter)delegate).getHUIterator(); } else { throw new AdempiereException("Cannot get HUIterator from delegate: " + delegate); } } @Override public Result beforeHU(final IMutable<I_M_HU> hu) { return delegate.beforeHU(hu); } @Override public Result afterHU(final I_M_HU hu) { return delegate.afterHU(hu); } @Override public Result beforeHUItem(final IMutable<I_M_HU_Item> item) { return delegate.beforeHUItem(item); } @Override public Result afterHUItem(final I_M_HU_Item item) { return delegate.afterHUItem(item); } @Override public Result beforeHUItemStorage(final IMutable<IHUItemStorage> itemStorage) { return delegate.beforeHUItemStorage(itemStorage); } @Override public Result afterHUItemStorage(final IHUItemStorage itemStorage) { return delegate.afterHUItemStorage(itemStorage); } }
997
1,992
<reponame>samwestmoreland/please // please_sandbox is a very small binary to implement sandboxing // of tests (and possibly other build actions) via cgroups. // Essentially this is a very lightweight replacement for Docker // where we would use it for tests to avoid port clashes etc. // // Note that this is a no-op on non-Linux OSs because they will not // support namespaces / cgroups. We still behave similarly otherwise // in order for it to be transparent to the rest of the system. #include <stdio.h> #include <stdlib.h> #include <string.h> #include "tools/sandbox/sandbox.h" int main(int argc, char* argv[]) { if (argc < 2) { fputs("please_sandbox implements sandboxing for Please.\n", stderr); fputs("It takes no flags, it simply executes the command given as arguments.\n", stderr); fputs("Usage: please_sandbox command args...\n", stderr); return 1; } // Network namespace is sandboxed by default but it can be opted out if `SHARE_NETWORK=1` env is set const char* share_network_env = getenv("SHARE_NETWORK"); const bool unshare_network = share_network_env == NULL || strcmp(share_network_env, "1"); // Mount namespace is sandboxed by default but it can be opted out if `SHARE_MOUNT=1` env is set const char* share_mount_env = getenv("SHARE_MOUNT"); const bool unshare_mount = share_mount_env == NULL || strcmp(share_mount_env, "1"); return contain(&argv[1], unshare_network, unshare_mount); }
482
778
/* * Copyright (C) 2020 Intel Corporation * * SPDX-License-Identifier: MIT * */ #pragma once namespace L0 { // Method called by global factory enabler template <typename Type> void populateFactoryTable(); } // namespace L0
74
2,151
/* This Java source file was generated by test-to-java.xsl and is a derived work from the source document. The source document contained the following notice: Copyright (c) 2001-2004 World Wide Web Consortium, (Massachusetts Institute of Technology, Institut National de Recherche en Informatique et en Automatique, Keio University). All Rights Reserved. This program is distributed under the W3C's Software Intellectual Property License. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See W3C License http://www.w3.org/Consortium/Legal/ for more details. */ package org.w3c.domts.level1.core; import org.w3c.dom.*; import org.w3c.domts.DOMTestCase; import org.w3c.domts.DOMTestDocumentBuilderFactory; /** * The "createCDATASection(data)" method creates a new * CDATASection node whose value is the specified string. * Retrieve the entire DOM document and invoke its * "createCDATASection(data)" method. It should create a * new CDATASection node whose "data" is the specified * string. The content, name and type are retrieved and * output. * @author NIST * @author <NAME> * @see <a href="http://www.w3.org/TR/1998/REC-DOM-Level-1-19981001/level-one-core#ID-D26C0AF8">http://www.w3.org/TR/1998/REC-DOM-Level-1-19981001/level-one-core#ID-D26C0AF8</a> */ public final class documentcreatecdatasection extends DOMTestCase { /** * Constructor. * @param factory document factory, may not be null * @throws org.w3c.domts.DOMTestIncompatibleException Thrown if test is not compatible with parser configuration */ public documentcreatecdatasection(final DOMTestDocumentBuilderFactory factory) throws org.w3c.domts.DOMTestIncompatibleException { super(factory); // // check if loaded documents are supported for content type // String contentType = getContentType(); preload(contentType, "staff", true); } /** * Runs the test case. * @throws Throwable Any uncaught exception causes test to fail */ public void runTest() throws Throwable { Document doc; CDATASection newCDATASectionNode; String newCDATASectionValue; String newCDATASectionName; int newCDATASectionType; doc = (Document) load("staff", true); newCDATASectionNode = doc.createCDATASection("This is a new CDATASection node"); newCDATASectionValue = newCDATASectionNode.getNodeValue(); assertEquals("nodeValue", "This is a new CDATASection node", newCDATASectionValue); newCDATASectionName = newCDATASectionNode.getNodeName(); assertEquals("nodeName", "#cdata-section", newCDATASectionName); newCDATASectionType = (int) newCDATASectionNode.getNodeType(); assertEquals("nodeType", 4, newCDATASectionType); } /** * Gets URI that identifies the test. * @return uri identifier of test */ public String getTargetURI() { return "http://www.w3.org/2001/DOM-Test-Suite/level1/core/documentcreatecdatasection"; } /** * Runs this test from the command line. * @param args command line arguments */ public static void main(final String[] args) { DOMTestCase.doMain(documentcreatecdatasection.class, args); } }
1,141
1,056
<reponame>timfel/netbeans<gh_stars>1000+ /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.netbeans.modules.xsl.utils; import java.io.*; import java.net.*; import java.beans.PropertyVetoException; import junit.framework.*; import org.netbeans.junit.*; import org.xml.sax.*; import javax.xml.parsers.*; import javax.xml.transform.*; import javax.xml.transform.sax.*; import javax.xml.transform.stream.*; import org.openide.filesystems.*; import org.openide.loaders.*; import org.netbeans.api.xml.cookies.*; /** * * @author <NAME> */ public class TransformUtilTest extends NbTestCase { public TransformUtilTest(java.lang.String testName) { super(testName); } public static void main(java.lang.String[] args) { junit.textui.TestRunner.run(suite()); } public static Test suite() { TestSuite suite = new NbTestSuite(TransformUtilTest.class); return suite; } public void testIsXSLTransformation () throws Exception { System.out.println("testIsXSLTransformation"); assertTrue (".xml document must NOT pass!", false==TransformUtil.isXSLTransformation (getDataObject ("doc.xml"))); assertTrue (".xsl document MUST pass!", TransformUtil.isXSLTransformation (getDataObject ("doc2xhtml.xsl"))); } public void testGetURLName () throws Exception { System.out.println("testGetURLName"); FileObject docXML = getFileObject("doc.xml"); String docXMLName = TransformUtil.getURLName(docXML); System.out.println(" docXML: " + docXML + " => '" + docXMLName + "'"); assertTrue ("URL should not contain nbsf://!",-1==docXMLName.indexOf("nbfs")); } public void testCreateURL () throws Exception { System.out.println("testCreateURL"); URL dataURL = getClass().getResource("data/"); URL docXMLURL = getClass().getResource("data/doc.xml"); URL docDTDURL = getClass().getResource("data/doc.dtd"); assertTrue ("Both URLs must be same!", docXMLURL.sameFile (TransformUtil.createURL (dataURL, "doc.xml"))); assertTrue ("Both URLs must be same!", docXMLURL.sameFile (TransformUtil.createURL (docDTDURL, "doc.xml"))); assertTrue ("Both URLs must be same!", docXMLURL.sameFile (TransformUtil.createURL (docDTDURL, "../data/doc.xml"))); assertTrue ("Both URLs must NOT be same!", false==docXMLURL.sameFile (TransformUtil.createURL (docDTDURL, "data/doc.xml"))); assertTrue ("Both URLs must be same!", false==docXMLURL.sameFile (TransformUtil.createURL (docDTDURL, docDTDURL.toExternalForm()))); } public void testGetAssociatedStylesheet () throws Exception { System.out.println("testGetAssociatedStylesheet"); URL docXMLURL = getClass().getResource("data/doc.xml"); URL invalidDocXMLURL = getClass().getResource("data/InvalidDocument.xml"); // assertTrue ("doc.xml does NOT have associated stylesheet", null==TransformUtil.getAssociatedStylesheet(docXMLURL)); // FAILS probably because bug in org.apache.xml.utils.URI => // "org.apache.xml.utils.URI$MalformedURIException: Path contains invalid character: [" if it is nbfs: URL! //assertTrue ("InvalidDocument.xml DOES have associated stylesheet", null!=TransformUtil.getAssociatedStylesheet(invalidDocXMLURL)); // Same URL converted to file: format. FileObject FO = URLMapper.findFileObjects (invalidDocXMLURL)[0]; URL url = URLMapper.findURL(FO, URLMapper.EXTERNAL); assertTrue ("InvalidDocument.xml DOES have associated stylesheet", null!=TransformUtil.getAssociatedStylesheet (url)); } public void testGuessOutputExt () throws Exception { System.out.println("testGuessOutputExt"); URL doc2htmlURL = getClass().getResource("data/doc2html.xsl"); URL doc2textURL = getClass().getResource("data/doc2text.xsl"); URL doc2xhtmlURL = getClass().getResource("data/doc2xhtml.xsl"); assertTrue ("doc2html.xsl produces HTML output!", "html".equals (TransformUtil.guessOutputExt (getSource (doc2htmlURL)))); assertTrue ("doc2text.xsl produces TXT output!", "txt".equals (TransformUtil.guessOutputExt (getSource (doc2textURL)))); assertTrue ("doc2xhtml.xsl produces XML output!", "xml".equals (TransformUtil.guessOutputExt (getSource (doc2xhtmlURL)))); } public void testTransform () throws Exception { System.out.println("testTransform"); assertTrue ("Correct XML and correct XSLT must pass!", transform ("data/doc.xml", "data/doc2xhtml.xsl")); assertTrue ("Incorrect XML and correct XSLT must not pass!", false==transform ("data/InvalidDocument.xml", "data/doc2xhtml.xsl")); assertTrue ("Correct XML and incorrect XSLT must not pass!", false==transform ("data/doc.xml", "data/InvalidDocument.xml")); assertTrue ("Incrrect XML and incorrect XSLT must not pass!", false==transform ("data/InvalidDocument.xml", "data/InvalidDocument.xml")); } private boolean transform (String xml, String xslt) { URL xmlURL = getClass().getResource(xml); URL xsltURL = getClass().getResource(xslt); Source xmlSource = new SAXSource (new InputSource (xmlURL.toExternalForm())); Source xsltSource = new SAXSource (new InputSource (xsltURL.toExternalForm())); Result outputResult = new StreamResult (new StringWriter()); Observer observer = new Observer(); // not yet used boolean exceptionThrown = false; try { TransformUtil.transform (xmlSource, null, xsltSource, outputResult, observer); } catch (TransformerException exc) { System.err.println("!!! " + exc); exceptionThrown = true; } System.out.println(xml + " & " + xslt + " => " + ( exceptionThrown ? "WRONG" : "OK" )); return exceptionThrown==false; } // // utils // private FileObject getFileObject (String name) throws PropertyVetoException, IOException { URL url = getClass().getResource("data/" + name); /* FileSystem FS = getDataFileSystem(); FileObject FO = FS.findResource (name); return FO;*/ FileObject[] fos = URLMapper.findFileObjects (url); return fos[0]; } private DataObject getDataObject (String name) throws PropertyVetoException, IOException, DataObjectNotFoundException { FileObject FO = getFileObject (name); DataObject DO = DataObject.find (FO); return DO; } /* private FileSystem getDataFileSystem () throws PropertyVetoException, IOException { URL dataURL = getClass().getResource("data"); String dataSysName = dataURL.toExternalForm(); Repository repository = Repository.getDefault(); FileSystem dataFS = repository.findFileSystem (dataSysName); if ( dataFS == null ) { LocalFileSystem locFS = new LocalFileSystem(); locFS.setRootDirectory (new File (dataSysName)); dataFS = locFS; } return dataFS; }*/ private Source getSource (URL url) throws ParserConfigurationException, SAXException { XMLReader reader = TransformUtil.newXMLReader(); reader.setEntityResolver (TransformUtil.getEntityResolver()); Source source = new SAXSource (reader, new InputSource (url.toExternalForm())); return source; } // // class Observer // private static class Observer implements CookieObserver { private int receives; private int warnings; public void receive(CookieMessage msg) { receives++; if (msg.getLevel() >= msg.WARNING_LEVEL) { warnings++; } } public int getWarnings() { return warnings; } } // class Observer }
3,424
1,602
<gh_stars>1000+ typedef struct _R { int x; } R; static void foo(R r) { printf("foo got: %d\n", r.x); } static void bar(R* r) { printf("bar got: %d\n", r->x); }
82
1,603
package com.linkedin.datahub.graphql.types.container.mappers; import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; import com.linkedin.common.InstitutionalMemory; import com.linkedin.common.Ownership; import com.linkedin.common.SubTypes; import com.linkedin.common.urn.Urn; import com.linkedin.container.ContainerProperties; import com.linkedin.container.EditableContainerProperties; import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.generated.Container; import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.Domain; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.DataPlatformInstanceAspectMapper; import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.StringMapMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.domain.Domains; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.metadata.Constants; import javax.annotation.Nullable; import static com.linkedin.metadata.Constants.*; public class ContainerMapper { @Nullable public static Container map(final EntityResponse entityResponse) { final Container result = new Container(); final Urn entityUrn = entityResponse.getUrn(); final EnvelopedAspectMap aspects = entityResponse.getAspects(); result.setUrn(entityUrn.toString()); result.setType(EntityType.CONTAINER); final EnvelopedAspect envelopedPlatformInstance = aspects.get(Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME); if (envelopedPlatformInstance != null) { final DataMap data = envelopedPlatformInstance.getValue().data(); result.setPlatform(mapPlatform(new DataPlatformInstance(data))); result.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(data))); } else { final DataPlatform unknownPlatform = new DataPlatform(); unknownPlatform.setUrn(UNKNOWN_DATA_PLATFORM); result.setPlatform(unknownPlatform); } final EnvelopedAspect envelopedContainerProperties = aspects.get(Constants.CONTAINER_PROPERTIES_ASPECT_NAME); if (envelopedContainerProperties != null) { result.setProperties(mapContainerProperties(new ContainerProperties(envelopedContainerProperties.getValue().data()))); } final EnvelopedAspect envelopedEditableContainerProperties = aspects.get(Constants.CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME); if (envelopedEditableContainerProperties != null) { result.setEditableProperties(mapContainerEditableProperties(new EditableContainerProperties(envelopedEditableContainerProperties.getValue().data()))); } final EnvelopedAspect envelopedOwnership = aspects.get(Constants.OWNERSHIP_ASPECT_NAME); if (envelopedOwnership != null) { result.setOwnership(OwnershipMapper.map(new Ownership(envelopedOwnership.getValue().data()))); } final EnvelopedAspect envelopedTags = aspects.get(Constants.GLOBAL_TAGS_ASPECT_NAME); if (envelopedTags != null) { com.linkedin.datahub.graphql.generated.GlobalTags globalTags = GlobalTagsMapper.map(new GlobalTags(envelopedTags.getValue().data())); result.setTags(globalTags); } final EnvelopedAspect envelopedTerms = aspects.get(Constants.GLOSSARY_TERMS_ASPECT_NAME); if (envelopedTerms != null) { result.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(envelopedTerms.getValue().data()))); } final EnvelopedAspect envelopedInstitutionalMemory = aspects.get(Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME); if (envelopedInstitutionalMemory != null) { result.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(envelopedInstitutionalMemory.getValue().data()))); } final EnvelopedAspect envelopedSubTypes = aspects.get(Constants.SUB_TYPES_ASPECT_NAME); if (envelopedSubTypes != null) { result.setSubTypes(mapSubTypes(new SubTypes(envelopedSubTypes.getValue().data()))); } final EnvelopedAspect envelopedContainer = aspects.get(Constants.CONTAINER_ASPECT_NAME); if (envelopedContainer != null) { final com.linkedin.container.Container gmsContainer = new com.linkedin.container.Container(envelopedContainer.getValue().data()); result.setContainer(Container .builder() .setType(EntityType.CONTAINER) .setUrn(gmsContainer.getContainer().toString()) .build()); } final EnvelopedAspect envelopedDomains = aspects.get(Constants.DOMAINS_ASPECT_NAME); if (envelopedDomains != null) { final Domains domains = new Domains(envelopedDomains.getValue().data()); // Currently we only take the first domain if it exists. if (domains.getDomains().size() > 0) { result.setDomain(Domain.builder() .setType(EntityType.DOMAIN) .setUrn(domains.getDomains().get(0).toString()).build()); } } final EnvelopedAspect envelopedDeprecation = aspects.get(Constants.DEPRECATION_ASPECT_NAME); if (envelopedDeprecation != null) { result.setDeprecation(DeprecationMapper.map(new Deprecation(envelopedDeprecation.getValue().data()))); } return result; } private static com.linkedin.datahub.graphql.generated.ContainerProperties mapContainerProperties(final ContainerProperties gmsProperties) { final com.linkedin.datahub.graphql.generated.ContainerProperties propertiesResult = new com.linkedin.datahub.graphql.generated.ContainerProperties(); propertiesResult.setName(gmsProperties.getName()); propertiesResult.setDescription(gmsProperties.getDescription()); if (gmsProperties.hasExternalUrl()) { propertiesResult.setExternalUrl(gmsProperties.getExternalUrl().toString()); } if (gmsProperties.hasCustomProperties()) { propertiesResult.setCustomProperties(StringMapMapper.map(gmsProperties.getCustomProperties())); } return propertiesResult; } private static com.linkedin.datahub.graphql.generated.ContainerEditableProperties mapContainerEditableProperties( final EditableContainerProperties gmsProperties) { final com.linkedin.datahub.graphql.generated.ContainerEditableProperties editableContainerProperties = new com.linkedin.datahub.graphql.generated.ContainerEditableProperties(); editableContainerProperties.setDescription(gmsProperties.getDescription()); return editableContainerProperties; } private static com.linkedin.datahub.graphql.generated.SubTypes mapSubTypes(final SubTypes gmsSubTypes) { final com.linkedin.datahub.graphql.generated.SubTypes subTypes = new com.linkedin.datahub.graphql.generated.SubTypes(); subTypes.setTypeNames(gmsSubTypes.getTypeNames()); return subTypes; } private static DataPlatform mapPlatform(final DataPlatformInstance platformInstance) { // Set dummy platform to be resolved. final DataPlatform dummyPlatform = new DataPlatform(); dummyPlatform.setUrn(platformInstance.getPlatform().toString()); return dummyPlatform; } private ContainerMapper() { } }
2,553
1,475
<reponame>mhansonp/geode /* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.management; import static java.util.concurrent.TimeUnit.MINUTES; import static org.apache.geode.cache.Region.SEPARATOR; import static org.apache.geode.distributed.ConfigurationProperties.JMX_MANAGER; import static org.apache.geode.distributed.ConfigurationProperties.JMX_MANAGER_PORT; import static org.apache.geode.distributed.ConfigurationProperties.JMX_MANAGER_START; import static org.apache.geode.distributed.ConfigurationProperties.OFF_HEAP_MEMORY_SIZE; import static org.apache.geode.test.awaitility.GeodeAwaitility.await; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.Assert.fail; import java.lang.management.ManagementFactory; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Properties; import javax.management.Attribute; import javax.management.AttributeList; import javax.management.JMException; import javax.management.MBeanServer; import javax.management.Notification; import javax.management.NotificationListener; import javax.management.ObjectName; import com.google.common.base.Stopwatch; import org.junit.After; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.apache.geode.OutOfOffHeapMemoryException; import org.apache.geode.cache.Cache; import org.apache.geode.cache.DataPolicy; import org.apache.geode.cache.Region; import org.apache.geode.cache30.CacheTestCase; import org.apache.geode.distributed.internal.InternalDistributedSystem; import org.apache.geode.internal.offheap.OffHeapStorage; import org.apache.geode.internal.offheap.OffHeapStoredObject; import org.apache.geode.management.internal.MBeanJMXAdapter; import org.apache.geode.test.dunit.Host; import org.apache.geode.test.dunit.VM; import org.apache.geode.test.dunit.rules.DistributedRestoreSystemProperties; import org.apache.geode.test.junit.categories.JMXTest; /** * Tests the off-heap additions to the RegionMXBean and MemberMXBean JMX interfaces. */ @Category({JMXTest.class}) @SuppressWarnings("serial") public class OffHeapManagementDUnitTest extends CacheTestCase { /** * Specified assertion operations. */ private enum ASSERT_OP { EQUAL, GREATER_THAN, GREATER_THAN_OR_EQUAL, LESS_THAN } /** * Name of off-heap test region. */ private static final String OFF_HEAP_REGION_NAME = "offHeapRegion"; /** * Path of off-heap test region. */ private static final String OFF_HEAP_REGION_PATH = SEPARATOR + OFF_HEAP_REGION_NAME; /** * Expected total off-heap reserved memory (1 megabyte). */ private static final int TOTAL_MEMORY = 1048576; /** * Half of expected memory total. */ private static final int HALF_TOTAL_MEMORY = (int) (TOTAL_MEMORY / 2); /** * An arbitrary array size. */ private static final int ALLOCATION_SIZE = 100000; /** * A non-arbitrary array size. */ private static final int NEW_ALLOCATION_SIZE = 400000; /** * Java object serialization overhead. */ private static final int OBJECT_OVERHEAD = 8; /** * A region entry key. */ private static final String KEY = "key"; /** * Another region entry key. */ private static final String KEY2 = "key2"; /** * Yet another region entry key. */ private static final String KEY3 = "key3"; /** * A region entry value. */ private static final byte[] VALUE = "Proin lobortis enim vel sem congue ut condimentum leo rhoncus. In turpis lorem, rhoncus nec rutrum vel, sodales vitae lacus. Etiam nunc ligula, scelerisque id egestas vitae, gravida non enim. Donec ac ligula purus. Mauris gravida ligula sit amet mi ornare blandit. Aliquam at velit ac enim varius malesuada ut eu tortor. Quisque diam nisi, fermentum vel accumsan at, commodo et velit." .getBytes(); /** * The expected size of the region entry value in off-heap memory. */ private static final int OBJECT_SIZE = VALUE.length + OBJECT_OVERHEAD; /** * Listens for off-heap JMX notifications. */ private static final OffHeapNotificationListener notificationListener = new OffHeapNotificationListener(); /** * Local MBeanServer. */ private static MBeanServer mbeanServer = ManagementFactory.getPlatformMBeanServer(); private VM vm; @Rule public DistributedRestoreSystemProperties restoreSystemProperties = new DistributedRestoreSystemProperties(); @Before public void setUp() throws Exception { vm = Host.getHost(0).getVM(0); vm.invoke(() -> { System.setProperty(OffHeapStorage.STAY_CONNECTED_ON_OUTOFOFFHEAPMEMORY_PROPERTY, "true"); }); } @After public void tearDown() throws Exception { doCleanupOnVm(vm); } /** * Returns off-heap system properties for enabling off-heap and the JMX system. */ @Override public Properties getDistributedSystemProperties() { Properties config = new Properties(); config.setProperty(OFF_HEAP_MEMORY_SIZE, "1m"); config.setProperty(JMX_MANAGER, "true"); config.setProperty(JMX_MANAGER_START, "true"); config.setProperty(JMX_MANAGER_PORT, "0"); return config; } /** * Tests off-heap additions to the RegionMXBean and MemberMXBean interfaces. */ @Test public void testOffHeapMBeanAttributesAndStats() throws Exception { // Setup off-heap memory for cache setSystemPropertiesOnVm(vm, true, getDistributedSystemProperties()); // Create our off-heap region assertThat(createOffHeapRegionOnVm(vm, OFF_HEAP_REGION_NAME, DataPolicy.REPLICATE)).isNotNull(); // Make sure our off-heap region has off-heap enabled. assertOffHeapRegionAttributesOnVm(vm); // Make sure our starting off heap stats are correct assertOffHeapMetricsOnVm(vm, TOTAL_MEMORY, 0, 0, 0); /* * Perform ops on the off-heap region and assert that the off-heap metrics correctly reflect the * ops */ doPutOnVm(vm, KEY, VALUE, OFF_HEAP_REGION_NAME, false); assertOffHeapMetricsOnVm(vm, (TOTAL_MEMORY - OBJECT_SIZE), OBJECT_SIZE, 1, 0); doPutOnVm(vm, KEY2, VALUE, OFF_HEAP_REGION_NAME, false); assertOffHeapMetricsOnVm(vm, (TOTAL_MEMORY - (2 * OBJECT_SIZE)), (2 * OBJECT_SIZE), 2, 0); doPutOnVm(vm, KEY3, VALUE, OFF_HEAP_REGION_NAME, false); assertOffHeapMetricsOnVm(vm, (TOTAL_MEMORY - (3 * OBJECT_SIZE)), (3 * OBJECT_SIZE), 3, 0); doDestroyOnVm(vm, KEY3, OFF_HEAP_REGION_NAME); assertOffHeapMetricsOnVm(vm, (TOTAL_MEMORY - (2 * OBJECT_SIZE)), (2 * OBJECT_SIZE), 2, 0); doDestroyOnVm(vm, KEY2, OFF_HEAP_REGION_NAME); assertOffHeapMetricsOnVm(vm, (TOTAL_MEMORY - OBJECT_SIZE), OBJECT_SIZE, 1, 0); doDestroyOnVm(vm, KEY, OFF_HEAP_REGION_NAME); assertOffHeapMetricsOnVm(vm, TOTAL_MEMORY, 0, 0, 0); } /** * Tests the fragmentation statistic for off-heap memory. */ @Test public void testFragmentationStat() throws Exception { // Setup off-heap memory for cache setSystemPropertiesOnVm(vm, true, getDistributedSystemProperties()); // Create our off-heap region assertThat(createOffHeapRegionOnVm(vm, OFF_HEAP_REGION_NAME, DataPolicy.REPLICATE)).isNotNull(); vm.invoke(() -> { Region region = getCache().getRegion(OFF_HEAP_REGION_NAME); assertThat(region).isNotNull(); }); // Make sure our off-heap region has off-heap enabled. assertOffHeapRegionAttributesOnVm(vm); // Make sure our starting off heap stats are correct assertOffHeapMetricsOnVm(vm, TOTAL_MEMORY, 0, 0, 0); // After allocating large chunk (equal to total memory) // we should still have no fragmentation int largeChunk = (int) TOTAL_MEMORY - OffHeapStoredObject.HEADER_SIZE; doPutOnVm(vm, KEY, new byte[largeChunk], OFF_HEAP_REGION_NAME, false); // No compaction has run, so fragmentation should be zero assertFragmentationStatOnVm(vm, 0, ASSERT_OP.EQUAL); // Allocate more memory to trigger compaction doPutOnVm(vm, KEY, new byte[ALLOCATION_SIZE], OFF_HEAP_REGION_NAME, true); // When total memory is used no fragmentation assertFragmentationStatOnVm(vm, 0, ASSERT_OP.EQUAL); // After freeing all memory we should have no fragmentation doDestroyOnVm(vm, KEY, OFF_HEAP_REGION_NAME); assertFragmentationStatOnVm(vm, 0, ASSERT_OP.EQUAL); // Allocate HALF_TOTAL_MEMORY twice and release one to create one fragment int halfChunk = HALF_TOTAL_MEMORY - OffHeapStoredObject.HEADER_SIZE; doPutOnVm(vm, KEY + "0", new byte[halfChunk], OFF_HEAP_REGION_NAME, false); doPutOnVm(vm, KEY + "1", new byte[halfChunk], OFF_HEAP_REGION_NAME, false); doDestroyOnVm(vm, KEY + "0", OFF_HEAP_REGION_NAME); // Allocate largeChunk to trigger compaction and fragmentation should be zero // as all free memory is available as one fragment doPutOnVm(vm, KEY + "1", new byte[largeChunk], OFF_HEAP_REGION_NAME, true); assertFragmentationStatOnVm(vm, 0, ASSERT_OP.EQUAL); // Consume the available fragment as below // [16][262120][16][262120][16] = [524288] (HALF_TOTAL_MEMORY) int smallChunk = OffHeapStoredObject.MIN_CHUNK_SIZE - OffHeapStoredObject.HEADER_SIZE; int mediumChunk = 262112; // (262120 - ObjectChunk.OFF_HEAP_HEADER_SIZE) doPutOnVm(vm, KEY + "S1", new byte[smallChunk], OFF_HEAP_REGION_NAME, false); doPutOnVm(vm, KEY + "M1", new byte[mediumChunk], OFF_HEAP_REGION_NAME, false); doPutOnVm(vm, KEY + "S2", new byte[smallChunk], OFF_HEAP_REGION_NAME, false); doPutOnVm(vm, KEY + "M2", new byte[mediumChunk], OFF_HEAP_REGION_NAME, false); doPutOnVm(vm, KEY + "S3", new byte[smallChunk], OFF_HEAP_REGION_NAME, false); // free small chunks to create gaps doDestroyOnVm(vm, KEY + "S1", OFF_HEAP_REGION_NAME); doDestroyOnVm(vm, KEY + "S2", OFF_HEAP_REGION_NAME); doDestroyOnVm(vm, KEY + "S3", OFF_HEAP_REGION_NAME); // Now free memory should be 48 so allocate a 40 byte object doPutOnVm(vm, KEY + "newKey", new byte[40], OFF_HEAP_REGION_NAME, true); /* * Setup a fragmentation attribute monitor */ setupOffHeapMonitorOnVm(vm, "OffHeapFragmentation", 0, 0); clearNotificationListenerOnVm(vm); // Make sure we have some fragmentation assertFragmentationStatOnVm(vm, 100, ASSERT_OP.EQUAL); // Make sure our fragmentation monitor was triggered waitForNotificationListenerOnVm(vm, 5000); } /** * Tests the compaction time statistic for off-heap memory. */ @Test public void testCompactionTimeStat() throws Exception { // Setup off-heap memory for cache setSystemPropertiesOnVm(vm, true, getDistributedSystemProperties()); // Create our off-heap region assertThat(createOffHeapRegionOnVm(vm, OFF_HEAP_REGION_NAME, DataPolicy.REPLICATE)).isNotNull(); // Make sure our off-heap region has off-heap enabled. assertOffHeapRegionAttributesOnVm(vm); // Make sure our starting off heap stats are correct assertOffHeapMetricsOnVm(vm, TOTAL_MEMORY, 0, 0, 0); // After allocating large chunck we should still have no compaction time doPutOnVm(vm, KEY, new byte[HALF_TOTAL_MEMORY], OFF_HEAP_REGION_NAME, false); assertCompactionTimeStatOnVm(vm, 0, ASSERT_OP.EQUAL); // After freeing all memory we should have no compaction time doDestroyOnVm(vm, KEY, OFF_HEAP_REGION_NAME); assertCompactionTimeStatOnVm(vm, 0, ASSERT_OP.EQUAL); // Consume all off-heap memory using an allocation size int numAllocations = doConsumeOffHeapMemoryOnVm(vm, ALLOCATION_SIZE); assertThat(numAllocations > 0).isTrue(); // Randomly free 3 allocations to produce off-heap gaps doFreeOffHeapMemoryOnVm(vm, numAllocations, 3); /* * Setup a compaction time attribute monitor */ setupOffHeapMonitorOnVm(vm, "OffHeapCompactionTime", 0, 0); clearNotificationListenerOnVm(vm); // Allocate enough memory to force compaction which will update compaction time stat doPutOnVm(vm, KEY, new byte[NEW_ALLOCATION_SIZE], OFF_HEAP_REGION_NAME, true); // Make sure our compaction time monitor was triggered waitForNotificationListenerOnVm(vm, 5000); /* * Make sure we have some compaction time. In some environments the compaction time is reported * as 0 due to time sample granularity and compaction speed. */ assertCompactionTimeStatOnVm(vm, 0, ASSERT_OP.GREATER_THAN_OR_EQUAL); } /** * Asserts that a monitor assigned to the OffHeapObjects attribute is triggered. */ @Test public void testOffHeapObjectsMonitoring() throws Exception { // Setup off-heap memory for cache setSystemPropertiesOnVm(vm, true, getDistributedSystemProperties()); // Create our off-heap region assertThat(createOffHeapRegionOnVm(vm, OFF_HEAP_REGION_NAME, DataPolicy.REPLICATE)).isNotNull(); // Make sure our off-heap region has off-heap enabled. assertOffHeapRegionAttributesOnVm(vm); // Make sure our starting off heap stats are correct assertOffHeapMetricsOnVm(vm, TOTAL_MEMORY, 0, 0, 0); /* * Tests off-heap objects notification */ setupOffHeapMonitorOnVm(vm, "OffHeapObjects", 0, -1); clearNotificationListenerOnVm(vm); doPutOnVm(vm, KEY, VALUE, OFF_HEAP_REGION_NAME, false); waitForNotificationListenerOnVm(vm, 5000); } /** * Asserts that a monitor assigned to the OffHeapFreeSize attribute is triggered. */ @Test public void testOffHeapFreeSizeMonitoring() throws Exception { // Setup off-heap memory for cache setSystemPropertiesOnVm(vm, true, getDistributedSystemProperties()); // Create our off-heap region assertThat(createOffHeapRegionOnVm(vm, OFF_HEAP_REGION_NAME, DataPolicy.REPLICATE)).isNotNull(); // Make sure our off-heap region has off-heap enabled. assertOffHeapRegionAttributesOnVm(vm); // Make sure our starting off heap stats are correct assertOffHeapMetricsOnVm(vm, TOTAL_MEMORY, 0, 0, 0); /* * Tests off-heap objects notification */ setupOffHeapMonitorOnVm(vm, "OffHeapFreeSize", TOTAL_MEMORY, TOTAL_MEMORY); clearNotificationListenerOnVm(vm); doPutOnVm(vm, KEY, VALUE, OFF_HEAP_REGION_NAME, false); waitForNotificationListenerOnVm(vm, 5000); } /** * Asserts that a monitor assigned to the OffHeapAllocatedSize attribute is triggered. */ @Test public void testOffHeapAllocatedSizeMonitoring() throws Exception { // Setup off-heap memory for cache setSystemPropertiesOnVm(vm, true, getDistributedSystemProperties()); // Create our off-heap region assertThat(createOffHeapRegionOnVm(vm, OFF_HEAP_REGION_NAME, DataPolicy.REPLICATE)).isNotNull(); // Make sure our off-heap region has off-heap enabled. assertOffHeapRegionAttributesOnVm(vm); // Make sure our starting off heap stats are correct assertOffHeapMetricsOnVm(vm, TOTAL_MEMORY, 0, 0, 0); /* * Tests off-heap objects notification */ setupOffHeapMonitorOnVm(vm, "OffHeapAllocatedSize", 0, OBJECT_SIZE); clearNotificationListenerOnVm(vm); doPutOnVm(vm, KEY, VALUE, OFF_HEAP_REGION_NAME, false); waitForNotificationListenerOnVm(vm, 5000); } /** * Destroys a number of entries previously allocated. * * @param vm a virtual machine * @param numAllocations the number of previous off-heap allocations * @param numDestroys the number of destroys to perform */ private void doFreeOffHeapMemoryOnVm(final VM vm, final int numAllocations, final int numDestroys) { vm.invoke(() -> { doFreeOffHeapMemory(numAllocations, numDestroys); }); } /** * Performs some destroys to free off-heap allocations. * * @param numAllocations the number of previous off-heap allocations * @param numDestroys the number of destroys to perform */ private void doFreeOffHeapMemory(final int numAllocations, final int numDestroys) { assertThat(numDestroys <= numAllocations).isTrue(); Region region = getCache().getRegion(OFF_HEAP_REGION_NAME); assertThat(region).isNotNull(); assertThat(numDestroys <= region.size()).isTrue(); String key = "KEY0"; Object value = key; int destroyed = 0; while (destroyed < numDestroys) { key = "KEY" + ((int) (Math.random() * numAllocations)); value = region.get(key); if (null != value) { region.destroy(key); ++destroyed; } } } /** * Consumes off off-heap memory until the allocation size cannot be satisfied. * * @param vm a virtual machine * @param allocationSize the number of bytes for each allocation * * @return the number of successful puts */ private int doConsumeOffHeapMemoryOnVm(final VM vm, final int allocationSize) { return vm.invoke(() -> doConsumeOffHeapMemory(allocationSize)); } /** * Consumes off off-heap memory until the allocation size cannot be satisfied. * * @param allocationSize the number of bytes for each allocation * * @return the number of successful puts */ private int doConsumeOffHeapMemory(final int allocationSize) { int i = 0; // Loop until we fail try { Stopwatch stopwatch = Stopwatch.createStarted(); while (stopwatch.elapsed(MINUTES) < 2) { doPut("KEY" + (i++), new byte[allocationSize], OFF_HEAP_REGION_NAME, false); } } catch (OutOfOffHeapMemoryException e) { } return i; } /** * Asserts that the compactionTime stat is available and satisfies an assert operation. * * @param vm a virtual machine. * @param compactionTime total off heap compaction time. * @param op an assert operation. */ private void assertCompactionTimeStatOnVm(final VM vm, final long compactionTime, final ASSERT_OP op) { vm.invoke(() -> assertCompactionTimeStat(compactionTime, op)); } /** * Asserts that the compactionTime stat is available and satisfies an assert operation. * * @param compactionTime total off heap compaction time. * @param op an assert operation. */ private void assertCompactionTimeStat(final long compactionTime, final ASSERT_OP op) { ManagementService service = ManagementService.getExistingManagementService(getCache()); assertThat(service).isNotNull(); assertThat(service.isManager()).isTrue(); MemberMXBean memberBean = service.getMemberMXBean(); assertThat(memberBean).isNotNull(); switch (op) { case EQUAL: assertThat(memberBean.getOffHeapCompactionTime()).isEqualTo(compactionTime); break; case GREATER_THAN: assertThat(compactionTime < memberBean.getOffHeapCompactionTime()).isTrue(); break; case GREATER_THAN_OR_EQUAL: assertThat(compactionTime <= memberBean.getOffHeapCompactionTime()).isTrue(); break; case LESS_THAN: assertThat(compactionTime > memberBean.getOffHeapCompactionTime()).isTrue(); break; } } /** * Asserts that the fragmentation stat is available and satisfies an assert operation. * * @param vm a virtual machine * @param fragmentation a fragmentation percentage * @param op an assertion operation */ private void assertFragmentationStatOnVm(final VM vm, final int fragmentation, final ASSERT_OP op) { vm.invoke(() -> assertFragmentationStat(fragmentation, op)); } /** * Asserts that the fragmentation stat is available and satisfies an assert operation. * * @param fragmentation a fragmentation percentage * @param op an assertion operation */ private void assertFragmentationStat(final int fragmentation, final ASSERT_OP op) { ManagementService service = ManagementService.getExistingManagementService(getCache()); assertThat(service).isNotNull(); assertThat(service.isManager()).isTrue(); MemberMXBean memberBean = service.getMemberMXBean(); assertThat(memberBean).isNotNull(); switch (op) { case EQUAL: assertThat(memberBean.getOffHeapFragmentation()).isEqualTo(fragmentation); break; case GREATER_THAN: assertThat(fragmentation < memberBean.getOffHeapFragmentation()).isTrue(); break; case LESS_THAN: assertThat(fragmentation > memberBean.getOffHeapFragmentation()).isTrue(); break; } } /** * Removes off heap region on vm and disconnects. * * @param vm a virtual machine. */ private void doCleanupOnVm(final VM vm) { vm.invoke(() -> cleanup()); } /** * Removes off-heap region and disconnects. */ protected void cleanup() { Cache existingCache = basicGetCache(); if (null != existingCache && !existingCache.isClosed()) { Region region = getCache().getRegion(OFF_HEAP_REGION_NAME); if (null != region) { region.destroyRegion(); } } disconnectFromDS(); } /** * Asserts that the off heap region data is available and enabled for a VM. */ private void assertOffHeapRegionAttributesOnVm(final VM vm) { vm.invoke(() -> assertOffHeapRegionAttributes()); } /** * Asserts that the off heap region data is available and enabled. */ private void assertOffHeapRegionAttributes() { ManagementService service = ManagementService.getExistingManagementService(getCache()); assertThat(service).isNotNull(); assertThat(service.isManager()).isTrue(); RegionMXBean regionBean = service.getLocalRegionMBean(OFF_HEAP_REGION_PATH); assertThat(regionBean).isNotNull(); RegionAttributesData regionData = regionBean.listRegionAttributes(); assertThat(regionData).isNotNull(); assertThat(regionData.getOffHeap()).isTrue(); } /** * Asserts that OffHeapMetrics match input parameters for a VM. * * @param vm a virtual machine. * @param freeMemory total off-heap free memory in bytes. * @param allocatedMemory allocated (or used) off-heap memory in bytes. * @param objects number of objects stored in off-heap memory. * @param fragmentation the fragmentation percentage. */ private void assertOffHeapMetricsOnVm(final VM vm, final int freeMemory, final int allocatedMemory, final int objects, final int fragmentation) { vm.invoke(() -> assertOffHeapMetrics(freeMemory, allocatedMemory, objects, fragmentation)); } /** * Asserts that OffHeapMetrics match input parameters. * * @param freeMemory total off-heap free memory in bytes. * @param allocatedMemory allocated (or used) off-heap memory in bytes. * @param objects number of objects stored in off-heap memory. * @param fragmentation the fragmentation percentage. */ private void assertOffHeapMetrics(final int freeMemory, final int allocatedMemory, final int objects, final int fragmentation) { ManagementService service = ManagementService.getExistingManagementService(getCache()); assertThat(service).isNotNull(); assertThat(service.isManager()).isTrue(); MemberMXBean memberBean = service.getMemberMXBean(); assertThat(memberBean).isNotNull(); assertThat(memberBean.getOffHeapFreeMemory()).isEqualTo(freeMemory); assertThat(memberBean.getOffHeapUsedMemory()).isEqualTo(allocatedMemory); assertThat(memberBean.getOffHeapObjects()).isEqualTo(objects); assertThat(memberBean.getOffHeapFragmentation()).isEqualTo(fragmentation); } /** * Creates an off-heap region on a vm. * * @param vm a virtual machine. * @param name a region name. * @param dataPolicy a data policy. * * @return true if successful. */ private boolean createOffHeapRegionOnVm(final VM vm, final String name, final DataPolicy dataPolicy) { return vm.invoke(() -> null != createOffHeapRegion(name, dataPolicy)); } /** * Creates an off-heap region. * * @param name a region name. * @param dataPolicy a data policy. * * @return the newly created region. */ private Region createOffHeapRegion(final String name, final DataPolicy dataPolicy) { return getCache().createRegionFactory().setOffHeap(true).setDataPolicy(dataPolicy).create(name); } /** * Sets the distributed system properties for a vm. * * @param vm a virtual machine. * @param management starts the ManagementService when true. * @param props distributed system properties. */ private void setSystemPropertiesOnVm(final VM vm, final boolean management, final Properties props) { vm.invoke(() -> setSystemProperties(management, props)); } /** * Sets the distributed system properties. * * @param management starts the ManagementService when true. * @param props distributed system properties. */ private void setSystemProperties(final boolean management, final Properties props) { getSystem(props); if (management) { ManagementService service = ManagementService.getManagementService(getCache()); if (!service.isManager()) { service.startManager(); } } } /** * Performs a destroy operation on a vm. * * @param vm a virtual machine. * @param key the region entry to destroy. * @param regionName a region name. */ private void doDestroyOnVm(final VM vm, final Object key, final String regionName) { vm.invoke(() -> doDestroy(key, regionName)); } /** * Performs a destroy operation. * * @param key the region entry to destroy. * @param regionName a region name. */ private void doDestroy(final Object key, final String regionName) { Region region = getCache().getRegion(regionName); assertThat(region).isNotNull(); region.destroy(key); } /** * Performs a put operation on a vm. * * @param vm a virtual machine. * @param key region entry key. * @param value region entry value. * @param regionName a region name. */ private void doPutOnVm(final VM vm, final Object key, final Object value, final String regionName, final boolean expectException) { vm.invoke(() -> doPut(key, value, regionName, expectException)); } /** * Performs a put operation. * * @param key region entry key. * @param value region entry value. * @param regionName a region name. */ private void doPut(final Object key, final Object value, final String regionName, final boolean expectException) { Region region = getCache().getRegion(regionName); assertThat(region).isNotNull(); try { region.put(key, value); if (expectException) { fail("Expected OutOfOffHeapMemoryException"); } } catch (OutOfOffHeapMemoryException e) { if (!expectException) { throw e; } } } /** * Creates and adds a generic GaugeMonitor for an attribute of the MemberMXBean on a VM. * * @param vm a virtual machine. * @param attribute the attribute to monitor. * @param highThreshold the high threshold trigger. * @param lowThreshold the low threshold trigger. */ private void setupOffHeapMonitorOnVm(final VM vm, final String attribute, final int highThreshold, final int lowThreshold) { vm.invoke(() -> setupOffHeapMonitor(attribute, highThreshold, lowThreshold)); } /** * Creates and adds a generic GaugeMonitor for an attribute of the MemberMXBean. * * @param attribute the attribute to monitor. * @param highThreshold the high threshold trigger. * @param lowThreshold the low threshold trigger. */ private void setupOffHeapMonitor(final String attribute, final int highThreshold, final int lowThreshold) throws JMException { ObjectName memberMBeanObjectName = MBeanJMXAdapter.getMemberMBeanName( InternalDistributedSystem.getConnectedInstance().getDistributedMember()); assertThat(memberMBeanObjectName).isNotNull(); ObjectName offHeapMonitorName = new ObjectName("monitors:type=Gauge,attr=" + attribute); mbeanServer.createMBean("javax.management.monitor.GaugeMonitor", offHeapMonitorName); AttributeList al = new AttributeList(); al.add(new Attribute("ObservedObject", memberMBeanObjectName)); al.add(new Attribute("GranularityPeriod", 500)); al.add(new Attribute("ObservedAttribute", attribute)); al.add(new Attribute("Notify", true)); al.add(new Attribute("NotifyHigh", true)); al.add(new Attribute("NotifyLow", true)); al.add(new Attribute("HighTheshold", highThreshold)); al.add(new Attribute("LowThreshold", lowThreshold)); mbeanServer.setAttributes(offHeapMonitorName, al); mbeanServer.addNotificationListener(offHeapMonitorName, notificationListener, null, null); mbeanServer.invoke(offHeapMonitorName, "start", new Object[] {}, new String[] {}); } /** * Waits to receive MBean notifications. * * @param vm a virtual machine. * @param wait how long to wait for in millis. */ private void waitForNotificationListenerOnVm(final VM vm, final long wait) { vm.invoke(() -> await("Awaiting Notification Listener") .untilAsserted(() -> assertThat(notificationListener.getNotificationSize() > 0).isTrue())); } /** * Clears received notifications. * * @param vm a virtual machine. */ private void clearNotificationListenerOnVm(final VM vm) { vm.invoke(() -> notificationListener.clear()); } /** * Collects MBean Notifications. */ private static class OffHeapNotificationListener implements NotificationListener { private List<Notification> notificationList = Collections.synchronizedList(new ArrayList<Notification>()); @Override public void handleNotification(final Notification notification, final Object handback) { notificationList.add(notification); } void clear() { notificationList.clear(); } int getNotificationSize() { return notificationList.size(); } } }
10,506
1,056
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.netbeans.modules.xml.wsdl.validator.visitor; import java.util.Properties; /** * Provides configuration for the validate visitor of BPEL and WSDL documents. * * @author ed.wong * @version */ public class ValidateConfiguration extends Properties { /** * */ private static final long serialVersionUID = -5171902287808106628L; /** BPEL document type */ public static final String BPEL = "bpel"; /** WSDL document type */ public static final String WSDL = "wsdl"; // ============================================================= /** Syntax check */ public static final String SYNTAX = ".syntax"; /** Semantics check */ public static final String SEMANTICS = ".semantics"; /** Consistency check */ public static final String CONSISTENCY = ".consistency"; // ============================================================= /** Attribute type */ public static final String ATTRIB = ".attrib"; /** Element type */ public static final String ELEM = ".elem"; // ============================================================= /** Required check */ public static final String REQUIRED = ".required"; /** QName check */ public static final String QNAME = ".qname"; /** NCName check */ public static final String NCNAME = ".ncname"; /** Boolean check */ public static final String BOOLEAN = ".boolean"; /** Enumerated check */ public static final String ENUMERATED = ".enumerated"; /** CreateInstance check */ public static final String CREATE_INSTANCE = ".createInstance"; /** Match catch to fault check */ public static final String MATCH_CATCH = ".matchCatch"; /** Minimum check */ public static final String MIN = ".min"; // ============================================================= /** BPEL SYNTAX ATTRIB REQUIRED check */ public static final String BPEL_SYNTAX_ATTRIB_REQUIRED = BPEL + SYNTAX + ATTRIB + REQUIRED; /** BPEL SYNTAX ATTRIB QNAME check */ public static final String BPEL_SYNTAX_ATTRIB_QNAME = BPEL + SYNTAX + ATTRIB + QNAME; /** BPEL SYNTAX ATTRIB NCNAME check */ public static final String BPEL_SYNTAX_ATTRIB_NCNAME = BPEL + SYNTAX + ATTRIB + NCNAME; /** BPEL SYNTAX ATTRIB BOOLEAN check */ public static final String BPEL_SYNTAX_ATTRIB_BOOLEAN = BPEL + SYNTAX + ATTRIB + BOOLEAN; /** BPEL SYNTAX ATTRIB ENUMERATED check */ public static final String BPEL_SYNTAX_ATTRIB_ENUMERATED = BPEL + SYNTAX + ATTRIB + ENUMERATED; /** BPEL SYNTAX ELEM MIN check */ public static final String BPEL_SYNTAX_ELEM_MIN = BPEL + SYNTAX + ELEM + MIN; /** BPEL SYNTAX ELEM REQUIRED check */ public static final String BPEL_SYNTAX_ELEM_REQUIRED = BPEL + SYNTAX + ELEM + REQUIRED; /** BPEL SEMANTICS CREATE_INSTANCE check */ public static final String BPEL_SEMANTICS_CREATE_INSTANCE = BPEL + SEMANTICS + CREATE_INSTANCE; /** BPEL CONSISTENCY MATCH_CATCH check */ public static final String BPEL_CONSISTENCY_MATCH_CATCH = BPEL + CONSISTENCY + MATCH_CATCH; // ============================================================= /** WSDL SYNTAX ATTRIB REQUIRED check */ public static final String WSDL_SYNTAX_ATTRIB_REQUIRED = WSDL + SYNTAX + ATTRIB + REQUIRED; /** WSDL SYNTAX ATTRIB QNAME check */ public static final String WSDL_SYNTAX_ATTRIB_QNAME = WSDL + SYNTAX + ATTRIB + QNAME; /** WSDL SYNTAX ATTRIB NCNAME check */ public static final String WSDL_SYNTAX_ATTRIB_NCNAME = WSDL + SYNTAX + ATTRIB + NCNAME; /** WSDL SYNTAX ATTRIB BOOLEAN check */ public static final String WSDL_SYNTAX_ATTRIB_BOOLEAN = WSDL + SYNTAX + ATTRIB + BOOLEAN; /** WSDL SYNTAX ATTRIB ENUMERATED check */ public static final String WSDL_SYNTAX_ATTRIB_ENUMERATED = WSDL + SYNTAX + ATTRIB + ENUMERATED; /** WSDL SYNTAX ELEM MIN check */ public static final String WSDL_SYNTAX_ELEM_MIN = WSDL + SYNTAX + ELEM + MIN; /** WSDL SYNTAX ELEM REQUIRED check */ public static final String WSDL_SYNTAX_ELEM_REQUIRED = WSDL + SYNTAX + ELEM + REQUIRED; /** Creates a new instance of ValidateConfiguration */ public ValidateConfiguration() { super(); } /** Creates a new instance of ValidateConfiguration * @param defaults Defaults to use. */ public ValidateConfiguration(Properties defaults) { super(defaults); } /** Gets the boolean property. If the key doesn't exist, it's assumed <code>true</code> * since this yields a stricter validation. * * @param key Key for the property. * @return <code>boolean</code> value for the property. */ public boolean getBooleanProperty(String key) { String val = getProperty(key); return (null == val ? true : Boolean.valueOf(val).booleanValue()); } /** Gets the integer property. If the key doesn't exist, it's assumed <code>1</code> * since this yields a stricter validation. * * @param key Key for the property. * @return <code>int</code> value for the property. */ public int getIntegerProperty(String key) { String val = getProperty(key); return (null == val ? 1 : Integer.parseInt(val)); } }
2,289
479
import windows import windows.generated_def as gdef evtlogmgr = windows.system.event_log print("Event log Manager is: {0}".format(evtlogmgr)) print("They are <{0}> channels".format(len(list(evtlogmgr.channels)))) print("They are <{0}> publishers".format(len(list(evtlogmgr.publishers)))) FIREWALL_CHANNEL = "Microsoft-Windows-Windows Firewall With Advanced Security/Firewall" print("Openning channel <{0}>".format(FIREWALL_CHANNEL)) evtchan = evtlogmgr[FIREWALL_CHANNEL] print("Channel is {0}".format(evtchan)) # Note that `evtchan.events` is an alias for `evtchan.query().all()` print("The channel contains <{0}> events".format(len(evtchan.events))) print("") EVT_QUERY = "Event/EventData[Data='C:\\WINDOWS\\System32\\svchost.exe'] and Event/System[EventID=2006]" print("""Querying "{0}">""".format(EVT_QUERY)) query = evtchan.query(EVT_QUERY) print("Query is {0}".format(query)) event_list = list(query) print("List contains {0} event".format(len(event_list))) event = event_list[0] print("") print("First event is {0}".format(event)) print("System values:") print(" * ID: {0}".format(event.id)) print(" * version: {0}".format(event.version)) print(" * level: {0}".format(event.level)) print(" * opcode: {0}".format(event.opcode)) print(" * time_created: {0}".format(event.time_created)) print(" * ID: {0}".format(event.id)) print("Event specific values:") for name, value in event.data.items(): print(" * <{0}> -> <{1}>".format(name, value)) print("") evtmeta = event.metadata print("Event metadata is {0}".format(evtmeta)) print(" * id : {0}".format(evtmeta.id)) print(" * channel_id : {0}".format(evtmeta.channel_id)) print(" * message_id : {0}".format(evtmeta.message_id)) print(" * event_data : {0}".format(evtmeta.event_data)) print(" * EventData template :\n{0}".format(evtmeta.template.replace("\r\n", "\n"))) print("") print("Exploring complex Evt types:") print("Channel is still {0}".format(evtchan)) print("Channel config is {0}".format(evtchan.config)) publisher = evtchan.config.publisher print("Channel publisher is {0}".format(publisher)) print("Channel publisher metadata is {0}".format(publisher.metadata)) print("Publisher's channels are:") for chan in publisher.metadata.channels: print(" * {0}".format(chan)) print("Some publisher's event metadata are:") for evtmeta in list(publisher.metadata.events_metadata)[:3]: print(" * {0}: id={1}".format(evtmeta, evtmeta.id))
877
344
/*--------------------------------------------------------------------------------------------- * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ // Do not edit this file. It is machine generated. { "schema.json": "使用架构描述 JSON 文件。参见 json-schema.org 了解详细信息。", "schema.json.$schema": "验证此文档的架构", "schema.json.additionalItems": "对于数组,仅适用于项目被设置为一个数组。如果是一个架构,则在项目数组指定项目后,由此架构进行验证。如果为 false,则其他项目将导致验证失败。", "schema.json.additionalProperties": "架构或布尔。如果是架构,则将用于验证与 \"properties\" 或 \"patternProperties\" 不匹配的所有属性。如果是 false,则与两者均不匹配的任何属性都将导致此架构失败。", "schema.json.allOf": "架构的数组,全部都必须匹配。", "schema.json.anyOf": "架构的数组,必须至少有一个匹配。", "schema.json.default": "默认值。由建议使用。", "schema.json.definitions": "不用于验证。将你希望使用 $ref 内嵌引用的子架构放在此处", "schema.json.dependencies": "属性名称到属性名称数组或架构的映射。属性名称数组指的是键中的属性名称。这取决于为保证有效而显示在对象中的数组中的属性。如果该值是一个架构,则该架构仅应用于该对象(如果键中的属性存在于对象上)。", "schema.json.description": "元素的详细描述。用于悬停菜单和建议。", "schema.json.enum": "一组有效的文字值", "schema.json.exclusiveMaximum": "使最大的属性成为专有属性。", "schema.json.exclusiveMininum": "使最小的属性成为专有属性。", "schema.json.format": "描述值应采用的格式。", "schema.json.id": "架构的唯一标识符。", "schema.json.items": "用于数组。可以是一个用于验证每个元素的架构,或按顺序验证每个项目的架构数组(第一个架构将验证第一个元素,第二个架构将验证第二个元素,依此类推)。", "schema.json.maxItems": "一个数组内可以包含的项目的最大数量。包含。", "schema.json.maxLength": "字符串的最大长度。", "schema.json.maxProperties": "一个对象可以拥有的属性的最大数量。包含。", "schema.json.maximum": "最大数值,默认包含。", "schema.json.minItems": "一个数组内可以包含的项目的最小数量。包含。", "schema.json.minLength": "字符串的最小长度。", "schema.json.minProperties": "一个对象可以拥有的属性的最小数量。包含。", "schema.json.minimum": "最小数值,默认包含。", "schema.json.multipleOf": "一个可以除尽当前值的数 (即,没有余数)", "schema.json.not": "必须不能匹配的架构。", "schema.json.oneOf": "架构的数组,正好有一个必须匹配。", "schema.json.pattern": "匹配字符串的正则表达式。不是隐含固定的。", "schema.json.patternProperties": "属性名称的正则表达式与架构的映射,用于匹配属性。", "schema.json.properties": "属性名称与每个属性架构的映射。", "schema.json.required": "字符串的数组,列出了此对象需要的所有属性的名称。", "schema.json.title": "元素的描述性标题", "schema.json.type": "一个基本架构类型(数字、整数、null、数组、对象、布尔值、字符串)的字符串或一个指定这些类型子集的字符串的数组。", "schema.json.uniqueItems": "数组中所有项目是否必须唯一。默认为 false。" }
2,133
653
//==------- assert_happened.hpp - Assert signalling structure --------------==// // // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. // See https://llvm.org/LICENSE.txt for license information. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception // //===----------------------------------------------------------------------===// #pragma once #include <CL/sycl/detail/defines_elementary.hpp> #ifdef __SYCL_DEVICE_ONLY__ // Reads Flag of AssertHappened on device SYCL_EXTERNAL __attribute__((weak)) extern "C" void __devicelib_assert_read(void *); #endif __SYCL_INLINE_NAMESPACE(cl) { namespace sycl { namespace detail { // NOTE Layout of this structure should be aligned with the one in // libdevice/include/assert-happened.hpp struct AssertHappened { int Flag = 0; // set to non-zero upon assert failure char Expr[256 + 1] = ""; char File[256 + 1] = ""; char Func[128 + 1] = ""; int32_t Line = 0; uint64_t GID0 = 0; uint64_t GID1 = 0; uint64_t GID2 = 0; uint64_t LID0 = 0; uint64_t LID1 = 0; uint64_t LID2 = 0; }; } // namespace detail } // namespace sycl } // __SYCL_INLINE_NAMESPACE(cl)
418
303
<reponame>mr-c/LightZone<gh_stars>100-1000 /* Copyright (C) 2005-2011 <NAME> */ package com.lightcrafts.utils.cache; /** * A <code>CacheBlock</code> contains information about a contiguous block in * the cache: its position and size. * * @author <NAME> [<EMAIL>] */ public final class CacheBlock { ////////// public ///////////////////////////////////////////////////////// /** * Construct a <code>CacheBlock</code>. * * @param pos The block's position. * @param size The block's size. */ public CacheBlock( long pos, int size ) { m_pos = pos; m_size = size; } /** * Gets the block's position. * @return Returns said position. */ public long getPosition() { return m_pos; } /** * Gets the block's size. * @return Returns said size. */ public int getSize() { return m_size; } /** * Set's the block's position. * @param newPos The new position. */ public void setPosition( long newPos ) { m_pos = newPos; } /** * Set's the block's size. * @param newSize The new size. */ public void setSize( int newSize ) { m_size = newSize; } ////////// private //////////////////////////////////////////////////////// private long m_pos; private int m_size; } /* vim:set et sw=4 ts=4: */
539
32,544
<reponame>DBatOWL/tutorials<filename>core-java-modules/core-java-lang-oop-methods/src/test/java/com/baeldung/utilities/StringUtilsUnitTest.java package com.baeldung.utilities; import org.junit.jupiter.api.Test; import static org.assertj.core.api.Assertions.*; class StringUtilsUnitTest { @Test void givenAnEmptyString_whenCallingIsEmpty_thenResultIsTrue() { assertThat(StringUtils.isEmpty("")).isTrue(); } @Test void givenNonEmptyString_whenCallingIsEmpty_thenResultIsFalse() { assertThat(StringUtils.isEmpty("asd")).isFalse(); } @Test void givenAnEmptyString_whenCallingWrap_thenResultIsAnEmptyString() { assertThat(StringUtils.wrap("", "wrapper")).isEmpty(); } @Test void givenNonEmptyString_whenCallingWrap_thenResultIsWrappedString() { assertThat(StringUtils.wrap("asd", "wrapper")).isEqualTo("wrapperasdwrapper"); } }
352
3,469
import csv import pandas as pd from get_addr_longitude_latitude import get_addr_longitude_latitude import threading import time import os if not os.path.exists('./csv/全国工业园区企业简要信息_addr.csv'): header = ['province', 'city', 'county', 'park', 'parkaddr', 'parkxy', 'parkx', 'parky', 'area', 'numcop', 'company', 'person', 'capital', 'settime', 'email', 'phone', 'address', 'addressxy', 'addressx', 'addressy', 'state', 'url'] with open('./csv/全国工业园区企业简要信息_addr.csv', 'a', newline='', encoding='utf-8') as f: writer = csv.writer(f) writer.writerow(header) file_name = './csv/去重_全国工业园区企业简要信息.csv' csv_data = pd.read_csv(file_name) csv_data_ori = pd.read_csv('./csv/全国工业园区信息_addr.csv') length = len(csv_data) length_ori = len(csv_data_ori) print(length, length_ori) # csv_data['parkaddr'] = '' # csv_data['parkxy'] = '' # csv_data['parkx'] = '' # csv_data['parky'] = '' # csv_data['addressxy'] = '' # csv_data['addressx'] = '' # csv_data['addressy'] = '' t_start = time.time() ListTask = [] for i in range(length): ListTask.append(i) def thread_task(): try: while True: num = ListTask.pop(0) park = csv_data.loc[num, 'park'] for i in range(length_ori): if csv_data_ori.loc[i, 'park'] == park: parkaddr = csv_data_ori.loc[i, 'parkaddr'] parkxy = csv_data_ori.loc[i, 'parkxy'] parkx = csv_data_ori.loc[i, 'parkx'] parky = csv_data_ori.loc[i, 'parky'] break address = csv_data.loc[num, 'address'] company = csv_data.loc[num, 'company'] # print(address, company) for i in range(5): L = get_addr_longitude_latitude(address) if L != ['', '', '']: addressx = L[1] addressy = L[2] addressxy = L[1]+','+L[2] print('已完成:\t{} / {} {} {}'.format(num, length, company, L)) break elif i == 4: addressx = '' addressy = '' addressxy = ',' print('\t无数据:\t{} / {} {} {}'.format(num, length, company, L)) else: time.sleep(0.1) # print(2) # province,city,county,park,area,numcop,company,person,capital,settime,email,phone,address,state,url List = [ csv_data.loc[num, 'province'], csv_data.loc[num, 'city'], csv_data.loc[num, 'county'], csv_data.loc[num, 'park'], parkaddr, parkxy, parkx, parky, csv_data.loc[num, 'area'], csv_data.loc[num, 'numcop'], csv_data.loc[num, 'company'], csv_data.loc[num, 'person'], csv_data.loc[num, 'capital'], csv_data.loc[num, 'settime'], csv_data.loc[num, 'email'], csv_data.loc[num, 'phone'], csv_data.loc[num, 'address'], addressxy, addressx, addressy, csv_data.loc[num, 'state'], csv_data.loc[num, 'url'] ] # print(List) with open('./csv/全国工业园区企业简要信息_addr.csv', 'a', newline='', encoding='utf-8') as f: writer = csv.writer(f) writer.writerow(List) except Exception as e: pass threads = [] for i in range(80): thread = threading.Thread(target=thread_task, args=()) threads.append(thread) # 启动多线程 for t in threads: t.start() print('开启线程:\t'+t.name) for t in threads: t.join() print('关闭线程:\t'+t.name) t_end = time.time() print('\n运行时间:', t_end-t_start)
2,308
335
<gh_stars>100-1000 { "word": "Aside", "definitions": [ "A remark or passage in a play that is intended to be heard by the audience but unheard by the other characters in the play.", "A remark not intended to be heard by everyone present.", "A remark that is not directly related to the main topic of discussion." ], "parts-of-speech": "Noun" }
129
447
#include <stdio.h> #include <cstdlib> #include <sstream> #include <dlfcn.h> #include <limits.h> #include <string> #include <cstring> #include <stdio.h> #include <string.h> #include <stdlib.h> #include "coreruncommon.h" #include "binding.hpp" static const char* serverGcVar = "CORECLR_SERVER_GC"; const char* useServerGc; void* coreclrLib; coreclr_initialize_ptr initialize_core_clr; coreclr_execute_assembly_ptr execute_assembly; coreclr_shutdown_ptr shutdown_core_clr; coreclr_create_delegate_ptr create_delegate; {{ .Impls }}
211
887
package org.javers.core.diff.custom; import java.util.Objects; @FunctionalInterface public interface CustomValueToStringTemplate<T> extends CustomValueComparator<T> { @Override default boolean equals(T a, T b) { return Objects.equals(a, b); } }
96
5,169
<reponame>morizotter/Specs<filename>Specs/BlurAnimation/0.0.2/BlurAnimation.podspec.json { "name": "BlurAnimation", "version": "0.0.2", "summary": "A subclass of UIImageView that applies blur and animations. Blur effect using StackBlur", "description": " The BlurAnimation creates a spyglass or inspection feel by bluring out \n the base image and then selectively showing the original image through\n a porthole cutour.\n", "homepage": "https://github.com/mymichellle/BlurAnimation", "license": { "type": "MIT", "file": "LICENSE" }, "authors": "<NAME>", "social_media_url": "https://github.com/mymichellle", "platforms": { "ios": "6.0" }, "source": { "git": "https://github.com/mymichellle/BlurAnimation.git", "tag": "0.0.2" }, "source_files": "BlurAnimation/BlurImageView.*", "requires_arc": true, "dependencies": { "StackBluriOS": [ "~> 0.0.1" ] } }
410
1,144
<gh_stars>1000+ package de.metas.adempiere.gui.search.impl; /* * #%L * de.metas.handlingunits.base * %% * Copyright (C) 2015 metas GmbH * %% * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as * published by the Free Software Foundation, either version 2 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public * License along with this program. If not, see * <http://www.gnu.org/licenses/gpl-2.0.html>. * #L% */ import java.math.BigDecimal; import org.adempiere.model.InterfaceWrapperHelper; import de.metas.adempiere.gui.search.IHUPackingAware; import de.metas.handlingunits.model.I_C_OrderLine; import de.metas.order.IOrderLineBL; import de.metas.util.Check; import de.metas.util.Services; /** * Wraps an {@link I_C_OrderLine} and makes it behave like an {@link IHUPackingAware}. * * @author tsa * */ public class OrderLineHUPackingAware implements IHUPackingAware { public static final OrderLineHUPackingAware of(final org.compiere.model.I_C_OrderLine orderLine) { return new OrderLineHUPackingAware(InterfaceWrapperHelper.create(orderLine, I_C_OrderLine.class)); } private final I_C_OrderLine orderLine; /** * Plain delegate for fields which are not available in order line */ private final PlainHUPackingAware values = new PlainHUPackingAware(); public OrderLineHUPackingAware(final I_C_OrderLine orderLine) { super(); Check.assumeNotNull(orderLine, "orderLine not null"); this.orderLine = orderLine; } @Override public int getM_Product_ID() { return orderLine.getM_Product_ID(); } @Override public void setM_Product_ID(final int productId) { orderLine.setM_Product_ID(productId); values.setM_Product_ID(productId); } @Override public void setQty(final BigDecimal qty) { orderLine.setQtyEntered(qty); final IOrderLineBL orderLineBL = Services.get(IOrderLineBL.class); orderLine.setQtyOrdered(orderLineBL.convertQtyEnteredToStockUOM(orderLine).toBigDecimal()); values.setQty(qty); } /** * @return QtyEntered of the wrapped order line. Note that qtyEntered is the qty that corresponds the UOM returned by {@link #getC_UOM()}. */ @Override public BigDecimal getQty() { return orderLine.getQtyEntered(); } @Override public int getM_HU_PI_Item_Product_ID() { return orderLine.getM_HU_PI_Item_Product_ID(); } @Override public void setM_HU_PI_Item_Product_ID(final int huPiItemProductId) { orderLine.setM_HU_PI_Item_Product_ID(huPiItemProductId); values.setM_HU_PI_Item_Product_ID(huPiItemProductId); } @Override public int getM_AttributeSetInstance_ID() { return orderLine.getM_AttributeSetInstance_ID(); } @Override public void setM_AttributeSetInstance_ID(final int M_AttributeSetInstance_ID) { orderLine.setM_AttributeSetInstance_ID(M_AttributeSetInstance_ID); values.setM_AttributeSetInstance_ID(M_AttributeSetInstance_ID); } @Override public int getC_UOM_ID() { return orderLine.getC_UOM_ID(); } @Override public void setC_UOM_ID(final int uomId) { values.setC_UOM_ID(uomId); // NOTE: uom is mandatory // we assume orderLine's UOM is correct if (uomId > 0) { orderLine.setC_UOM_ID(uomId); } } @Override public BigDecimal getQtyTU() { return orderLine.getQtyEnteredTU(); } @Override public void setQtyTU(final BigDecimal qtyPacks) { orderLine.setQtyEnteredTU(qtyPacks); values.setQtyTU(qtyPacks); } @Override public int getC_BPartner_ID() { return orderLine.getC_BPartner_ID(); } @Override public void setC_BPartner_ID(final int bpartnerId) { orderLine.setC_BPartner_ID(bpartnerId); values.setC_BPartner_ID(bpartnerId); } @Override public boolean isInDispute() { // order line has no IsInDispute flag return values.isInDispute(); } @Override public void setInDispute(final boolean inDispute) { values.setInDispute(inDispute); } @Override public String toString() { return String .format("OrderLineHUPackingAware [orderLine=%s, getM_Product_ID()=%s, getM_Product()=%s, getQty()=%s, getM_HU_PI_Item_Product()=%s, getM_AttributeSetInstance_ID()=%s, getC_UOM()=%s, getQtyPacks()=%s, getC_BPartner()=%s, getM_HU_PI_Item_Product_ID()=%s, isInDispute()=%s]", orderLine, getM_Product_ID(), getM_Product_ID(), getQty(), getM_HU_PI_Item_Product_ID(), getM_AttributeSetInstance_ID(), getC_UOM_ID(), getQtyTU(), getC_BPartner_ID(), getM_HU_PI_Item_Product_ID(), isInDispute()); } }
1,837
2,209
#!/usr/bin/env python2 """ signal_def.py """ from __future__ import print_function import signal from typing import List, Dict, Tuple def _MakeSignals(): # type: () -> Dict[str, int] """Piggy-back on CPython to get a list of portable signals. When Oil is ported to C, we might want to do something like bash/dash. """ names = {} # type: Dict[str, int] for name in dir(signal): # don't want SIG_DFL or SIG_IGN if name.startswith('SIG') and not name.startswith('SIG_'): int_val = getattr(signal, name) abbrev = name[3:] names[abbrev] = int_val return names def GetNumber(sig_spec): # type: (str) -> int return _SIGNAL_NAMES.get(sig_spec) _SIGNAL_NAMES = _MakeSignals() _BY_NUMBER = _SIGNAL_NAMES.items() _BY_NUMBER.sort(key=lambda x: x[1]) def AllNames(): # type: () -> List[Tuple[str, int]] return _BY_NUMBER
347
852
#include "RecoJets/JetProducers/plugins/CastorJetIDProducer.h" #include "DataFormats/JetReco/interface/CastorJetID.h" #include <vector> // // constants, enums and typedefs // // // static data member definitions // // // constructors and destructor // CastorJetIDProducer::CastorJetIDProducer(const edm::ParameterSet& iConfig) : src_(iConfig.getParameter<edm::InputTag>("src")), helper_() { produces<reco::CastorJetIDValueMap>(); input_jet_token_ = consumes<edm::View<reco::BasicJet> >(src_); } CastorJetIDProducer::~CastorJetIDProducer() {} // // member functions // // ------------ method called to produce the data ------------ void CastorJetIDProducer::produce(edm::Event& iEvent, const edm::EventSetup& iSetup) { // get the input jets edm::Handle<edm::View<reco::BasicJet> > h_jets; iEvent.getByToken(input_jet_token_, h_jets); // allocate the jet--->jetid value map auto castorjetIdValueMap = std::make_unique<reco::CastorJetIDValueMap>(); // instantiate the filler with the map reco::CastorJetIDValueMap::Filler filler(*castorjetIdValueMap); // allocate the vector of ids size_t njets = h_jets->size(); std::vector<reco::CastorJetID> ids(njets); // loop over the jets for (edm::View<reco::BasicJet>::const_iterator jetsBegin = h_jets->begin(), jetsEnd = h_jets->end(), ijet = jetsBegin; ijet != jetsEnd; ++ijet) { // get the id from each jet helper_.calculate(iEvent, *ijet); ids[ijet - jetsBegin].emEnergy = helper_.emEnergy(); ids[ijet - jetsBegin].hadEnergy = helper_.hadEnergy(); ids[ijet - jetsBegin].fem = helper_.fem(); ids[ijet - jetsBegin].depth = helper_.depth(); ids[ijet - jetsBegin].width = helper_.width(); ids[ijet - jetsBegin].fhot = helper_.fhot(); ids[ijet - jetsBegin].sigmaz = helper_.sigmaz(); ids[ijet - jetsBegin].nTowers = helper_.nTowers(); } // set up the map filler.insert(h_jets, ids.begin(), ids.end()); // fill the vals filler.fill(); // write map to the event iEvent.put(std::move(castorjetIdValueMap)); } //define this as a plug-in DEFINE_FWK_MODULE(CastorJetIDProducer);
800
771
# Copyright 2019 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Firebase Exceptions module. This module defines the base types for exceptions and the platform-wide error codes as outlined in https://cloud.google.com/apis/design/errors. :class:`FirebaseError` is the parent class of all exceptions raised by the Admin SDK. It contains the ``code``, ``http_response`` and ``cause`` properties common to all Firebase exception types. Each exception also carries a message that outlines what went wrong. This can be logged for audit or debugging purposes. When calling an Admin SDK API, developers can catch the parent ``FirebaseError`` and inspect its ``code`` to implement fine-grained error handling. Alternatively, developers can catch one or more subtypes of ``FirebaseError``. Under normal conditions, any given API can raise only a small subset of the available exception subtypes. However, the SDK also exposes rare error conditions like connection timeouts and other I/O errors as instances of ``FirebaseError``. Therefore it is always a good idea to have a handler specified for ``FirebaseError``, after all the subtype error handlers. """ #: Error code for ``InvalidArgumentError`` type. INVALID_ARGUMENT = 'INVALID_ARGUMENT' #: Error code for ``FailedPreconditionError`` type. FAILED_PRECONDITION = 'FAILED_PRECONDITION' #: Error code for ``OutOfRangeError`` type. OUT_OF_RANGE = 'OUT_OF_RANGE' #: Error code for ``UnauthenticatedError`` type. UNAUTHENTICATED = 'UNAUTHENTICATED' #: Error code for ``PermissionDeniedError`` type. PERMISSION_DENIED = 'PERMISSION_DENIED' #: Error code for ``NotFoundError`` type. NOT_FOUND = 'NOT_FOUND' #: Error code for ``ConflictError`` type. CONFLICT = 'CONFLICT' #: Error code for ``AbortedError`` type. ABORTED = 'ABORTED' #: Error code for ``AlreadyExistsError`` type. ALREADY_EXISTS = 'ALREADY_EXISTS' #: Error code for ``ResourceExhaustedError`` type. RESOURCE_EXHAUSTED = 'RESOURCE_EXHAUSTED' #: Error code for ``CancelledError`` type. CANCELLED = 'CANCELLED' #: Error code for ``DataLossError`` type. DATA_LOSS = 'DATA_LOSS' #: Error code for ``UnknownError`` type. UNKNOWN = 'UNKNOWN' #: Error code for ``InternalError`` type. INTERNAL = 'INTERNAL' #: Error code for ``UnavailableError`` type. UNAVAILABLE = 'UNAVAILABLE' #: Error code for ``DeadlineExceededError`` type. DEADLINE_EXCEEDED = 'DEADLINE_EXCEEDED' class FirebaseError(Exception): """Base class for all errors raised by the Admin SDK. Args: code: A string error code that represents the type of the exception. Possible error codes are defined in https://cloud.google.com/apis/design/errors#handling_errors. message: A human-readable error message string. cause: The exception that caused this error (optional). http_response: If this error was caused by an HTTP error response, this property is set to the ``requests.Response`` object that represents the HTTP response (optional). See https://2.python-requests.org/en/master/api/#requests.Response for details of this object. """ def __init__(self, code, message, cause=None, http_response=None): Exception.__init__(self, message) self._code = code self._cause = cause self._http_response = http_response @property def code(self): return self._code @property def cause(self): return self._cause @property def http_response(self): return self._http_response class InvalidArgumentError(FirebaseError): """Client specified an invalid argument.""" def __init__(self, message, cause=None, http_response=None): FirebaseError.__init__(self, INVALID_ARGUMENT, message, cause, http_response) class FailedPreconditionError(FirebaseError): """Request can not be executed in the current system state, such as deleting a non-empty directory.""" def __init__(self, message, cause=None, http_response=None): FirebaseError.__init__(self, FAILED_PRECONDITION, message, cause, http_response) class OutOfRangeError(FirebaseError): """Client specified an invalid range.""" def __init__(self, message, cause=None, http_response=None): FirebaseError.__init__(self, OUT_OF_RANGE, message, cause, http_response) class UnauthenticatedError(FirebaseError): """Request not authenticated due to missing, invalid, or expired OAuth token.""" def __init__(self, message, cause=None, http_response=None): FirebaseError.__init__(self, UNAUTHENTICATED, message, cause, http_response) class PermissionDeniedError(FirebaseError): """Client does not have sufficient permission. This can happen because the OAuth token does not have the right scopes, the client doesn't have permission, or the API has not been enabled for the client project. """ def __init__(self, message, cause=None, http_response=None): FirebaseError.__init__(self, PERMISSION_DENIED, message, cause, http_response) class NotFoundError(FirebaseError): """A specified resource is not found, or the request is rejected by undisclosed reasons, such as whitelisting.""" def __init__(self, message, cause=None, http_response=None): FirebaseError.__init__(self, NOT_FOUND, message, cause, http_response) class ConflictError(FirebaseError): """Concurrency conflict, such as read-modify-write conflict.""" def __init__(self, message, cause=None, http_response=None): FirebaseError.__init__(self, CONFLICT, message, cause, http_response) class AbortedError(FirebaseError): """Concurrency conflict, such as read-modify-write conflict.""" def __init__(self, message, cause=None, http_response=None): FirebaseError.__init__(self, ABORTED, message, cause, http_response) class AlreadyExistsError(FirebaseError): """The resource that a client tried to create already exists.""" def __init__(self, message, cause=None, http_response=None): FirebaseError.__init__(self, ALREADY_EXISTS, message, cause, http_response) class ResourceExhaustedError(FirebaseError): """Either out of resource quota or reaching rate limiting.""" def __init__(self, message, cause=None, http_response=None): FirebaseError.__init__(self, RESOURCE_EXHAUSTED, message, cause, http_response) class CancelledError(FirebaseError): """Request cancelled by the client.""" def __init__(self, message, cause=None, http_response=None): FirebaseError.__init__(self, CANCELLED, message, cause, http_response) class DataLossError(FirebaseError): """Unrecoverable data loss or data corruption.""" def __init__(self, message, cause=None, http_response=None): FirebaseError.__init__(self, DATA_LOSS, message, cause, http_response) class UnknownError(FirebaseError): """Unknown server error.""" def __init__(self, message, cause=None, http_response=None): FirebaseError.__init__(self, UNKNOWN, message, cause, http_response) class InternalError(FirebaseError): """Internal server error.""" def __init__(self, message, cause=None, http_response=None): FirebaseError.__init__(self, INTERNAL, message, cause, http_response) class UnavailableError(FirebaseError): """Service unavailable. Typically the server is down.""" def __init__(self, message, cause=None, http_response=None): FirebaseError.__init__(self, UNAVAILABLE, message, cause, http_response) class DeadlineExceededError(FirebaseError): """Request deadline exceeded. This will happen only if the caller sets a deadline that is shorter than the method's default deadline (i.e. requested deadline is not enough for the server to process the request) and the request did not finish within the deadline. """ def __init__(self, message, cause=None, http_response=None): FirebaseError.__init__(self, DEADLINE_EXCEEDED, message, cause, http_response)
2,730
1,565
<gh_stars>1000+ /* * This file is part of LuckPerms, licensed under the MIT License. * * Copyright (c) lucko (Luck) <<EMAIL>> * Copyright (c) contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package net.luckperms.api.model.group; import net.luckperms.api.node.HeldNode; import net.luckperms.api.node.Node; import net.luckperms.api.node.matcher.NodeMatcher; import org.checkerframework.checker.nullness.qual.NonNull; import org.checkerframework.checker.nullness.qual.Nullable; import org.jetbrains.annotations.Unmodifiable; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.concurrent.Executor; import java.util.function.Consumer; /** * Represents the object responsible for managing {@link Group} instances. * * <p>All blocking methods return {@link CompletableFuture}s, which will be * populated with the result once the data has been loaded/saved asynchronously. * Care should be taken when using such methods to ensure that the main server * thread is not blocked.</p> * * <p>Methods such as {@link CompletableFuture#get()} and equivalent should * <strong>not</strong> be called on the main server thread. If you need to use * the result of these operations on the main server thread, register a * callback using {@link CompletableFuture#thenAcceptAsync(Consumer, Executor)}.</p> */ public interface GroupManager { /** * Creates a new group in the plugin's storage provider and then loads it * into memory. * * <p>If a group by the same name already exists, it will be loaded.</p> * * @param name the name of the group * @return the resultant group * @throws NullPointerException if the name is null */ @NonNull CompletableFuture<Group> createAndLoadGroup(@NonNull String name); /** * Loads a group from the plugin's storage provider into memory. * * <p>Returns an {@link Optional#empty() empty optional} if the group does * not exist.</p> * * @param name the name of the group * @return the resultant group * @throws NullPointerException if the name is null */ @NonNull CompletableFuture<Optional<Group>> loadGroup(@NonNull String name); /** * Saves a group's data back to the plugin's storage provider. * * <p>You should call this after you make any changes to a group.</p> * * @param group the group to save * @return a future to encapsulate the operation. * @throws NullPointerException if group is null * @throws IllegalStateException if the group instance was not obtained from LuckPerms. */ @NonNull CompletableFuture<Void> saveGroup(@NonNull Group group); /** * Permanently deletes a group from the plugin's storage provider. * * @param group the group to delete * @return a future to encapsulate the operation. * @throws NullPointerException if group is null * @throws IllegalStateException if the group instance was not obtained from LuckPerms. */ @NonNull CompletableFuture<Void> deleteGroup(@NonNull Group group); /** * Loads (or creates) a group from the plugin's storage provider, applies the given * {@code action}, then saves the group's data back to storage. * * <p>This method effectively calls {@link #createAndLoadGroup(String)}, followed by the * {@code action}, then {@link #saveGroup(Group)}, and returns an encapsulation of the whole * process as a {@link CompletableFuture}. </p> * * @param name the name of the group * @param action the action to apply to the group * @return a future to encapsulate the operation * @since 5.1 */ default @NonNull CompletableFuture<Void> modifyGroup(@NonNull String name, @NonNull Consumer<? super Group> action) { /* This default method is overridden in the implementation, and is just here to demonstrate what this method does in the API sources. */ return createAndLoadGroup(name) .thenApplyAsync(group -> { action.accept(group); return group; }) .thenCompose(this::saveGroup); } /** * Loads all groups into memory. * * @return a future to encapsulate the operation. */ @NonNull CompletableFuture<Void> loadAllGroups(); /** * Searches the {@link Group#data() normal node maps} of all known {@link Group}s for {@link Node} * entries matching the given {@link NodeMatcher matcher}. * * @param matcher the matcher * @return the entries which matched * @since 5.1 */ <T extends Node> @NonNull CompletableFuture<@Unmodifiable Map<String, Collection<T>>> searchAll(@NonNull NodeMatcher<? extends T> matcher); /** * Searches for a list of groups with a given permission. * * @param permission the permission to search for * @return a list of held permissions, or null if the operation failed * @throws NullPointerException if the permission is null * @deprecated Use {@link #searchAll(NodeMatcher)} instead */ @Deprecated @NonNull CompletableFuture<@Unmodifiable List<HeldNode<String>>> getWithPermission(@NonNull String permission); /** * Gets a loaded group. * * @param name the name of the group to get * @return a {@link Group} object, if one matching the name exists, or null if not * @throws NullPointerException if the name is null */ @Nullable Group getGroup(@NonNull String name); /** * Gets a set of all loaded groups. * * @return a {@link Set} of {@link Group} objects */ @NonNull @Unmodifiable Set<Group> getLoadedGroups(); /** * Check if a group is loaded in memory * * @param name the name to check for * @return true if the group is loaded * @throws NullPointerException if the name is null */ boolean isLoaded(@NonNull String name); }
2,313
1,338
/* * Copyright 2011, <NAME>, <EMAIL>. * Distributed under the terms of the MIT License. */ #ifndef HID_WRITER_H #define HID_WRITER_H #include "HIDDataTypes.h" class HIDWriter { public: HIDWriter(size_t blockSize = 20); ~HIDWriter(); // High Level status_t DefineInputPadding(uint8 count, uint8 bitLength); status_t DefineInputData(uint8 count, uint8 bitLength, main_item_data data, uint32 logicalMinimum, uint32 logicalMaximum, uint16 usagePage, uint16 usageMinimum, uint16 usageMaximum = 0xffff); status_t BeginCollection(uint8 collectionType, uint16 usagePage, uint16 usageID); status_t EndCollection(); // Low Level status_t SetUsagePage(uint16 usagePage); status_t SetLogicalMinimum(uint32 logicalMinimum); status_t SetLogicalMaximum(uint32 logicalMaximum); status_t SetReportSize(uint8 reportSize); status_t SetReportID(uint8 reportID); status_t SetReportCount(uint8 reportCount); status_t LocalSetUsageID(uint16 usageID); status_t LocalSetUsageMinimum(uint16 usageMinimum); status_t LocalSetUsageMaximum(uint16 usageMaximum); status_t BeginCollection(uint8 collectionType); status_t Input(main_item_data data); status_t Output(main_item_data data); status_t Feature(main_item_data data); // Generic status_t WriteShortItem(uint8 type, uint8 tag, uint32 value); status_t Write(const void *data, size_t length); size_t BufferLength() { return fBufferUsed; }; const uint8 * Buffer() { return fBuffer; }; void Reset(); private: size_t fBlockSize; size_t fBufferAllocated; size_t fBufferUsed; uint8 * fBuffer; status_t fStatus; }; #endif // HID_WRITER_H
767
4,772
package example.service; import example.repo.Customer303Repository; import org.springframework.stereotype.Service; @Service public class Customer303Service { public Customer303Service(Customer303Repository repo) {} }
60
410
<reponame>caryll/node-tin #include "dep/json-builder.h" #include "otfcc/sfnt.h" #include "otfcc/font.h" #include "aliases.h" #include "platform.h" #include "stopwatch.h" #ifndef MAIN_VER #define MAIN_VER 0 #endif #ifndef SECONDARY_VER #define SECONDARY_VER 0 #endif #ifndef PATCH_VER #define PATCH_VER 0 #endif void printInfo() { fprintf(stdout, "This is Polymorphic otfccdump, version %d.%d.%d.\n", MAIN_VER, SECONDARY_VER, PATCH_VER); } void printHelp() { fprintf(stdout, "\n" "Usage : otfccdump [OPTIONS] input.[otf|ttf|ttc]\n\n" " -h, --help : Display this help message and exit.\n" " -v, --version : Display version information and exit.\n" " -o <file> : Set output file path to <file>. When absent the dump\n" " will be written to STDOUT.\n" " -n <n>, --ttc-index <n> : Use the <n>th subfont within the input font.\n" " --pretty : Prettify the output JSON.\n" " --ugly : Force uglify the output JSON.\n" " --verbose : Show more information when building.\n" " -q, --quiet : Be silent when building.\n\n" " --ignore-glyph-order : Do not export glyph order information.\n" " --glyph-name-prefix pfx : Add a prefix to the glyph names.\n" " --ignore-hints : Do not export hinting information.\n" " --decimal-cmap : Export 'cmap' keys as decimal number.\n" " --hex-cmap : Export 'cmap' keys as hex number (U+FFFF).\n" " --name-by-hash : Name glyphs using its hash value.\n" " --name-by-gid : Name glyphs using its glyph id.\n" " --add-bom : Add BOM mark in the output. (It is default on Windows\n" " when redirecting to another program. Use --no-bom to\n" " turn it off.)\n" "\n"); } #ifdef _WIN32 int main() { int argc; char **argv; get_argv_utf8(&argc, &argv); #else int main(int argc, char *argv[]) { #endif bool show_help = false; bool show_version = false; bool show_pretty = false; bool show_ugly = false; bool add_bom = false; bool no_bom = false; uint32_t ttcindex = 0; struct option longopts[] = {{"version", no_argument, NULL, 'v'}, {"help", no_argument, NULL, 'h'}, {"pretty", no_argument, NULL, 'p'}, {"ugly", no_argument, NULL, 0}, {"time", no_argument, NULL, 0}, {"ignore-glyph-order", no_argument, NULL, 0}, {"ignore-hints", no_argument, NULL, 0}, {"hex-cmap", no_argument, NULL, 0}, {"decimal-cmap", no_argument, NULL, 0}, {"instr-as-bytes", no_argument, NULL, 0}, {"name-by-hash", no_argument, NULL, 0}, {"name-by-gid", no_argument, NULL, 0}, {"glyph-name-prefix", required_argument, NULL, 0}, {"verbose", no_argument, NULL, 0}, {"quiet", no_argument, NULL, 0}, {"add-bom", no_argument, NULL, 0}, {"no-bom", no_argument, NULL, 0}, {"output", required_argument, NULL, 'o'}, {"ttc-index", required_argument, NULL, 'n'}, {"debug-wait-on-start", no_argument, NULL, 0}, {0, 0, 0, 0}}; otfcc_Options *options = otfcc_newOptions(); options->logger = otfcc_newLogger(otfcc_newStdErrTarget()); options->logger->indent(options->logger, "otfccdump"); options->decimal_cmap = true; int option_index = 0; int c; sds outputPath = NULL; sds inPath = NULL; while ((c = getopt_long(argc, argv, "vhqpio:n:", longopts, &option_index)) != (-1)) { switch (c) { case 0: /* If this option set a flag, do nothing else now. */ if (longopts[option_index].flag != 0) { break; } else if (strcmp(longopts[option_index].name, "ugly") == 0) { show_ugly = true; } else if (strcmp(longopts[option_index].name, "time") == 0) { } else if (strcmp(longopts[option_index].name, "add-bom") == 0) { add_bom = true; } else if (strcmp(longopts[option_index].name, "no-bom") == 0) { no_bom = true; } else if (strcmp(longopts[option_index].name, "ignore-glyph-order") == 0) { options->ignore_glyph_order = true; } else if (strcmp(longopts[option_index].name, "verbose") == 0) { options->verbose = true; } else if (strcmp(longopts[option_index].name, "quiet") == 0) { options->quiet = true; } else if (strcmp(longopts[option_index].name, "ignore-hints") == 0) { options->ignore_hints = true; } else if (strcmp(longopts[option_index].name, "decimal-cmap") == 0) { options->decimal_cmap = true; } else if (strcmp(longopts[option_index].name, "hex-cmap") == 0) { options->decimal_cmap = false; } else if (strcmp(longopts[option_index].name, "name-by-hash") == 0) { options->name_glyphs_by_hash = true; } else if (strcmp(longopts[option_index].name, "name-by-gid") == 0) { options->name_glyphs_by_gid = true; } else if (strcmp(longopts[option_index].name, "instr-as-bytes") == 0) { options->instr_as_bytes = true; } else if (strcmp(longopts[option_index].name, "glyph-name-prefix") == 0) { options->glyph_name_prefix = strdup(optarg); } else if (strcmp(longopts[option_index].name, "debug-wait-on-start") == 0) { options->debug_wait_on_start = true; } break; case 'v': show_version = true; break; case 'i': options->ignore_glyph_order = true; break; case 'h': show_help = true; break; case 'p': show_pretty = true; break; case 'o': outputPath = sdsnew(optarg); break; case 'q': options->quiet = true; break; case 'n': ttcindex = atoi(optarg); break; } } if (options->debug_wait_on_start) { getchar(); } options->logger->setVerbosity(options->logger, options->quiet ? 0 : options->verbose ? 0xFF : 1); if (show_help) { printInfo(); printHelp(); return 0; } if (show_version) { printInfo(); return 0; } if (optind >= argc) { logError("Expected argument for input file name.\n"); printHelp(); exit(EXIT_FAILURE); } else { inPath = sdsnew(argv[optind]); } struct timespec begin; time_now(&begin); otfcc_SplineFontContainer *sfnt; loggedStep("Read SFNT") { logProgress("From file %s", inPath); FILE *file = u8fopen(inPath, "rb"); sfnt = otfcc_readSFNT(file); if (!sfnt || sfnt->count == 0) { logError("Cannot read SFNT file \"%s\". Exit.\n", inPath); exit(EXIT_FAILURE); } if (ttcindex >= sfnt->count) { logError("Subfont index %d out of range for \"%s\" (0 -- %d). Exit.\n", ttcindex, inPath, (sfnt->count - 1)); exit(EXIT_FAILURE); } logStepTime; } otfcc_Font *font; loggedStep("Read Font") { otfcc_IFontBuilder *reader = otfcc_newOTFReader(); font = reader->read(sfnt, ttcindex, options); if (!font) { logError("Font structure broken or corrupted \"%s\". Exit.\n", inPath); exit(EXIT_FAILURE); } reader->free(reader); if (sfnt) otfcc_deleteSFNT(sfnt); logStepTime; } loggedStep("Consolidate") { otfcc_iFont.consolidate(font, options); logStepTime; } json_value *root; loggedStep("Dump") { otfcc_IFontSerializer *dumper = otfcc_newJsonWriter(); root = (json_value *)dumper->serialize(font, options); if (!root) { logError("Font structure broken or corrupted \"%s\". Exit.\n", inPath); exit(EXIT_FAILURE); } logStepTime; dumper->free(dumper); } char *buf; size_t buflen; loggedStep("Serialize to JSON") { json_serialize_opts jsonOptions; jsonOptions.mode = json_serialize_mode_packed; jsonOptions.opts = 0; jsonOptions.indent_size = 4; if (show_pretty || (!outputPath && isatty(fileno(stdout)))) { jsonOptions.mode = json_serialize_mode_multiline; } if (show_ugly) jsonOptions.mode = json_serialize_mode_packed; buflen = json_measure_ex(root, jsonOptions); buf = calloc(1, buflen); json_serialize_ex(buf, root, jsonOptions); logStepTime; } loggedStep("Output") { if (outputPath) { FILE *outputFile = u8fopen(outputPath, "wb"); if (!outputFile) { logError("Cannot write to file \"%s\". Exit.", outputPath); exit(EXIT_FAILURE); } if (add_bom) { fputc(0xEF, outputFile); fputc(0xBB, outputFile); fputc(0xBF, outputFile); } size_t actualLen = buflen - 1; while (!buf[actualLen]) actualLen -= 1; fwrite(buf, sizeof(char), actualLen + 1, outputFile); fclose(outputFile); } else { #ifdef WIN32 if (isatty(fileno(stdout))) { LPWSTR pwStr; DWORD dwNum = widen_utf8(buf, &pwStr); DWORD actual = 0; DWORD written = 0; const DWORD chunk = 0x10000; while (written < dwNum) { DWORD len = dwNum - written; if (len > chunk) len = chunk; WriteConsoleW(GetStdHandle(STD_OUTPUT_HANDLE), pwStr + written, len, &actual, NULL); written += len; } free(pwStr); } else { if (!no_bom) { fputc(0xEF, stdout); fputc(0xBB, stdout); fputc(0xBF, stdout); } fputs(buf, stdout); } #else if (add_bom) { fputc(0xEF, stdout); fputc(0xBB, stdout); fputc(0xBF, stdout); } fputs(buf, stdout); #endif } logStepTime; } loggedStep("Finalize") { free(buf); if (font) otfcc_iFont.free(font); if (root) json_builder_free(root); if (inPath) sdsfree(inPath); if (outputPath) sdsfree(outputPath); logStepTime; } otfcc_deleteOptions(options); return 0; }
4,934
2,540
<gh_stars>1000+ package bar; /** * Foo */ public class Foo { /** perfom request * * @param name user name * @param password <PASSWORD> */ public void request(String name, String password) { } }
92
435
{ "copyright_text": "Standard YouTube License", "description": "Keynote: Data Science Workflow", "duration": 2806, "language": "eng", "recorded": "2017-11-27", "related_urls": [ { "label": "schedule", "url": "https://pydata.org/nyc2017/schedule/" }, { "label": "slides", "url": "https://www.slideshare.net/PyData/data-science-workflow" } ], "speakers": [ "<NAME>" ], "tags": [ "keynote" ], "thumbnail_url": "https://i.ytimg.com/vi/veiLCvcLIg8/maxresdefault.jpg", "title": "Data Science Workflow", "videos": [ { "type": "youtube", "url": "https://www.youtube.com/watch?v=veiLCvcLIg8" } ] }
314
3,508
<reponame>Anshul1507/Leetcode package com.fishercoder.solutions; public class _565 { public static class Solution1 { public int arrayNesting(int[] nums) { if (nums == null || nums.length == 0) { return 0; } boolean[] visited = new boolean[nums.length]; int answer = 0; for (int i : nums) { int count = 0; int j = i; while (j >= 0 && j < nums.length && !visited[j]) { count++; visited[j] = true; j = nums[j]; } answer = Math.max(answer, count); } return answer; } } }
427
1,401
# built-in from math import isclose # external import pytest # project import textdistance ALG = textdistance.MongeElkan @pytest.mark.parametrize('left, right, expected', [ (['Niall'], ['Neal'], .805), (['Niall'], ['Nigel'], 0.7866666666666667), ]) def test_similarity(left, right, expected): actual = ALG(qval=1, algorithm=textdistance.jaro_winkler).similarity(left, right) assert isclose(actual, expected)
158
348
{"nom":"Saulxures-lès-Vannes","dpt":"Meurthe-et-Moselle","inscrits":261,"abs":55,"votants":206,"blancs":17,"nuls":3,"exp":186,"res":[{"panneau":"2","voix":99},{"panneau":"1","voix":87}]}
84
575
<reponame>sarang-apps/darshan_browser<filename>chrome/browser/profiling_host/chrome_browser_main_extra_parts_profiling.h // Copyright 2017 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef CHROME_BROWSER_PROFILING_HOST_CHROME_BROWSER_MAIN_EXTRA_PARTS_PROFILING_H_ #define CHROME_BROWSER_PROFILING_HOST_CHROME_BROWSER_MAIN_EXTRA_PARTS_PROFILING_H_ #include "base/compiler_specific.h" #include "base/macros.h" #include "chrome/browser/chrome_browser_main_extra_parts.h" class ChromeBrowserMainExtraPartsProfiling : public ChromeBrowserMainExtraParts { public: ChromeBrowserMainExtraPartsProfiling(); ~ChromeBrowserMainExtraPartsProfiling() override; private: // ChromeBrowserMainExtraParts overrides. void PostCreateThreads() override; DISALLOW_COPY_AND_ASSIGN(ChromeBrowserMainExtraPartsProfiling); }; #endif // CHROME_BROWSER_PROFILING_HOST_CHROME_BROWSER_MAIN_EXTRA_PARTS_PROFILING_H_
357
20,995
// Copyright 2021 the V8 project authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef INCLUDE_V8_DATA_H_ #define INCLUDE_V8_DATA_H_ #include "v8-local-handle.h" // NOLINT(build/include_directory) #include "v8config.h" // NOLINT(build/include_directory) namespace v8 { class Context; /** * The superclass of objects that can reside on V8's heap. */ class V8_EXPORT Data { public: /** * Returns true if this data is a |v8::Value|. */ bool IsValue() const; /** * Returns true if this data is a |v8::Module|. */ bool IsModule() const; /** * Returns true if this data is a |v8::Private|. */ bool IsPrivate() const; /** * Returns true if this data is a |v8::ObjectTemplate|. */ bool IsObjectTemplate() const; /** * Returns true if this data is a |v8::FunctionTemplate|. */ bool IsFunctionTemplate() const; /** * Returns true if this data is a |v8::Context|. */ bool IsContext() const; private: Data(); }; /** * A fixed-sized array with elements of type Data. */ class V8_EXPORT FixedArray : public Data { public: int Length() const; Local<Data> Get(Local<Context> context, int i) const; }; } // namespace v8 #endif // INCLUDE_V8_DATA_H_
474
2,151
/* * Copyright (C) 2015 The Android Open Source Project * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.databinding.testapp; import android.databinding.testapp.databinding.IncludeNoVariablesBinding; import android.test.UiThreadTest; import android.view.ViewGroup; import android.widget.TextView; public class NoVariableIncludeTest extends BaseDataBinderTest<IncludeNoVariablesBinding> { public NoVariableIncludeTest() { super(IncludeNoVariablesBinding.class); } @UiThreadTest public void testInclude() { initBinder(); mBinder.executePendingBindings(); assertNotNull(mBinder.included); assertNotNull(mBinder.included.textView); String expectedValue = getActivity().getResources().getString(R.string.app_name); assertEquals(expectedValue, mBinder.included.textView.getText().toString()); TextView noIdInclude = (TextView) ((ViewGroup) mBinder.getRoot()).getChildAt(1); assertEquals(expectedValue, noIdInclude.getText().toString()); } }
501
5,342
<filename>src/vp8/util/debug.cc<gh_stars>1000+ #include <sys/types.h> #ifdef _WIN32 #include <io.h> #include <fcntl.h> #else #include <unistd.h> #include <sys/fcntl.h> #endif #include <errno.h> #include "debug.hh" #include "memory.hh" namespace LeptonDebug{ int med_err; int amd_err; int avg_err; int ori_err; int loc_err; int luma_debug_width; int luma_debug_height; int chroma_debug_width; int chroma_debug_height; int getDebugWidth(int color){ return color == 0 ? luma_debug_width : chroma_debug_width; } int getDebugHeight(int color){ return color == 0 ? luma_debug_height : chroma_debug_height; } #if defined(DUMP_RAW_IMAGE) int load_raw_fd_output(const char *fname) { return open(fname, O_CREAT|O_TRUNC|O_WRONLY, S_IWUSR | S_IRUSR); } char * serialize_unsigned_int(unsigned int value, char *output, bool term = true) { int counter = value; char *end = output; do { ++end; counter /= 10; } while(counter); if (term) { *end = 0; } char *retval = end; do { *--end = '0' + value % 10; value /= 10; }while(value); return retval; } static ptrdiff_t write_full(int fd, unsigned char * data, size_t size) { size_t total_written = 0; ptrdiff_t written = 0; do { written = write(fd, data + total_written, size - total_written); if (written <= 0) { if (errno == EINTR) { continue; } return -1; } total_written += written; } while(total_written < size); return total_written; } void dumpDebugFile(int fd, int width, int height, unsigned char *data) { char header[256] = "P5\n"; char * width_end = serialize_unsigned_int(width, header + 3); *width_end = ' '; ++width_end; width_end = serialize_unsigned_int(height, width_end); *width_end = ' '; ++width_end; width_end = serialize_unsigned_int(255, width_end); *width_end = '\n'; ++width_end; *width_end = '\0'; write_full(fd, (unsigned char*)header, width_end - header); write_full(fd, data, width * height); } void dumpDebugData() { dumpDebugFile(raw_decoded_fp_Y, luma_debug_width, luma_debug_height, raw_YCbCr[0]); dumpDebugFile(raw_decoded_fp_Cb, chroma_debug_width, chroma_debug_height, raw_YCbCr[1]); dumpDebugFile(raw_decoded_fp_Cr, chroma_debug_width, chroma_debug_height, raw_YCbCr[2]); } void setupDebugData(int lumaWidth, int lumaHeight, int chromaWidth, int chromaHeight) { raw_YCbCr[0] = (unsigned char*)custom_calloc(lumaWidth * lumaHeight); raw_YCbCr[1] = (unsigned char*)custom_calloc(chromaWidth * chromaHeight); raw_YCbCr[2] = (unsigned char*)custom_calloc(chromaWidth * chromaHeight); luma_debug_width = lumaWidth; luma_debug_height = lumaHeight; chroma_debug_width = chromaWidth; chroma_debug_height = chromaHeight; } #else int load_raw_fd_output(const char * fname) { return -1; } void dumpDebugData(){ } void setupDebugData(int lumaWidth, int lumaHeight, int chromaWidth, int chromaHeight){ } #endif int raw_decoded_fp_Y = load_raw_fd_output("/tmp/raw_Y.pgm"); int raw_decoded_fp_Cb = load_raw_fd_output("/tmp/raw_Cb.pgm"); int raw_decoded_fp_Cr = load_raw_fd_output("/tmp/raw_Cr.pgm"); unsigned char *raw_YCbCr[4] = {nullptr, nullptr, nullptr, nullptr}; }
1,471
590
/*! @file @author <NAME> @date 07/2008 */ #include "Precompiled.h" #include "ItemBoxWindow.h" namespace demo { ItemBoxWindow::ItemBoxWindow(const std::string& _layout) : BaseLayout(_layout) { assignBase(mItemBox, "box_Items"); } } // namespace demo
109
3,861
/* * proto.h -- function prototypes * * SOFTWARE RIGHTS * * We reserve no LEGAL rights to the Purdue Compiler Construction Tool * Set (PCCTS) -- PCCTS is in the public domain. An individual or * company may do whatever they wish with source code distributed with * PCCTS or the code generated by PCCTS, including the incorporation of * PCCTS, or its output, into commerical software. * * We encourage users to develop software with PCCTS. However, we do ask * that credit is given to us for developing PCCTS. By "credit", * we mean that if you incorporate our source code into one of your * programs (commercial product, research project, or otherwise) that you * acknowledge this fact somewhere in the documentation, research report, * etc... If you like PCCTS and have developed a nice tool with the * output, please mention that you developed it using PCCTS. In * addition, we ask that this header remain intact in our source code. * As long as these guidelines are kept, we expect to continue enhancing * this system and expect to make other tools available as they are * completed. * * ANTLR 1.33 * <NAME> * Parr Research Corporation * with Purdue University and AHPCRC, University of Minnesota * 1989-2001 */ /* V a r i a b l e s */ extern int tp; extern Junction *SynDiag; extern char Version[]; extern char VersionDef[]; #ifdef __cplusplus extern void (*fpPrint[])(...); #else extern void (*fpPrint[])(); #endif #ifdef __cplusplus extern struct _set (*fpReach[])(...); #else extern struct _set (*fpReach[])(); #endif #ifdef __cplusplus extern struct _tree *(*fpTraverse[])(...); #else extern struct _tree *(*fpTraverse[])(); #endif #ifdef __cplusplus extern void (**fpTrans)(...); #else extern void (**fpTrans)(); #endif #ifdef __cplusplus extern void (**fpJTrans)(...); #else extern void (**fpJTrans)(); #endif #ifdef __cplusplus extern void (*C_Trans[NumNodeTypes+1])(...); #else extern void (*C_Trans[])(); #endif #ifdef __cplusplus extern void (*C_JTrans[NumJuncTypes+1])(...); #else extern void (*C_JTrans[])(); #endif extern int BlkLevel; extern int CurFile; extern char *CurPredName; extern char *CurRule; extern int CurRuleDebug; /* MR13 */ extern Junction *CurRuleBlk; extern RuleEntry *CurRuleNode; extern ListNode *CurElementLabels; extern ListNode *CurAstLabelsInActions; /* MR27 */ extern ListNode *ContextGuardPredicateList; /* MR13 */ extern ListNode *CurActionLabels; extern int numericActionLabel; /* MR10 << ... $1 ... >> or << ... $1 ... >>? */ extern ListNode *NumericPredLabels; /* MR10 << ... $1 ... >>? ONLY */ extern char *FileStr[]; extern int NumFiles; extern int EpToken; extern int WildCardToken; extern Entry **Tname, **Texpr, **Rname, **Fcache, **Tcache, **Elabel, **Sname, **Pname; /* MR11 */ extern ListNode *ExprOrder; extern ListNode **Cycles; extern int TokenNum; extern int LastTokenCounted; extern ListNode *BeforeActions, *AfterActions, *LexActions; /* MR1 */ /* MR1 11-Apr-97 Provide mechanism for inserting code into DLG class */ /* MR1 via #lexmember <<....>> & #lexprefix <<...>> */ /* MR1 */ extern ListNode *LexMemberActions; /* MR1 */ extern ListNode *LexPrefixActions; /* MR1 */ extern set *fset; /* for constrained search */ /* MR11 */ extern int maxk; /* for constrained search */ /* MR11 */ extern int Save_argc; /* MR10 */ extern char **Save_argv; /* MR10 */ extern ListNode *eclasses, *tclasses; extern char *HdrAction; extern char *FirstAction; /* MR11 */ extern FILE *ErrFile; extern char *RemapFileName; extern char *ErrFileName; extern char *DlgFileName; extern char *DefFileName; extern char *ModeFileName; extern char *StdMsgName; extern int NumRules; extern Junction **RulePtr; extern int LL_k; extern int CLL_k; extern char *decodeJType[]; extern int PrintOut; extern int PrintAnnotate; extern int CodeGen; extern int LexGen; extern int esetnum; extern int setnum; extern int wordnum; extern int GenAST; extern int GenANSI; extern int **FoStack; extern int **FoTOS; extern int GenExprSetsOpt; extern FILE *DefFile; extern int CannotContinue; extern int GenCR; extern int GenLineInfo; extern int GenLineInfoMS; extern int action_file, action_line; extern int TraceGen; extern int CurAmbigAlt1, CurAmbigAlt2, CurAmbigline, CurAmbigfile; extern char *CurAmbigbtype; extern int elevel; extern int GenEClasseForRules; extern FILE *input, *output; extern char **TokenStr, **ExprStr; extern int CurrentLexClass, NumLexClasses; extern LClass lclass[]; extern char LexStartSymbol[]; extern char *CurRetDef; extern char *CurParmDef; extern int OutputLL_k; extern int TreeResourceLimit; extern int DemandLookahead; extern char *RulePrefix; extern int GenStdPccts; extern char *stdpccts; extern int ParseWithPredicates; extern int ConstrainSearch; extern int PURIFY; /* MR23 */ extern set MR_CompromisedRules; /* MR14 */ extern int MR_AmbSourceSearch; /* MR11 */ extern int MR_SuppressSearch; /* MR13 */ extern int MR_AmbSourceSearchGroup; /* MR11 */ extern int MR_AmbSourceSearchChoice; /* MR11 */ extern int MR_AmbSourceSearchLimit; /* MR11 */ extern int MR_usingPredNames; /* MR11 */ extern int MR_ErrorSetComputationActive; /* MR14 */ extern char *MR_AmbAidRule; /* MR11 */ extern int MR_AmbAidLine; /* MR11 */ extern int MR_AmbAidMultiple; /* MR11 */ extern int MR_AmbAidDepth; /* MR11 */ extern int MR_skipped_e3_report; /* MR11 */ extern int MR_matched_AmbAidRule; /* MR11 */ extern int MR_Inhibit_Tokens_h_Gen; /* MR13 */ extern int NewAST; /* MR13 */ extern int tmakeInParser; /* MR23 */ extern int AlphaBetaTrace; /* MR14 */ extern int MR_BlkErr; /* MR21 */ extern int MR_AlphaBetaWarning; /* MR14 */ extern int MR_AlphaBetaMessageCount; /* MR14 */ extern int MR_MaintainBackTrace; /* MR14 */ extern int MR_BadExprSets; /* MR13 */ extern int FoundGuessBlk; extern int FoundException; extern int FoundAtOperator; /* MR6 */ extern int FoundExceptionGroup; /* MR6 */ extern int WarningLevel; extern int UseStdout; /* MR6 */ extern int TabWidth; /* MR6 */ extern int pLevel; extern int pAlt1; extern int pAlt2; extern int AImode; extern int HoistPredicateContext; extern int MRhoisting; /* MR9 */ extern int MRhoistingk; /* MR13 */ extern int MR_debugGenRule; /* MR11 */ extern int GenCC; extern char *ParserName; extern char *StandardSymbols[]; extern char *ASTSymbols[]; extern set reserved_positions; extern set all_tokens; extern set imag_tokens; extern set tokclasses; extern ListNode *ForcedTokens; extern int *TokenInd; extern FILE *Parser_h, *Parser_c; extern char CurrentClassName[]; extern int no_classes_found; extern char Parser_h_Name[]; extern char Parser_c_Name[]; extern char MRinfoFile_Name[]; /* MR10 */ extern FILE *MRinfoFile; /* MR10 */ extern int MRinfo; /* MR10 */ extern int MRinfoSeq; /* MR10 */ extern int InfoP; /* MR10 */ extern int InfoT; /* MR10 */ extern int InfoF; /* MR10 */ extern int InfoM; /* MR10 */ extern int InfoO; /* MR12 */ extern int PotentialSuppression; /* MR10 */ extern int PotentialDummy; /* MR10 */ extern int TnodesInUse; /* MR10 */ extern int TnodesPeak; /* MR10 */ extern int TnodesReportThreshold; /* MR11 */ extern int TnodesAllocated; /* MR10 */ extern char *ClassDeclStuff; /* MR10 */ extern char *BaseClassName; /* MR22 */ extern ListNode *class_before_actions, *class_after_actions; extern char *UserTokenDefsFile; extern int UserDefdTokens; extern ListNode *MetaTokenNodes; extern char *OutputDirectory; extern int DontCopyTokens; extern int LTinTokenAction; /* MR23 */ extern set AST_nodes_refd_in_actions; extern ListNode *CurExGroups; extern int CurBlockID; extern int CurAltNum; extern Junction *CurAltStart; extern Junction *OuterAltStart; /* chain exception groups MR7 */ extern ExceptionGroup *DefaultExGroup; extern int NumSignals; extern int ContextGuardTRAV; extern Junction *MR_RuleBlkWithHalt; /* MR10 */ extern PointerStack MR_BackTraceStack; /* MR10 */ extern PointerStack MR_PredRuleRefStack; /* MR10 */ extern PointerStack MR_RuleBlkWithHaltStack; /* MR10 */ /* */ /* MR1 10-Apr-97 MR1 Previously unable to put right shift operator */ /* MR1 in DLG action */ /* */ extern int tokenActionActive; /* MR1 */ extern char *PRED_OR_LIST; /* MR10 */ extern char *PRED_AND_LIST; /* MR10 */ #ifdef __VMS #define STRICMP strcasecmp /* MR21 */ #else #define STRICMP stricmp /* MR21 */ #endif /* MR26 */ #ifdef PCCTS_USE_STDARG extern Tree *tmake(Tree *root, ...); #else extern Tree *tmake(); #endif #ifdef __USE_PROTOS extern int STRICMP(const char*, const char*); extern void istackreset(void); extern int istacksize(void); extern void pushint(int); extern int popint( void ); extern int istackempty( void ); extern int topint( void ); extern void NewSetWd( void ); extern void DumpSetWd( void ); extern void DumpSetWdForC( void ); extern void DumpSetWdForCC( void ); extern void NewSet( void ); extern void FillSet( set ); extern void ComputeErrorSets( void ); extern void ComputeTokSets( void ); extern void SubstErrorClass( set * ); extern int DefErrSet( set *, int, char * ); extern int DefErrSetForC( set *, int, char * ); extern int DefErrSetForCC( set *, int, char * ); extern int DefErrSet1(int, set *, int, char *); /* MR21 */ extern int DefErrSetForC1(int, set *, int, char *, const char* ); /* MR21 */ extern int DefErrSetForCC1(int, set *, int, char *, const char* ); /* MR21 */ extern int DefErrSetWithSuffix(int, set *, int, char *, const char *); /* MR21 */ extern void GenErrHdr( void ); extern void dumpExpr( FILE *, char * ); extern void addParm( Node *, char * ); extern Graph buildAction( char *, int, int, int ); extern Graph buildToken( char * ); extern Graph buildWildCard( char * ); extern Graph buildRuleRef( char * ); extern Graph Or( Graph, Graph ); extern Graph Cat( Graph, Graph ); extern Graph makeOpt( Graph, int, char *); extern Graph makeBlk( Graph, int, char *); extern Graph makeLoop( Graph, int, char *); extern Graph makePlus( Graph, int, char *); extern Graph emptyAlt( void ); extern Graph emptyAlt3( void ); extern TokNode * newTokNode( void ); extern RuleRefNode * newRNode( void ); extern Junction * newJunction( void ); extern ActionNode * newActionNode( void ); extern char * makelocks( void ); extern void preorder( Tree * ); extern Tree * tnode( int ); extern void _Tfree( Tree * ); extern Tree * tdup( Tree * ); extern int is_single_tuple( Tree * ); extern Tree * tappend( Tree *, Tree * ); extern void Tfree( Tree * ); extern Tree * tlink( Tree *, Tree *, int ); extern Tree * tshrink( Tree * ); extern Tree * tflatten( Tree * ); extern Tree * tJunc( Junction *, int, set * ); extern Tree * tRuleRef( RuleRefNode *, int, set * ); extern Tree * tToken( TokNode *, int, set * ); extern Tree * tAction( ActionNode *, int, set * ); extern int tmember( Tree *, Tree * ); extern int tmember_constrained( Tree *, Tree * ); extern Tree * tleft_factor( Tree * ); extern Tree * trm_perm( Tree *, Tree * ); extern void tcvt( set *, Tree * ); extern Tree * permute( int, int ); extern Tree * VerifyAmbig( Junction *, Junction *, unsigned **, set *, Tree **, Tree **, int * ); extern set rJunc( Junction *, int, set * ); extern set rRuleRef( RuleRefNode *, int, set * ); extern set rToken( TokNode *, int, set * ); extern set rAction( ActionNode *, int, set * ); extern void HandleAmbiguity( Junction *, Junction *, Junction *, int ); extern set First( Junction *, int, int, int * ); extern void freeBlkFsets( Junction * ); extern void genAction( ActionNode * ); extern void genRuleRef( RuleRefNode * ); extern void genToken( TokNode * ); extern void genOptBlk( Junction * ); extern void genLoopBlk( Junction *, Junction *, Junction *, int ); extern void genLoopBegin( Junction * ); extern void genPlusBlk( Junction * ); extern void genSubBlk( Junction * ); extern void genRule( Junction * ); extern void genJunction( Junction * ); extern void genEndBlk( Junction * ); extern void genEndRule( Junction * ); extern void genHdr( int ); extern void genHdr1( int ); extern void dumpAction( char *, FILE *, int, int, int, int ); extern void dumpActionPlus(ActionNode*, char *, FILE *, int, int, int, int ); /* MR21 */ extern Entry ** newHashTable( void ); extern Entry * hash_add( Entry **, char *, Entry * ); extern Entry * hash_get( Entry **, char * ); extern void hashStat( Entry ** ); extern char * mystrdup( char * ); extern void genLexDescr( void ); extern void dumpLexClasses( FILE * ); extern void genDefFile( void ); extern void DumpListOfParmNames( char *, FILE *, int ); /* MR5 janm 26-May-97 */ extern int DumpNextNameInDef( char **, FILE * ); extern void DumpOldStyleParms( char *, FILE * ); extern void DumpType( char *, FILE * ); extern int strmember( char *, char * ); /* extern int HasComma( char * ); MR23 Replaced by hasMultipleOperands() */ extern void DumpRetValStruct( FILE *, char *, int ); extern char * StripQuotes( char * ); extern int main( int, char *[] ); extern void readDescr( void ); extern FILE * NextFile( void ); extern char * outnameX( char *, char *); extern char * outname( char * ); extern void fatalFL( char *, char *, int ); extern void fatal_intern( char *, char *, int ); extern void cleanUp( void ); extern char * eMsg3( char *, char *, char *, char * ); extern char * eMsgd( char *, int ); extern char * eMsgd2( char *, int, int ); extern void s_fprT( FILE *, set ); extern char * TerminalString( int ); extern void lexclass( char * ); extern void lexmode( int ); extern int LexClassIndex( char * ); extern int hasAction( char * ); extern void setHasAction( char *, char * ); extern int addTname( char * ); extern int addTexpr( char * ); extern int Tnum( char * ); extern void Tklink( char *, char * ); extern Entry * newEntry( char *, int ); extern void list_add( ListNode **, void * ); extern void list_free( ListNode **, int freeData ); /* MR10 */ extern void list_apply( ListNode *, void (*)(void *) ); extern int list_search_cstring (ListNode *, char *); /* MR27 */ extern char * Fkey( char *, int, int ); extern void FoPush( char *, int ); extern void FoPop( int ); extern void RegisterCycle( char *, int ); extern void ResolveFoCycles( int ); extern void pJunc( Junction * ); extern void pRuleRef( RuleRefNode * ); extern void pToken( TokNode * ); extern void pAction( ActionNode * ); extern void FoLink( Node * ); extern void addFoLink( Node *, char *, Junction * ); extern void GenCrossRef( Junction * ); extern void defErr( char *, long, long, long, long, long, long ); extern void genStdPCCTSIncludeFile(FILE *,char *); /* MR10 */ extern char * pcctsBaseName(char *); /* MR32 */ extern Predicate *find_predicates(Node *); /* MR10 */ extern Predicate *MR_find_predicates_and_supp(Node *); /* MR13 */ extern int predicateLookaheadDepth(ActionNode *); /* MR10 */ extern void predicate_free(Predicate *); /* MR10 */ extern Predicate * predicate_dup(Predicate *); /* MR10 */ extern Predicate * predicate_dup_without_context(Predicate *); /* MR11 */ extern void GenRulePrototypes(FILE *, Junction *); extern Junction *first_item_is_guess_block(Junction *); extern Junction *first_item_is_guess_block_extra(Junction * q); /* MR30 */ extern Junction *analysis_point(Junction *); extern Tree *make_tree_from_sets(set *, set *); extern Tree *tdup_chain(Tree *); extern Tree *tdif(Tree *, Predicate *, set *, set *); extern set covered_set(Predicate *); extern void AmbiguityDialog(Junction *, int, Junction *, Junction *, int *, int *); extern void dumpAmbigMsg(set *, FILE *, int); extern void GenRuleFuncRedefs(FILE *, Junction *); extern void GenPredefinedSymbolRedefs(FILE *); extern void GenASTSymbolRedefs(FILE *); extern void GenRemapFile(void); extern void GenSetRedefs(FILE *); extern ForcedToken *newForcedToken(char *, int); extern void RemapForcedTokens(void); extern char *TokenOrExpr(int); extern void setUpperRange(TokNode *, char *); extern void GenParser_c_Hdr(void); extern void GenParser_h_Hdr(void); extern void GenRuleMemberDeclarationsForCC(FILE *, Junction *); extern int addForcedTname( char *, int ); extern char *OutMetaName(char *); extern void OutFirstSetSymbol(Junction *q, char *); /* MR21 */ extern void warnNoFL(char *err); extern void warnFL(char *err,char *f,int l); extern void warn(char *err); extern void warnNoCR( char *err ); extern void errNoFL(char *err); extern void errFL(char *err,char *f,int l); extern void err(char *err); extern void errNoCR( char *err ); extern void genPredTree( Predicate *p, Node *j, int ,int); extern UserAction *newUserAction(char *); extern char *gate_symbol(char *name); extern char *makeAltID(int blockid, int altnum); extern void DumpRemainingTokSets(void); extern void DumpANSIFunctionArgDef(FILE *f, Junction *q, int bInit); /* MR23 */ extern void DumpFormals(FILE *, char *, int bInit); /* MR23 */ extern char* hideDefaultArgs(const char* pdecl); /* MR22 VHS */ extern Predicate *computePredFromContextGuard(Graph,int *msgDone); /* MR21 */ extern void recomputeContextGuard(Predicate *); /* MR13 */ extern Predicate *new_pred(void); extern void chkGTFlag(void); extern void leAdd(LabelEntry *); /* MR7 */ extern void leFixup(void); /* MR7 */ extern void egAdd(ExceptionGroup *); /* MR7 */ extern void egFixup(void); /* MR7 */ extern void altAdd(Junction *); /* MR7 */ extern void altFixup(void); /* MR7 */ extern Predicate * MR_find_in_aSubBlk(Junction *alt); /* MR10 */ extern Predicate * MR_predFlatten(Predicate *p); /* MR10 */ extern Predicate * MR_predSimplifyALL(Predicate *p); /* MR10 */ extern Predicate * MR_predSimplifyALLX(Predicate *p,int skipPass3); /* MR10 */ extern int MR_allPredLeaves(Predicate *p); /* MR10 */ extern void MR_cleanup_pred_trees(Predicate *p); /* MR10 */ extern int MR_predicate_context_completed(Predicate *p); /* MR10 */ extern void MR_check_pred_too_long(Predicate *p,set completion); /* MR10 */ extern Tree * MR_remove_epsilon_from_tree(Tree *t); /* MR10 */ extern Tree * MR_computeTreeAND(Tree *l,Tree *r); /* MR10 */ extern int MR_tree_equ(Tree *big, Tree *small); /* MR10 */ extern set MR_First(int ck,Junction *j,set *incomplete); /* MR10 */ extern set MR_compute_pred_set(Predicate *p); /* MR10 */ extern Tree * MR_compute_pred_tree_context(Predicate *p); /* MR10 */ extern int MR_pointerStackPush(PointerStack *,void *); /* MR10 */ extern void * MR_pointerStackPop(PointerStack *); /* MR10 */ extern void * MR_pointerStackTop(PointerStack *); /* MR10 */ extern void MR_pointerStackReset(PointerStack *); /* MR10 */ extern void MR_backTraceReport(void); /* MR10 */ extern void MR_alphaBetaTraceReport(void); /* MR14 */ extern void MR_dumpRuleSet(set); /* MR14 */ extern void MR_predContextPresent(Predicate *p,int *,int *); /* MR10 */ extern void MR_dumpPred(Predicate *p,int withContext); /* MR10 */ extern void MR_dumpPred1(int,Predicate *p,int withContext); /* MR10 */ extern void MR_xxxIndent(FILE *f,int depth); /* MR11 */ extern void MR_outputIndent(int depth); /* MR11 */ extern void MR_stderrIndent(int depth); /* MR11 */ extern Junction * MR_ruleReferenced(RuleRefNode *rrn); /* MR10 */ extern Junction * MR_nameToRuleBlk(char *); /* MR10 */ extern void MR_releaseResourcesUsedInRule(Node *); /* MR10 */ extern void MR_dumpTreeX(int depth,Tree *t,int across); /* MR10 */ extern void MR_dumpTreeF(FILE *f,int depth,Tree *t,int across); /* MR10 */ extern void DumpFcache(void); /* MR10 */ extern void MR_dumpTokenSet(FILE *f,int depth,set s); /* MR10 */ extern void MR_traceAmbSource(set *,Junction *,Junction *); /* MR11 */ extern void MR_traceAmbSourceK(Tree *,Junction *a1,Junction *a2); /* MR11 */ extern void MR_traceAmbSourceKclient(void); /* MR20 */ extern Node *MR_advance(Node *); /* MR11 */ extern int MR_offsetFromRule(Node *); /* MR11 */ extern char *MR_ruleNamePlusOffset(Node *); /* MR11 */ extern int MR_max_height_of_tree(Tree *); /* MR11 */ extern int MR_all_leaves_same_height(Tree *,int); /* MR11 */ extern void MR_projectTreeOntoSet(Tree *t,int k,set *); /* MR11 */ extern Tree *MR_make_tree_from_set(set); /* MR11 */ extern Predicate *MR_removeRedundantPredPass3(Predicate *); /* MR11 */ extern void MR_pred_depth(Predicate *,int *); /* MR11 */ extern int MR_comparePredicates(Predicate *,Predicate *); /* MR11 */ extern Predicate * MR_unfold(Predicate *); /* MR11 */ extern void MR_simplifyInverted(Predicate *,int); /* MR11 */ extern int MR_secondPredicateUnreachable /* MR11 */ (Predicate *first,Predicate *second); /* MR11 */ extern void MR_clearPredEntry(Predicate *); /* MR11 */ extern void MR_orphanRules(FILE *); /* MR12 */ extern void MR_merge_contexts(Tree *); /* MR12 */ extern int ci_strequ(char *,char *); /* MR12 */ extern void MR_guardPred_plainSet(ActionNode *anode,Predicate *); /* MR12c */ extern void MR_suppressSearchReport(void); /* MR12c */ extern Predicate * MR_suppressK(Node *,Predicate *); /* MR13 */ extern void MR_backTraceDumpItem(FILE *,int skip,Node *n); /* MR13 */ extern void MR_backTraceDumpItemReset(void); /* MR13 */ extern Junction * MR_junctionWithoutP2(Junction *); /* MR13 */ extern void MR_setConstrainPointer(set *); /* MR18 */ extern void BlockPreambleOption(Junction *q, char * pSymbol); /* MR23 */ extern char* getInitializer(char *); /* MR23 */ extern char *endFormal(char *pStart, /* MR23 */ char **ppDataType, /* MR23 */ char **ppSymbol, /* MR23 */ char **ppEqualSign, /* MR23 */ char **ppValue, /* MR23 */ char **ppSeparator, /* MR23 */ int *pNext); /* MR23 */ extern char *strBetween(char *pStart, /* MR23 */ char *pNext, /* MR23 */ char *pStop); /* MR23 */ extern int hasMultipleOperands(char *); /* MR23 */ extern void DumpInitializers(FILE*, RuleEntry*, char*); /* MR23 */ extern int isTermEntryTokClass(TermEntry *); /* MR23 */ extern int isEmptyAlt(Node *, Node *); /* MR23 */ #else extern int STRICMP(); extern void istackreset(); extern int istacksize(); extern void pushint(); extern int popint(); extern int istackempty(); extern int topint(); extern void NewSetWd(); extern void DumpSetWd(); extern void DumpSetWdForC(); extern void DumpSetWdForCC(); extern void NewSet(); extern void FillSet(); extern void ComputeErrorSets(); extern void ComputeTokSets(); extern void SubstErrorClass(); extern int DefErrSet(); extern int DefErrSetForC(); extern int DefErrSetForCC(); extern int DefErrSet1(); extern int DefErrSetForC1(); extern int DefErrSetForCC1(); extern int DefErrSetWithSuffix(); /* MR21 */ extern void GenErrHdr(); extern void dumpExpr(); extern void addParm(); extern Graph buildAction(); extern Graph buildToken(); extern Graph buildWildCard(); extern Graph buildRuleRef(); extern Graph Or(); extern Graph Cat(); extern Graph makeOpt(); extern Graph makeBlk(); extern Graph makeLoop(); extern Graph makePlus(); extern Graph emptyAlt(); extern Graph emptyAlt3(); extern TokNode * newTokNode(); extern RuleRefNode * newRNode(); extern Junction * newJunction(); extern ActionNode * newActionNode(); extern char * makelocks(); extern void preorder(); extern Tree * tnode(); extern void _Tfree(); extern Tree * tdup(); extern int is_single_tuple(); extern Tree * tappend(); extern void Tfree(); extern Tree * tlink(); extern Tree * tshrink(); extern Tree * tflatten(); extern Tree * tJunc(); extern Tree * tRuleRef(); extern Tree * tToken(); extern Tree * tAction(); extern int tmember(); extern int tmember_constrained(); extern Tree * tleft_factor(); extern Tree * trm_perm(); extern void tcvt(); extern Tree * permute(); extern Tree * VerifyAmbig(); extern set rJunc(); extern set rRuleRef(); extern set rToken(); extern set rAction(); extern void HandleAmbiguity(); extern set First(); extern void freeBlkFsets(); extern void genAction(); extern void genRuleRef(); extern void genToken(); extern void genOptBlk(); extern void genLoopBlk(); extern void genLoopBegin(); extern void genPlusBlk(); extern void genSubBlk(); extern void genRule(); extern void genJunction(); extern void genEndBlk(); extern void genEndRule(); extern void genHdr(); extern void genHdr1(); extern void dumpAction(); extern void dumpActionPlus(); /* MR21 */ extern Entry ** newHashTable(); extern Entry * hash_add(); extern Entry * hash_get(); extern void hashStat(); extern char * mystrdup(); extern void genLexDescr(); extern void dumpLexClasses(); extern void genDefFile(); extern void DumpListOfParmNames(); /* MR5 janm 26-May-97 */ extern int DumpNextNameInDef(); extern void DumpOldStyleParms(); extern void DumpType(); extern int strmember(); /* extern int HasComma(); MR23 Replaced by hasMultipleOperands() */ extern void DumpRetValStruct(); extern char * StripQuotes(); extern int main(); extern void readDescr(); extern FILE * NextFile(); extern char * outnameX(); extern char * outname(); extern void fatalFL(); extern void fatal_intern(); extern void cleanUp(); extern char * eMsg3(); extern char * eMsgd(); extern char * eMsgd2(); extern void s_fprT(); extern char * TerminalString(); extern void lexclass(); extern void lexmode(); extern int LexClassIndex(); extern int hasAction(); extern void setHasAction(); extern int addTname(); extern int addTexpr(); extern int Tnum(); extern void Tklink(); extern Entry * newEntry(); extern void list_add(); extern void list_free(); /* MR10 */ extern void list_apply(); extern int list_search_cstring (); /* MR27 */ extern char * Fkey(); extern void FoPush(); extern void FoPop(); extern void RegisterCycle(); extern void ResolveFoCycles(); extern void pJunc(); extern void pRuleRef(); extern void pToken(); extern void pAction(); extern void FoLink(); extern void addFoLink(); extern void GenCrossRef(); extern void defErr(); extern void genStdPCCTSIncludeFile(); extern char * pcctsBaseName(); /* MR32 */ extern Predicate *find_predicates(); extern Predicate *MR_find_predicates_and_supp(); /* MR13 */ extern int predicateLookaheadDepth(); /* MR10 */ extern void predicate_free(); /* MR10 */ extern Predicate * predicate_dup(); /* MR10 */ extern Predicate * predicate_dup_without_context(); /* MR11 */ extern void GenRulePrototypes(); extern Junction *first_item_is_guess_block(); extern Junction *first_item_is_guess_block_extra(); /* MR30 */ extern Junction *analysis_point(); extern Tree *make_tree_from_sets(); extern Tree *tdup_chain(); extern Tree *tdif(); extern set covered_set(); extern void AmbiguityDialog(); extern void dumpAmbigMsg(); extern void GenRuleFuncRedefs(); extern void GenPredefinedSymbolRedefs(); extern void GenASTSymbolRedefs(); extern void GenRemapFile(); extern void GenSetRedefs(); extern ForcedToken *newForcedToken(); extern void RemapForcedTokens(); extern char *TokenOrExpr(); extern void setUpperRange(); extern void GenParser_c_Hdr(); extern void GenParser_h_Hdr(); extern void GenRuleMemberDeclarationsForCC(); extern int addForcedTname(); extern char *OutMetaName(); extern void OutFirstSetSymbol(); /* MR21 */ extern void warnNoFL(); extern void warnFL(); extern void warn(); extern void warnNoCR(); extern void errNoFL(); extern void errFL(); extern void err(); extern void errNoCR(); extern void genPredTree(); extern UserAction *newUserAction(); extern char *gate_symbol(); extern char *makeAltID(); extern void DumpRemainingTokSets(); extern void DumpANSIFunctionArgDef(); extern void DumpFormals(); /* MR23 */ extern char* hideDefaultArgs(); /* MR22 VHS */ extern Predicate *computePredFromContextGuard(); extern void recomputeContextGuard(); /* MR13 */ extern Predicate *new_pred(); extern void chkGTFlag(); extern void leAdd(); /* MR7 */ extern void leFixup(); /* MR7 */ extern void egAdd(); /* MR7 */ extern void egFixup(); /* MR7 */ extern void altAdd(); /* MR7 */ extern void altFixup(); /* MR7 */ extern Predicate * MR_find_in_aSubBlk(); /* MR10 */ extern Predicate * MR_predFlatten(); /* MR10 */ extern Predicate * MR_predSimplifyALL(); /* MR10 */ extern Predicate * MR_predSimplifyALLX(); /* MR10 */ extern void MR_cleanup_pred_trees(); /* MR10 */ extern int MR_allPredLeaves(); /* MR10 */ extern int MR_predicate_context_completed(); /* MR10 */ extern void MR_check_pred_too_long(); /* MR10 */ extern Tree * MR_remove_epsilon_from_tree(); /* MR10 */ extern Tree * MR_computeTreeAND(); /* MR10 */ extern int MR_tree_equ(); /* MR10 */ extern set MR_First(); /* MR10 */ extern set MR_compute_pred_set(); /* MR10 */ extern Tree * MR_compute_pred_tree_context(); /* MR10 */ extern int MR_pointerStackPush(); /* MR10 */ extern void * MR_pointerStackPop(); /* MR10 */ extern void * MR_pointerStackTop(); /* MR10 */ extern void MR_pointerStackReset(); /* MR10 */ extern void MR_backTraceReport(); /* MR10 */ extern void MR_alphaBetaTraceReport(); /* MR14 */ extern void MR_dumpRuleSet(); /* MR14 */ extern void MR_predContextPresent(); /* MR10 */ extern void MR_dumpPred(); /* MR10 */ extern void MR_dumpPred1(); /* MR10 */ extern void MR_xxxIndent(); /* MR11 */ extern void MR_stderrIndent(); /* MR11 */ extern void MR_outputIndent(); /* MR11 */ extern Junction * MR_ruleReferenced(); /* MR10 */ extern void MR_releaseResourcesUsedInRule(); /* MR10 */ extern void MR_dumpTreeX(); /* MR10 */ extern void MR_dumpTreeF(); /* MR10 */ extern void DumpFcache(); /* MR10 */ extern void MR_dumpTokenSet(); /* MR10 */ extern void MR_traceAmbSource(); /* MR11 */ extern Node *MR_advance(); /* MR11 */ extern int MR_offsetFromRule(); /* MR11 */ extern char *MR_ruleNamePlusOffset(); /* MR11 */ extern void MR_traceAmbSourceK(); /* MR11 */ extern void MR_traceAmbSourceKclient(); /* [i_a] added */ extern int MR_max_height_of_tree(); /* MR11 */ extern int MR_all_leaves_same_height(); /* MR11 */ extern void MR_projectTreeOntoSet(); /* MR11 */ extern Tree *MR_make_tree_from_set(); /* MR11 */ extern Predicate *MR_removeRedundantPredPass3(); /* MR11 */ extern void MR_pred_depth(); /* MR11 */ extern int MR_comparePredicates(); /* MR11 */ extern Predicate * MR_unfold(); /* MR11 */ extern void MR_simplifyInverted(); /* MR11 */ extern int MR_secondPredicateUnreachable(); /* MR11 */ extern Junction * MR_nameToRuleBlk(); /* MR10 */ extern void MR_clearPredEntry(); /* MR11 */ extern void MR_orphanRules(); /* MR12 */ extern void MR_merge_contexts(); /* MR12 */ extern int ci_strequ(); /* MR12 */ extern void MR_guardPred_plainSet(); /* MR12c */ extern void MR_suppressSearchReport(); /* MR12c */ extern Predicate * MR_suppressK(); /* MR13 */ extern void MR_backTraceDumpItem(); /* MR13 */ extern void MR_backTraceDumpItemReset(); /* MR13 */ extern Junction * MR_junctionWithoutP2(); /* MR13 */ extern void MR_setConstrainPointer(); /* MR18 */ extern void BlockPreambleOption(); /* MR23 */ extern char* getInitializer(); /* MR23 */ extern int hasMultipleOperands(); /* MR23 */ extern char *endFormal(); /* MR23 */ extern char *strBetween(); /* MR23 */ extern void DumpInitializers(); /* MR23 */ extern int isTermEntryTokClass(); /* MR23 */ extern int isEmptyAlt(); #endif #ifdef __USE_PROTOS #include <stdlib.h> #endif /* MR20 <NAME> Create proper externs for dlg variables */ extern set attribsRefdFromAction; extern int inAlt; extern int UsedOldStyleAttrib; extern int UsedNewStyleLabel; #define MAX_BLK_LEVEL 100 /* MR23 */ extern int CurBlockID_array[MAX_BLK_LEVEL]; /* MR23 */ extern int CurAltNum_array[MAX_BLK_LEVEL]; /* MR23 */
18,944