max_stars_count
int64 301
224k
| text
stringlengths 6
1.05M
| token_count
int64 3
727k
|
---|---|---|
398 | /*
Copyright 2013 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.chill.config;
public class ConfigurationException extends Exception {
public ConfigurationException(String message) {
super(message);
}
public ConfigurationException(Exception ex) {
super(ex);
}
public ConfigurationException(String message, Exception ex) {
super(message, ex);
}
}
| 233 |
623 | <filename>Tools/IGCSInjector/IGCSInjector/CDataFile.h
//
// CDataFile Class Implementation
//
// The purpose of this class is to provide a simple, full featured means to
// store persistent data to a text file. It uses a simple key/value paradigm
// to achieve this. The class can read/write to standard Windows .ini files,
// and yet does not rely on any windows specific calls. It should work as
// well in a linux environment (with some minor adjustments) as it does in
// a Windows one.
//
// Written July, 2002 by <NAME> <<EMAIL>>
// If you use this class in your application, credit would be appreciated.
//
#pragma once
#include "stdafx.h"
#include <vector>
#include <fstream>
#include <string>
using namespace std;
// Globally defined structures, defines, & types
//////////////////////////////////////////////////////////////////////////////////
// AUTOCREATE_SECTIONS
// When set, this define will cause SetValue() to create a new section, if
// the requested section does not allready exist.
#define AUTOCREATE_SECTIONS (1L<<1)
// AUOTCREATE_KEYS
// When set, this define causes SetValue() to create a new key, if the
// requested key does not allready exist.
#define AUTOCREATE_KEYS (1L<<2)
// MAX_BUFFER_LEN
// Used simply as a max size of some internal buffers. Determines the maximum
// length of a line that will be read from or written to the file or the
// report output.
#define MAX_BUFFER_LEN 512
// eDebugLevel
// Used by our Report function to classify levels of reporting and severity
// of report.
enum e_DebugLevel
{
// detailed programmatic informational messages used as an aid in
// troubleshooting problems by programmers
E_DEBUG = 0,
// brief informative messages to use as an aid in troubleshooting
// problems by production support and programmers
E_INFO,
// messages intended to notify help desk, production support and
// programmers of possible issues with respect to the running application
E_WARN,
// messages that detail a programmatic error, these are typically
// messages intended for help desk, production support, programmers and
// occasionally users
E_ERROR,
// severe messages that are programmatic violations that will usually
// result in application failure. These messages are intended for help
// desk, production support, programmers and possibly users
E_FATAL,
// notice that all processing should be stopped immediately after the
// log is written.
E_CRITICAL
};
typedef std::string t_Str;
// CommentIndicators
// This constant contains the characters that we check for to determine if a
// line is a comment or not. Note that the first character in this constant is
// the one used when writing comments to disk (if the comment does not allready
// contain an indicator)
const t_Str CommentIndicators = t_Str(";#");
// EqualIndicators
// This constant contains the characters that we check against to determine if
// a line contains an assignment ( key = value )
// Note that changing these from their defaults ("=:") WILL affect the
// ability of CDataFile to read/write to .ini files. Also, note that the
// first character in this constant is the one that is used when writing the
// values to the file. (EqualIndicators[0])
const t_Str EqualIndicators = t_Str("=:");
// WhiteSpace
// This constant contains the characters that the Trim() function removes from
// the head and tail of strings.
const t_Str WhiteSpace = t_Str(" \t\n\r");
// st_key
// This structure stores the definition of a key. A key is a named identifier
// that is associated with a value. It may or may not have a comment. All comments
// must PRECEDE the key on the line in the config file.
typedef struct st_key
{
t_Str szKey;
t_Str szValue;
t_Str szComment;
st_key()
{
szKey = t_Str("");
szValue = t_Str("");
szComment = t_Str("");
}
} t_Key;
typedef std::vector<t_Key> KeyList;
typedef KeyList::iterator KeyItor;
// st_section
// This structure stores the definition of a section. A section contains any number
// of keys (see st_keys), and may or may not have a comment. Like keys, all
// comments must precede the section.
typedef struct st_section
{
t_Str szName;
t_Str szComment;
KeyList Keys;
st_section()
{
szName = t_Str("");
szComment = t_Str("");
Keys.clear();
}
} t_Section;
typedef std::vector<t_Section> SectionList;
typedef SectionList::iterator SectionItor;
/// General Purpose Utility Functions ///////////////////////////////////////////
/////////////////////////////////////////////////////////////////////////////////
void Report(e_DebugLevel DebugLevel, char *fmt, ...);
t_Str GetNextWord(t_Str& CommandLine);
int CompareNoCase(t_Str str1, t_Str str2);
void Trim(t_Str& szStr);
int WriteLn(fstream& stream, char* fmt, ...);
/// Class Definitions ///////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////////////////////////
// CDataFile
class CDataFile
{
// Methods
public:
// Constructors & Destructors
/////////////////////////////////////////////////////////////////
CDataFile();
CDataFile(t_Str szFileName);
virtual ~CDataFile();
// File handling methods
/////////////////////////////////////////////////////////////////
bool Load(t_Str szFileName);
bool Save();
// Data handling methods
/////////////////////////////////////////////////////////////////
// GetValue: Our default access method. Returns the raw t_Str value
// Note that this returns keys specific to the given section only.
t_Str GetValue(t_Str szKey, t_Str szSection = t_Str(""));
// GetString: Returns the value as a t_Str
t_Str GetString(t_Str szKey, t_Str szSection = t_Str(""));
// GetFloat: Return the value as a float
float GetFloat(t_Str szKey, t_Str szSection = t_Str(""));
// GetInt: Return the value as an int
int GetInt(t_Str szKey, t_Str szSection = t_Str(""));
// GetBool: Return the value as a bool
bool GetBool(t_Str szKey, t_Str szSection = t_Str(""));
// SetValue: Sets the value of a given key. Will create the
// key if it is not found and AUTOCREATE_KEYS is active.
bool SetValue(t_Str szKey, t_Str szValue,
t_Str szComment = t_Str(""), t_Str szSection = t_Str(""));
// SetFloat: Sets the value of a given key. Will create the
// key if it is not found and AUTOCREATE_KEYS is active.
bool SetFloat(t_Str szKey, float fValue,
t_Str szComment = t_Str(""), t_Str szSection = t_Str(""));
// SetInt: Sets the value of a given key. Will create the
// key if it is not found and AUTOCREATE_KEYS is active.
bool SetInt(t_Str szKey, int nValue,
t_Str szComment = t_Str(""), t_Str szSection = t_Str(""));
// SetBool: Sets the value of a given key. Will create the
// key if it is not found and AUTOCREATE_KEYS is active.
bool SetBool(t_Str szKey, bool bValue,
t_Str szComment = t_Str(""), t_Str szSection = t_Str(""));
// Sets the comment for a given key.
bool SetKeyComment(t_Str szKey, t_Str szComment, t_Str szSection = t_Str(""));
// Sets the comment for a given section
bool SetSectionComment(t_Str szSection, t_Str szComment);
// DeleteKey: Deletes a given key from a specific section
bool DeleteKey(t_Str szKey, t_Str szFromSection = t_Str(""));
// DeleteSection: Deletes a given section.
bool DeleteSection(t_Str szSection);
// Key/Section handling methods
/////////////////////////////////////////////////////////////////
// CreateKey: Creates a new key in the requested section. The
// Section will be created if it does not exist and the
// AUTOCREATE_SECTIONS bit is set.
bool CreateKey(t_Str szKey, t_Str szValue,
t_Str szComment = t_Str(""), t_Str szSection = t_Str(""));
// CreateSection: Creates the new section if it does not allready
// exist. Section is created with no keys.
bool CreateSection(t_Str szSection, t_Str szComment = t_Str(""));
// CreateSection: Creates the new section if it does not allready
// exist, and copies the keys passed into it into the new section.
bool CreateSection(t_Str szSection, t_Str szComment, KeyList Keys);
// Utility Methods
/////////////////////////////////////////////////////////////////
// SectionCount: Returns the number of valid sections in the database.
int SectionCount();
// KeyCount: Returns the total number of keys, across all sections.
int KeyCount();
// Clear: Initializes the member variables to their default states
void Clear();
// SetFileName: For use when creating the object by hand
// initializes the file name so that it can be later saved.
void SetFileName(t_Str szFileName);
// CommentStr
// Parses a string into a proper comment token/comment.
t_Str CommentStr(t_Str szComment);
protected:
// Note: I've tried to insulate the end user from the internal
// data structures as much as possible. This is by design. Doing
// so has caused some performance issues (multiple calls to a
// GetSection() function that would otherwise not be necessary,etc).
// But, I believe that doing so will provide a safer, more stable
// environment. You'll notice that nothing returns a reference,
// to modify the data values, you have to call member functions.
// think carefully before changing this.
// GetKey: Returns the requested key (if found) from the requested
// Section. Returns NULL otherwise.
t_Key* GetKey(t_Str szKey, t_Str szSection);
// GetSection: Returns the requested section (if found), NULL otherwise.
t_Section* GetSection(t_Str szSection);
// Data
public:
long m_Flags; // Our settings flags.
protected:
SectionList m_Sections; // Our list of sections
t_Str m_szFileName; // The filename to write to
bool m_bDirty; // Tracks whether or not data has changed.
};
| 3,136 |
660 | package quick.pager.shop.constants;
/**
* redis缓存key
*
* @author siguiyang
*/
public interface PlatformRedisKeys {
}
| 48 |
809 | <reponame>nikitavlaev/embox<filename>src/cmds/shell/diag_shell/console/screen.c<gh_stars>100-1000
/**
* @file
*
* @date 28.02.09
* @author <NAME>
*/
#include "screen.h"
#include "cmdline.h"
#include <stddef.h>
SCREEN * screen_init(SCREEN *this, SCREEN_IO *io) {
if (this == NULL || io == NULL) {
return NULL;
}
if (terminal_init(this->terminal, io) == NULL) {
return NULL;
}
this->running = false;
return this;
}
| 187 |
2,023 | <filename>recipes/Python/525484_Convenience_class_algorithm/recipe-525484.py<gh_stars>1000+
"""classes for timing during development
by <NAME>, 070722
"""
from time import clock, time
#--- timer decorator -----------------------------------
class Timer(object):
def __init__(self, f):
self.__f=f
self.__name__=f.__name__
self.__doc__=f.__doc__
self.times=[]
def __call__(self, *args, **kwargs):
start=time()
result=self.__f(*args, **kwargs)
stop=time()
self.times.append(stop-start)
def report(self):
results = {'min':min(self.times)*1000, 'max':max(self.times)*1000, 'mean':sum(self.times)/len(self.times)*1000}
return self.__doc__.ljust(70) + "\t%(mean)6.5fms [%(min)6.5f - %(max)6.5f]" % results
def reset(self):
self.times=[]
#--- scenarios -------------------------------------------
class Scenario(object):
def __init__(self, **kw):
"""
pass each scenario as a kw={'desc':"", data:(), functions:()}
for each scenario each function is applied to the data and results are printed
"""
self.scenarios=kw
def run(self, nr_iter):
for title, scenario in self.scenarios.items():
print 50*'-' + "\n" + title.upper()
print scenario['desc']
for f in scenario['functions']:
for x in xrange(nr_iter):
f(*scenario['data'])
print "\t" + f.report()
f.reset()
#--- example: validators------------------------------------------
# validator takes a iterable object and checks if each element
# in the object is part of a defined alphabet
@Timer
def validate1(s, a):
"""validate1(s, a): list comprehension with a.index"""
try:
[a.index(x) for x in s]
return True
except:
return False
@Timer
def validate2(s,a):
"""validate2(s, a): for loop with a.index"""
for l in s:
try:
a.index(l)
except:
return False
return True
@Timer
def validate3(s,a):
"""validate3(s, a): list comprehension with (l in a)"""
return min([(l in a) for l in s])
@Timer
def validate4(s,a):
"""validate4(s, a): for loop with generator and (l in a)"""
for x in ((l in a) for l in s):
if not x:
return False
return True
@Timer
def validate5(s,a):
"""validate5(s,a): convert s to set and compare to set alphabet"""
return set(s).issubset(a)
#--- main-------------------------------------------------
if __name__=='__main__':
replications=1000
s_false="GATCTTGACGATGCGATGCGATG"*40 + "==" + "GATCTTGACGATGCGATGCGATG"*40
s_true=80*"GATCTTGACGATGCGATGCGATG" + "GA"
a=['G','A','T','C','R','W','B','C','M','T','Q','X','.','-','~','U']
testValidator = Scenario(scene_1={'desc':"Validator evaluates to true; alphabet is a list",
'data':(s_true, a),
'functions':(validate1, validate2, validate3, validate4, validate5)},
scene_2= {'desc':"Validator evaluates to false; alphabet is a list",
'data':(s_false, a),
'functions':(validate1, validate2, validate3, validate4, validate5)},
scene_3={'desc':"Validator evaluates to true; alphabet is a tuple",
'data':(s_true, tuple(a)),
'functions':( validate3, validate4, validate5)})
testValidator.run(1000)
-----------------------------
| 1,734 |
1,781 | package org.nico.ratel.landlords.helper;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.nico.ratel.landlords.entity.Poker;
import org.nico.ratel.landlords.entity.PokerSell;
import org.nico.ratel.landlords.enums.PokerLevel;
import org.nico.ratel.landlords.enums.PokerType;
import org.nico.ratel.landlords.enums.SellType;
import org.nico.ratel.landlords.utils.ListUtils;
public class PokerHelper {
/**
* Print the type of poker style
*/
public static int pokerPrinterType = 0;
public static int totalPrinters = 5;
/**
* The list of all pokers, by 54
*/
private static final List<Poker> basePokers = new ArrayList<>(54);
private static final Comparator<Poker> pokerComparator = (o1, o2) -> o1.getLevel().getLevel() - o2.getLevel().getLevel();
static {
PokerLevel[] pokerLevels = PokerLevel.values();
PokerType[] pokerTypes = PokerType.values();
for (PokerLevel level : pokerLevels) {
if (level == PokerLevel.LEVEL_BIG_KING) {
basePokers.add(new Poker(level, PokerType.BLANK));
continue;
}
if (level == PokerLevel.LEVEL_SMALL_KING) {
basePokers.add(new Poker(level, PokerType.BLANK));
continue;
}
for (PokerType type : pokerTypes) {
if (type == PokerType.BLANK) {
continue;
}
basePokers.add(new Poker(level, type));
}
}
}
public static void sortPoker(List<Poker> pokers) {
pokers.sort(pokerComparator);
}
public static List<Poker> clonePokers(List<Poker> pokers){
List<Poker> newPokers = new ArrayList<Poker>(pokers.size());
for(Poker poker: pokers) {
newPokers.add(new Poker(poker.getLevel(), poker.getType()));
}
return newPokers;
}
public static List<PokerSell> validSells(PokerSell lastPokerSell, List<Poker> pokers) {
List<PokerSell> sells = PokerHelper.parsePokerSells(pokers);
if(lastPokerSell == null) {
return sells;
}
List<PokerSell> validSells = new ArrayList<PokerSell>();
for(PokerSell sell: sells) {
if(sell.getSellType() == lastPokerSell.getSellType()) {
if(sell.getScore() > lastPokerSell.getScore() && sell.getSellPokers().size() == lastPokerSell.getSellPokers().size()) {
validSells.add(sell);
}
}
if(sell.getSellType() == SellType.KING_BOMB) {
validSells.add(sell);
}
}
if(lastPokerSell.getSellType() != SellType.BOMB) {
for(PokerSell sell: sells) {
if(sell.getSellType() == SellType.BOMB) {
validSells.add(sell);
}
}
}
return validSells;
}
public static int[] getIndexes(Character[] options, List<Poker> pokers) {
List<Poker> copyList = new ArrayList<>(pokers.size());
copyList.addAll(pokers);
int[] indexes = new int[options.length];
for (int index = 0; index < options.length; index++) {
char option = options[index];
boolean isTarget = false;
for (int pi = 0; pi < copyList.size(); pi++) {
Poker poker = copyList.get(pi);
if (poker == null) {
continue;
}
if (Arrays.asList(poker.getLevel().getAlias()).contains(option)) {
isTarget = true;
//Index start from 1, not 0
indexes[index] = pi + 1;
copyList.set(pi, null);
break;
}
}
if (!isTarget) {
return null;
}
}
Arrays.sort(indexes);
return indexes;
}
public static boolean checkPokerIndex(int[] indexes, List<Poker> pokers) {
if (indexes == null || indexes.length == 0) {
return false;
}
for (int index : indexes) {
if (index > pokers.size() || index < 1) {
return false;
}
}
return true;
}
public static PokerSell checkPokerType(List<Poker> pokers) {
if (pokers == null || pokers.isEmpty()) {
return new PokerSell(SellType.ILLEGAL, null, -1);
}
sortPoker(pokers);
int[] levelTable = new int[20];
for (Poker poker : pokers) {
levelTable[poker.getLevel().getLevel()]++;
}
int startIndex = -1;
int endIndex = -1;
int count = 0;
int singleCount = 0;
int doubleCount = 0;
int threeCount = 0;
int threeStartIndex = -1;
int threeEndIndex = -1;
int fourCount = 0;
int fourStartIndex = -1;
int fourEndIndex = -1;
for (int index = 0; index < levelTable.length; index++) {
int value = levelTable[index];
if (value == 0) {
continue;
}
endIndex = index;
count++;
if (startIndex == -1) {
startIndex = index;
}
if (value == 1) {
singleCount++;
} else if (value == 2) {
doubleCount++;
} else if (value == 3) {
if (threeStartIndex == -1) {
threeStartIndex = index;
}
threeEndIndex = index;
threeCount++;
} else if (value == 4) {
if (fourStartIndex == -1) {
fourStartIndex = index;
}
fourEndIndex = index;
fourCount++;
}
}
if (singleCount == doubleCount && singleCount == threeCount && singleCount == 0 && fourCount == 1) {
return new PokerSell(SellType.BOMB, pokers, startIndex);
}
if (singleCount == 2 && startIndex == PokerLevel.LEVEL_SMALL_KING.getLevel() && endIndex == PokerLevel.LEVEL_BIG_KING.getLevel()) {
return new PokerSell(SellType.KING_BOMB, pokers, PokerLevel.LEVEL_SMALL_KING.getLevel());
}
if (startIndex == endIndex) {
if (levelTable[startIndex] == 1) {
return new PokerSell(SellType.SINGLE, pokers, startIndex);
} else if (levelTable[startIndex] == 2) {
return new PokerSell(SellType.DOUBLE, pokers, startIndex);
} else if (levelTable[startIndex] == 3) {
return new PokerSell(SellType.THREE, pokers, startIndex);
}
}
if (endIndex - startIndex == count - 1 && endIndex < PokerLevel.LEVEL_2.getLevel()) {
if (levelTable[startIndex] == 1 && singleCount > 4 && doubleCount + threeCount + fourCount == 0) {
return new PokerSell(SellType.SINGLE_STRAIGHT, pokers, endIndex);
} else if (levelTable[startIndex] == 2 && doubleCount > 2 && singleCount + threeCount + fourCount == 0) {
return new PokerSell(SellType.DOUBLE_STRAIGHT, pokers, endIndex);
} else if (levelTable[startIndex] == 3 && threeCount > 1 && doubleCount + singleCount + fourCount == 0) {
return new PokerSell(SellType.THREE_STRAIGHT, pokers, endIndex);
} else if (levelTable[startIndex] == 4 && fourCount > 1 && doubleCount + threeCount + singleCount == 0) {
return new PokerSell(SellType.FOUR_STRAIGHT, pokers, endIndex);
}
}
if (threeCount != 0) {
if (singleCount != 0 && singleCount == threeCount && doubleCount == 0 && fourCount == 0) {
if (threeCount == 1) {
return new PokerSell(SellType.THREE_ZONES_SINGLE, pokers, threeEndIndex);
}
if (threeEndIndex - threeStartIndex + 1 == threeCount && threeEndIndex < PokerLevel.LEVEL_2.getLevel()) {
return new PokerSell(SellType.THREE_STRAIGHT_WITH_SINGLE, pokers, threeEndIndex);
}
} else if (doubleCount != 0 && doubleCount == threeCount && singleCount == 0 && fourCount == 0) {
if (threeCount == 1) {
return new PokerSell(SellType.THREE_ZONES_DOUBLE, pokers, threeEndIndex);
}
if (threeEndIndex - threeStartIndex + 1 == threeCount && threeEndIndex < PokerLevel.LEVEL_2.getLevel()) {
return new PokerSell(SellType.THREE_STRAIGHT_WITH_DOUBLE, pokers, threeEndIndex);
}
} else if (singleCount + doubleCount * 2 == threeCount && fourCount == 0) {
return new PokerSell(SellType.THREE_STRAIGHT_WITH_SINGLE, pokers, threeEndIndex);
}
}
if (fourCount != 0) {
if (singleCount != 0 && singleCount == fourCount * 2 && doubleCount == 0 && threeCount == 0) {
if (fourCount == 1) {
return new PokerSell(SellType.FOUR_ZONES_SINGLE, pokers, fourEndIndex);
}
if (fourEndIndex - fourStartIndex + 1 == fourCount && fourEndIndex < PokerLevel.LEVEL_2.getLevel()) {
return new PokerSell(SellType.FOUR_STRAIGHT_WITH_SINGLE, pokers, fourEndIndex);
}
} else if (doubleCount != 0 && doubleCount == fourCount * 2 && singleCount == 0 && threeCount == 0) {
if (fourCount == 1) {
return new PokerSell(SellType.FOUR_ZONES_DOUBLE, pokers, fourEndIndex);
}
if (fourEndIndex - fourStartIndex + 1 == fourCount && fourEndIndex < PokerLevel.LEVEL_2.getLevel()) {
return new PokerSell(SellType.FOUR_STRAIGHT_WITH_DOUBLE, pokers, fourEndIndex);
}
}
}
return new PokerSell(SellType.ILLEGAL, null, -1);
}
public static int parseScore(SellType sellType, int level) {
if (sellType == SellType.BOMB) {
return level * 4 + 999;
} else if (sellType == SellType.KING_BOMB) {
return Integer.MAX_VALUE;
} else if (sellType == SellType.SINGLE || sellType == SellType.DOUBLE || sellType == SellType.THREE) {
return level;
} else if (sellType == SellType.SINGLE_STRAIGHT || sellType == SellType.DOUBLE_STRAIGHT || sellType == SellType.THREE_STRAIGHT || sellType == SellType.FOUR_STRAIGHT) {
return level;
} else if (sellType == SellType.THREE_ZONES_SINGLE || sellType == SellType.THREE_STRAIGHT_WITH_SINGLE || sellType == SellType.THREE_ZONES_DOUBLE || sellType == SellType.THREE_STRAIGHT_WITH_DOUBLE) {
return level;
} else if (sellType == SellType.FOUR_ZONES_SINGLE || sellType == SellType.FOUR_STRAIGHT_WITH_SINGLE || sellType == SellType.FOUR_ZONES_DOUBLE || sellType == SellType.FOUR_STRAIGHT_WITH_DOUBLE) {
return level;
}
return -1;
}
public static List<Poker> getPoker(int[] indexes, List<Poker> pokers) {
List<Poker> resultPokers = new ArrayList<>(indexes.length);
for (int index : indexes) {
resultPokers.add(pokers.get(index - 1));
}
sortPoker(resultPokers);
return resultPokers;
}
public static boolean comparePoker(List<Poker> pres, List<Poker> currents) {
return true;
}
public static List<List<Poker>> distributePoker() {
Collections.shuffle(basePokers);
List<List<Poker>> pokersList = new ArrayList<List<Poker>>();
List<Poker> pokers1 = new ArrayList<>(17);
pokers1.addAll(basePokers.subList(0, 17));
List<Poker> pokers2 = new ArrayList<>(17);
pokers2.addAll(basePokers.subList(17, 34));
List<Poker> pokers3 = new ArrayList<>(17);
pokers3.addAll(basePokers.subList(34, 51));
List<Poker> pokers4 = new ArrayList<>(3);
pokers4.addAll(basePokers.subList(51, 54));
pokersList.add(pokers1);
pokersList.add(pokers2);
pokersList.add(pokers3);
pokersList.add(pokers4);
for (List<Poker> pokers : pokersList) {
sortPoker(pokers);
}
return pokersList;
}
public static String printPoker(List<Poker> pokers) {
sortPoker(pokers);
switch (pokerPrinterType) {
case 0:
return buildHandStringSharp(pokers);
case 1:
return buildHandStringRounded(pokers);
case 2:
return textOnly(pokers);
case 3:
return textOnlyNoType(pokers);
default:
return buildHandStringSharp(pokers);
}
}
private static String buildHandStringSharp(List<Poker> pokers) {
StringBuilder builder = new StringBuilder();
if (pokers != null && pokers.size() > 0) {
for (int index = 0; index < pokers.size(); index++) {
if (index == 0) {
builder.append("┌──┐");
} else {
builder.append("──┐");
}
}
builder.append(System.lineSeparator());
for (int index = 0; index < pokers.size(); index++) {
if (index == 0) {
builder.append("│");
}
String name = pokers.get(index).getLevel().getName();
builder.append(name).append(name.length() == 1 ? " " : "").append("|");
}
builder.append(System.lineSeparator());
for (int index = 0; index < pokers.size(); index++) {
if (index == 0) {
builder.append("│");
}
builder.append(pokers.get(index).getType().getName()).append(" |");
}
builder.append(System.lineSeparator());
for (int index = 0; index < pokers.size(); index++) {
if (index == 0) {
builder.append("└──┘");
} else {
builder.append("──┘");
}
}
}
return builder.toString();
}
private static String buildHandStringRounded(List<Poker> pokers) {
StringBuilder builder = new StringBuilder();
if (pokers != null && pokers.size() > 0) {
for (int index = 0; index < pokers.size(); index++) {
if (index == 0) {
builder.append("┌──╮");
} else {
builder.append("──╮");
}
}
builder.append(System.lineSeparator());
for (int index = 0; index < pokers.size(); index++) {
if (index == 0) {
builder.append("│");
}
String name = pokers.get(index).getLevel().getName();
builder.append(name).append(name.length() == 1 ? " " : "").append("|");
}
builder.append(System.lineSeparator());
for (int index = 0; index < pokers.size(); index++) {
if (index == 0) {
builder.append("│");
}
builder.append(pokers.get(index).getType().getName()).append(" |");
}
builder.append(System.lineSeparator());
for (int index = 0; index < pokers.size(); index++) {
if (index == 0) {
builder.append("└──╯");
} else {
builder.append("──╯");
}
}
}
return builder.toString();
}
private static String textOnly(List<Poker> pokers) {
StringBuilder builder = new StringBuilder();
if (pokers != null && pokers.size() > 0) {
for (Poker poker : pokers) {
String name = poker.getLevel().getName();
String type = poker.getType().getName();
builder.append(name).append(type);
}
}
return builder.toString();
}
public static String textOnlyNoType(List<Poker> pokers) {
StringBuilder builder = new StringBuilder();
if (pokers != null && pokers.size() > 0) {
for (Poker poker : pokers) {
String name = poker.getLevel().getName();
builder.append(name).append(" ");
}
}
return builder.toString();
}
public static int parsePokerColligationScore(List<Poker> pokers) {
int score = 0;
int count = 0;
int increase = 0;
int lastLevel = -1;
if (pokers != null && !pokers.isEmpty()) {
for (int index = 0; index < pokers.size(); index++) {
int level = pokers.get(index).getLevel().getLevel();
if (lastLevel == -1) {
increase++;
count++;
score += lastLevel;
} else {
if (level == lastLevel) {
++count;
} else {
count = 1;
}
if (level < PokerLevel.LEVEL_2.getLevel() && level - 1 == lastLevel) {
++increase;
} else {
increase = 1;
}
score += (count + (increase > 4 ? increase : 0)) * level;
}
if (level == PokerLevel.LEVEL_2.getLevel()) {
score += level * 2;
} else if (level > PokerLevel.LEVEL_2.getLevel()) {
score += level * 3;
}
lastLevel = level;
}
}
return score;
}
public static List<PokerSell> parsePokerSells(List<Poker> pokers) {
List<PokerSell> pokerSells = new ArrayList<>();
int size = pokers.size();
//all single or double
{
int count = 0;
int lastLevel = -1;
List<Poker> sellPokers = new ArrayList<>(4);
for (Poker poker : pokers) {
int level = poker.getLevel().getLevel();
if (lastLevel == -1) {
++count;
} else {
if (level == lastLevel) {
++count;
} else {
count = 1;
sellPokers.clear();
}
}
sellPokers.add(poker);
if (count == 1) {
pokerSells.add(new PokerSell(SellType.SINGLE, ListUtils.getList(sellPokers), poker.getLevel().getLevel()));
} else if (count == 2) {
pokerSells.add(new PokerSell(SellType.DOUBLE, ListUtils.getList(sellPokers), poker.getLevel().getLevel()));
} else if (count == 3) {
pokerSells.add(new PokerSell(SellType.THREE, ListUtils.getList(sellPokers), poker.getLevel().getLevel()));
} else if (count == 4) {
pokerSells.add(new PokerSell(SellType.BOMB, ListUtils.getList(sellPokers), poker.getLevel().getLevel()));
}
lastLevel = level;
}
}
//Shunzi
{
parsePokerSellStraight(pokerSells, SellType.SINGLE);
parsePokerSellStraight(pokerSells, SellType.DOUBLE);
parsePokerSellStraight(pokerSells, SellType.THREE);
parsePokerSellStraight(pokerSells, SellType.BOMB);
}
//Shunzi with args
{
for (int index = 0; index < pokerSells.size(); index++) {
PokerSell sell = pokerSells.get(index);
if (sell.getSellType() == SellType.THREE) {
parseArgs(pokerSells, sell, 1, SellType.SINGLE, SellType.THREE_ZONES_SINGLE);
parseArgs(pokerSells, sell, 1, SellType.DOUBLE, SellType.THREE_ZONES_DOUBLE);
} else if (sell.getSellType() == SellType.BOMB) {
parseArgs(pokerSells, sell, 2, SellType.SINGLE, SellType.FOUR_ZONES_SINGLE);
parseArgs(pokerSells, sell, 2, SellType.DOUBLE, SellType.FOUR_ZONES_DOUBLE);
} else if (sell.getSellType() == SellType.THREE_STRAIGHT) {
int count = sell.getSellPokers().size() / 3;
parseArgs(pokerSells, sell, count, SellType.SINGLE, SellType.THREE_STRAIGHT_WITH_SINGLE);
parseArgs(pokerSells, sell, count, SellType.DOUBLE, SellType.THREE_STRAIGHT_WITH_DOUBLE);
} else if (sell.getSellType() == SellType.FOUR_STRAIGHT) {
int count = (sell.getSellPokers().size() / 4) * 2;
parseArgs(pokerSells, sell, count, SellType.SINGLE, SellType.FOUR_STRAIGHT_WITH_SINGLE);
parseArgs(pokerSells, sell, count, SellType.DOUBLE, SellType.FOUR_STRAIGHT_WITH_DOUBLE);
}
}
}
//king boom
{
if (size > 1) {
if (pokers.get(size - 1).getLevel() == PokerLevel.LEVEL_BIG_KING && pokers.get(size - 2).getLevel() == PokerLevel.LEVEL_SMALL_KING) {
pokerSells.add(new PokerSell(SellType.KING_BOMB, ListUtils.getList(new Poker[]{pokers.get(size - 2), pokers.get(size - 1)}), PokerLevel.LEVEL_BIG_KING.getLevel()));
}
}
}
return pokerSells;
}
private static void parseArgs(List<PokerSell> pokerSells, PokerSell pokerSell, int deep, SellType sellType, SellType targetSellType) {
Set<Integer> existLevelSet = new HashSet<>();
for (Poker p : pokerSell.getSellPokers()) {
existLevelSet.add(p.getLevel().getLevel());
}
parseArgs(existLevelSet, pokerSells, new HashSet<>(), pokerSell, deep, sellType, targetSellType);
}
private static void parseArgs(Set<Integer> existLevelSet, List<PokerSell> pokerSells, Set<List<Poker>> pokersList, PokerSell pokerSell, int deep, SellType sellType, SellType targetSellType) {
if (deep == 0) {
List<Poker> allPokers = new ArrayList<>(pokerSell.getSellPokers());
for (List<Poker> ps : pokersList) {
allPokers.addAll(ps);
}
pokerSells.add(new PokerSell(targetSellType, allPokers, pokerSell.getCoreLevel()));
return;
}
for (int index = 0; index < pokerSells.size(); index++) {
PokerSell subSell = pokerSells.get(index);
if (subSell.getSellType() == sellType && !existLevelSet.contains(subSell.getCoreLevel())) {
pokersList.add(subSell.getSellPokers());
existLevelSet.add(subSell.getCoreLevel());
parseArgs(existLevelSet, pokerSells, pokersList, pokerSell, deep - 1, sellType, targetSellType);
existLevelSet.remove(subSell.getCoreLevel());
pokersList.remove(subSell.getSellPokers());
}
}
}
private static void parsePokerSellStraight(List<PokerSell> pokerSells, SellType sellType) {
int minLength = -1;
int width = -1;
SellType targetSellType = null;
if (sellType == SellType.SINGLE) {
minLength = 5;
width = 1;
targetSellType = SellType.SINGLE_STRAIGHT;
} else if (sellType == SellType.DOUBLE) {
minLength = 3;
width = 2;
targetSellType = SellType.DOUBLE_STRAIGHT;
} else if (sellType == SellType.THREE) {
minLength = 2;
width = 3;
targetSellType = SellType.THREE_STRAIGHT;
} else if (sellType == SellType.BOMB) {
minLength = 2;
width = 4;
targetSellType = SellType.FOUR_STRAIGHT;
}
int increase_1 = 0;
int lastLevel_1 = -1;
List<Poker> sellPokers_1 = new ArrayList<>(4);
for (int index = 0; index < pokerSells.size(); index++) {
PokerSell sell = pokerSells.get(index);
if (sell.getSellType() != sellType) {
continue;
}
int level = sell.getCoreLevel();
if (lastLevel_1 == -1) {
++increase_1;
} else {
if (level - 1 == lastLevel_1 && level != PokerLevel.LEVEL_2.getLevel()) {
++increase_1;
} else {
addPokers(pokerSells, minLength, width, targetSellType, increase_1, sellPokers_1);
increase_1 = 1;
}
}
sellPokers_1.addAll(sell.getSellPokers());
lastLevel_1 = level;
}
addPokers(pokerSells, minLength, width, targetSellType, increase_1, sellPokers_1);
}
private static void addPokers(List<PokerSell> pokerSells, int minLenght, int width, SellType targetSellType, int increase_1, List<Poker> sellPokers_1) {
if (increase_1 >= minLenght) {
for (int s = 0; s <= increase_1 - minLenght; s++) {
int len = minLenght + s;
for (int subIndex = 0; subIndex <= increase_1 - len; subIndex++) {
List<Poker> pokers = ListUtils.getList(sellPokers_1.subList(subIndex * width, (subIndex + len) * width));
pokerSells.add(new PokerSell(targetSellType, pokers, pokers.get(pokers.size() - 1).getLevel().getLevel()));
}
}
}
sellPokers_1.clear();
}
}
| 8,652 |
1,041 | package org.tests.o2m.lazy;
import javax.persistence.*;
import java.util.ArrayList;
import java.util.List;
@Entity
@Table(name = "oml_bar")
public class OmlBar {
@Id
private Long id;
@OneToMany(mappedBy = "bar", cascade = CascadeType.ALL)
private List<OmlFoo> fooList = new ArrayList<OmlFoo>();
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public List<OmlFoo> getFooList() {
return fooList;
}
public void setFooList(List<OmlFoo> fooList) {
this.fooList = fooList;
}
}
| 225 |
1,144 | /*
* #%L
* de.metas.async
* %%
* Copyright (C) 2021 metas GmbH
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as
* published by the Free Software Foundation, either version 2 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/gpl-2.0.html>.
* #L%
*/
package de.metas.async.event;
import de.metas.async.QueueWorkPackageId;
import de.metas.util.JSONObjectMapper;
import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
import java.util.UUID;
import static de.metas.async.event.WorkpackageProcessedEvent.Status.DONE;
class WorkpackageProcessedEventTest
{
@Test
void serializeDeserialize()
{
final WorkpackageProcessedEvent event = WorkpackageProcessedEvent.builder()
.status(DONE)
.correlationId(UUID.randomUUID())
.workPackageId(QueueWorkPackageId.ofRepoId(23)).build();
final JSONObjectMapper<WorkpackageProcessedEvent> objectMapper = JSONObjectMapper.forClass(WorkpackageProcessedEvent.class);
final String string = objectMapper.writeValueAsString(event);
final WorkpackageProcessedEvent event1 = objectMapper.readValue(string);
Assertions.assertThat(event1).isEqualTo(event);
}
} | 513 |
3,227 | <reponame>ffteja/cgal
namespace CGAL {
/*!
\ingroup PkgTriangulation3TraitsClasses
\deprecated The class is deprecated since \cgal 4.10, as the weighted point and the function
objects for weighted points are part of the concept `Kernel`. The class is kept for backward
compatibility, but ignores the template parameter `Weight`.
\tparam K must be a model of the `Kernel` concept.
\tparam Weight This template parameter is ignored, as `Kernel::Weighted_point_3`
uses the type `Kernel::FT`.
\cgalModels `RegularTriangulationTraits_3`
*/
template< typename K, typename Weight >
class Regular_triangulation_euclidean_traits_3 : public K {
public:
}; /* end Regular_triangulation_euclidean_traits_3 */
} /* end namespace CGAL */
| 244 |
2,023 | #!/usr/bin/env python
# -*- coding: utf8 -*-
__version__ = '$Id: binclock_bcd_curses.py 780 2010-10-19 10:33:34Z mn $'
# binary clock, bcd version
# author: <NAME>
import sys
import time
import curses
def bin_old(n):
"""bin() that works with Python 2.4"""
if n < 1:
return '0'
result = []
while n:
if n % 2:
result.append('1')
else:
result.append('0')
n = n // 2
result.reverse()
return ''.join(result)
def bcd_digit(sn):
"""converts decimal digit char to 4 char binary 0 and 1 representation"""
n = int(sn)
try:
bin_nr = bin(n)[2:]
except NameError:
bin_nr = bin_old(n)
return ('0000' + bin_nr)[-4:]
def add_bcd(n, digits):
"""add n binary digits to digits"""
nn = "%02d" % (n)
digits.append(bcd_digit(nn[0]))
digits.append(bcd_digit(nn[1]))
def get_stars(digits):
"""changes digits to vertical picture of clock with stars and dots"""
digits_arr = []
for j in range(len(digits[0]) - 1, -1, -1):
digits2 = []
for i in range(len(digits)):
digits2.append(digits[i][j])
digits_arr.append(''.join(digits2))
digits_arr.reverse()
stars = "\n".join(digits_arr)
stars = stars.replace('0', '.')
stars = stars.replace('1', '*')
return stars
def main():
try:
try:
window = curses.initscr()
while 1:
digits = []
window.clear()
tm = time.localtime()
add_bcd(tm.tm_hour, digits)
add_bcd(tm.tm_min, digits)
add_bcd(tm.tm_sec, digits)
stars = get_stars(digits)
line_nr = 0
for line in stars.split('\n'):
window.addstr(line_nr, 0, line[:6])
line_nr += 1
window.refresh()
time.sleep(0.5)
except KeyboardInterrupt:
pass
finally:
# reset terminal
curses.nocbreak()
curses.echo()
curses.endwin()
if '--version' in sys.argv:
print(__version__)
elif __name__ == '__main__':
main()
| 805 |
14,668 | // Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "base/one_shot_event.h"
#include "chrome/browser/extensions/extension_browsertest.h"
#include "content/public/test/browser_test.h"
#include "extensions/browser/extension_function_registry.h"
#include "extensions/browser/extension_system.h"
namespace extensions {
using ExtensionFunctionRegistrationTest = ExtensionBrowserTest;
// Test that all functions are registered with unique names, histogram values,
// and factories. This is a browser test (rather than a unit test) to (help)
// ensure that all the optional factories and services are indeed instantiated.
IN_PROC_BROWSER_TEST_F(ExtensionFunctionRegistrationTest,
CheckForDuplicateEntries) {
// Verify the ExtensionSystem is ready (and thus all extension functions
// registered) before checking.
base::RunLoop run_loop;
ExtensionSystem::Get(profile())->ready().Post(FROM_HERE,
run_loop.QuitClosure());
run_loop.Run();
const ExtensionFunctionRegistry::FactoryMap& factories =
ExtensionFunctionRegistry::GetInstance().GetFactoriesForTesting();
// Sanity check: Many, many functions should have been registered.
EXPECT_GT(factories.size(), 500u);
std::set<std::string> seen_names;
std::set<functions::HistogramValue> seen_histograms;
for (const auto& key_value : factories) {
const ExtensionFunctionRegistry::FactoryEntry& entry = key_value.second;
SCOPED_TRACE(entry.function_name_);
EXPECT_TRUE(seen_names.insert(entry.function_name_).second);
// NOTE: We explicitly don't check the factory here. On certain platforms
// with enough compiler optimization, the templated factories are re-used
// for different functions.
// EXPECT_TRUE(seen_factories.insert(entry.factory_).second);
// The chrome.test API uses an "unknown" histogram value, but should be the
// only API that does.
if (entry.histogram_value_ == functions::UNKNOWN) {
EXPECT_TRUE(base::StartsWith(entry.function_name_, "test.",
base::CompareCase::SENSITIVE));
} else {
EXPECT_TRUE(seen_histograms.insert(entry.histogram_value_).second);
}
}
}
} // namespace extensions
| 795 |
678 | <filename>WeChat-Headers/MMAlbum.h
//
// Generated by class-dump 3.5 (64 bit).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2013 by <NAME>.
//
#import "MMObject.h"
#import "NSCopying.h"
@interface MMAlbum : MMObject <NSCopying>
{
}
- (id)copyWithZone:(struct _NSZone *)arg1;
- (id)lastAssetWithSecond:(double)arg1;
- (void)retriveCoverImageWithAsyncBlock:(CDUnknownBlockType)arg1;
- (void)retrivePhotoCountWithAsyncBlock:(CDUnknownBlockType)arg1;
- (id)imageAssetOfIndex:(unsigned long long)arg1;
- (unsigned long long)imagesCount;
- (unsigned long long)indexInImagesForIndexInAll:(unsigned long long)arg1;
- (void)fetchImageWithNeedDistinguishGif:(_Bool)arg1;
- (void)stopICloudActivity;
- (id)assetOfIndex:(unsigned long long)arg1;
- (void)shouldIncludeVideoAsset:(_Bool)arg1;
- (id)coverImage;
- (unsigned long long)photosCount;
- (id)name;
- (id)albumId;
@end
| 341 |
1,495 | <filename>python/lesson02/solution/hello.py<gh_stars>1000+
import sys
import time
from lib.tracing import init_tracer
def say_hello(hello_to):
with tracer.start_active_span('say-hello') as scope:
scope.span.set_tag('hello-to', hello_to)
hello_str = format_string(hello_to)
print_hello(hello_str)
def format_string(hello_to):
with tracer.start_active_span('format') as scope:
hello_str = 'Hello, %s!' % hello_to
scope.span.log_kv({'event': 'string-format', 'value': hello_str})
return hello_str
def print_hello(hello_str):
with tracer.start_active_span('println') as scope:
print(hello_str)
scope.span.log_kv({'event': 'println'})
# main
assert len(sys.argv) == 2
tracer = init_tracer('hello-world')
hello_to = sys.argv[1]
say_hello(hello_to)
# yield to IOLoop to flush the spans
time.sleep(2)
tracer.close()
| 372 |
348 | {"nom":"Romain-aux-Bois","circ":"4ème circonscription","dpt":"Vosges","inscrits":46,"abs":12,"votants":34,"blancs":1,"nuls":2,"exp":31,"res":[{"nuance":"LR","nom":"<NAME>","voix":21},{"nuance":"SOC","nom":"<NAME>","voix":10}]} | 94 |
348 | {"nom":"<NAME>","circ":"4ème circonscription","dpt":"Loire","inscrits":4802,"abs":3490,"votants":1312,"blancs":132,"nuls":62,"exp":1118,"res":[{"nuance":"REM","nom":"<NAME>","voix":639},{"nuance":"LR","nom":"<NAME>","voix":479}]} | 93 |
5,169 | <reponame>Gantios/Specs<filename>Specs/6/a/c/SCTE35/1.0.5/SCTE35.podspec.json
{
"name": "SCTE35",
"version": "1.0.5",
"summary": "SCTE Library for Swift.",
"description": "Converts hex strings and base64 strings into SCTE 35 Objects per the specifications at https://www.scte.org/SCTEDocs/Standards/SCTE%2035%202016.pdf",
"homepage": "https://realeyes.com",
"license": {
"type": "MIT",
"file": "LICENSE"
},
"authors": {
"<NAME>": "<EMAIL>"
},
"source": {
"git": "https://github.com/realeyes-media/scte35-swift",
"branch": "master",
"tag": "1.0.5"
},
"platforms": {
"ios": "10.0",
"tvos": "10.0"
},
"swift_versions": "5.0",
"requires_arc": true,
"source_files": [
"SCTE35-SwiftLibrary",
"SCTE35-SwiftLibrary/HelperConverter/*.swift",
"SCTE35-SwiftLibrary/**/*.{h,m,swift}",
"SharedResources/**/*.swift"
],
"swift_version": "5.0"
}
| 425 |
1,851 | //
// VROIKRig.h
// ViroRenderer
//
// Copyright © 2018 <NAME>. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
// IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
// TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
// SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#ifndef VROIKRig_h
#define VROIKRig_h
#include <memory>
#include <vector>
#include <map>
#include "VROMatrix4f.h"
#include "VROVector3f.h"
#include "VROQuaternion.h"
class VRONode;
class VROSkeleton;
/*
A joint in the Inverse Kinematic Rig used for performing and storing inverse kinematic
calculations.
*/
struct VROIKJoint {
// Unique id representing this VROIKJoint
int id;
// Transforms representing this joint in world space.
VROVector3f position;
VROQuaternion rotation;
VROVector3f scale;
// Actual VRONode representing this VROBoneNode.
std::shared_ptr<VRONode> syncNode;
int syncBone;
// True if this joint is a junction upon which multiple IKChains converge.
bool isCentroidJoint;
std::vector<VROVector3f> centroidSubLocations;
// True if this joint is an intermediary effector.
bool isIntermediaryEffector;
// Pointers to parent and child IKJointNodes
std::shared_ptr<VROIKJoint> parent;
std::vector<std::shared_ptr<VROIKJoint>> children;
// Intermediary joints between this IKJoint and its child that should be
// 'locked' and left out of the IK calculation.
std::vector<std::shared_ptr<VROIKJoint>> lockedJoints;
std::vector<VROMatrix4f> lockedJointLocalTransforms;
};
/*
A chain containing a list of VROIKJoint in sequence, used for inverse kinematic calculations.
*/
struct VROIKChain {
// The net length of all the bones in this chain.
float totalLength;
// A list of all VROIKJoints in this chain. Joints are placed in
// order from the root to the leaf.
std::vector<std::shared_ptr<VROIKJoint>> chainJoints;
// A list of all intermediary bone lengths connecting the joints within this chain.
std::vector<float> boneLengths;
// Pointers to parent and child chains.
std::shared_ptr<VROIKChain> parentChain;
std::vector<std::shared_ptr<VROIKChain>> childChains;
// True if an IK calculation pass has been performed on this chain.
bool processed;
};
/*
The VROIKRig contains a tree of VROIKJoints representing the node or skeletal hierarchy on which
to apply inverse kinematic calculations and constraints driven by end effectors.
*/
class VROIKRig {
public:
// Constructor used for creating IK Rigs out of VRONode trees.
VROIKRig(std::shared_ptr<VRONode> root,
std::map<std::string, std::shared_ptr<VRONode>> endAffectors);
// Constructor used for creating IK Rigs out of the 3D model's skeleton.
VROIKRig(std::shared_ptr<VROSkeleton> skeleton,
std::map<std::string, int> endEffectorBoneIndexMap);
~VROIKRig();
/*
Sets the position for an effector in world space.
*/
void setPositionForEffector(std::string effectorId, VROVector3f pos);
/*
Called during a render pass to perform a full IK calculation on this rig and to
sync the results back to the node or skeletal hierarchical tree.
*/
void processRig();
private:
/*
The root IKJoint of this rig
*/
std::shared_ptr<VROIKJoint> _rootJoint;
/*
A vec of all IK Joints in this rig
*/
std::vector<std::shared_ptr<VROIKJoint>> _allKnownIKJoints;
/*
Map of keys to IK joint effectors in this rig
*/
std::map<std::string, std::shared_ptr<VROIKJoint>> _keyToEffectorMap;
std::map<std::shared_ptr<VROIKJoint>, std::string> _effectorTokeyMap;
/*
Map representing the desired world positions of IK joint effectors by their keys
*/
std::map<std::string, VROVector3f> _effectorDesiredPositionMap;
/*
True if we need to initialize / re-invalidate the joint structure of this rig
*/
bool _initializeRig;
/*
True if we have already processed this rig with the latest set map of effector positions
*/
bool _processedNewEffectorPositions;
/*
A vec of all root IK chains in this rig
*/
std::vector<std::shared_ptr<VROIKChain>> _rootChains;
/*
A map of VROIKJoint Ids mapped to VRORigChains representing the end
effectors for this rig.
*/
std::map<int, std::shared_ptr<VROIKChain>> _endEffectorIdToChains;
/*
A map of end effector VROIKJoint Ids and its local rotational transform.
*/
std::map<int, VROMatrix4f> _endEffectorIdLocalRotation;
/*
A vec of all known chains of this rig.
*/
std::vector<std::shared_ptr<VROIKChain>> _allKnownChains;
/*
A reference to the skeleton used to construct this IKRig, if any.
*/
std::shared_ptr<VROSkeleton> _skeleton;
VROMatrix4f _modelRootToRootJoint;
/*
Initializes / constructs the IKJoint and IKChain structure of this rig.
*/
void initializeRig();
/*
Given the end effectors, construct the rig's IK tree by starting from each end effector
node/bone and tracing them back towards the root effector node/bone. This will
also help filter out redundant node subtrees that do not need to be apart of the
kinematic calculation.
*/
void createSkeletalRigFromNodeTree(std::shared_ptr<VRONode> currentNode);
void createSkeletalRigFromSkeletalModel(int boneId);
/*
Iterates through the rig to bypass intermediary IKjoints in between joint effectors from
being computed, thereby effectively "locking" them in place.
*/
void flagLockedJoints(std::shared_ptr<VROIKJoint> referenceJoint,
std::shared_ptr<VROIKJoint> currentJoint);
/*
Returns the local transform of the given reference joint.
*/
VROMatrix4f getJointLocalTransform(std::shared_ptr<VROIKJoint> referenceJoint);
/*
Removes the given IKJoint from _allKnownIKJoints and thus from being processed in the
IKRig as a part of the FABRIK computation.
*/
void detachIKJoint(std::shared_ptr<VROIKJoint> joint);
/*
Construct a tree of VROIKChains, each containing a sequence of IKJoints in this rig. This is
is done by creating a new chain for every branching point in the IKJoint tree, starting
from the root of the tree.
*/
void formChains(std::shared_ptr<VROIKJoint> branchNodeStart,
std::shared_ptr<VROIKJoint> currentNode,
std::shared_ptr<VROIKChain> ¤tChain);
void formChainDependencies(std::shared_ptr<VROIKChain> ¤tChain);
/*
Main kinematic functions for performing a FABRIK pass.
*/
void processInverseKinematics();
void processChainTreeTowardsRoot(std::shared_ptr<VROIKChain> &chain);
void processChainTreeTowardsEffectors(std::shared_ptr<VROIKChain> &chain);
void processFABRIKChainNode(std::shared_ptr<VROIKChain> &chain, bool reverse);
bool hasEffectorsMetTarget();
/*
FUnctions for syncing the result of FABRIK calculations back into node / bone transforms.
*/
void syncResultPositionOnly(std::shared_ptr<VROIKJoint> jointNode);
void syncResultRotationOnly(std::shared_ptr<VROIKJoint> jointNode);
void syncResultSkeleton(std::shared_ptr<VROIKJoint> jointNode);
void syncLockedJoint(std::shared_ptr<VROIKJoint> jointNode, VROMatrix4f parentTrans);
};
#endif /* VROIKRig_h */
| 2,964 |
5,169 | {
"name": "ZJChangFont",
"version": "0.0.1",
"summary": "A changed font on view",
"description": "It is a changed font on view , which implement by Objective-C",
"homepage": "https://github.com/zhangjikuan/ZJChangFont",
"license": "MIT",
"authors": {
"zhangjikuan": "<EMAIL>"
},
"platforms": {
"ios": null
},
"source": {
"git": "https://github.com/zhangjikuan/ZJChangFont.git",
"tag": "0.0.1"
},
"source_files": "ChangeFont/*",
"requires_arc": true,
"frameworks": [
"Foundation",
"CoreGraphics",
"UIKit"
]
}
| 242 |
14,668 | // Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include <cups/cups.h>
#include <map>
#include <string>
#include <utility>
#include "base/bind.h"
#include "base/containers/contains.h"
#include "base/guid.h"
#include "base/memory/weak_ptr.h"
#include "base/test/task_environment.h"
#include "chrome/services/cups_proxy/fake_cups_proxy_service_delegate.h"
#include "chrome/services/cups_proxy/printer_installer.h"
#include "printing/backend/cups_ipp_helper.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace cups_proxy {
namespace {
using Printer = chromeos::Printer;
// Generated via base::GenerateGUID.
const char kGenericGUID[] = "fd4c5f2e-7549-43d5-b931-9bf4e4f1bf51";
// Faked delegate gives control over PrinterInstaller's printing stack
// dependencies.
class FakeServiceDelegate : public FakeCupsProxyServiceDelegate {
public:
FakeServiceDelegate() = default;
~FakeServiceDelegate() override = default;
void AddPrinter(const Printer& printer) {
installed_printers_.insert({printer.id(), false});
}
void FailSetupPrinter() { fail_printer_setup_ = true; }
// Service delegate overrides.
bool IsPrinterInstalled(const Printer& printer) override {
if (!base::Contains(installed_printers_, printer.id())) {
return false;
}
return installed_printers_.at(printer.id());
}
void PrinterInstalled(const Printer& printer) override {
DCHECK(base::Contains(installed_printers_, printer.id()));
installed_printers_[printer.id()] = true;
}
absl::optional<Printer> GetPrinter(const std::string& id) override {
if (!base::Contains(installed_printers_, id)) {
return absl::nullopt;
}
return Printer(id);
}
void SetupPrinter(const Printer& printer,
SetupPrinterCallback callback) override {
if (fail_printer_setup_) {
return std::move(callback).Run(false);
}
// PrinterInstaller is expected to have checked if |printer| is already
// installed before trying setup.
if (IsPrinterInstalled(printer)) {
return std::move(callback).Run(false);
}
// Install printer.
return std::move(callback).Run(true);
}
private:
std::map<std::string, bool> installed_printers_;
// Conditions whether calls to SetupPrinter succeed.
bool fail_printer_setup_ = false;
};
class PrinterInstallerTest : public testing::Test {
public:
PrinterInstallerTest() : weak_factory_(this) {
delegate_ = std::make_unique<FakeServiceDelegate>();
printer_installer_ = std::make_unique<PrinterInstaller>(delegate_.get());
}
~PrinterInstallerTest() override = default;
InstallPrinterResult RunInstallPrinter(std::string printer_id) {
InstallPrinterResult ret;
base::RunLoop run_loop;
printer_installer_->InstallPrinter(
printer_id, base::BindOnce(&PrinterInstallerTest::OnRunInstallPrinter,
weak_factory_.GetWeakPtr(),
run_loop.QuitClosure(), &ret));
run_loop.Run();
return ret;
}
protected:
base::test::TaskEnvironment task_environment_;
void OnRunInstallPrinter(base::OnceClosure finish_cb,
InstallPrinterResult* ret,
InstallPrinterResult result) {
*ret = result;
std::move(finish_cb).Run();
}
// Backend fake driving the PrinterInstaller.
std::unique_ptr<FakeServiceDelegate> delegate_;
// The class being tested. This must be declared after the fakes, as its
// initialization must come after that of the fakes.
std::unique_ptr<PrinterInstaller> printer_installer_;
base::WeakPtrFactory<PrinterInstallerTest> weak_factory_;
};
// Standard install known printer workflow.
TEST_F(PrinterInstallerTest, SimpleSanityTest) {
Printer to_install(kGenericGUID);
delegate_->AddPrinter(to_install);
auto ret = RunInstallPrinter(kGenericGUID);
EXPECT_EQ(ret, InstallPrinterResult::kSuccess);
EXPECT_TRUE(delegate_->IsPrinterInstalled(to_install));
}
// Should fail to install an unknown(previously unseen) printer.
TEST_F(PrinterInstallerTest, UnknownPrinter) {
Printer to_install(kGenericGUID);
auto ret = RunInstallPrinter(kGenericGUID);
EXPECT_EQ(ret, InstallPrinterResult::kUnknownPrinterFound);
EXPECT_FALSE(delegate_->IsPrinterInstalled(to_install));
}
// Ensure we never setup a printer that's already installed.
TEST_F(PrinterInstallerTest, InstallPrinterTwice) {
Printer to_install(kGenericGUID);
delegate_->AddPrinter(to_install);
auto ret = RunInstallPrinter(kGenericGUID);
EXPECT_EQ(ret, InstallPrinterResult::kSuccess);
// |printer_installer_| should notice printer is already installed and bail
// out. If it attempts setup, FakeServiceDelegate will fail the request.
ret = RunInstallPrinter(kGenericGUID);
EXPECT_EQ(ret, InstallPrinterResult::kSuccess);
}
// Checks for correct response to failed SetupPrinter call.
TEST_F(PrinterInstallerTest, SetupPrinterFailure) {
Printer to_install(kGenericGUID);
delegate_->AddPrinter(to_install);
delegate_->FailSetupPrinter();
auto ret = RunInstallPrinter(kGenericGUID);
EXPECT_EQ(ret, InstallPrinterResult::kPrinterInstallationFailure);
EXPECT_FALSE(delegate_->IsPrinterInstalled(to_install));
}
} // namespace
} // namespace cups_proxy
| 1,896 |
559 | <filename>core/src/main/java/com/netflix/msl/util/NullAuthenticationUtils.java
/**
* Copyright (c) 2016 Netflix, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.msl.util;
import com.netflix.msl.entityauth.EntityAuthenticationScheme;
import com.netflix.msl.keyx.KeyExchangeScheme;
import com.netflix.msl.tokens.MslUser;
import com.netflix.msl.userauth.UserAuthenticationScheme;
/**
* <p>An authentication utilities implementation where all operations are
* permitted.</p>
*
* @author <NAME> <<EMAIL>>
*/
public class NullAuthenticationUtils implements AuthenticationUtils {
/* (non-Javadoc)
* @see com.netflix.msl.util.AuthenticationUtils#isEntityRevoked(java.lang.String)
*/
@Override
public boolean isEntityRevoked(final String identity) {
return false;
}
/* (non-Javadoc)
* @see com.netflix.msl.util.AuthenticationUtils#isSchemePermitted(java.lang.String, com.netflix.msl.entityauth.EntityAuthenticationScheme)
*/
@Override
public boolean isSchemePermitted(final String identity, final EntityAuthenticationScheme scheme) {
return true;
}
/* (non-Javadoc)
* @see com.netflix.msl.util.AuthenticationUtils#isSchemePermitted(java.lang.String, com.netflix.msl.userauth.UserAuthenticationScheme)
*/
@Override
public boolean isSchemePermitted(final String identity, final UserAuthenticationScheme scheme) {
return true;
}
/* (non-Javadoc)
* @see com.netflix.msl.util.AuthenticationUtils#isSchemePermitted(java.lang.String, com.netflix.msl.tokens.MslUser, com.netflix.msl.userauth.UserAuthenticationScheme)
*/
@Override
public boolean isSchemePermitted(final String identity, final MslUser user, final UserAuthenticationScheme scheme) {
return true;
}
/* (non-Javadoc)
* @see com.netflix.msl.util.AuthenticationUtils#isSchemePermitted(java.lang.String, com.netflix.msl.keyx.KeyExchangeScheme)
*/
@Override
public boolean isSchemePermitted(final String identity, final KeyExchangeScheme scheme) {
return true;
}
}
| 810 |
348 | <reponame>chamberone/Leaflet.PixiOverlay
{"nom":"Langoëlan","circ":"6ème circonscription","dpt":"Morbihan","inscrits":270,"abs":116,"votants":154,"blancs":16,"nuls":11,"exp":127,"res":[{"nuance":"UDI","nom":"<NAME>","voix":68},{"nuance":"REM","nom":"<NAME>","voix":59}]} | 111 |
2,209 | # test_multibytecodec_support.py
# Common Unittest Routines for CJK codecs
#
import codecs
import os
import re
import sys
import unittest
from httplib import HTTPException
from test import test_support
from StringIO import StringIO
class TestBase:
encoding = '' # codec name
codec = None # codec tuple (with 4 elements)
tstring = '' # string to test StreamReader
codectests = None # must set. codec test tuple
roundtriptest = 1 # set if roundtrip is possible with unicode
has_iso10646 = 0 # set if this encoding contains whole iso10646 map
xmlcharnametest = None # string to test xmlcharrefreplace
unmappedunicode = u'\udeee' # a unicode code point that is not mapped.
def setUp(self):
if self.codec is None:
self.codec = codecs.lookup(self.encoding)
self.encode = self.codec.encode
self.decode = self.codec.decode
self.reader = self.codec.streamreader
self.writer = self.codec.streamwriter
self.incrementalencoder = self.codec.incrementalencoder
self.incrementaldecoder = self.codec.incrementaldecoder
def test_chunkcoding(self):
for native, utf8 in zip(*[StringIO(f).readlines()
for f in self.tstring]):
u = self.decode(native)[0]
self.assertEqual(u, utf8.decode('utf-8'))
if self.roundtriptest:
self.assertEqual(native, self.encode(u)[0])
def test_errorhandle(self):
for source, scheme, expected in self.codectests:
if isinstance(source, bytes):
func = self.decode
else:
func = self.encode
if expected:
result = func(source, scheme)[0]
if func is self.decode:
self.assertTrue(type(result) is unicode, type(result))
self.assertEqual(result, expected,
'%r.decode(%r, %r)=%r != %r'
% (source, self.encoding, scheme, result,
expected))
else:
self.assertTrue(type(result) is bytes, type(result))
self.assertEqual(result, expected,
'%r.encode(%r, %r)=%r != %r'
% (source, self.encoding, scheme, result,
expected))
else:
self.assertRaises(UnicodeError, func, source, scheme)
def test_xmlcharrefreplace(self):
if self.has_iso10646:
self.skipTest('encoding contains full ISO 10646 map')
s = u"\u0b13\u0b23\u0b60 nd eggs"
self.assertEqual(
self.encode(s, "xmlcharrefreplace")[0],
"ଓଣୠ nd eggs"
)
def test_customreplace_encode(self):
if self.has_iso10646:
self.skipTest('encoding contains full ISO 10646 map')
from htmlentitydefs import codepoint2name
def xmlcharnamereplace(exc):
if not isinstance(exc, UnicodeEncodeError):
raise TypeError("don't know how to handle %r" % exc)
l = []
for c in exc.object[exc.start:exc.end]:
if ord(c) in codepoint2name:
l.append(u"&%s;" % codepoint2name[ord(c)])
else:
l.append(u"&#%d;" % ord(c))
return (u"".join(l), exc.end)
codecs.register_error("test.xmlcharnamereplace", xmlcharnamereplace)
if self.xmlcharnametest:
sin, sout = self.xmlcharnametest
else:
sin = u"\xab\u211c\xbb = \u2329\u1234\u232a"
sout = "«ℜ» = ⟨ሴ⟩"
self.assertEqual(self.encode(sin,
"test.xmlcharnamereplace")[0], sout)
def test_callback_wrong_objects(self):
def myreplace(exc):
return (ret, exc.end)
codecs.register_error("test.cjktest", myreplace)
for ret in ([1, 2, 3], [], None, object(), 'string', ''):
self.assertRaises(TypeError, self.encode, self.unmappedunicode,
'test.cjktest')
def test_callback_long_index(self):
def myreplace(exc):
return (u'x', long(exc.end))
codecs.register_error("test.cjktest", myreplace)
self.assertEqual(self.encode(u'abcd' + self.unmappedunicode + u'efgh',
'test.cjktest'), ('abcdxefgh', 9))
def myreplace(exc):
return (u'x', sys.maxint + 1)
codecs.register_error("test.cjktest", myreplace)
self.assertRaises(IndexError, self.encode, self.unmappedunicode,
'test.cjktest')
def test_callback_None_index(self):
def myreplace(exc):
return (u'x', None)
codecs.register_error("test.cjktest", myreplace)
self.assertRaises(TypeError, self.encode, self.unmappedunicode,
'test.cjktest')
def test_callback_backward_index(self):
def myreplace(exc):
if myreplace.limit > 0:
myreplace.limit -= 1
return (u'REPLACED', 0)
else:
return (u'TERMINAL', exc.end)
myreplace.limit = 3
codecs.register_error("test.cjktest", myreplace)
self.assertEqual(self.encode(u'abcd' + self.unmappedunicode + u'efgh',
'test.cjktest'),
('abcdREPLACEDabcdREPLACEDabcdREPLACEDabcdTERMINALefgh', 9))
def test_callback_forward_index(self):
def myreplace(exc):
return (u'REPLACED', exc.end + 2)
codecs.register_error("test.cjktest", myreplace)
self.assertEqual(self.encode(u'abcd' + self.unmappedunicode + u'efgh',
'test.cjktest'), ('abcdREPLACEDgh', 9))
def test_callback_index_outofbound(self):
def myreplace(exc):
return (u'TERM', 100)
codecs.register_error("test.cjktest", myreplace)
self.assertRaises(IndexError, self.encode, self.unmappedunicode,
'test.cjktest')
def test_incrementalencoder(self):
UTF8Reader = codecs.getreader('utf-8')
for sizehint in [None] + range(1, 33) + \
[64, 128, 256, 512, 1024]:
istream = UTF8Reader(StringIO(self.tstring[1]))
ostream = StringIO()
encoder = self.incrementalencoder()
while 1:
if sizehint is not None:
data = istream.read(sizehint)
else:
data = istream.read()
if not data:
break
e = encoder.encode(data)
ostream.write(e)
self.assertEqual(ostream.getvalue(), self.tstring[0])
def test_incrementaldecoder(self):
UTF8Writer = codecs.getwriter('utf-8')
for sizehint in [None, -1] + range(1, 33) + \
[64, 128, 256, 512, 1024]:
istream = StringIO(self.tstring[0])
ostream = UTF8Writer(StringIO())
decoder = self.incrementaldecoder()
while 1:
data = istream.read(sizehint)
if not data:
break
else:
u = decoder.decode(data)
ostream.write(u)
self.assertEqual(ostream.getvalue(), self.tstring[1])
def test_incrementalencoder_error_callback(self):
inv = self.unmappedunicode
e = self.incrementalencoder()
self.assertRaises(UnicodeEncodeError, e.encode, inv, True)
e.errors = 'ignore'
self.assertEqual(e.encode(inv, True), '')
e.reset()
def tempreplace(exc):
return (u'called', exc.end)
codecs.register_error('test.incremental_error_callback', tempreplace)
e.errors = 'test.incremental_error_callback'
self.assertEqual(e.encode(inv, True), 'called')
# again
e.errors = 'ignore'
self.assertEqual(e.encode(inv, True), '')
def test_streamreader(self):
UTF8Writer = codecs.getwriter('utf-8')
for name in ["read", "readline", "readlines"]:
for sizehint in [None, -1] + range(1, 33) + \
[64, 128, 256, 512, 1024]:
istream = self.reader(StringIO(self.tstring[0]))
ostream = UTF8Writer(StringIO())
func = getattr(istream, name)
while 1:
data = func(sizehint)
if not data:
break
if name == "readlines":
ostream.writelines(data)
else:
ostream.write(data)
self.assertEqual(ostream.getvalue(), self.tstring[1])
def test_streamwriter(self):
readfuncs = ('read', 'readline', 'readlines')
UTF8Reader = codecs.getreader('utf-8')
for name in readfuncs:
for sizehint in [None] + range(1, 33) + \
[64, 128, 256, 512, 1024]:
istream = UTF8Reader(StringIO(self.tstring[1]))
ostream = self.writer(StringIO())
func = getattr(istream, name)
while 1:
if sizehint is not None:
data = func(sizehint)
else:
data = func()
if not data:
break
if name == "readlines":
ostream.writelines(data)
else:
ostream.write(data)
self.assertEqual(ostream.getvalue(), self.tstring[0])
class TestBase_Mapping(unittest.TestCase):
pass_enctest = []
pass_dectest = []
supmaps = []
codectests = []
def __init__(self, *args, **kw):
unittest.TestCase.__init__(self, *args, **kw)
try:
self.open_mapping_file().close() # test it to report the error early
except (IOError, HTTPException):
self.skipTest("Could not retrieve "+self.mapfileurl)
def open_mapping_file(self):
return test_support.open_urlresource(self.mapfileurl)
def test_mapping_file(self):
if self.mapfileurl.endswith('.xml'):
self._test_mapping_file_ucm()
else:
self._test_mapping_file_plain()
def _test_mapping_file_plain(self):
_unichr = lambda c: eval("u'\\U%08x'" % int(c, 16))
unichrs = lambda s: u''.join(_unichr(c) for c in s.split('+'))
urt_wa = {}
with self.open_mapping_file() as f:
for line in f:
if not line:
break
data = line.split('#')[0].strip().split()
if len(data) != 2:
continue
csetval = eval(data[0])
if csetval <= 0x7F:
csetch = chr(csetval & 0xff)
elif csetval >= 0x1000000:
csetch = chr(csetval >> 24) + chr((csetval >> 16) & 0xff) + \
chr((csetval >> 8) & 0xff) + chr(csetval & 0xff)
elif csetval >= 0x10000:
csetch = chr(csetval >> 16) + \
chr((csetval >> 8) & 0xff) + chr(csetval & 0xff)
elif csetval >= 0x100:
csetch = chr(csetval >> 8) + chr(csetval & 0xff)
else:
continue
unich = unichrs(data[1])
if unich == u'\ufffd' or unich in urt_wa:
continue
urt_wa[unich] = csetch
self._testpoint(csetch, unich)
def _test_mapping_file_ucm(self):
with self.open_mapping_file() as f:
ucmdata = f.read()
uc = re.findall('<a u="([A-F0-9]{4})" b="([0-9A-F ]+)"/>', ucmdata)
for uni, coded in uc:
unich = unichr(int(uni, 16))
codech = ''.join(chr(int(c, 16)) for c in coded.split())
self._testpoint(codech, unich)
def test_mapping_supplemental(self):
for mapping in self.supmaps:
self._testpoint(*mapping)
def _testpoint(self, csetch, unich):
if (csetch, unich) not in self.pass_enctest:
try:
self.assertEqual(unich.encode(self.encoding), csetch)
except UnicodeError, exc:
self.fail('Encoding failed while testing %s -> %s: %s' % (
repr(unich), repr(csetch), exc.reason))
if (csetch, unich) not in self.pass_dectest:
try:
self.assertEqual(csetch.decode(self.encoding), unich)
except UnicodeError, exc:
self.fail('Decoding failed while testing %s -> %s: %s' % (
repr(csetch), repr(unich), exc.reason))
def test_errorhandle(self):
for source, scheme, expected in self.codectests:
if isinstance(source, bytes):
func = source.decode
else:
func = source.encode
if expected:
if isinstance(source, bytes):
result = func(self.encoding, scheme)
self.assertTrue(type(result) is unicode, type(result))
self.assertEqual(result, expected,
'%r.decode(%r, %r)=%r != %r'
% (source, self.encoding, scheme, result,
expected))
else:
result = func(self.encoding, scheme)
self.assertTrue(type(result) is bytes, type(result))
self.assertEqual(result, expected,
'%r.encode(%r, %r)=%r != %r'
% (source, self.encoding, scheme, result,
expected))
else:
self.assertRaises(UnicodeError, func, self.encoding, scheme)
def load_teststring(name):
dir = os.path.join(os.path.dirname(__file__), 'cjkencodings')
with open(os.path.join(dir, name + '.txt'), 'rb') as f:
encoded = f.read()
with open(os.path.join(dir, name + '-utf8.txt'), 'rb') as f:
utf8 = f.read()
return encoded, utf8
| 7,986 |
801 | <filename>tests/layers/test_reshape.py
import numpy as np
from neupy import layers
from neupy.utils import asfloat, tf_utils
from neupy.exceptions import LayerConnectionError
from base import BaseTestCase
class ReshapeLayerTestCase(BaseTestCase):
def test_reshape_layer_1d_shape(self):
x = np.random.random((5, 4, 3, 2, 1))
network = layers.Input((4, 3, 2, 1)) >> layers.Reshape()
y = self.eval(network.output(x))
self.assertEqual(y.shape, (5, 4 * 3 * 2 * 1))
def test_reshape_layer_2d_shape(self):
x = np.random.random((5, 20))
input_layer = layers.Input(20)
reshape_layer = layers.Reshape((4, 5))
input_layer > reshape_layer
y = self.eval(reshape_layer.output(x))
self.assertEqual(y.shape, (5, 4, 5))
self.assertShapesEqual(reshape_layer.output_shape, (None, 4, 5))
def test_reshape_unknown_shape(self):
network = layers.join(
layers.Input((None, 20)),
layers.Reshape(),
)
self.assertShapesEqual(network.output_shape, (None, None))
x = np.random.random((7, 12, 20))
y = self.eval(network.output(x))
self.assertEqual(y.shape, (7, 12 * 20))
def test_reshape_with_negative_value(self):
network = layers.join(
layers.Input((7, 20)),
layers.Reshape((5, -1)),
)
self.assertShapesEqual(network.output_shape, (None, 5, 28))
x = np.random.random((11, 7, 20))
y = self.eval(network.output(x))
self.assertEqual(y.shape, (11, 5, 28))
def test_reshape_with_negative_value_unknown_in_shape(self):
network = layers.join(
layers.Input((7, None)),
layers.Reshape([5, -1]),
)
self.assertShapesEqual(network.output_shape, (None, 5, None))
x = np.random.random((11, 7, 10))
y = self.eval(network.output(x))
self.assertEqual(y.shape, (11, 5, 14))
def test_reshape_exceptions(self):
with self.assertRaisesRegexp(ValueError, "Only single"):
layers.Reshape([-1, -1])
with self.assertRaisesRegexp(ValueError, "are incompatible"):
layers.join(
layers.Input(20),
layers.Reshape((-1, 6)),
)
def test_reshape_repr(self):
layer = layers.Reshape()
self.assertEqual(
"Reshape((-1,), name='reshape-1')",
str(layer))
layer = layers.Reshape((5, 2), name='reshape-layer')
self.assertEqual(
"Reshape((5, 2), name='reshape-layer')",
str(layer))
def test_partially_defined_input_shape(self):
network = layers.join(
layers.Input((None, None, 3)),
layers.Convolution((3, 3, 5)),
layers.Reshape((-1, 5)),
)
self.assertShapesEqual(network.input_shape, (None, None, None, 3))
self.assertShapesEqual(network.output_shape, (None, None, 5))
x = network.inputs
y = network.outputs
session = tf_utils.tensorflow_session()
images = np.random.random((2, 10, 10, 3))
output = session.run(y, feed_dict={x: images})
self.assertEqual(output.shape, (2, 64, 5))
class TransposeTestCase(BaseTestCase):
def test_simple_transpose(self):
network = layers.join(
layers.Input((7, 11)),
layers.Transpose((0, 2, 1)),
)
self.assertShapesEqual(network.output_shape, (None, 11, 7))
def test_transpose_unknown_input_dim(self):
network = layers.join(
layers.Input((None, 10, 20)),
layers.Transpose((0, 2, 1, 3)),
)
self.assertShapesEqual(network.output_shape, (None, 10, None, 20))
value = asfloat(np.random.random((12, 100, 10, 20)))
output_value = self.eval(network.output(value))
self.assertEqual(output_value.shape, (12, 10, 100, 20))
value = asfloat(np.random.random((12, 33, 10, 20)))
output_value = self.eval(network.output(value))
self.assertEqual(output_value.shape, (12, 10, 33, 20))
def test_transpose_exceptions(self):
error_message = "Cannot apply transpose operation to the input"
with self.assertRaisesRegexp(LayerConnectionError, error_message):
layers.join(
layers.Input(20),
layers.Transpose((0, 2, 1)),
)
def test_transpose_repr(self):
layer = layers.Transpose((0, 2, 1))
self.assertEqual(
"Transpose((0, 2, 1), name='transpose-1')",
str(layer))
layer = layers.Transpose((0, 2, 1), name='test')
self.assertEqual(
"Transpose((0, 2, 1), name='test')",
str(layer))
def test_transpose_undefined_input_shape(self):
network = layers.Transpose((1, 0, 2))
self.assertShapesEqual(network.input_shape, None)
self.assertShapesEqual(network.output_shape, (None, None, None))
network = layers.Transpose((1, 0))
self.assertShapesEqual(network.input_shape, None)
self.assertShapesEqual(network.output_shape, (None, None))
| 2,428 |
335 | {
"word": "Category",
"definitions": [
"A class or division of people or things regarded as having particular shared characteristics.",
"Each of a possibly exhaustive set of classes among which all things might be distributed.",
"Each of the a priori conceptions applied by the mind to sense impressions."
],
"parts-of-speech": "Noun"
} | 115 |
14,668 | <reponame>zealoussnow/chromium
// Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CHROMECAST_GRAPHICS_CAST_TOUCH_ACTIVITY_OBSERVER_H_
#define CHROMECAST_GRAPHICS_CAST_TOUCH_ACTIVITY_OBSERVER_H_
namespace chromecast {
class CastTouchActivityObserver {
public:
virtual ~CastTouchActivityObserver() = default;
// Invoked when the window manager has touch input disabled.
virtual void OnTouchEventsDisabled(bool disabled) = 0;
// Invoked when input is disabled and an input event is received.
// Can be used by the observer to turn touch input back on.
virtual void OnTouchActivity() = 0;
};
} // namespace chromecast
#endif // CHROMECAST_GRAPHICS_CAST_TOUCH_ACTIVITY_OBSERVER_H_
| 262 |
3,787 | {
"federatedml.transfer_variable.transfer_class.sample_transfer_variable.SampleTransferVariable": {
"sample_ids": {
"src": [
"guest"
],
"dst": [
"host"
]
}
}
} | 106 |
669 | <gh_stars>100-1000
#include "mlasi.h"
#include <altivec.h>
template <typename OutputType>
void
MLASCALL
MlasQuantizeLinearVSX(
const float* Input,
OutputType* Output,
size_t N,
float Scale,
OutputType ZeroPoint
)
{
// Workaround for bad GCC warning that Scale is set but not used.
MLAS_UNREFERENCED_PARAMETER(Scale);
constexpr int32_t MinimumValue = std::numeric_limits<OutputType>::min();
constexpr int32_t MaximumValue = std::numeric_limits<OutputType>::max();
auto ScaleVector = vec_splats(Scale);
auto MinimumValueVector = vec_splats(float(MinimumValue));
auto MaximumValueVector = vec_splats(float(MaximumValue));
auto ZeroPointVector = vec_splats(float(ZeroPoint));
while (N >= 16) {
auto FloatVector0 = vec_xl(0, Input);
auto FloatVector1 = vec_xl(0, Input + 4);
auto FloatVector2 = vec_xl(0, Input + 8);
auto FloatVector3 = vec_xl(0, Input + 12);
FloatVector0 = vec_div(FloatVector0, ScaleVector);
FloatVector1 = vec_div(FloatVector1, ScaleVector);
FloatVector2 = vec_div(FloatVector2, ScaleVector);
FloatVector3 = vec_div(FloatVector3, ScaleVector);
FloatVector0 = vec_round(FloatVector0);
FloatVector1 = vec_round(FloatVector1);
FloatVector2 = vec_round(FloatVector2);
FloatVector3 = vec_round(FloatVector3);
FloatVector0 = vec_add(FloatVector0, ZeroPointVector);
FloatVector1 = vec_add(FloatVector1, ZeroPointVector);
FloatVector2 = vec_add(FloatVector2, ZeroPointVector);
FloatVector3 = vec_add(FloatVector3, ZeroPointVector);
FloatVector0 = vec_max(FloatVector0, MinimumValueVector);
FloatVector1 = vec_max(FloatVector1, MinimumValueVector);
FloatVector2 = vec_max(FloatVector2, MinimumValueVector);
FloatVector3 = vec_max(FloatVector3, MinimumValueVector);
FloatVector0 = vec_min(FloatVector0, MaximumValueVector);
FloatVector1 = vec_min(FloatVector1, MaximumValueVector);
FloatVector2 = vec_min(FloatVector2, MaximumValueVector);
FloatVector3 = vec_min(FloatVector3, MaximumValueVector);
auto IntegerVector0 = vec_signed(FloatVector0);
auto IntegerVector1 = vec_signed(FloatVector1);
auto IntegerVector2 = vec_signed(FloatVector2);
auto IntegerVector3 = vec_signed(FloatVector3);
auto ShortVector0 = vec_pack(IntegerVector0, IntegerVector1);
auto ShortVector1 = vec_pack(IntegerVector2, IntegerVector3);
auto CharVector = vec_pack(ShortVector0, ShortVector1);
vec_xst(CharVector, 0, (int8_t *) Output);
Output += 16;
Input += 16;
N -= 16;
}
while (N >= 4) {
auto FloatVector = vec_xl(0, Input);
FloatVector = vec_div(FloatVector, ScaleVector);
FloatVector = vec_round(FloatVector);
FloatVector = vec_add(FloatVector, ZeroPointVector);
FloatVector = vec_max(FloatVector, MinimumValueVector);
FloatVector = vec_min(FloatVector, MaximumValueVector);
auto IntegerVector = vec_signed(FloatVector);
auto ShortVector = vec_pack(IntegerVector, vec_splats((int32_t) 0));
auto CharVector = vec_pack(ShortVector, vec_splats((int16_t) 0));
vec_xst_len(CharVector, (int8_t *) Output, N);
Output += 4;
Input += 4;
N -= 4;
}
if (N > 0) {
auto FloatVector = vec_xl_len( const_cast<float*>(Input), 4*N);
FloatVector = vec_div(FloatVector, ScaleVector);
FloatVector = vec_round(FloatVector);
FloatVector = vec_add(FloatVector, ZeroPointVector);
FloatVector = vec_max(FloatVector, MinimumValueVector);
FloatVector = vec_min(FloatVector, MaximumValueVector);
auto IntegerVector = vec_signed(FloatVector);
auto ShortVector = vec_pack(IntegerVector, vec_splats((int32_t) 0));
auto CharVector = vec_pack(ShortVector, vec_splats((int16_t) 0));
vec_xst_len(CharVector, (int8_t *) Output, N);
}
}
void
MLASCALL
MlasQuantizeLinearU8KernelVSX(
const float* Input,
uint8_t* Output,
size_t N,
float Scale,
uint8_t ZeroPoint
)
{
MlasQuantizeLinearVSX<uint8_t>(Input, Output, N, Scale, ZeroPoint);
}
void
MLASCALL
MlasQuantizeLinearS8KernelVSX(
const float* Input,
int8_t* Output,
size_t N,
float Scale,
int8_t ZeroPoint
)
{
MlasQuantizeLinearVSX<int8_t>(Input, Output, N, Scale, ZeroPoint);
}
| 1,817 |
522 | <reponame>konny0311/algorithms-nutshell-2ed<gh_stars>100-1000
package algs.chapter2.table2;
import java.text.DecimalFormat;
import java.text.NumberFormat;
/**
* Java Example of Bisection method for root-finding.
*
*
* Java implementation of Bisection method that also shows the binary digits
* of the resulting floating point value to provide a sense of the
* convergence of the approach.
*
* @author <NAME>
* @date 8/26/15
*/
public class BisectionMethod {
// Note that f(answer) = 0
static final double root = -0.1893027580583891;
static final String rootS = "" + root;
static NumberFormat df = DecimalFormat.getInstance();
/** The function whose roots are to be computed. */
static double f(double x) {
return x*Math.sin(x)-5*x-Math.cos(x);
}
/** Launch Bisection method for ten steps. */
public static void main (String []args) {
df.setMaximumFractionDigits(7);
double a = -1;
double b = +1;
System.out.println("n\ta\tb\tc\tf(c)");
// compute table and determine accurate digits
for (int i = 1; i <= 20; i++) {
double c = (a+b)/2;
System.out.println(i + "\t" + df.format(a) + "\t" + df.format(b) + "\t" + df.format(c) + "\t" + df.format(f(c)));
int signfa = (int) Math.signum(f(a));
int signfc = (int) Math.signum(f(c));
if (signfa == signfc) {
a = c;
} else {
b = c;
}
}
}
}
| 535 |
866 | <filename>src/main/java/brickhouse/udf/collect/LastIndexUDF.java<gh_stars>100-1000
package brickhouse.udf.collect;
/**
* Copyright 2012 Klout, Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**/
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
/**
* Workaround for the Hive bug
* https://issues.apache.org/jira/browse/HIVE-1955
* <p/>
* FAILED: Error in semantic analysis: Line 4:3 Non-constant expressions for array indexes not supported key
* <p/>
* <p/>
* Use instead of [ ] syntax,
*/
@Description(name = "last_index",
value = "_FUNC_(x) - Last value in an array "
)
public class LastIndexUDF extends GenericUDF {
private ListObjectInspector listInspector;
@Override
public Object evaluate(DeferredObject[] arg0) throws HiveException {
Object list = arg0[0].get();
int lastIdx = listInspector.getListLength(list) - 1;
if (lastIdx >= 0) {
Object unInsp = listInspector.getListElement(list, lastIdx);
return unInsp;
} else {
return null;
}
}
@Override
public String getDisplayString(String[] arg0) {
return "last_index( " + arg0[0] + " )";
}
@Override
public ObjectInspector initialize(ObjectInspector[] arg0)
throws UDFArgumentException {
if (arg0.length != 1) {
throw new UDFArgumentException("last_index takes an array as an argument.");
}
if (arg0[0].getCategory() != Category.LIST) {
throw new UDFArgumentException("last_index takes an array as an argument.");
}
listInspector = (ListObjectInspector) arg0[0];
return listInspector.getListElementObjectInspector();
}
}
| 966 |
3,670 | # Copyright 2020 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
# Lint as: python3
"""Annotations of special functions."""
import builtins
import functools
import math
import operator
import sys
import tensorflow.compat.v2 as tf
# According to:
#
# https://docs.python.org/3/reference/datamodel.html#special-lookup
#
# "implicit invocations of special methods are only guaranteed to work
# correctly if defined on an object's type, not in the object's instance
# dictionary".
#
# Additionally:
#
# "In addition to bypassing any instance attributes in the interest of
# correctness, implicit special method lookup generally also bypasses the
# __getattribute__() method even of the object's metaclass"
#
# We therefore use:
# https://docs.python.org/3/reference/datamodel.html#special-method-names
# to compile sets of names which we will explicitly handle if defined in the
# static class.
PY2_OR_OLDER = sys.version_info[0] < 3
def _reverse(fn):
@functools.wraps(fn)
def _wrapped(a, b, *args, **kwargs):
return fn(b, a, *args, **kwargs)
return _wrapped
def _defer(fn, name=None, reverse=False):
"""Wraps `fn` by instead calling `self.__action__`."""
if name is None:
name = fn.__name__
if not name.startswith('__'):
name = '__' + name
if not name.endswith('__'):
name = name + '__'
if reverse:
fn = _reverse(fn)
if name.startswith('__'):
name = '__r' + name[2:]
else:
name = 'r' + name
@functools.wraps(fn)
def _wrapped_fn(self, *args, **kwargs):
return self.__action__(fn, *args, _action_name=name, **kwargs)
return _wrapped_fn
def _enter(self):
return self.__enter__()
def _exit(self, exc_type, exc_value, traceback):
return self.__exit__(exc_type, exc_value, traceback)
def _call(self, *args, **kwargs):
# Note: it is essential to use `self(...)` rather than `self.__call__(...)`
# since the latter fails to correctly forward to `self.__init__(...)`.
return self(*args, **kwargs)
class SpecialMethods(object):
"""Special methods to intercept."""
__slots__ = ('_name',)
def __action__(self, fn, *args, **kwargs):
action_name = kwargs.pop('_action_name', None)
name = try_get_name(fn) if action_name is None else action_name
raise NotImplementedError(
'Subclass must implement `__action__` ({}).'.format(name))
__repr__ = _defer(builtins.repr)
__str__ = _defer(builtins.str)
__bytes__ = _defer(builtins.bytes)
__format__ = _defer(builtins.format)
__lt__ = _defer(operator.lt)
__le__ = _defer(operator.le)
__eq__ = _defer(operator.eq)
__ne__ = _defer(operator.ne)
__gt__ = _defer(operator.gt)
__ge__ = _defer(operator.ge)
__hash__ = _defer(builtins.hash)
__bool__ = _defer(builtins.bool)
__len__ = _defer(builtins.len)
__getitem__ = _defer(operator.getitem)
__setitem__ = _defer(operator.setitem)
__delitem__ = _defer(operator.delitem)
__iter__ = _defer(builtins.iter)
__next__ = _defer(builtins.next)
__reversed__ = _defer(builtins.reversed)
__contains__ = _defer(operator.contains)
__neg__ = _defer(operator.neg)
__pos__ = _defer(operator.pos)
__abs__ = _defer(builtins.abs)
__invert__ = _defer(operator.invert)
__complex__ = _defer(builtins.complex)
__int__ = _defer(builtins.int)
__float__ = _defer(builtins.float)
__index__ = _defer(operator.index)
__round__ = _defer(builtins.round)
__trunc__ = _defer(math.trunc)
__floor__ = _defer(math.floor)
__ceil__ = _defer(math.ceil)
__enter__ = _defer(_enter, '__enter__')
__exit__ = _defer(_exit, '__exit__')
__call__ = _defer(_call, '__call__')
if PY2_OR_OLDER:
def next(self, *default):
# We don't call __action__ since __next__ will do it for us.
return self.__next__(*default)
# '__coerce__',
__inv__ = _defer(operator.inv)
__nonzero__ = _defer(builtins.bool, '__nonzero__')
__long__ = _defer(builtins.long)
__hex__ = _defer(builtins.hex)
__oct__ = _defer(builtins.oct)
# Old PY2:
# __getslice__ = _defer(builtins, '__getslice__')
# __setslice__ = _defer(builtins, '__setslice__')
# __delslice__ = _defer(builtins, '__delslice__')
else:
__length_hint__ = _defer(operator.length_hint)
__add__ = _defer(operator.add)
__sub__ = _defer(operator.sub)
__mul__ = _defer(operator.mul)
__truediv__ = _defer(operator.truediv)
__floordiv__ = _defer(operator.floordiv)
__mod__ = _defer(operator.mod)
__divmod__ = _defer(builtins.divmod)
__pow__ = _defer(builtins.pow)
__lshift__ = _defer(operator.lshift)
__rshift__ = _defer(operator.rshift)
__and__ = _defer(operator.and_, '__and__')
__xor__ = _defer(operator.xor)
__or__ = _defer(operator.or_, '__or__')
__radd__ = _defer(operator.add, reverse=True)
__rsub__ = _defer(operator.sub, reverse=True)
__rmul__ = _defer(operator.mul, reverse=True)
__rtruediv__ = _defer(operator.truediv, reverse=True)
__rfloordiv__ = _defer(operator.floordiv, reverse=True)
__rmod__ = _defer(operator.mod, reverse=True)
__rdivmod__ = _defer(builtins.divmod, reverse=True)
__rpow__ = _defer(builtins.pow, reverse=True)
__rlshift__ = _defer(operator.lshift, reverse=True)
__rrshift__ = _defer(operator.rshift, reverse=True)
__rand__ = _defer(operator.and_, '__and__', reverse=True)
__rxor__ = _defer(operator.xor, reverse=True)
__ror__ = _defer(operator.or_, '__or__', reverse=True)
__iadd__ = _defer(operator.iadd)
__isub__ = _defer(operator.isub)
__imul__ = _defer(operator.imul)
__itruediv__ = _defer(operator.itruediv)
__ifloordiv__ = _defer(operator.ifloordiv)
__imod__ = _defer(operator.imod)
__ipow__ = _defer(operator.ipow)
__ilshift__ = _defer(operator.ilshift)
__irshift__ = _defer(operator.irshift)
__iand__ = _defer(operator.iand)
__ixor__ = _defer(operator.ixor)
__ior__ = _defer(operator.ior)
if PY2_OR_OLDER:
__cmp__ = _defer(builtins.cmp)
__rcmp__ = _defer(builtins.cmp, reverse=True)
__div__ = _defer(operator.div)
__rdiv__ = _defer(operator.div, reverse=True)
__idiv__ = _defer(operator.idiv)
else:
__matmul__ = _defer(operator.matmul)
__rmatmul__ = _defer(operator.matmul, reverse=True)
__imatmul__ = _defer(operator.imatmul)
def __getattr__(self, attr):
"""Implements `__getattr__`."""
# By implementing __getattr__, attributes will first be accessed from self,
# otherwise will be accessed from the deferred object.
if (attr in _GETATTRIBUTE_PASSTHROUGH_OVERRIDE or
# For some reason we can't use generators here because they behave
# differently in Ipython REPL execution regime.
any(tuple(fn(attr)
for fn in _GETATTRIBUTE_PASSTHROUGH_OVERRIDE_CALLABLES))):
raise AttributeError()
return self.__action__(getattr, attr, _action_name=attr)
# If the following attributes are not found in the DeferredBase subclass then
# they will raise AttributeError on access.
# Note: Most of these functions will always be defined in DeferredBase. For
# those which are in DeferredBase, inclusion here has no extra overhead.
# pylint: disable=line-too-long
_GETATTRIBUTE_PASSTHROUGH_OVERRIDE = {
# https://docs.python.org/3/reference/datamodel.html
'__annotations__', # inspect: method: mapping of parameters names to annotations; "return" key is reserved for return annotations.
'__code__', # inspect: method: code object containing compiled function bytecode
'__defaults__', # inspect: method: tuple of any default values for positional or keyword parameters
'__doc__', # inspect: class/method/module: documentation string
'__file__', # inspect: module: filename (missing for built-in modules)
'__func__', # inspect: method: function object containing implementation of method
'__globals__', # inspect: method: global namespace in which this function was defined
'__kwdefaults__', # inspect: method: mapping of any default values for keyword-only parameters
'__module__', # inspect: class/method: name of module in which this class was defined
'__name__', # inspect: class/method: name with which this class was defined
'__qualname__', # inspect: class/method: qualified name
'__self__', # inspect: method: instance to which this method is bound, or None
'__closure__',
'__signature__',
'__text_signature__',
'__dict__',
'__slots__',
'__weakref__',
'__class__',
'__hash__',
'__eq__',
'__ne__',
'__ge__',
'__gt__',
'__le__',
'__lt__',
'__copy__', # serialization
'__deepcopy__', # serialization
'__getnewargs__', # serialization: pickle
'__reduce__', # serialization: pickle
'__reduce_ex__', # serialization: pickle
'__setstate__', # serialization: pickle
'__delattr__',
'__getattr__',
'__getattribute__',
'__setattr__',
'_ipython_canary_method_should_not_exist_',
'_ipython_display_', # print: Queried by Jupyter Notebook.
'__format__', # print
'__dir__', # print
'__repr__', # print
'__str__', # print
'__new__',
'__init__',
'__prepare__',
'__classcell__',
'__class_getitem__',
'__delete__',
'__init_subclass__',
'__instancecheck__',
'__mro__',
'__mro_entries__',
'__set_name__',
'__sizeof__',
'__subclasscheck__',
'__subclasshook__',
'__traceback__',
'__del__', # descriptors
'__get__', # descriptors
'__set__', # descriptors
# '_partialmethod', # Might not be needed if we exclude __signature__.
}
# pylint: disable=g-long-lambda
_GETATTRIBUTE_PASSTHROUGH_OVERRIDE_CALLABLES = [
lambda x: (len(x) > 2
and x.startswith('_repr_')
and x[-2] != '_'
and x[-1] == '_'), # Queried by Jupyter Notebook, eg,
# "_repr_latex_".
]
# pylint: enable=g-long-lambda
# pylint: enable=line-too-long
# --- The following is for reference purposes. -------------
SPECIAL_PROPERTIES = {
'__module__',
'__doc__',
'__dict__',
'__weakref__',
'__name__',
'__class__',
'__closure__',
'__code__',
'__defaults__',
'__globals__',
'__qualname__',
}
IGNORED_SPECIAL_METHODS = {
'__new__',
'__init__',
'__slots__',
'__call__',
'__get__',
'__set__',
'__del__',
'__getattr__',
'__getattribute__',
'__setattr__',
'__delattr__',
'__dir__',
'__delete__',
'__set_name__',
'__init_subclass__',
'__class_getitem__',
'__instancecheck__',
'__subclasscheck__',
'__subclasshook__',
'__missing__',
'__sizeof__',
# Class serialization:
# (Pretty sure only only `__copy__` is magic.)
'__copy__',
'__deepcopy__',
'__reduce__',
'__reduce_ex__',
'__getnewargs__',
'__setstate__',
}
class ObjectProxy(SpecialMethods):
"""Like `wrapt.ObjectProxy` except using our way."""
slots = ('__wrapped__', '__unpack__')
def __init__(self, wrapped, unpack=True):
self.__wrapped__ = wrapped
self.__unpack__ = unpack
def __action__(self, fn, *args, **kwargs):
kwargs.pop('_action_name', None)
self, args, kwargs = tf.nest.map_structure(
lambda x: ( # pylint: disable=g-long-lambda
x.__wrapped__ if isinstance(x, ObjectProxy) and x.__unpack__
else x),
[self, args, kwargs])
return fn(self, *args, **kwargs)
def try_get_name(fn, name_fallback='unknown'):
return str(getattr(fn, 'name', None) or
getattr(fn, '__name__', None) or
getattr(type(fn), '__name__', name_fallback))
| 5,051 |
1,144 | package de.metas.vertical.healthcare_ch.forum_datenaustausch_ch.invoice_xversion.request.model.payload.body.service;
import lombok.Builder;
import lombok.NonNull;
import lombok.Value;
import java.math.BigDecimal;
import de.metas.vertical.healthcare_ch.forum_datenaustausch_ch.invoice_xversion.request.model.payload.body.XmlService;
import de.metas.vertical.healthcare_ch.forum_datenaustausch_ch.invoice_xversion.request.model.payload.body.XmlService.ServiceModWithSelector.ServiceMod;
import javax.xml.datatype.XMLGregorianCalendar;
/*
* #%L
* vertical-healthcare_ch.invoice_gateway.forum_datenaustausch_ch.invoice_commons
* %%
* Copyright (C) 2018 metas GmbH
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as
* published by the Free Software Foundation, either version 2 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/gpl-2.0.html>.
* #L%
*/
@Value
@Builder(toBuilder = true)
public class XmlRecordParamed implements XmlService
{
@NonNull
XmlRecordService recordService;
@NonNull
String tariffType;
@Override
public String getName()
{
return recordService.getName();
}
@Override
public XMLGregorianCalendar getDateBegin()
{
return recordService.getDateBegin();
}
@Override
public Integer getRecordId()
{
return recordService.getRecordId();
}
@Override
public BigDecimal getAmount()
{
return recordService.getAmount();
}
@Override
public BigDecimal getExternalFactor()
{
return recordService.getExternalFactor();
}
@Override
public XmlService withModNonNull(@NonNull final ServiceMod serviceMod)
{
return toBuilder()
.recordService(recordService.withModNonNull(serviceMod))
.build();
}
}
| 697 |
435 | package datawave.webservice.query.dashboard;
import java.util.Date;
import java.util.Objects;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
/**
* Data object used for the metrics dashboard.
*/
@XmlAccessorType(XmlAccessType.FIELD)
public final class DashboardSummary implements Comparable<DashboardSummary> {
private final Date dateTime;
private int upTo3Sec;
private int upTo10Sec;
private int upTo60Sec;
private int moreThan60Sec;
private int errorCount;
private int zeroResults;
private int upTo10KResults;
private int upTo1MResults;
private int upToINFResults;
private int oneTerm;
private int upTo16Terms;
private int upTo100Terms;
private int upTo1000Terms;
private int upToInfTerms;
private int resultCount;
private int queryCount;
private int selectorCount;
public DashboardSummary(Date dateTime) {
this.dateTime = dateTime;
}
public void addQuery(long latency, boolean error, int resultCount, int selectorCount) {
queryCount++;
this.resultCount += resultCount;
this.selectorCount += selectorCount;
if (error) {
errorCount++;
} else {
int elapsed = new Long(latency).intValue();
if (elapsed < 3_000) {
upTo3Sec++;
} else if (elapsed < 10_000) {
upTo10Sec++;
} else if (elapsed < 60_000) {
upTo60Sec++;
} else {
moreThan60Sec++;
}
if (resultCount == 0) {
zeroResults++;
} else if (resultCount < 10_000) {
upTo10KResults++;
} else if (resultCount < 1_000_000) {
upTo1MResults++;
} else {
upToINFResults++;
}
}
// there shouldn't be values of 0, but just in-case
if (selectorCount <= 1) {
oneTerm++;
} else if (selectorCount < 16) {
upTo16Terms++;
} else if (selectorCount < 100) {
upTo100Terms++;
} else if (selectorCount < 1_000) {
upTo1000Terms++;
} else {
upToInfTerms++;
}
}
public Date getDateTime() {
return dateTime;
}
public int getErrorCount() {
return errorCount;
}
public int getZeroResults() {
return zeroResults;
}
public int getUpTo10KResults() {
return upTo10KResults;
}
public int getUpTo1MResults() {
return upTo1MResults;
}
public int getUpToINFResults() {
return upToINFResults;
}
public int getResultCount() {
return resultCount;
}
public int getQueryCount() {
return queryCount;
}
public int getUpTo3Sec() {
return upTo3Sec;
}
public int getUpTo10Sec() {
return upTo10Sec;
}
public int getUpTo60Sec() {
return upTo60Sec;
}
public int getMoreThan60Sec() {
return moreThan60Sec;
}
public int getSelectorCount() {
return selectorCount;
}
public int getOneTerm() {
return oneTerm;
}
public int getUpTo16Terms() {
return upTo16Terms;
}
public int getUpTo100Terms() {
return upTo100Terms;
}
public int getUpTo1000Terms() {
return upTo1000Terms;
}
public int getUpToInfTerms() {
return upToInfTerms;
}
@Override
public int hashCode() {
int hash = 7;
hash = 73 * hash + Objects.hashCode(this.dateTime);
hash = 73 * hash + this.upTo3Sec;
hash = 73 * hash + this.upTo10Sec;
hash = 73 * hash + this.upTo60Sec;
hash = 73 * hash + this.moreThan60Sec;
hash = 73 * hash + this.errorCount;
hash = 73 * hash + this.zeroResults;
hash = 73 * hash + this.upTo10KResults;
hash = 73 * hash + this.upTo1MResults;
hash = 73 * hash + this.upToINFResults;
hash = 73 * hash + this.oneTerm;
hash = 73 * hash + this.upTo16Terms;
hash = 73 * hash + this.upTo100Terms;
hash = 73 * hash + this.upTo1000Terms;
hash = 73 * hash + this.upToInfTerms;
hash = 73 * hash + this.resultCount;
hash = 73 * hash + this.queryCount;
hash = 73 * hash + this.selectorCount;
return hash;
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final DashboardSummary other = (DashboardSummary) obj;
if (!Objects.equals(this.dateTime, other.dateTime)) {
return false;
}
if (this.upTo3Sec != other.upTo3Sec) {
return false;
}
if (this.upTo10Sec != other.upTo10Sec) {
return false;
}
if (this.upTo60Sec != other.upTo60Sec) {
return false;
}
if (this.moreThan60Sec != other.moreThan60Sec) {
return false;
}
if (this.errorCount != other.errorCount) {
return false;
}
if (this.zeroResults != other.zeroResults) {
return false;
}
if (this.upTo10KResults != other.upTo10KResults) {
return false;
}
if (this.upTo1MResults != other.upTo1MResults) {
return false;
}
if (this.upToINFResults != other.upToINFResults) {
return false;
}
if (this.oneTerm != other.oneTerm) {
return false;
}
if (this.upTo16Terms != other.upTo16Terms) {
return false;
}
if (this.upTo100Terms != other.upTo100Terms) {
return false;
}
if (this.upTo1000Terms != other.upTo1000Terms) {
return false;
}
if (this.upToInfTerms != other.upToInfTerms) {
return false;
}
if (this.resultCount != other.resultCount) {
return false;
}
if (this.queryCount != other.queryCount) {
return false;
}
return this.selectorCount == other.selectorCount;
}
@Override
public int compareTo(DashboardSummary o) {
return dateTime.compareTo(o.dateTime);
}
}
| 3,268 |
6,098 | package water;
/**
* Created by tomas on 11/5/16.
* Interface to be used by LocalMR.
*
*/
public abstract class MrFun<T extends MrFun<T>> extends Iced<T> {
protected abstract void map(int id);
protected void reduce(T t) {}
protected MrFun<T> makeCopy() {
return clone();
}
}
| 99 |
825 | <gh_stars>100-1000
//
// HMCanvasViewInterface.h
// Hummer
//
// Created by litianhao on 2021/7/28.
//
#import <Foundation/Foundation.h>
NS_ASSUME_NONNULL_BEGIN
@protocol HMCanvasViewInterface <NSObject>
@property (nonatomic , assign) CGFloat lineWidth ;
@property (nonatomic , strong) UIColor *lineColor ;
@property (nonatomic , strong) UIColor *fillColor ;
@property (nonatomic , assign) CGLineCap lineCap ;
@property (nonatomic , assign) CGLineJoin lineJoin ;
@property (nonatomic , assign) CGFloat fontSize ;
@property (nonatomic , strong) UIColor *textColor ;
- (void)moveToPoint:(CGPoint)point;
- (void)addLineTo:(CGPoint)p;
- (void)drawImage:(UIImage *)image atRect:(CGRect)rect;
- (void)fillRect:(CGRect)rect;
- (void)strokeRect:(CGRect)rect;
- (void)fillCircleAtPoint:(CGPoint)point radius:(CGFloat)radius;
- (void)strokeCircleAtPoint:(CGPoint)point radius:(CGFloat)radius;
- (void)drawArcAtPoint:(CGPoint)point radius:(CGFloat)radius startAngle:(CGFloat)startAngle endAngle:(CGFloat)endAngle clockwise:(BOOL)clockwise;
- (void)drawEllipseAtRect:(CGRect)rect;
- (void)fillEllipseAtRect:(CGRect)rect;
- (void)drawPath:(UIBezierPath *)path;
- (void)drawText:(NSString *)text atPoint:(CGPoint)point maxWidth:(CGFloat)maxWidth;
@end
@protocol HMCanvasInternalInterface <HMCanvasViewInterface>
@property (nonatomic , weak ) UIView *containerView;
- (instancetype)initWithContainerView:(UIView *)containerView;
- (void)refreshLayout;
@end
NS_ASSUME_NONNULL_END
| 563 |
372 | <reponame>mjhopkins/google-api-java-client-services<gh_stars>100-1000
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.classroom.model;
/**
* Google Forms item.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Google Classroom API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class Form extends com.google.api.client.json.GenericJson {
/**
* URL of the form.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String formUrl;
/**
* URL of the form responses document. Only set if respsonses have been recorded and only when the
* requesting user is an editor of the form.
*
* Read-only.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String responseUrl;
/**
* URL of a thumbnail image of the Form.
*
* Read-only.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String thumbnailUrl;
/**
* Title of the Form.
*
* Read-only.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String title;
/**
* URL of the form.
* @return value or {@code null} for none
*/
public java.lang.String getFormUrl() {
return formUrl;
}
/**
* URL of the form.
* @param formUrl formUrl or {@code null} for none
*/
public Form setFormUrl(java.lang.String formUrl) {
this.formUrl = formUrl;
return this;
}
/**
* URL of the form responses document. Only set if respsonses have been recorded and only when the
* requesting user is an editor of the form.
*
* Read-only.
* @return value or {@code null} for none
*/
public java.lang.String getResponseUrl() {
return responseUrl;
}
/**
* URL of the form responses document. Only set if respsonses have been recorded and only when the
* requesting user is an editor of the form.
*
* Read-only.
* @param responseUrl responseUrl or {@code null} for none
*/
public Form setResponseUrl(java.lang.String responseUrl) {
this.responseUrl = responseUrl;
return this;
}
/**
* URL of a thumbnail image of the Form.
*
* Read-only.
* @return value or {@code null} for none
*/
public java.lang.String getThumbnailUrl() {
return thumbnailUrl;
}
/**
* URL of a thumbnail image of the Form.
*
* Read-only.
* @param thumbnailUrl thumbnailUrl or {@code null} for none
*/
public Form setThumbnailUrl(java.lang.String thumbnailUrl) {
this.thumbnailUrl = thumbnailUrl;
return this;
}
/**
* Title of the Form.
*
* Read-only.
* @return value or {@code null} for none
*/
public java.lang.String getTitle() {
return title;
}
/**
* Title of the Form.
*
* Read-only.
* @param title title or {@code null} for none
*/
public Form setTitle(java.lang.String title) {
this.title = title;
return this;
}
@Override
public Form set(String fieldName, Object value) {
return (Form) super.set(fieldName, value);
}
@Override
public Form clone() {
return (Form) super.clone();
}
}
| 1,379 |
1,350 | // Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.machinelearningservices.models;
import com.azure.core.util.ExpandableStringEnum;
import com.fasterxml.jackson.annotation.JsonCreator;
import java.util.Collection;
/** Defines values for VariantType. */
public final class VariantType extends ExpandableStringEnum<VariantType> {
/** Static value Control for VariantType. */
public static final VariantType CONTROL = fromString("Control");
/** Static value Treatment for VariantType. */
public static final VariantType TREATMENT = fromString("Treatment");
/**
* Creates or finds a VariantType from its string representation.
*
* @param name a name to look for.
* @return the corresponding VariantType.
*/
@JsonCreator
public static VariantType fromString(String name) {
return fromString(name, VariantType.class);
}
/** @return known VariantType values. */
public static Collection<VariantType> values() {
return values(VariantType.class);
}
}
| 353 |
332 | <filename>native/CefDownloadItemCallback_N.cpp
// Copyright (c) 2014 The Chromium Embedded Framework Authors. All rights
// reserved. Use of this source code is governed by a BSD-style license that
// can be found in the LICENSE file.
#include "CefDownloadItemCallback_N.h"
#include "include/cef_download_handler.h"
#include "jni_scoped_helpers.h"
namespace {
CefRefPtr<CefDownloadItemCallback> GetSelf(jlong self) {
return reinterpret_cast<CefDownloadItemCallback*>(self);
}
void ClearSelf(JNIEnv* env, jobject obj) {
// Clear the reference added in DownloadHandler::OnDownloadUpdated.
SetCefForJNIObject<CefDownloadItemCallback>(env, obj, nullptr,
"CefDownloadItemCallback");
}
} // namespace
JNIEXPORT void JNICALL
Java_org_cef_callback_CefDownloadItemCallback_1N_N_1Cancel(JNIEnv* env,
jobject obj,
jlong self) {
CefRefPtr<CefDownloadItemCallback> callback = GetSelf(self);
if (!callback)
return;
callback->Cancel();
ClearSelf(env, obj);
}
JNIEXPORT void JNICALL
Java_org_cef_callback_CefDownloadItemCallback_1N_N_1Pause(JNIEnv* env,
jobject obj,
jlong self) {
CefRefPtr<CefDownloadItemCallback> callback = GetSelf(self);
if (!callback)
return;
callback->Pause();
}
JNIEXPORT void JNICALL
Java_org_cef_callback_CefDownloadItemCallback_1N_N_1Resume(JNIEnv* env,
jobject obj,
jlong self) {
CefRefPtr<CefDownloadItemCallback> callback = GetSelf(self);
if (!callback)
return;
callback->Resume();
}
JNIEXPORT void JNICALL
Java_org_cef_callback_CefDownloadItemCallback_1N_N_1Dispose(JNIEnv* env,
jobject obj,
jlong self) {
CefRefPtr<CefDownloadItemCallback> callback = GetSelf(self);
if (!callback)
return;
// Intentionally not executing any callback methods here.
ClearSelf(env, obj);
}
| 1,135 |
4,140 | <filename>ql/src/java/org/apache/hadoop/hive/ql/exec/ListSinkOperator.java
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.exec;
import java.util.List;
import java.util.Properties;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.ql.CompilationOpContext;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.ListSinkDesc;
import org.apache.hadoop.hive.ql.plan.api.OperatorType;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.DefaultFetchFormatter;
import org.apache.hadoop.hive.serde2.FetchFormatter;
import org.apache.hadoop.hive.serde2.SerDeUtils;
import org.apache.hadoop.util.ReflectionUtils;
/**
* For fetch task with operator tree, row read from FetchOperator is processed via operator tree
* and finally arrives to this operator.
*/
public class ListSinkOperator extends Operator<ListSinkDesc> {
private transient List res;
private transient FetchFormatter fetcher;
private transient int numRows;
/** Kryo ctor. */
protected ListSinkOperator() {
super();
}
public ListSinkOperator(CompilationOpContext ctx) {
super(ctx);
}
@Override
protected void initializeOp(Configuration hconf) throws HiveException {
super.initializeOp(hconf);
try {
fetcher = initializeFetcher(hconf);
} catch (Exception e) {
throw new HiveException(e);
}
}
private FetchFormatter initializeFetcher(Configuration conf) throws Exception {
String formatterName = conf.get(SerDeUtils.LIST_SINK_OUTPUT_FORMATTER);
FetchFormatter fetcher;
if (formatterName != null && !formatterName.isEmpty()) {
Class<? extends FetchFormatter> fetcherClass = Class.forName(formatterName, true,
Utilities.getSessionSpecifiedClassLoader()).asSubclass(FetchFormatter.class);
fetcher = ReflectionUtils.newInstance(fetcherClass, null);
} else {
fetcher = new DefaultFetchFormatter();
}
// selectively used by fetch formatter
Properties props = new Properties();
props.put(serdeConstants.SERIALIZATION_FORMAT, "" + Utilities.tabCode);
props.put(serdeConstants.SERIALIZATION_NULL_FORMAT, getConf().getSerializationNullFormat());
fetcher.initialize(conf, props);
return fetcher;
}
public void reset(List res) {
this.res = res;
this.numRows = 0;
}
public int getNumRows() {
return numRows;
}
@Override
@SuppressWarnings("unchecked")
public void process(Object row, int tag) throws HiveException {
try {
res.add(fetcher.convert(row, inputObjInspectors[0]));
numRows++;
runTimeNumRows++;
} catch (Exception e) {
throw new HiveException(e);
}
}
@Override
public OperatorType getType() {
return OperatorType.FORWARD;
}
@Override
public String getName() {
return ListSinkOperator.getOperatorName();
}
public static String getOperatorName() {
return "LIST_SINK";
}
@Override
public boolean logicalEquals(Operator other) {
return getClass().getName().equals(other.getClass().getName());
}
}
| 1,292 |
511 | /*
*
* Copyright 2015 gRPC authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
#ifndef GRPC_COMMON_CPP_ROUTE_GUIDE_HELPER_H_
#define GRPC_COMMON_CPP_ROUTE_GUIDE_HELPER_H_
#include <string>
#include <vector>
const char test_root_cert[] = {
0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x42, 0x45, 0x47, 0x49, 0x4e, 0x20, 0x43, 0x45, 0x52, 0x54, 0x49, 0x46, 0x49, 0x43,
0x41, 0x54, 0x45, 0x2d, 0x2d, 0x2d, 0x2d, 0x2d, 0x0a, 0x4d, 0x49, 0x49, 0x44, 0x49, 0x44, 0x43, 0x43, 0x41, 0x67,
0x67, 0x43, 0x41, 0x51, 0x45, 0x77, 0x44, 0x51, 0x59, 0x4a, 0x4b, 0x6f, 0x5a, 0x49, 0x68, 0x76, 0x63, 0x4e, 0x41,
0x51, 0x45, 0x4c, 0x42, 0x51, 0x41, 0x77, 0x56, 0x6a, 0x45, 0x4c, 0x4d, 0x41, 0x6b, 0x47, 0x41, 0x31, 0x55, 0x45,
0x42, 0x68, 0x4d, 0x43, 0x51, 0x56, 0x55, 0x78, 0x45, 0x7a, 0x41, 0x52, 0x42, 0x67, 0x4e, 0x56, 0x0a, 0x42, 0x41,
0x67, 0x4d, 0x43, 0x6c, 0x4e, 0x76, 0x62, 0x57, 0x55, 0x74, 0x55, 0x33, 0x52, 0x68, 0x64, 0x47, 0x55, 0x78, 0x49,
0x54, 0x41, 0x66, 0x42, 0x67, 0x4e, 0x56, 0x42, 0x41, 0x6f, 0x4d, 0x47, 0x45, 0x6c, 0x75, 0x64, 0x47, 0x56, 0x79,
0x62, 0x6d, 0x56, 0x30, 0x49, 0x46, 0x64, 0x70, 0x5a, 0x47, 0x64, 0x70, 0x64, 0x48, 0x4d, 0x67, 0x55, 0x48, 0x52,
0x35, 0x49, 0x45, 0x78, 0x30, 0x0a, 0x5a, 0x44, 0x45, 0x50, 0x4d, 0x41, 0x30, 0x47, 0x41, 0x31, 0x55, 0x45, 0x41,
0x77, 0x77, 0x47, 0x64, 0x47, 0x56, 0x7a, 0x64, 0x47, 0x4e, 0x68, 0x4d, 0x42, 0x34, 0x58, 0x44, 0x54, 0x45, 0x34,
0x4d, 0x44, 0x45, 0x7a, 0x4d, 0x54, 0x41, 0x35, 0x4d, 0x54, 0x63, 0x31, 0x4d, 0x56, 0x6f, 0x58, 0x44, 0x54, 0x49,
0x34, 0x4d, 0x44, 0x45, 0x79, 0x4f, 0x54, 0x41, 0x35, 0x4d, 0x54, 0x63, 0x31, 0x0a, 0x4d, 0x56, 0x6f, 0x77, 0x56,
0x6a, 0x45, 0x4c, 0x4d, 0x41, 0x6b, 0x47, 0x41, 0x31, 0x55, 0x45, 0x42, 0x68, 0x4d, 0x43, 0x51, 0x56, 0x55, 0x78,
0x45, 0x7a, 0x41, 0x52, 0x42, 0x67, 0x4e, 0x56, 0x42, 0x41, 0x67, 0x4d, 0x43, 0x6c, 0x4e, 0x76, 0x62, 0x57, 0x55,
0x74, 0x55, 0x33, 0x52, 0x68, 0x64, 0x47, 0x55, 0x78, 0x49, 0x54, 0x41, 0x66, 0x42, 0x67, 0x4e, 0x56, 0x42, 0x41,
0x6f, 0x4d, 0x0a, 0x47, 0x45, 0x6c, 0x75, 0x64, 0x47, 0x56, 0x79, 0x62, 0x6d, 0x56, 0x30, 0x49, 0x46, 0x64, 0x70,
0x5a, 0x47, 0x64, 0x70, 0x64, 0x48, 0x4d, 0x67, 0x55, 0x48, 0x52, 0x35, 0x49, 0x45, 0x78, 0x30, 0x5a, 0x44, 0x45,
0x50, 0x4d, 0x41, 0x30, 0x47, 0x41, 0x31, 0x55, 0x45, 0x41, 0x77, 0x77, 0x47, 0x64, 0x47, 0x56, 0x7a, 0x64, 0x47,
0x4e, 0x68, 0x4d, 0x49, 0x49, 0x42, 0x49, 0x6a, 0x41, 0x4e, 0x0a, 0x42, 0x67, 0x6b, 0x71, 0x68, 0x6b, 0x69, 0x47,
0x39, 0x77, 0x30, 0x42, 0x41, 0x51, 0x45, 0x46, 0x41, 0x41, 0x4f, 0x43, 0x41, 0x51, 0x38, 0x41, 0x4d, 0x49, 0x49,
0x42, 0x43, 0x67, 0x4b, 0x43, 0x41, 0x51, 0x45, 0x41, 0x32, 0x59, 0x33, 0x4a, 0x53, 0x31, 0x41, 0x42, 0x43, 0x65,
0x4b, 0x46, 0x71, 0x79, 0x78, 0x6c, 0x48, 0x51, 0x6c, 0x44, 0x55, 0x75, 0x35, 0x74, 0x67, 0x63, 0x33, 0x2f, 0x0a,
0x38, 0x77, 0x61, 0x75, 0x66, 0x7a, 0x4f, 0x77, 0x75, 0x54, 0x41, 0x6c, 0x79, 0x35, 0x61, 0x70, 0x75, 0x37, 0x4b,
0x57, 0x59, 0x49, 0x6a, 0x59, 0x41, 0x64, 0x58, 0x5a, 0x6e, 0x54, 0x4b, 0x73, 0x52, 0x6e, 0x2f, 0x7a, 0x43, 0x42,
0x4c, 0x59, 0x73, 0x76, 0x43, 0x78, 0x78, 0x67, 0x77, 0x50, 0x49, 0x42, 0x61, 0x63, 0x46, 0x31, 0x42, 0x44, 0x34,
0x6e, 0x70, 0x41, 0x33, 0x63, 0x54, 0x41, 0x0a, 0x72, 0x70, 0x6c, 0x5a, 0x78, 0x69, 0x38, 0x4d, 0x57, 0x4d, 0x6f,
0x79, 0x73, 0x2b, 0x32, 0x51, 0x49, 0x4a, 0x46, 0x6e, 0x54, 0x53, 0x31, 0x5a, 0x2b, 0x47, 0x62, 0x42, 0x57, 0x6f,
0x71, 0x65, 0x6e, 0x38, 0x5a, 0x61, 0x36, 0x51, 0x71, 0x4a, 0x63, 0x73, 0x5a, 0x42, 0x71, 0x74, 0x79, 0x6b, 0x6b,
0x44, 0x38, 0x42, 0x47, 0x33, 0x2f, 0x6c, 0x6f, 0x45, 0x4b, 0x43, 0x58, 0x76, 0x44, 0x45, 0x0a, 0x42, 0x4b, 0x36,
0x2f, 0x33, 0x71, 0x38, 0x52, 0x56, 0x7a, 0x77, 0x33, 0x58, 0x42, 0x46, 0x43, 0x67, 0x4c, 0x68, 0x6d, 0x47, 0x59,
0x57, 0x4c, 0x66, 0x42, 0x6f, 0x2b, 0x4b, 0x36, 0x6e, 0x52, 0x79, 0x38, 0x59, 0x5a, 0x34, 0x59, 0x34, 0x6d, 0x79,
0x52, 0x73, 0x55, 0x38, 0x77, 0x59, 0x38, 0x6f, 0x54, 0x55, 0x77, 0x31, 0x39, 0x4c, 0x50, 0x4e, 0x79, 0x36, 0x50,
0x2f, 0x46, 0x44, 0x74, 0x0a, 0x4c, 0x68, 0x57, 0x54, 0x61, 0x2b, 0x43, 0x57, 0x36, 0x4e, 0x33, 0x6f, 0x58, 0x6f,
0x73, 0x64, 0x32, 0x76, 0x38, 0x79, 0x30, 0x72, 0x72, 0x63, 0x64, 0x58, 0x74, 0x52, 0x63, 0x61, 0x30, 0x58, 0x64,
0x57, 0x50, 0x4c, 0x50, 0x72, 0x6a, 0x79, 0x44, 0x76, 0x52, 0x6a, 0x53, 0x39, 0x69, 0x57, 0x4f, 0x4c, 0x70, 0x64,
0x63, 0x79, 0x53, 0x41, 0x50, 0x52, 0x51, 0x6f, 0x53, 0x33, 0x48, 0x32, 0x0a, 0x4d, 0x72, 0x30, 0x4b, 0x4d, 0x32,
0x67, 0x76, 0x48, 0x71, 0x33, 0x36, 0x45, 0x55, 0x50, 0x71, 0x67, 0x43, 0x45, 0x7a, 0x68, 0x64, 0x74, 0x71, 0x30,
0x4d, 0x33, 0x68, 0x5a, 0x7a, 0x4e, 0x30, 0x37, 0x4c, 0x49, 0x46, 0x4c, 0x73, 0x43, 0x6a, 0x52, 0x57, 0x4d, 0x6c,
0x35, 0x59, 0x56, 0x68, 0x6f, 0x74, 0x54, 0x69, 0x65, 0x49, 0x50, 0x7a, 0x46, 0x51, 0x49, 0x44, 0x41, 0x51, 0x41,
0x42, 0x0a, 0x4d, 0x41, 0x30, 0x47, 0x43, 0x53, 0x71, 0x47, 0x53, 0x49, 0x62, 0x33, 0x44, 0x51, 0x45, 0x42, 0x43,
0x77, 0x55, 0x41, 0x41, 0x34, 0x49, 0x42, 0x41, 0x51, 0x42, 0x2b, 0x4a, 0x68, 0x64, 0x4c, 0x41, 0x54, 0x53, 0x69,
0x2f, 0x47, 0x42, 0x79, 0x32, 0x32, 0x34, 0x67, 0x76, 0x6d, 0x45, 0x6e, 0x77, 0x50, 0x65, 0x59, 0x4a, 0x50, 0x66,
0x6d, 0x31, 0x5a, 0x4b, 0x34, 0x61, 0x64, 0x37, 0x6c, 0x0a, 0x59, 0x38, 0x37, 0x62, 0x78, 0x72, 0x59, 0x57, 0x55,
0x36, 0x73, 0x4d, 0x50, 0x52, 0x6d, 0x6b, 0x72, 0x4e, 0x36, 0x38, 0x66, 0x42, 0x62, 0x47, 0x54, 0x38, 0x30, 0x68,
0x74, 0x57, 0x37, 0x4c, 0x46, 0x6e, 0x72, 0x71, 0x62, 0x6e, 0x78, 0x7a, 0x41, 0x6d, 0x6c, 0x6d, 0x58, 0x4c, 0x45,
0x2b, 0x31, 0x6c, 0x43, 0x79, 0x74, 0x37, 0x59, 0x7a, 0x2f, 0x53, 0x70, 0x50, 0x51, 0x75, 0x46, 0x48, 0x0a, 0x73,
0x4e, 0x49, 0x6c, 0x62, 0x32, 0x59, 0x74, 0x47, 0x70, 0x69, 0x64, 0x56, 0x55, 0x7a, 0x35, 0x50, 0x67, 0x78, 0x35,
0x6f, 0x62, 0x46, 0x65, 0x7a, 0x53, 0x39, 0x6e, 0x56, 0x79, 0x6f, 0x4c, 0x38, 0x32, 0x62, 0x78, 0x42, 0x65, 0x47,
0x6e, 0x66, 0x72, 0x71, 0x4b, 0x4d, 0x4c, 0x2f, 0x34, 0x4e, 0x4e, 0x79, 0x78, 0x64, 0x64, 0x71, 0x57, 0x44, 0x61,
0x34, 0x58, 0x32, 0x6b, 0x44, 0x63, 0x0a, 0x42, 0x70, 0x79, 0x30, 0x7a, 0x31, 0x47, 0x6a, 0x43, 0x34, 0x64, 0x77,
0x38, 0x73, 0x65, 0x64, 0x44, 0x37, 0x35, 0x62, 0x33, 0x33, 0x57, 0x54, 0x68, 0x4e, 0x44, 0x67, 0x74, 0x4b, 0x6d,
0x30, 0x52, 0x62, 0x55, 0x38, 0x62, 0x66, 0x68, 0x45, 0x68, 0x42, 0x4a, 0x73, 0x68, 0x46, 0x2f, 0x36, 0x53, 0x76,
0x58, 0x45, 0x32, 0x45, 0x76, 0x4c, 0x71, 0x69, 0x67, 0x58, 0x53, 0x2f, 0x68, 0x56, 0x0a, 0x52, 0x78, 0x6f, 0x6d,
0x6e, 0x46, 0x54, 0x75, 0x2b, 0x63, 0x73, 0x70, 0x43, 0x57, 0x37, 0x53, 0x42, 0x46, 0x31, 0x6d, 0x64, 0x50, 0x70,
0x33, 0x51, 0x65, 0x62, 0x4c, 0x6a, 0x35, 0x6f, 0x61, 0x41, 0x36, 0x71, 0x59, 0x53, 0x6a, 0x5a, 0x47, 0x49, 0x63,
0x32, 0x42, 0x41, 0x61, 0x59, 0x58, 0x6c, 0x6b, 0x4e, 0x34, 0x6d, 0x65, 0x6d, 0x69, 0x50, 0x76, 0x33, 0x6f, 0x4b,
0x6f, 0x52, 0x6b, 0x0a, 0x54, 0x7a, 0x65, 0x56, 0x6a, 0x50, 0x41, 0x71, 0x4f, 0x74, 0x30, 0x6e, 0x63, 0x43, 0x58,
0x2f, 0x6a, 0x4f, 0x71, 0x43, 0x64, 0x6e, 0x73, 0x56, 0x44, 0x6a, 0x69, 0x48, 0x4a, 0x47, 0x54, 0x4c, 0x73, 0x30,
0x74, 0x7a, 0x62, 0x35, 0x54, 0x6f, 0x33, 0x69, 0x44, 0x6a, 0x42, 0x7a, 0x51, 0x36, 0x0a, 0x2d, 0x2d, 0x2d, 0x2d,
0x2d, 0x45, 0x4e, 0x44, 0x20, 0x43, 0x45, 0x52, 0x54, 0x49, 0x46, 0x49, 0x43, 0x41, 0x54, 0x45, 0x2d, 0x2d, 0x2d,
0x2d, 0x2d, 0x0a, 0x00};
#define JSON_LOCATION_DATA \
"[{ \
\"location\": { \
\"latitude\": 407838351, \
\"longitude\": -746143763 \
}, \
\"name\": \"<NAME>, Mendham, NJ 07945, USA\" \
}, { \
\"location\": { \
\"latitude\": 408122808, \
\"longitude\": -743999179 \
}, \
\"name\": \"101 New Jersey 10, Whippany, NJ 07981, USA\" \
}, { \
\"location\": { \
\"latitude\": 413628156, \
\"longitude\": -749015468 \
}, \
\"name\": \"U.S. 6, Shohola, PA 18458, USA\" \
}, { \
\"location\": { \
\"latitude\": 419999544, \
\"longitude\": -740371136 \
}, \
\"name\": \"5 Conners Road, Kingston, NY 12401, USA\" \
}, { \
\"location\": { \
\"latitude\": 414008389, \
\"longitude\": -743951297 \
}, \
\"name\": \"Mid Hudson Psychiatric Center, New Hampton, NY 10958, USA\" \
}, { \
\"location\": { \
\"latitude\": 419611318, \
\"longitude\": -746524769 \
}, \
\"name\": \"287 Flugertown Road, Livingston Manor, NY 12758, USA\" \
}, { \
\"location\": { \
\"latitude\": 406109563, \
\"longitude\": -742186778 \
}, \
\"name\": \"4001 Tremley Point Road, Linden, NJ 07036, USA\" \
}, { \
\"location\": { \
\"latitude\": 416802456, \
\"longitude\": -742370183 \
}, \
\"name\": \"352 South Mountain Road, Wallkill, NY 12589, USA\" \
}, { \
\"location\": { \
\"latitude\": 412950425, \
\"longitude\": -741077389 \
}, \
\"name\": \"<NAME> Road, Harriman, NY 10926, USA\" \
}, { \
\"location\": { \
\"latitude\": 412144655, \
\"longitude\": -743949739 \
}, \
\"name\": \"193-199 Wawayanda Road, Hewitt, NJ 07421, USA\" \
}, { \
\"location\": { \
\"latitude\": 415736605, \
\"longitude\": -742847522 \
}, \
\"name\": \"406-496 W<NAME>venue, Pine Bush, NY 12566, USA\" \
}, { \
\"location\": { \
\"latitude\": 413843930, \
\"longitude\": -740501726 \
}, \
\"name\": \"162 Merrill Road, Highland Mills, NY 10930, USA\" \
}, { \
\"location\": { \
\"latitude\": 410873075, \
\"longitude\": -744459023 \
}, \
\"name\": \"<NAME>, West Milford, NJ 07480, USA\" \
}, { \
\"location\": { \
\"latitude\": 412346009, \
\"longitude\": -744026814 \
}, \
\"name\": \"16 Old Brook Lane, Warwick, NY 10990, USA\" \
}, { \
\"location\": { \
\"latitude\": 402948455, \
\"longitude\": -747903913 \
}, \
\"name\": \"3 Drake Lane, Pennington, NJ 08534, USA\" \
}, { \
\"location\": { \
\"latitude\": 406337092, \
\"longitude\": -740122226 \
}, \
\"name\": \"6324 8th Avenue, Brooklyn, NY 11220, USA\" \
}, { \
\"location\": { \
\"latitude\": 406421967, \
\"longitude\": -747727624 \
}, \
\"name\": \"1 Merck Access Road, Whitehouse Station, NJ 08889, USA\" \
}, { \
\"location\": { \
\"latitude\": 416318082, \
\"longitude\": -749677716 \
}, \
\"name\": \"78-98 Schalck Road, Narrowsburg, NY 12764, USA\" \
}, { \
\"location\": { \
\"latitude\": 415301720, \
\"longitude\": -748416257 \
}, \
\"name\": \"282 Lakeview Drive Road, Highland Lake, NY 12743, USA\" \
}, { \
\"location\": { \
\"latitude\": 402647019, \
\"longitude\": -747071791 \
}, \
\"name\": \"330 Evelyn Avenue, Hamilton Township, NJ 08619, USA\" \
}, { \
\"location\": { \
\"latitude\": 412567807, \
\"longitude\": -741058078 \
}, \
\"name\": \"New York State Reference Route 987E, Southfields, NY 10975, USA\" \
}, { \
\"location\": { \
\"latitude\": 416855156, \
\"longitude\": -744420597 \
}, \
\"name\": \"103-271 Tempaloni Road, Ellenville, NY 12428, USA\" \
}, { \
\"location\": { \
\"latitude\": 404663628, \
\"longitude\": -744820157 \
}, \
\"name\": \"1300 Airport Road, North Brunswick Township, NJ 08902, USA\" \
}, { \
\"location\": { \
\"latitude\": 407113723, \
\"longitude\": -749746483 \
}, \
\"name\": \"\" \
}, { \
\"location\": { \
\"latitude\": 402133926, \
\"longitude\": -743613249 \
}, \
\"name\": \"\" \
}, { \
\"location\": { \
\"latitude\": 400273442, \
\"longitude\": -741220915 \
}, \
\"name\": \"\" \
}, { \
\"location\": { \
\"latitude\": 411236786, \
\"longitude\": -744070769 \
}, \
\"name\": \"\" \
}, { \
\"location\": { \
\"latitude\": 411633782, \
\"longitude\": -746784970 \
}, \
\"name\": \"211-225 Plains Road, Augusta, NJ 07822, USA\" \
}, { \
\"location\": { \
\"latitude\": 415830701, \
\"longitude\": -742952812 \
}, \
\"name\": \"\" \
}, { \
\"location\": { \
\"latitude\": 413447164, \
\"longitude\": -748712898 \
}, \
\"name\": \"165 Ped<NAME> Road, Milford, PA 18337, USA\" \
}, { \
\"location\": { \
\"latitude\": 405047245, \
\"longitude\": -749800722 \
}, \
\"name\": \"100-122 Locktown Road, Frenchtown, NJ 08825, USA\" \
}, { \
\"location\": { \
\"latitude\": 418858923, \
\"longitude\": -746156790 \
}, \
\"name\": \"\" \
}, { \
\"location\": { \
\"latitude\": 417951888, \
\"longitude\": -748484944 \
}, \
\"name\": \"650-652 Willi Hill Road, Swan Lake, NY 12783, USA\" \
}, { \
\"location\": { \
\"latitude\": 407033786, \
\"longitude\": -743977337 \
}, \
\"name\": \"26 East 3rd Street, New Providence, NJ 07974, USA\" \
}, { \
\"location\": { \
\"latitude\": 417548014, \
\"longitude\": -740075041 \
}, \
\"name\": \"\" \
}, { \
\"location\": { \
\"latitude\": 410395868, \
\"longitude\": -744972325 \
}, \
\"name\": \"\" \
}, { \
\"location\": { \
\"latitude\": 404615353, \
\"longitude\": -745129803 \
}, \
\"name\": \"\" \
}, { \
\"location\": { \
\"latitude\": 406589790, \
\"longitude\": -743560121 \
}, \
\"name\": \"611 Lawrence Avenue, Westfield, NJ 07090, USA\" \
}, { \
\"location\": { \
\"latitude\": 414653148, \
\"longitude\": -740477477 \
}, \
\"name\": \"18 Lannis Avenue, New Windsor, NY 12553, USA\" \
}, { \
\"location\": { \
\"latitude\": 405957808, \
\"longitude\": -743255336 \
}, \
\"name\": \"82-104 Amherst Avenue, Colonia, NJ 07067, USA\" \
}, { \
\"location\": { \
\"latitude\": 411733589, \
\"longitude\": -741648093 \
}, \
\"name\": \"170 Seven Lakes Drive, Sloatsburg, NY 10974, USA\" \
}, { \
\"location\": { \
\"latitude\": 412676291, \
\"longitude\": -742606606 \
}, \
\"name\": \"1270 Lakes Road, Monroe, NY 10950, USA\" \
}, { \
\"location\": { \
\"latitude\": 409224445, \
\"longitude\": -748286738 \
}, \
\"name\": \"509-535 Alphano Road, Great Meadows, NJ 07838, USA\" \
}, { \
\"location\": { \
\"latitude\": 406523420, \
\"longitude\": -742135517 \
}, \
\"name\": \"652 Garden Street, Elizabeth, NJ 07202, USA\" \
}, { \
\"location\": { \
\"latitude\": 401827388, \
\"longitude\": -740294537 \
}, \
\"name\": \"349 Sea Spray Court, Neptune City, NJ 07753, USA\" \
}, { \
\"location\": { \
\"latitude\": 410564152, \
\"longitude\": -743685054 \
}, \
\"name\": \"13-17 Stanley Street, West Milford, NJ 07480, USA\" \
}, { \
\"location\": { \
\"latitude\": 408472324, \
\"longitude\": -740726046 \
}, \
\"name\": \"47 Industrial Avenue, Teterboro, NJ 07608, USA\" \
}, { \
\"location\": { \
\"latitude\": 412452168, \
\"longitude\": -740214052 \
}, \
\"name\": \"5 White Oak Lane, Stony Point, NY 10980, USA\" \
}, { \
\"location\": { \
\"latitude\": 409146138, \
\"longitude\": -746188906 \
}, \
\"name\": \"Berkshire Valley Management Area Trail, Jefferson, NJ, USA\" \
}, { \
\"location\": { \
\"latitude\": 404701380, \
\"longitude\": -744781745 \
}, \
\"name\": \"1007 Jersey Avenue, New Brunswick, NJ 08901, USA\" \
}, { \
\"location\": { \
\"latitude\": 409642566, \
\"longitude\": -746017679 \
}, \
\"name\": \"6 East Emerald Isle Drive, Lake Hopatcong, NJ 07849, USA\" \
}, { \
\"location\": { \
\"latitude\": 408031728, \
\"longitude\": -748645385 \
}, \
\"name\": \"1358-1474 New Jersey 57, Port Murray, NJ 07865, USA\" \
}, { \
\"location\": { \
\"latitude\": 413700272, \
\"longitude\": -742135189 \
}, \
\"name\": \"367 Prospect Road, Chester, NY 10918, USA\" \
}, { \
\"location\": { \
\"latitude\": 404310607, \
\"longitude\": -740282632 \
}, \
\"name\": \"10 Simon Lake Drive, Atlantic Highlands, NJ 07716, USA\" \
}, { \
\"location\": { \
\"latitude\": 409319800, \
\"longitude\": -746201391 \
}, \
\"name\": \"11 Ward Street, Mount Arlington, NJ 07856, USA\" \
}, { \
\"location\": { \
\"latitude\": 406685311, \
\"longitude\": -742108603 \
}, \
\"name\": \"300-398 Jefferson Avenue, Elizabeth, NJ 07201, USA\" \
}, { \
\"location\": { \
\"latitude\": 419018117, \
\"longitude\": -749142781 \
}, \
\"name\": \"43 Dreher Road, Roscoe, NY 12776, USA\" \
}, { \
\"location\": { \
\"latitude\": 412856162, \
\"longitude\": -745148837 \
}, \
\"name\": \"Swan Street, Pine Island, NY 10969, USA\" \
}, { \
\"location\": { \
\"latitude\": 416560744, \
\"longitude\": -746721964 \
}, \
\"name\": \"66 Pleasantview Avenue, Monticello, NY 12701, USA\" \
}, { \
\"location\": { \
\"latitude\": 405314270, \
\"longitude\": -749836354 \
}, \
\"name\": \"\" \
}, { \
\"location\": { \
\"latitude\": 414219548, \
\"longitude\": -743327440 \
}, \
\"name\": \"\" \
}, { \
\"location\": { \
\"latitude\": 415534177, \
\"longitude\": -742900616 \
}, \
\"name\": \"565 Winding Hills Road, Montgomery, NY 12549, USA\" \
}, { \
\"location\": { \
\"latitude\": 406898530, \
\"longitude\": -749127080 \
}, \
\"name\": \"231 Rocky Run Road, <NAME>, NJ 08826, USA\" \
}, { \
\"location\": { \
\"latitude\": 407586880, \
\"longitude\": -741670168 \
}, \
\"name\": \"100 Mount Pleasant Avenue, Newark, NJ 07104, USA\" \
}, { \
\"location\": { \
\"latitude\": 400106455, \
\"longitude\": -742870190 \
}, \
\"name\": \"517-521 Huntington Drive, Manchester Township, NJ 08759, USA\" \
}, { \
\"location\": { \
\"latitude\": 400066188, \
\"longitude\": -746793294 \
}, \
\"name\": \"\" \
}, { \
\"location\": { \
\"latitude\": 418803880, \
\"longitude\": -744102673 \
}, \
\"name\": \"40 Mountain Road, Napanoch, NY 12458, USA\" \
}, { \
\"location\": { \
\"latitude\": 414204288, \
\"longitude\": -747895140 \
}, \
\"name\": \"\" \
}, { \
\"location\": { \
\"latitude\": 414777405, \
\"longitude\": -740615601 \
}, \
\"name\": \"\" \
}, { \
\"location\": { \
\"latitude\": 415464475, \
\"longitude\": -747175374 \
}, \
\"name\": \"48 North Road, Forestburgh, NY 12777, USA\" \
}, { \
\"location\": { \
\"latitude\": 404062378, \
\"longitude\": -746376177 \
}, \
\"name\": \"\" \
}, { \
\"location\": { \
\"latitude\": 405688272, \
\"longitude\": -749285130 \
}, \
\"name\": \"\" \
}, { \
\"location\": { \
\"latitude\": 400342070, \
\"longitude\": -748788996 \
}, \
\"name\": \"\" \
}, { \
\"location\": { \
\"latitude\": 401809022, \
\"longitude\": -744157964 \
}, \
\"name\": \"\" \
}, { \
\"location\": { \
\"latitude\": 404226644, \
\"longitude\": -740517141 \
}, \
\"name\": \"<NAME>, Leonardo, NJ 07737, USA\" \
}, { \
\"location\": { \
\"latitude\": 410322033, \
\"longitude\": -747871659 \
}, \
\"name\": \"\" \
}, { \
\"location\": { \
\"latitude\": 407100674, \
\"longitude\": -747742727 \
}, \
\"name\": \"\" \
}, { \
\"location\": { \
\"latitude\": 418811433, \
\"longitude\": -741718005 \
}, \
\"name\": \"213 Bush Road, Stone Ridge, NY 12484, USA\" \
}, { \
\"location\": { \
\"latitude\": 415034302, \
\"longitude\": -743850945 \
}, \
\"name\": \"\" \
}, { \
\"location\": { \
\"latitude\": 411349992, \
\"longitude\": -743694161 \
}, \
\"name\": \"\" \
}, { \
\"location\": { \
\"latitude\": 404839914, \
\"longitude\": -744759616 \
}, \
\"name\": \"1-17 <NAME>, New Brunswick, NJ 08901, USA\" \
}, { \
\"location\": { \
\"latitude\": 414638017, \
\"longitude\": -745957854 \
}, \
\"name\": \"35 Oakland Valley Road, Cuddebackville, NY 12729, USA\" \
}, { \
\"location\": { \
\"latitude\": 412127800, \
\"longitude\": -740173578 \
}, \
\"name\": \"\" \
}, { \
\"location\": { \
\"latitude\": 401263460, \
\"longitude\": -747964303 \
}, \
\"name\": \"\" \
}, { \
\"location\": { \
\"latitude\": 412843391, \
\"longitude\": -749086026 \
}, \
\"name\": \"\" \
}, { \
\"location\": { \
\"latitude\": 418512773, \
\"longitude\": -743067823 \
}, \
\"name\": \"\" \
}, { \
\"location\": { \
\"latitude\": 404318328, \
\"longitude\": -740835638 \
}, \
\"name\": \"42-102 Main Street, Belford, NJ 07718, USA\" \
}, { \
\"location\": { \
\"latitude\": 419020746, \
\"longitude\": -741172328 \
}, \
\"name\": \"\" \
}, { \
\"location\": { \
\"latitude\": 404080723, \
\"longitude\": -746119569 \
}, \
\"name\": \"\" \
}, { \
\"location\": { \
\"latitude\": 401012643, \
\"longitude\": -744035134 \
}, \
\"name\": \"\" \
}, { \
\"location\": { \
\"latitude\": 404306372, \
\"longitude\": -741079661 \
}, \
\"name\": \"\" \
}, { \
\"location\": { \
\"latitude\": 403966326, \
\"longitude\": -748519297 \
}, \
\"name\": \"\" \
}, { \
\"location\": { \
\"latitude\": 405002031, \
\"longitude\": -748407866 \
}, \
\"name\": \"\" \
}, { \
\"location\": { \
\"latitude\": 409532885, \
\"longitude\": -742200683 \
}, \
\"name\": \"\" \
}, { \
\"location\": { \
\"latitude\": 416851321, \
\"longitude\": -742674555 \
}, \
\"name\": \"\" \
}, { \
\"location\": { \
\"latitude\": 406411633, \
\"longitude\": -741722051 \
}, \
\"name\": \"3387 Richmond Terrace, Staten Island, NY 10303, USA\" \
}, { \
\"location\": { \
\"latitude\": 413069058, \
\"longitude\": -744597778 \
}, \
\"name\": \"261 Van Sickle Road, Goshen, NY 10924, USA\" \
}, { \
\"location\": { \
\"latitude\": 418465462, \
\"longitude\": -746859398 \
}, \
\"name\": \"\" \
}, { \
\"location\": { \
\"latitude\": 411733222, \
\"longitude\": -744228360 \
}, \
\"name\": \"\" \
}, { \
\"location\": { \
\"latitude\": 410248224, \
\"longitude\": -747127767 \
}, \
\"name\": \"3 Hasta Way, Newton, NJ 07860, USA\" \
}]"
namespace routeguide {
class Feature;
std::string GetDbFileContent(int argc, char** argv);
void ParseDb(const std::string& db, std::vector<Feature>* feature_list);
} // namespace routeguide
#endif // GRPC_COMMON_CPP_ROUTE_GUIDE_HELPER_H_
| 11,851 |
348 | <gh_stars>100-1000
{"nom":"Liévans","circ":"2ème circonscription","dpt":"Haute-Saône","inscrits":108,"abs":47,"votants":61,"blancs":7,"nuls":3,"exp":51,"res":[{"nuance":"REM","nom":"<NAME>","voix":32},{"nuance":"FN","nom":"M. <NAME>","voix":19}]} | 104 |
890 | <reponame>andreasunterhuber/owt-server<filename>source/agent/sip/sipIn/AudioFrameConstructorWrapper.cc
// Copyright (C) <2019> Intel Corporation
//
// SPDX-License-Identifier: Apache-2.0
#ifndef BUILDING_NODE_EXTENSION
#define BUILDING_NODE_EXTENSION
#endif
#include "AudioFrameConstructorWrapper.h"
#include "../../addons/common/MediaFramePipelineWrapper.h"
#include "SipCallConnection.h"
using namespace v8;
Persistent<Function> AudioFrameConstructor::constructor;
AudioFrameConstructor::AudioFrameConstructor() {};
AudioFrameConstructor::~AudioFrameConstructor() {};
void AudioFrameConstructor::Init(v8::Local<v8::Object> exports) {
Isolate* isolate = Isolate::GetCurrent();
// Prepare constructor template
Local<FunctionTemplate> tpl = FunctionTemplate::New(isolate, New);
tpl->SetClassName(String::NewFromUtf8(isolate, "AudioFrameConstructor"));
tpl->InstanceTemplate()->SetInternalFieldCount(1);
// Prototype
NODE_SET_PROTOTYPE_METHOD(tpl, "close", close);
NODE_SET_PROTOTYPE_METHOD(tpl, "bindTransport", bindTransport);
NODE_SET_PROTOTYPE_METHOD(tpl, "unbindTransport", unbindTransport);
NODE_SET_PROTOTYPE_METHOD(tpl, "addDestination", addDestination);
NODE_SET_PROTOTYPE_METHOD(tpl, "removeDestination", removeDestination);
constructor.Reset(isolate, tpl->GetFunction());
exports->Set(String::NewFromUtf8(isolate, "AudioFrameConstructor"), tpl->GetFunction());
}
void AudioFrameConstructor::New(const FunctionCallbackInfo<Value>& args) {
Isolate* isolate = Isolate::GetCurrent();
HandleScope scope(isolate);
AudioFrameConstructor* obj = new AudioFrameConstructor();
obj->me = new owt_base::AudioFrameConstructor();
obj->src = obj->me;
obj->msink = obj->me;
obj->Wrap(args.This());
args.GetReturnValue().Set(args.This());
}
void AudioFrameConstructor::close(const FunctionCallbackInfo<Value>& args) {
Isolate* isolate = Isolate::GetCurrent();
HandleScope scope(isolate);
AudioFrameConstructor* obj = ObjectWrap::Unwrap<AudioFrameConstructor>(args.Holder());
owt_base::AudioFrameConstructor* me = obj->me;
delete me;
}
void AudioFrameConstructor::bindTransport(const FunctionCallbackInfo<Value>& args) {
Isolate* isolate = Isolate::GetCurrent();
HandleScope scope(isolate);
AudioFrameConstructor* obj = ObjectWrap::Unwrap<AudioFrameConstructor>(args.Holder());
owt_base::AudioFrameConstructor* me = obj->me;
SipCallConnection* param = ObjectWrap::Unwrap<SipCallConnection>(args[0]->ToObject());
sip_gateway::SipCallConnection* transport = param->me;
me->bindTransport(transport, transport);
}
void AudioFrameConstructor::unbindTransport(const FunctionCallbackInfo<Value>& args) {
Isolate* isolate = Isolate::GetCurrent();
HandleScope scope(isolate);
AudioFrameConstructor* obj = ObjectWrap::Unwrap<AudioFrameConstructor>(args.Holder());
owt_base::AudioFrameConstructor* me = obj->me;
me->unbindTransport();
}
void AudioFrameConstructor::addDestination(const FunctionCallbackInfo<Value>& args) {
Isolate* isolate = Isolate::GetCurrent();
HandleScope scope(isolate);
AudioFrameConstructor* obj = ObjectWrap::Unwrap<AudioFrameConstructor>(args.Holder());
owt_base::AudioFrameConstructor* me = obj->me;
FrameDestination* param = ObjectWrap::Unwrap<FrameDestination>(args[0]->ToObject());
owt_base::FrameDestination* dest = param->dest;
me->addAudioDestination(dest);
}
void AudioFrameConstructor::removeDestination(const FunctionCallbackInfo<Value>& args) {
Isolate* isolate = Isolate::GetCurrent();
HandleScope scope(isolate);
AudioFrameConstructor* obj = ObjectWrap::Unwrap<AudioFrameConstructor>(args.Holder());
owt_base::AudioFrameConstructor* me = obj->me;
FrameDestination* param = ObjectWrap::Unwrap<FrameDestination>(args[0]->ToObject());
owt_base::FrameDestination* dest = param->dest;
me->removeAudioDestination(dest);
}
| 1,243 |
4,538 | /*
* Copyright (C) 2015-2017 Alibaba Group Holding Limited
*
*
*/
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "aos/kernel.h"
#include "ulog/ulog.h"
#include "sensor_drv_api.h"
#include "sensor_hal.h"
#define NSA_REG_SPI_I2C 0x00
#define NSA_REG_WHO_AM_I 0x01
#define NSA_REG_ACC_X_LSB 0x02
#define NSA_REG_ACC_X_MSB 0x03
#define NSA_REG_ACC_Y_LSB 0x04
#define NSA_REG_ACC_Y_MSB 0x05
#define NSA_REG_ACC_Z_LSB 0x06
#define NSA_REG_ACC_Z_MSB 0x07
#define NSA_REG_MOTION_FLAG 0x09
#define NSA_REG_NEWDATA_FLAG 0x0A
#define NSA_REG_STEPS_MSB 0x0D
#define NSA_REG_STEPS_LSB 0x0E
#define NSA_REG_G_RANGE 0x0f
#define NSA_REG_ODR_AXIS_DISABLE 0x10
#define NSA_REG_POWERMODE_BW 0x11
#define NSA_REG_SWAP_POLARITY 0x12
#define NSA_REG_FIFO_CTRL 0x14
#define NSA_REG_INTERRUPT_SETTINGS0 0x15
#define NSA_REG_INTERRUPT_SETTINGS1 0x16
#define NSA_REG_INTERRUPT_SETTINGS2 0x17
#define NSA_REG_INTERRUPT_MAPPING1 0x19
#define NSA_REG_INTERRUPT_MAPPING2 0x1a
#define NSA_REG_INTERRUPT_MAPPING3 0x1b
#define NSA_REG_INT_PIN_CONFIG 0x20
#define NSA_REG_INT_LATCH 0x21
#define NSA_REG_ACTIVE_DURATION 0x27
#define NSA_REG_ACTIVE_THRESHOLD 0x28
#define NSA_REG_TAP_DURATION 0x2A
#define NSA_REG_TAP_THRESHOLD 0x2B
#define NSA_REG_RESET_STEP 0x2E
#define NSA_REG_STEP_CONGIF1 0x2F
#define NSA_REG_STEP_CONGIF2 0x30
#define NSA_REG_STEP_CONGIF3 0x31
#define NSA_REG_STEP_CONGIF4 0x32
#define NSA_REG_STEP_FILTER 0x33
#define NSA_REG_CUSTOM_OFFSET_X 0x38
#define NSA_REG_CUSTOM_OFFSET_Y 0x39
#define NSA_REG_CUSTOM_OFFSET_Z 0x3a
#define NSA_REG_ENGINEERING_MODE 0x7f
#define NSA_REG_SENSITIVITY_TRIM_X 0x80
#define NSA_REG_SENSITIVITY_TRIM_Y 0x81
#define NSA_REG_SENSITIVITY_TRIM_Z 0x82
#define NSA_REG_COARSE_OFFSET_TRIM_X 0x83
#define NSA_REG_COARSE_OFFSET_TRIM_Y 0x84
#define NSA_REG_COARSE_OFFSET_TRIM_Z 0x85
#define NSA_REG_FINE_OFFSET_TRIM_X 0x86
#define NSA_REG_FINE_OFFSET_TRIM_Y 0x87
#define NSA_REG_FINE_OFFSET_TRIM_Z 0x88
#define NSA_REG_SENS_COMP 0x8c
#define NSA_REG_SENS_COARSE_TRIM 0xd1
#define DA217_NORMAL_MODE 0x00
#define DA217_SUSPEND_MODE 0x01
#define DA217_I2C_SLAVE_ADDR_LOW (0x26)
#define DA217_I2C_SLAVE_ADDR_HIGN (0x27)
#define DA217_ACC_DATA_SIZE 6
#define DA217_CHIP_ID_VAL 0x13
#define DA217_ADDR_TRANS(n) ((n) << 1)
#define DA217_GET_BITSLICE(regvar, bitname) \
((regvar & bitname##__MSK) >> bitname##__POS)
#define DA217_SET_BITSLICE(regvar, bitname, val) \
((regvar & ~bitname##__MSK) | ((val << bitname##__POS) & bitname##__MSK))
i2c_dev_t da217_ctx = { .port = 1,
.config.address_width = 8,
.config.freq = 100000,
.config.dev_addr =
DA217_ADDR_TRANS(DA217_I2C_SLAVE_ADDR_HIGN) };
static int drv_acc_mir3_da217_validate_id(i2c_dev_t *drv, uint8_t id_value)
{
int ret = 0;
uint8_t value = 0;
if (drv == NULL) {
return -1;
}
ret = sensor_i2c_read(drv, NSA_REG_WHO_AM_I, &value, I2C_DATA_LEN,
I2C_OP_RETRIES);
if (unlikely(ret)) {
return ret;
}
if (id_value != value) {
return -1;
}
return 0;
}
UNUSED static int drv_acc_mir3_da217_open_step_counter(i2c_dev_t *drv)
{
int ret = 0;
uint8_t value = 0;
value = 0x01;
ret = sensor_i2c_write(drv, NSA_REG_STEP_CONGIF1, &value, I2C_DATA_LEN,
I2C_OP_RETRIES);
if (unlikely(ret)) {
return ret;
}
value = 0x62;
ret = sensor_i2c_write(drv, NSA_REG_STEP_CONGIF2, &value, I2C_DATA_LEN,
I2C_OP_RETRIES);
if (unlikely(ret)) {
return ret;
}
value = 0x46;
ret = sensor_i2c_write(drv, NSA_REG_STEP_CONGIF3, &value, I2C_DATA_LEN,
I2C_OP_RETRIES);
if (unlikely(ret)) {
return ret;
}
value = 0x32;
ret = sensor_i2c_write(drv, NSA_REG_STEP_CONGIF4, &value, I2C_DATA_LEN,
I2C_OP_RETRIES);
if (unlikely(ret)) {
return ret;
}
value = 0xa2;
ret = sensor_i2c_write(drv, NSA_REG_STEP_FILTER, &value, I2C_DATA_LEN,
I2C_OP_RETRIES);
if (unlikely(ret)) {
return ret;
}
return 0;
}
static int drv_acc_mir3_da217_close_step_counter(i2c_dev_t *drv)
{
int ret = 0;
uint8_t value = 0;
value = 0x22;
ret = sensor_i2c_write(drv, NSA_REG_STEP_FILTER, &value, I2C_DATA_LEN,
I2C_OP_RETRIES);
if (unlikely(ret)) {
return ret;
}
return 0;
}
static int drv_acc_mir3_da217_set_power_mode(i2c_dev_t * drv,
dev_power_mode_e mode)
{
int ret = 0;
uint8_t dev_mode;
switch (mode) {
case DEV_POWER_OFF:
case DEV_SLEEP: {
dev_mode = (uint8_t)0x80;
break;
}
case DEV_POWER_ON: {
dev_mode = (uint8_t)0x34;
break;
}
default:
return -1;
}
ret = sensor_i2c_write(drv, NSA_REG_POWERMODE_BW, &dev_mode, I2C_DATA_LEN,
I2C_OP_RETRIES);
if (unlikely(ret)) {
return ret;
}
return 0;
}
static int drv_acc_mir3_da217_set_default_config(i2c_dev_t *drv)
{
int ret = 0;
uint8_t value = 0;
value = 0x83;
ret = sensor_i2c_write(drv, NSA_REG_ENGINEERING_MODE, &value, I2C_DATA_LEN,
I2C_OP_RETRIES);
if (unlikely(ret)) {
return ret;
}
value = 0x69;
ret = sensor_i2c_write(drv, NSA_REG_ENGINEERING_MODE, &value, I2C_DATA_LEN,
I2C_OP_RETRIES);
if (unlikely(ret)) {
return ret;
}
value = 0xbd;
ret = sensor_i2c_write(drv, NSA_REG_ENGINEERING_MODE, &value, I2C_DATA_LEN,
I2C_OP_RETRIES);
if (unlikely(ret)) {
return ret;
}
ret = sensor_i2c_read(drv, 0x8e, &value, I2C_DATA_LEN, I2C_OP_RETRIES);
if (unlikely(ret)) {
return ret;
}
if (value == 0) {
value = 0x50;
ret = sensor_i2c_write(drv, 0x8e, &value, I2C_DATA_LEN, I2C_OP_RETRIES);
if (unlikely(ret)) {
return ret;
}
}
value = 0x40;
ret = sensor_i2c_write(drv, NSA_REG_G_RANGE, &value, I2C_DATA_LEN,
I2C_OP_RETRIES);
if (unlikely(ret)) {
return ret;
}
value = 0x00;
ret = sensor_i2c_write(drv, NSA_REG_INT_PIN_CONFIG, &value, I2C_DATA_LEN,
I2C_OP_RETRIES);
if (unlikely(ret)) {
return ret;
}
ret = drv_acc_mir3_da217_set_power_mode(drv, DEV_SLEEP);
if (unlikely(ret)) {
return ret;
}
value = 0x07;
ret = sensor_i2c_write(drv, NSA_REG_ODR_AXIS_DISABLE, &value, I2C_DATA_LEN,
I2C_OP_RETRIES);
if (unlikely(ret)) {
return ret;
}
ret = drv_acc_mir3_da217_close_step_counter(drv);
if (unlikely(ret)) {
return ret;
}
value = 0x80;
ret = sensor_i2c_write(drv, NSA_REG_RESET_STEP, &value, I2C_DATA_LEN,
I2C_OP_RETRIES);
if (unlikely(ret)) {
return ret;
}
value = 0x04;
ret = sensor_i2c_write(drv, NSA_REG_INTERRUPT_MAPPING2, &value,
I2C_DATA_LEN, I2C_OP_RETRIES);
if (unlikely(ret)) {
return ret;
}
value = 0x04;
ret = sensor_i2c_write(drv, NSA_REG_INTERRUPT_SETTINGS0, &value,
I2C_DATA_LEN, I2C_OP_RETRIES);
if (unlikely(ret)) {
return ret;
}
return 0;
}
static void drv_acc_mir3_da217_irq_handle(void)
{
/* no handle so far */
}
static int drv_acc_mir3_da217_open(void)
{
int ret = 0;
ret = drv_acc_mir3_da217_set_power_mode(&da217_ctx, DEV_POWER_ON);
if (unlikely(ret)) {
return -1;
}
#ifdef AOS_SENSOR_ACC_SUPPORT_STEP
ret = drv_acc_mir3_da217_open_step_counter(&da217_ctx);
if (unlikely(ret)) {
return -1;
}
#endif
LOG("%s %s successfully \n", SENSOR_STR, __func__);
return 0;
}
static int drv_acc_mir3_da217_close(void)
{
int ret = 0;
#ifdef AOS_SENSOR_ACC_SUPPORT_STEP
ret = drv_acc_mir3_da217_close_step_counter(&da217_ctx);
if (unlikely(ret)) {
return -1;
}
#endif
ret = drv_acc_mir3_da217_set_power_mode(&da217_ctx, DEV_POWER_OFF);
if (unlikely(ret)) {
return -1;
}
LOG("%s %s successfully \n", SENSOR_STR, __func__);
return 0;
}
static int drv_acc_mir3_da217_read(void *buf, size_t len)
{
int ret = 0;
size_t size;
uint8_t acc_raw[DA217_ACC_DATA_SIZE] = { 0 };
accel_data_t *pdata = (accel_data_t *)buf;
#ifdef AOS_SENSOR_ACC_SUPPORT_STEP
uint8_t step_raw[2] = { 0 };
#endif
if (buf == NULL) {
return -1;
}
size = sizeof(accel_data_t);
if (len < size) {
return -1;
}
ret = sensor_i2c_read(&da217_ctx, NSA_REG_ACC_X_LSB, acc_raw,
DA217_ACC_DATA_SIZE, I2C_OP_RETRIES);
if (unlikely(ret)) {
return -1;
}
pdata->data[0] = (int32_t)((int16_t)(acc_raw[1] << 8 | acc_raw[0]) >> 4);
pdata->data[1] = (int32_t)((int16_t)(acc_raw[3] << 8 | acc_raw[2]) >> 4);
pdata->data[2] = (int32_t)((int16_t)(acc_raw[5] << 8 | acc_raw[4]) >> 4);
#ifdef AOS_SENSOR_ACC_SUPPORT_STEP
ret = sensor_i2c_read(&da217_ctx, NSA_REG_STEPS_MSB, step_raw, 2,
I2C_OP_RETRIES);
if (unlikely(ret)) {
return -1;
}
pdata->step = ((uint16_t)((step_raw[0] << 8 | step_raw[1]))) / 2;
#endif
pdata->timestamp = aos_now_ms();
return (int)size;
}
static int drv_acc_mir3_da217_write(const void *buf, size_t len)
{
(void)buf;
(void)len;
return 0;
}
static int drv_acc_mir3_da217_ioctl(int cmd, unsigned long arg)
{
int ret = 0;
switch (cmd) {
case SENSOR_IOCTL_SET_POWER: {
ret = drv_acc_mir3_da217_set_power_mode(&da217_ctx, arg);
if (unlikely(ret)) {
return -1;
}
} break;
case SENSOR_IOCTL_GET_INFO: {
/* fill the dev info here */
dev_sensor_info_t *info = (dev_sensor_info_t *)arg;
info->model = "DA217";
info->unit = mg;
} break;
default:
return -1;
}
LOG("%s %s successfully \n", SENSOR_STR, __func__);
return 0;
}
int drv_acc_mir3_da217_init(void)
{
int ret = 0;
sensor_obj_t sensor;
memset(&sensor, 0, sizeof(sensor));
/* fill the sensor obj parameters here */
sensor.tag = TAG_DEV_ACC;
sensor.path = dev_acc_path;
sensor.io_port = I2C_PORT;
sensor.open = drv_acc_mir3_da217_open;
sensor.close = drv_acc_mir3_da217_close;
sensor.read = drv_acc_mir3_da217_read;
sensor.write = drv_acc_mir3_da217_write;
sensor.ioctl = drv_acc_mir3_da217_ioctl;
sensor.irq_handle = drv_acc_mir3_da217_irq_handle;
ret = sensor_create_obj(&sensor);
if (unlikely(ret)) {
return -1;
}
ret = drv_acc_mir3_da217_validate_id(&da217_ctx, DA217_CHIP_ID_VAL);
if (unlikely(ret)) {
return -1;
}
ret = drv_acc_mir3_da217_set_default_config(&da217_ctx);
if (unlikely(ret)) {
return -1;
}
LOG("%s %s successfully \n", SENSOR_STR, __func__);
return 0;
}
SENSOR_DRV_ADD(drv_acc_mir3_da217_init);
| 6,403 |
7,113 | /*
* Copyright (C) 2010-2101 Alibaba Group Holding Limited.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.otter.node.etl.load.loader.db;
import java.text.MessageFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import org.apache.commons.lang.SystemUtils;
import com.alibaba.otter.node.etl.load.loader.db.context.FileLoadContext;
import com.alibaba.otter.shared.etl.model.FileData;
import com.alibaba.otter.shared.etl.model.Identity;
/**
* dumper 记录
*
* @author jianghang 2011-12-28 上午11:19:10
* @version 4.0.0
*/
public class FileloadDumper {
private static final String SEP = SystemUtils.LINE_SEPARATOR;
private static final String TIMESTAMP_FORMAT = "yyyy-MM-dd HH:mm:ss:SSS";
private static String context_format = null;
private static String miss_format = null;
private static String filter_format = null;
static {
context_format = SEP + "****************************************************" + SEP;
context_format = "* status : {0} , time : {1} *" + SEP;
context_format += "* Identity : {2} *" + SEP;
context_format += "* total Data : [{3}] , success Data : [{4}] , miss Data : [{5}] , Interrupt : [{6}]" + SEP;
context_format += "****************************************************" + SEP;
context_format += "* process file *" + SEP;
context_format += "{7}" + SEP;
context_format += "* miss file *" + SEP;
context_format += "{8}" + SEP;
context_format += "****************************************************" + SEP;
miss_format = SEP + "****************************************************" + SEP;
miss_format += "* Identity : {0} *" + SEP;
miss_format += "* miss : " + SEP;
miss_format += "* {1}" + SEP;
miss_format += "****************************************************";
filter_format = SEP + "****************************************************" + SEP;
filter_format += "* Identity : {0} *" + SEP;
filter_format += "* input [{1}] , output [{2}] , filter [{3}] *" + SEP;
filter_format += "* filters : " + SEP;
filter_format += "* {4}" + SEP;
filter_format += "****************************************************" + SEP;
}
public static String dumpContext(String status, FileLoadContext context) {
int successed = context.getProcessedDatas().size();
int failed = context.getFailedDatas().size();
int all = context.getPrepareDatas().size();
boolean isInterrupt = (all != failed + successed);
Date now = new Date();
SimpleDateFormat format = new SimpleDateFormat(TIMESTAMP_FORMAT);
return MessageFormat.format(context_format, status, format.format(now), context.getIdentity().toString(), all,
successed, failed, isInterrupt, dumpFileDatas(context.getProcessedDatas()),
dumpFileDatas(context.getFailedDatas()));
}
public static String dumpFileDatas(List<FileData> fileDatas) {
StringBuilder builder = new StringBuilder();
synchronized (fileDatas) {
for (FileData data : fileDatas) {
builder.append("\t").append(data.toString()).append(SEP);
}
}
return builder.toString();
}
public static String dumpMissFileDatas(Identity identity, FileData fileData) {
return MessageFormat.format(miss_format, identity.toString(), fileData.toString());
}
public static String dumpFilterFileDatas(Identity identity, int input, int output, List<FileData> fileDatas) {
StringBuilder builder = new StringBuilder();
synchronized (fileDatas) {
for (FileData data : fileDatas) {
builder.append("\t").append(data.toString()).append(SEP);
}
}
return MessageFormat.format(filter_format, identity.toString(), input, output, fileDatas.size(),
builder.toString());
}
}
| 1,752 |
421 | //<snippet00>
using namespace System;
using namespace System::IO;
ref class App
{
public:
static void Main()
{
// Specify the directory you want to manipulate.
String^ path = "c:\\";
String^ searchPattern = "c*";
DirectoryInfo^ di = gcnew DirectoryInfo(path);
array<DirectoryInfo^>^ directories =
di->GetDirectories(searchPattern, SearchOption::TopDirectoryOnly);
array<FileInfo^>^ files =
di->GetFiles(searchPattern, SearchOption::TopDirectoryOnly);
Console::WriteLine(
"Directories that begin with the letter \"c\" in {0}", path);
for each (DirectoryInfo^ dir in directories)
{
Console::WriteLine(
"{0,-25} {1,25}", dir->FullName, dir->LastWriteTime);
}
Console::WriteLine();
Console::WriteLine(
"Files that begin with the letter \"c\" in {0}", path);
for each (FileInfo^ file in files)
{
Console::WriteLine(
"{0,-25} {1,25}", file->Name, file->LastWriteTime);
}
} // Main()
}; // App()
int main()
{
App::Main();
}
//</snippet00> | 568 |
942 | <reponame>evilwizardington/hackazon<gh_stars>100-1000
package com.ntobjectives.hackazon.activity;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.support.annotation.NonNull;
import android.util.Log;
import android.view.Menu;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.Toast;
import com.google.gson.Gson;
import com.google.gson.JsonSyntaxException;
import com.ntobjectives.hackazon.R;
import com.ntobjectives.hackazon.model.Auth;
import com.ntobjectives.hackazon.network.AuthRequest;
import com.octo.android.robospice.persistence.exception.SpiceException;
import com.octo.android.robospice.request.listener.RequestListener;
/**
* Created with IntelliJ IDEA by <NAME>.
* User: <NAME>
* Date: 20.10.2014
* Time: 17:37
*/
public class LoginActivity extends AbstractRootActivity {
public static final String TAG = "LoginActivity";
protected String host;
protected String username;
protected String password;
protected String token;
protected EditText hostField;
protected EditText usernameField;
protected EditText passwordField;
protected Button button;
protected boolean refresh = false;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (getIntent().getBooleanExtra("EXIT", false)) {
finish();
}
setContentView(R.layout.activity_login);
SharedPreferences pref = PreferenceManager.getDefaultSharedPreferences(this);
refresh = getIntent().getBooleanExtra("refresh", false);
if (!pref.getBoolean("first_time", true) && !pref.getString("token", "").equals("") && !refresh) {
startActivity(new Intent(this, MainActivity.class));
}
if (savedInstanceState != null) {
host = savedInstanceState.getString("host", pref.getString("host", ""));
username = savedInstanceState.getString("username", "");
password = savedInstanceState.getString("password", "");
} else {
host = pref.getString("host", "");
username = pref.getString("username", "");
password = pref.getString("password", "");
}
hostField = (EditText) findViewById(R.id.host);
usernameField = (EditText) findViewById(R.id.username);
passwordField = (EditText) findViewById(R.id.password);
button = (Button) findViewById(R.id.loginButton);
if (!host.equals("")) {
hostField.setText(host);
}
usernameField.setText(username);
passwordField.setText(password);
button.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
button.setEnabled(false);
host = hostField.getText().toString();
username = usernameField.getText().toString();
password = passwordField.getText().toString();
Log.d(TAG, "Clicked with: username = " + username + ", password = " + password + ", host = " + host);
AuthRequest req = new AuthRequest(LoginActivity.this, host, username, password);
req.setRefresh(refresh);
getSpiceManager().execute(req, new AuthRequestListener(host, username, password));
}
});
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.auth, menu);
return true;
}
@Override
protected void onSaveInstanceState(@NonNull Bundle outState) {
outState.putString("host", host);
outState.putString("username", username);
outState.putString("password", password);
super.onSaveInstanceState(outState);
}
@Override
protected void onRestoreInstanceState(@NonNull Bundle savedInstanceState) {
super.onRestoreInstanceState(savedInstanceState);
}
@Override
protected void onStart() {
super.onStart();
}
public final class AuthRequestListener implements RequestListener<String> {
protected String host;
protected String username;
protected String password;
public AuthRequestListener(String host, String username, String password) {
this.host = host;
this.username = username;
this.password = password;
}
@Override
public void onRequestFailure(SpiceException spiceException) {
Toast.makeText(LoginActivity.this, "Incorrect request.", Toast.LENGTH_SHORT).show();
button.setEnabled(true);
}
@Override
public void onRequestSuccess(String response) {
Log.d(TAG, response);
button.setEnabled(true);
Gson gson = new Gson();
try {
Auth auth = gson.fromJson(response, Auth.class);
token = auth.token;
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(LoginActivity.this.getApplicationContext());
prefs
.edit()
.putString("host", host)
.putString("username", username)
.putString("password", password)
.putString("token", token)
.putBoolean("first_time", false)
.apply();
Log.d(TAG, "Token: " + token);
Log.d(TAG, "Host: " + host);
startActivity(new Intent(LoginActivity.this, MainActivity.class));
finish();
} catch (JsonSyntaxException ex) {
Toast.makeText(LoginActivity.this, "Service response is invalid.", Toast.LENGTH_SHORT).show();
}
}
}
}
| 2,646 |
474 | package org.javacord.core.entity.message.embed;
import com.fasterxml.jackson.databind.JsonNode;
import org.apache.logging.log4j.Logger;
import org.javacord.api.entity.message.embed.Embed;
import org.javacord.api.entity.message.embed.EmbedAuthor;
import org.javacord.api.entity.message.embed.EmbedField;
import org.javacord.api.entity.message.embed.EmbedFooter;
import org.javacord.api.entity.message.embed.EmbedImage;
import org.javacord.api.entity.message.embed.EmbedProvider;
import org.javacord.api.entity.message.embed.EmbedThumbnail;
import org.javacord.api.entity.message.embed.EmbedVideo;
import org.javacord.core.util.logging.LoggerUtil;
import java.awt.Color;
import java.net.MalformedURLException;
import java.net.URL;
import java.time.Instant;
import java.time.OffsetDateTime;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
/**
* The implementation of {@link Embed}.
*/
public class EmbedImpl implements Embed {
/**
* The logger of this class.
*/
private static final Logger logger = LoggerUtil.getLogger(EmbedImpl.class);
private final String title;
private final String type;
private final String description;
private final String url;
private final Instant timestamp;
private final Color color;
private final EmbedFooter footer;
private final EmbedImage image;
private final EmbedThumbnail thumbnail;
private final EmbedVideo video;
private final EmbedProvider provider;
private final EmbedAuthor author;
private final List<EmbedField> fields = new ArrayList<>();
/**
* Creates a new embed.
*
* @param data The json data of the embed.
*/
public EmbedImpl(JsonNode data) {
title = data.has("title") ? data.get("title").asText() : null;
type = data.has("type") ? data.get("type").asText() : null;
description = data.has("description") ? data.get("description").asText() : null;
url = data.has("url") ? data.get("url").asText() : null;
timestamp = data.has("timestamp") ? OffsetDateTime.parse(data.get("timestamp").asText()).toInstant() : null;
color = data.has("color") ? new Color(data.get("color").asInt()) : null;
footer = data.has("footer") ? new EmbedFooterImpl(data.get("footer")) : null;
image = data.has("image") ? new EmbedImageImpl(data.get("image")) : null;
thumbnail = data.has("thumbnail") ? new EmbedThumbnailImpl(data.get("thumbnail")) : null;
video = data.has("video") ? new EmbedVideoImpl(data.get("video")) : null;
provider = data.has("provider") ? new EmbedProviderImpl(data.get("provider")) : null;
author = data.has("author") ? new EmbedAuthorImpl(data.get("author")) : null;
if (data.has("fields")) {
for (JsonNode jsonField : data.get("fields")) {
this.fields.add(new EmbedFieldImpl(jsonField));
}
}
}
@Override
public Optional<String> getTitle() {
return Optional.ofNullable(title);
}
@Override
public String getType() {
return type;
}
@Override
public Optional<String> getDescription() {
return Optional.ofNullable(description);
}
@Override
public Optional<URL> getUrl() {
if (url == null) {
return Optional.empty();
}
try {
return Optional.of(new URL(url));
} catch (MalformedURLException e) {
logger.warn("Seems like the url of the embed is malformed! Please contact the developer!", e);
return Optional.empty();
}
}
@Override
public Optional<Instant> getTimestamp() {
return Optional.ofNullable(timestamp);
}
@Override
public Optional<Color> getColor() {
return Optional.ofNullable(color);
}
@Override
public Optional<EmbedFooter> getFooter() {
return Optional.ofNullable(footer);
}
@Override
public Optional<EmbedImage> getImage() {
return Optional.ofNullable(image);
}
@Override
public Optional<EmbedThumbnail> getThumbnail() {
return Optional.ofNullable(thumbnail);
}
@Override
public Optional<EmbedVideo> getVideo() {
return Optional.ofNullable(video);
}
@Override
public Optional<EmbedProvider> getProvider() {
return Optional.ofNullable(provider);
}
@Override
public Optional<EmbedAuthor> getAuthor() {
return Optional.ofNullable(author);
}
@Override
public List<EmbedField> getFields() {
return fields;
}
}
| 1,764 |
384 | <reponame>mengmengliu1998/qd-3dt<filename>qd3dt/models/detectrackers/tracker/motion_lstm.py<gh_stars>100-1000
import argparse
import time
import multiprocessing
import os.path as osp
import pickle
from os import mkdir
from pyquaternion import Quaternion
from tqdm import tqdm
import numpy as np
import torch
import torch.backends.cudnn as cudnn
import torch.nn.functional as F
import torch.optim as optim
from torch.utils.data import Dataset, DataLoader
import scripts.tracking_utils as tu
import scripts.network_utils as nu
import scripts.kitti_utils as ku
from scripts.object_ap_eval.coco_format import read_file
from scripts.plot_utils import plot_3D
from motion_model import get_lstm_model, LSTM_MODEL_ZOO
from tracker_model import get_tracker, TRACKER_MODEL_ZOO
'''
CUDA_VISIBLE_DEVICES=1 python motion_lstm.py nuscenes train \
--session batch128_min10_seq10_dim7_VeloLSTM \
--min_seq_len 10 --seq_len 10 \
--lstm_model_name VeloLSTM --tracker_model_name KalmanBox3DTracker \
--input_gt_path data/nuscenes/anns/tracking_train.json \
--input_pd_path data/nuscenes/anns/tracking_output_train.json \
--cache_name work_dirs/LSTM/nuscenes_train_pure_det_min10.pkl \
--loc_dim 7 -b 128 --is_plot --show_freq 500
'''
'''
CUDA_VISIBLE_DEVICES=0 python motion_lstm.py nuscenes test \
--session batch128_min10_seq10_dim7_VeloLSTM \
--min_seq_len 10 --seq_len 10 \
--lstm_model_name VeloLSTM --tracker_model_name LSTM3DTracker \
--input_gt_path data/nuscenes/anns/tracking_val.json \
--input_pd_path data/nuscenes/anns/tracking_output_val.json \
--cache_name work_dirs/LSTM/nuscenes_val_pure_det_min10.pkl \
--num_epochs 100 --loc_dim 7 -b 1 --is_plot
'''
np.random.seed(777)
torch.manual_seed(100)
def verbose(sentence: str, is_verbose: bool = False):
if is_verbose:
tqdm.write(f"{time.strftime('%Y-%m-%d %H:%M:%S')} {sentence}")
def fix_alpha(angle: float) -> float:
return (angle + np.pi) % (2 * np.pi) - np.pi
cat_mapping = {
'kitti': ['Car', 'Pedestrian', 'Cyclist'],
'gta': ['Car'],
'nuscenes': [
'Bicycle', 'Motorcycle', 'Pedestrian', 'Bus', 'Car', 'Trailer',
'Truck', 'Construction_vehicle', 'Traffic_cone', 'Barrier'
],
'waymo': ['Car', 'Pedestrian', 'Cyclist'],
}
class SeqDataset(Dataset):
def __init__(self,
dataset,
input_gt_path,
input_pd_path,
is_train,
seq_len,
min_seq_len,
max_depth,
depth_scale,
cache_name,
r_var=0.1):
self.dataset = dataset
self.seq_len = seq_len
self.min_seq_len = min_seq_len
self.max_depth = max_depth
self.depth_scale = depth_scale
self.is_train = is_train
self.is_pred = True
self.r_var = r_var
if not is_train:
input_gt_path = [
path.replace('train', 'val') for path in input_gt_path
]
input_pd_path = [
path.replace('_train', '_val') for path in input_pd_path
]
cache_name = cache_name.replace('_train', '_val')
if cache_name and osp.isfile(cache_name):
verbose("Loading {} ...".format(cache_name), True)
data = pickle.load(open(cache_name, 'rb'))
self.tracking_seqs, self.data_key = data
else:
self.tracking_seqs = []
self.data_key = []
for pidx, (path_gt, path_pd) in enumerate(
zip(input_gt_path, input_pd_path)):
assert osp.isfile(path_gt), path_gt
assert osp.isfile(path_pd), path_pd
verbose(f"{pidx}, {path_gt}, {path_pd}", True)
cats = cat_mapping.get(self.dataset)
sequences_gt = read_file(path_gt, category=cats)
if self.is_pred:
sequences_pd = read_file(path_pd, category=cats)
else:
sequences_pd = sequences_gt
for iter_idx, ((seq_idx, sequence_gt),
(seq_idx_pd, sequence_pd)) in enumerate(
zip(sequences_gt.items(),
sequences_pd.items())):
verbose(
f"{iter_idx} | {seq_idx} - {sequence_gt['seq_name']} | {seq_idx_pd} - {sequence_pd['seq_name']}",
True)
self.tracking_seqs.append(
self.convert_tracking_pred(sequence_gt, sequence_pd))
self.data_key.append(
self.sample(self.tracking_seqs[iter_idx]))
self.seqs_len = [len(keys) for keys in self.data_key]
self.accum_len = np.cumsum(self.seqs_len) - 1
self.data_len = sum(self.seqs_len)
if cache_name and not osp.isfile(cache_name):
if not osp.isdir(osp.dirname(cache_name)):
mkdir(osp.dirname(cache_name))
with open(cache_name, 'wb') as f:
pickle.dump([self.tracking_seqs, self.data_key], f)
def __getitem__(self, index):
seq = np.sum(self.accum_len < index)
fr = index - (self.accum_len[seq] if seq > 0 else 0)
key = self.data_key[seq][fr]
return self.get_traj_from_data_seq(seq, key)
def __len__(self):
return self.data_len
def get_traj_from_data_seq(self, seq, key):
# Dataloading
trajectory_seq = self.tracking_seqs[seq][key]
traj_len = max(len(trajectory_seq) - self.seq_len, 1)
# Random sample data
idx = 0
if self.is_train:
idx = np.random.randint(traj_len)
upper_idx = self.seq_len + idx
data_seq = trajectory_seq[idx:upper_idx]
# Get data
depth_gt = np.array([fr['depth_gt'] for fr in data_seq])
alpha_gt = np.array([fr['alpha_gt'] for fr in data_seq])
yaw_gt = np.array([fr['yaw_gt'] for fr in data_seq])
dim_gt = np.array([fr['dim_gt'] for fr in data_seq])
cen_gt = np.array([fr['center_gt'] for fr in data_seq])
cam_calib = np.array(
[np.array(fr['cam_calib']).reshape(3, 4) for fr in data_seq])
cam_loc = np.array([fr['cam_loc'] for fr in data_seq])
cam_rot = np.array([fr['cam_rot'] for fr in data_seq])
pose = [
ku.Pose(np.array(fr['cam_loc']), np.array(fr['cam_rot']))
for fr in data_seq
]
if self.is_pred:
confidence = np.array([fr['confidence_pd'] for fr in data_seq])
depth_random = np.array([fr['depth_pd'] for fr in data_seq])
alpha_random = np.array([fr['alpha_pd'] for fr in data_seq])
yaw_random = np.array([fr['yaw_pd'] for fr in data_seq])
dim_random = np.array([fr['dim_pd'] for fr in data_seq])
cen_random = np.array([fr['center_pd'] for fr in data_seq])
else:
if self.depth_scale > 0.0:
randomness = np.random.normal(
0.0, self.r_var, size=depth_gt.shape)
randomness *= (
np.random.rand(*depth_gt.shape) > np.exp(
-depth_gt / (self.depth_scale**2)))
else:
randomness = np.zeros(depth_gt.shape)
confidence = np.exp(-np.abs(randomness))
depth_random = depth_gt * (1.0 + randomness)
yaw_random = yaw_gt * (1.0 + randomness)
cen_random = cen_gt * (1.0 + randomness[..., None])
dim_random = dim_gt * (1.0 + randomness[..., None]**2)
alpha_random = alpha_gt.copy()
rand_thrs = abs(randomness) > 2 * self.r_var
alpha_random[rand_thrs] += np.pi
alpha_random[np.bitwise_not(rand_thrs)] *= (
1.0 + randomness[np.bitwise_not(rand_thrs)]**2)
alpha_random = (alpha_random + np.pi) % (2 * np.pi) - np.pi
# objects center in the world coordinates
# X to the east, Y to the north, Z to the sky
def get_box_obj(depth, alpha, dim, cen, cam_calib, pose) -> np.ndarray:
objs_list = []
roll_pitch_list = []
for i in range(len(depth)):
loc_cam = tu.imagetocamera(cen[i:i + 1], depth[i:i + 1],
cam_calib[i])
yaw = tu.alpha2rot_y(alpha[i:i + 1], loc_cam[:, 0:1],
loc_cam[:, 2:3])
quat_yaw = Quaternion(axis=[0, 1, 0], radians=yaw)
quat_cam_rot = Quaternion(matrix=pose[i].rotation)
quat_yaw_world = quat_cam_rot * quat_yaw
if quat_yaw_world.z < 0:
quat_yaw_world *= -1
roll_world, pitch_world, yaw_world = tu.quaternion_to_euler(
quat_yaw_world.w, quat_yaw_world.x, quat_yaw_world.y,
quat_yaw_world.z)
loc_glb = tu.cameratoworld(loc_cam, pose[i])
roll_pitch_list.append([roll_world, pitch_world])
objs_list.append(
np.hstack([loc_glb,
np.array([[yaw_world]]),
dim[i:i + 1]]).flatten())
return np.array(objs_list), np.array(roll_pitch_list)
objs_gt, yaw_axis_gt = get_box_obj(depth_gt, alpha_gt, dim_gt, cen_gt,
cam_calib, pose)
objs_obs, yaw_axis_pd = get_box_obj(depth_random, alpha_random,
dim_random, cen_gt, cam_calib,
pose)
# Padding
valid_mask = np.hstack(
[np.ones(len(objs_gt)),
np.zeros([self.seq_len])])[:self.seq_len]
objs_gt = np.vstack([objs_gt, np.zeros([self.seq_len,
7])])[:self.seq_len]
objs_obs = np.vstack([objs_obs, np.zeros([self.seq_len,
7])])[:self.seq_len]
confidence = np.hstack([confidence,
np.zeros([self.seq_len])])[:self.seq_len]
cam_loc = np.vstack([cam_loc, np.zeros([self.seq_len,
3])])[:self.seq_len]
cam_rot = np.vstack([cam_rot, np.zeros([self.seq_len,
3])])[:self.seq_len]
cen_gt = np.vstack([cen_gt, np.zeros([self.seq_len,
2])])[:self.seq_len]
dim_gt = np.vstack([dim_gt, np.zeros([self.seq_len,
3])])[:self.seq_len]
depth_gt = np.hstack([depth_gt,
np.zeros([self.seq_len])])[:self.seq_len]
alpha_gt = np.hstack([alpha_gt,
np.zeros([self.seq_len])])[:self.seq_len]
yaw_gt = np.hstack([yaw_gt, np.zeros([self.seq_len])])[:self.seq_len]
yaw_axis_gt = np.vstack([yaw_axis_gt,
np.zeros([self.seq_len, 2])])[:self.seq_len]
cen_pd = np.vstack([cen_random,
np.zeros([self.seq_len, 2])])[:self.seq_len]
dim_pd = np.vstack([dim_random,
np.zeros([self.seq_len, 3])])[:self.seq_len]
depth_pd = np.hstack([depth_random,
np.zeros([self.seq_len])])[:self.seq_len]
alpha_pd = np.hstack([alpha_random,
np.zeros([self.seq_len])])[:self.seq_len]
yaw_pd = np.hstack([yaw_random,
np.zeros([self.seq_len])])[:self.seq_len]
yaw_axis_pd = np.vstack([yaw_axis_pd,
np.zeros([self.seq_len, 2])])[:self.seq_len]
# Torch tensors
traj_out = {
'obj_gt': torch.from_numpy(objs_gt).float(),
'obj_obs': torch.from_numpy(objs_obs).float(),
'depth_gt': torch.from_numpy(depth_gt).float(),
'alpha_gt': torch.from_numpy(alpha_gt).float(),
'yaw_gt': torch.from_numpy(yaw_gt).float(),
'yaw_axis_gt': torch.from_numpy(yaw_axis_gt).float(),
'dim_gt': torch.from_numpy(dim_gt).float(),
'cen_gt': torch.from_numpy(cen_gt).float(),
'depth_pd': torch.from_numpy(depth_pd).float(),
'alpha_pd': torch.from_numpy(alpha_pd).float(),
'yaw_pd': torch.from_numpy(yaw_pd).float(),
'yaw_axis_pd': torch.from_numpy(yaw_axis_pd).float(),
'dim_pd': torch.from_numpy(dim_pd).float(),
'cen_pd': torch.from_numpy(cen_pd).float(),
'cam_rot': torch.from_numpy(cam_rot).float(),
'cam_loc': torch.from_numpy(cam_loc).float(),
'confidence': torch.from_numpy(confidence).float(),
'valid_mask': torch.from_numpy(valid_mask).float()
}
return traj_out
def convert_tracking_gt(self, sequence_data):
tracking_dict = {}
for fr_idx, frame in sequence_data['frames'].items():
for obj_gt in frame['annotations']:
tid = obj_gt['track_id']
# If not ignore
# Get rois, feature, depth, depth_gt, cam_rot, cam_trans
tid_data = {
'depth_gt': obj_gt['location'][2],
'alpha_gt': obj_gt['alpha'],
'yaw_gt': obj_gt['yaw'],
'dim_gt': obj_gt['dimension'],
'center_gt': obj_gt['box_center'],
'loc_gt': obj_gt['location'],
'cam_calib': frame['cam_calib'],
'cam_rot': frame['cam_rot'],
'cam_loc': frame['cam_loc'],
'fr_idx': fr_idx
}
if tid not in tracking_dict:
tracking_dict[tid] = [tid_data.copy()]
else:
tracking_dict[tid].append(tid_data.copy())
return tracking_dict
def convert_tracking_pred(self, sequence_data, sequence_result):
tracking_dict = {}
width = sequence_data['width']
height = sequence_data['height']
print(width, height)
for fr_idx, frame_gt in sequence_data['frames'].items():
frame_pd = sequence_result['frames'][fr_idx]
obj_boxes = np.array(
[obj_pd['box'] for obj_pd in frame_pd['annotations']])
if len(obj_boxes):
obj_boxes /= np.array([[width, height, width, height]])
for obj_gt in frame_gt['annotations']:
tid = obj_gt['track_id']
# If not ignore
# Get rois, feature, depth, depth_gt, cam_rot, cam_trans
if len(obj_boxes):
_, box_idx, valid = tu.matching(
np.array(obj_gt['box']) /
np.array([[width, height, width, height]]),
obj_boxes,
thres=0.85)
if np.any(valid):
obj_pd = frame_pd['annotations'][box_idx.item()]
else:
obj_pd = obj_gt
else:
obj_pd = obj_gt
tid_data = {
'depth_gt': obj_gt['location'][2],
'alpha_gt': obj_gt['alpha'],
'yaw_gt': obj_gt['yaw'],
'dim_gt': obj_gt['dimension'],
'center_gt': obj_gt['box_center'],
'loc_gt': obj_gt['location'],
'depth_pd': obj_pd['location'][2],
'alpha_pd': obj_pd['alpha'],
'yaw_pd': obj_pd['yaw'],
'dim_pd': obj_pd['dimension'],
'center_pd': obj_pd['box_center'],
'loc_pd': obj_pd['location'],
'confidence_pd': obj_pd['confidence'],
'cam_calib': frame_gt['cam_calib'],
'cam_rot': frame_gt['cam_rot'],
'cam_loc': frame_gt['cam_loc'],
'fr_idx': fr_idx
}
if tid not in tracking_dict:
tracking_dict[tid] = [tid_data.copy()]
else:
tracking_dict[tid].append(tid_data.copy())
return tracking_dict
def sample(self, data):
datakey = []
for key in list(data):
if len(data[key]) > self.min_seq_len:
datakey.append(key)
return datakey
class MotionTrainer():
def __init__(self, args):
self.input_gt_path = [args.input_gt_path]
self.input_pd_path = [args.input_pd_path]
self.num_input_data = len(self.input_gt_path)
self.ckpt_path = args.ckpt_path.format(args.session, args.set,
args.num_epochs)
self.cache_name = args.cache_name
self.set = args.set
self.phase = args.phase
self.lstm_model = get_lstm_model(args.lstm_model_name)
self.tracker_model = get_tracker(args.tracker_model_name)
self.lstm_model_name = args.lstm_model_name
self.tracker_model_name = args.tracker_model_name
self.session = args.session
self.start_epoch = args.start_epoch
self.num_epochs = args.num_epochs
self.device = args.device
self.num_workers = args.num_workers
self.resume = args.resume
self.show_freq = args.show_freq
self.is_verbose = args.is_verbose
self.is_plot = args.is_plot
self.is_train = args.phase == 'train'
self.model = None
self.num_seq = args.num_seq
self.batch_size = args.batch_size
self.feature_dim = args.feature_dim
self.hidden_size = args.hidden_size
self.num_layers = args.num_layers
self.loc_dim = args.loc_dim
self.data_loader = None
self.train_loader = None
self.val_loader = None
self.seq_len = args.seq_len
self.min_seq_len = args.min_seq_len
self.max_depth = args.max_depth
self.depth_scale = args.depth_scale
self.optimizer = None
self.init_lr = args.init_lr
self.lr = args.init_lr
self.step_ratio = args.step_ratio
self.lr_adjust = args.lr_adjust
self.lr_step = args.lr_step
self.depth_weight = args.depth_weight
self.weight_decay = args.weight_decay
self.dropout = 0.1 if self.is_train else 0.0
def loop_epoch(self):
self._init_model()
# Start epoch iterations
for epoch in range(self.start_epoch, self.num_epochs + 1):
if self.is_train:
self.model.train()
self.lr = nu.adjust_learning_rate(self.optimizer, epoch,
self.init_lr,
self.step_ratio,
self.lr_step, self.lr_adjust)
self._init_dataset(is_train=True)
self._loop_sequence(epoch, is_train=True)
# Save
if epoch % min(10, self.num_epochs) == 0:
torch.save(
{
'epoch': epoch,
'state_dict': self.model.state_dict(),
'session': self.session,
'optimizer': self.optimizer.state_dict(),
}, self.ckpt_path)
self.model.eval()
with torch.no_grad():
self._init_dataset(is_train=False)
self._loop_sequence(epoch, is_train=False)
def _loop_sequence(self, epoch: int, is_train: bool = True):
losses = {
'total_losses': tu.AverageMeter(),
'pred_losses': tu.AverageMeter(),
'refine_losses': tu.AverageMeter(),
'linear_losses': tu.AverageMeter()
}
losses_kf = {
'total_losses': tu.AverageMeter(),
'pred_losses': tu.AverageMeter(),
'refine_losses': tu.AverageMeter(),
'linear_losses': tu.AverageMeter()
}
for iters, traj_out in enumerate(
tqdm(self.data_loader, total=len(self.data_loader))):
(obj_obs, obj_gt, loc_preds, loc_refines, loc_preds_kf,
loc_refines_kf, confidence, valid_mask,
cam_loc) = self._run_engine(traj_out)
verbose("=" * 20, self.is_verbose)
total_loss = self._loss_term(obj_obs, obj_gt, loc_preds,
loc_refines, confidence, losses,
valid_mask, epoch, iters,
len(self.data_loader), cam_loc,
self.lstm_model_name, is_train)
_ = self._loss_term(obj_obs, obj_gt, loc_preds_kf, loc_refines_kf,
confidence, losses_kf, valid_mask, epoch,
iters, len(self.data_loader), cam_loc,
self.tracker_model_name, is_train)
def closure():
# Clear the states of model parameters each time
self.optimizer.zero_grad()
# BP loss
total_loss.backward()
# Clip if the gradients explode
torch.nn.utils.clip_grad_norm_(self.model.parameters(), 3.0)
return total_loss
if is_train:
self.optimizer.step(closure)
def _run_engine(self, trajs: torch.Tensor):
# Initial
cam_loc = trajs['cam_loc'].to(self.device)
obj_gt = trajs['obj_gt'].to(self.device)
obj_obs = trajs['obj_obs'].to(self.device)
confidence = trajs['confidence'].to(self.device)
valid_mask = trajs['valid_mask'].to(self.device)
# batch x len x loc_dim
obj_gt[..., :3] -= cam_loc[:, 0:1]
obj_obs[..., :3] -= cam_loc[:, 0:1]
loc_preds = []
loc_refines = []
loc_preds_kf = []
loc_refines_kf = []
# Also, we need to clear out the hidden state of the LSTM,
# detaching it from its history on the last instance.
hidden_predict = self.model.init_hidden(self.device) # None
hidden_refine = self.model.init_hidden(self.device) # None
# Generate a history of location
vel_history = obj_obs.new_zeros(self.num_seq, obj_obs.shape[0],
self.loc_dim)
# Starting condition
prev_refine = obj_obs[:, 0, :self.loc_dim]
loc_pred = obj_obs[:, 1, :self.loc_dim]
with torch.no_grad():
trks = [
self.tracker_model(self.device, self.model, _box, _conf)
if self.tracker_model_name == 'LSTM3DTracker' else
self.tracker_model(_box, _conf)
for _box, _conf in zip(obj_obs[:, 0].cpu().numpy(),
confidence.cpu().numpy())
]
for i in range(1, valid_mask.shape[1]):
# LSTM
loc_pred[:, 3:4] = fix_alpha(loc_pred[:, 3:4])
for batch_id in range(obj_obs.shape[0]):
curr_yaw = fix_alpha(obj_obs[batch_id, i, 3:4])
if np.pi / 2.0 < abs(
curr_yaw - loc_pred[batch_id, 3:4]) < np.pi * 3 / 2.0:
loc_pred[batch_id, 3:4] += np.pi
if loc_pred[batch_id, 3:4] > np.pi:
loc_pred[batch_id, 3:4] -= np.pi * 2
if loc_pred[batch_id, 3:4] < -np.pi:
loc_pred[batch_id, 3:4] += np.pi * 2
# now the angle is acute: < 90 or > 270,
# convert the case of > 270 to < 90
if abs(curr_yaw - loc_pred[batch_id, 3:4]) >= np.pi * 3 / 2.0:
if curr_yaw > 0:
loc_pred[batch_id, 3:4] += np.pi * 2
else:
loc_pred[batch_id, 3:4] -= np.pi * 2
loc_refine, hidden_refine = self.model.refine(
loc_pred.detach().clone(), obj_obs[:, i, :self.loc_dim],
prev_refine.detach().clone(), confidence[:, i, None],
hidden_refine)
loc_refine[:, 3:4] = fix_alpha(loc_refine[:, 3:4])
if i == 1:
vel_history = torch.cat(
[(loc_refine - prev_refine).unsqueeze(0)] * self.num_seq)
else:
vel_history = torch.cat(
[vel_history[1:], (loc_refine - prev_refine).unsqueeze(0)],
dim=0)
prev_refine = loc_refine
loc_pred, hidden_predict = self.model.predict(
vel_history,
loc_refine.detach().clone(), hidden_predict)
loc_pred[:, 3:4] = fix_alpha(loc_pred[:, 3:4])
# KF3D
with torch.no_grad():
for trk_idx, trk in enumerate(trks):
if i == 1:
trk.predict(update_state=False)
trk.update(obj_obs[trk_idx, i].cpu().numpy(),
confidence[trk_idx, i].cpu().numpy())
loc_refine_kf = loc_refine.new(
np.vstack([trk.get_state()[:self.loc_dim]
for trk in trks]))
loc_pred_kf = loc_pred.new(
np.vstack([
trk.predict().squeeze()[:self.loc_dim] for trk in trks
]))
# Predict residual of depth
loc_preds.append(loc_pred)
loc_refines.append(loc_refine)
loc_preds_kf.append(loc_pred_kf)
loc_refines_kf.append(loc_refine_kf)
return (obj_obs, obj_gt, loc_preds, loc_refines, loc_preds_kf,
loc_refines_kf, confidence, valid_mask, cam_loc)
def _loss_term(self, loc_obs, loc_gt, loc_preds, loc_refines, confidence,
losses, valid_mask, epoch, iters, num_iters, cam_loc,
method: str, is_train: bool) -> torch.Tensor:
loc_refines = torch.cat(
loc_refines, dim=1).view(valid_mask.shape[0], -1, self.loc_dim)
loc_preds = torch.cat(
loc_preds, dim=1).view(valid_mask.shape[0], -1, self.loc_dim)
if self.loc_dim > 3:
loc_refines[
..., 3] = (loc_refines[..., 3] + np.pi) % (2 * np.pi) - np.pi
loc_preds[...,
3] = (loc_preds[..., 3] + np.pi) % (2 * np.pi) - np.pi
loc_refine_mask = loc_refines[:, :] * valid_mask[:, 1:, None]
loc_pred_mask = loc_preds[:, :-1] * valid_mask[:, 2:, None]
loc_gt_mask = loc_gt[:, :, :self.loc_dim] * valid_mask[:, :, None]
loc_obs_mask = loc_obs[:, :, :self.loc_dim] * valid_mask[:, :, None]
# Normalize yaw angle
loc_refine_mask[loc_refine_mask[:, :, 3] < 0][:, 3] += 2 * np.pi
loc_pred_mask[loc_pred_mask[:, :, 3] < 0][:, 3] += 2 * np.pi
# Cost functions
refine_loss = F.smooth_l1_loss(
loc_refine_mask, loc_gt_mask[:, 1:], reduction='sum') / torch.sum(
valid_mask[:, 1:])
pred_loss = F.smooth_l1_loss(
loc_pred_mask, loc_gt_mask[:, 2:], reduction='sum') / torch.sum(
valid_mask[:, 2:])
linear_loss = nu.linear_motion_loss(loc_refine_mask, valid_mask[:, 1:])
linear_loss += nu.linear_motion_loss(loc_pred_mask, valid_mask[:, 2:])
verbose(method, self.is_verbose)
verbose(
f"Ref: {torch.mean(loc_refine_mask - loc_gt_mask[:, 1:], dim=0).detach().cpu().numpy()}",
self.is_verbose)
verbose(
f"Prd: {torch.mean(loc_pred_mask - loc_gt_mask[:, 2:], dim=0).detach().cpu().numpy()}",
self.is_verbose)
total_loss: torch.Tensor = (
self.depth_weight * (refine_loss + pred_loss) +
(1.0 - self.depth_weight) * linear_loss)
# Updates
losses['total_losses'].update(total_loss.data.cpu().numpy().item(),
int(torch.sum(valid_mask)))
losses['pred_losses'].update(pred_loss.data.cpu().numpy().item(),
int(torch.sum(valid_mask)))
losses['refine_losses'].update(refine_loss.data.cpu().numpy().item(),
int(torch.sum(valid_mask)))
losses['linear_losses'].update(linear_loss.data.cpu().numpy().item(),
int(torch.sum(valid_mask)))
# Verbose
if iters % min(num_iters - 1, self.show_freq) == 0 and iters != 0:
phase = 'Train' if is_train else 'Val'
status_msg = (f'[{self.set.upper()} - {self.session} | '
f'{phase} - {method}]\t'
f'[Epoch: {epoch}/{self.num_epochs} | '
f'Iters: {iters}/{len(self.data_loader)}')
if is_train:
status_msg += f' | LR: {self.lr:.6f}]\t'
else:
status_msg += ']\t'
verbose(
f'{status_msg}'
'[Total Loss {loss.val:2.2f} ({loss.avg:2.2f}) | '
'P-Loss {pred.val:2.2f} ({pred.avg:2.2f}) | '
'R-Loss {refine.val:2.2f} ({refine.avg:2.2f}) | '
'S-Loss {smooth.val:2.2f} ({smooth.avg:2.2f})]'.format(
loss=losses['total_losses'],
pred=losses['pred_losses'],
refine=losses['refine_losses'],
smooth=losses['linear_losses']), True)
verbose(
f"PD: {loc_pred_mask[0].cpu().data.numpy()}\n"
f"OB: {loc_obs_mask[0].cpu().data.numpy()}\n"
f"RF: {loc_refine_mask[0].cpu().data.numpy()}\n"
f"GT: {loc_gt_mask[0].cpu().data.numpy()}\n"
f"Conf: {confidence[0].cpu().data.numpy()}", self.is_verbose)
if self.is_plot:
plot_3D(
osp.dirname(self.cache_name),
f"{epoch}_{iters}_{phase}_{method}",
self.session,
cam_loc[0].cpu().data.numpy(),
loc_gt[0].cpu().data.numpy(),
predictions={
'Obs': loc_obs[0].cpu().data.numpy(),
'Prd': loc_preds[0].cpu().data.numpy(),
'Ref': loc_refines[0].cpu().data.numpy()
},
show_cam_loc=False)
return total_loss
def _init_model(self):
self.model = self.lstm_model(
self.batch_size,
self.feature_dim,
self.hidden_size,
self.num_layers,
self.loc_dim,
dropout=self.dropout).to(self.device)
if self.is_train:
self.model.train()
self.optimizer = optim.Adam(
filter(lambda p: p.requires_grad, self.model.parameters()),
lr=self.lr,
weight_decay=self.weight_decay,
amsgrad=True)
if self.resume:
nu.load_checkpoint(
self.model,
self.ckpt_path,
optimizer=self.optimizer,
is_test=not self.is_train)
else:
self.model.eval()
nu.load_checkpoint(
self.model,
self.ckpt_path,
optimizer=self.optimizer,
is_test=not self.is_train)
def _init_dataset(self, is_train: bool = True):
if is_train and self.train_loader is not None:
verbose(f"TRAIN set with {len(self.train_loader)} trajectories",
True)
self.data_loader = self.train_loader
elif not is_train and self.val_loader is not None:
verbose(f"VAL set with {len(self.val_loader)} trajectories", True)
self.data_loader = self.val_loader
else:
# Data loading code
dataset = SeqDataset(self.set, self.input_gt_path,
self.input_pd_path, is_train, self.seq_len,
self.min_seq_len, self.max_depth,
self.depth_scale, self.cache_name)
verbose(f"Generate dataset with {dataset.__len__()} trajectories ",
True)
data_loader = DataLoader(
dataset,
batch_size=self.batch_size,
shuffle=is_train,
num_workers=self.num_workers,
pin_memory=True,
drop_last=True)
if is_train:
self.data_loader = data_loader
self.train_loader = data_loader
else:
self.data_loader = data_loader
self.val_loader = data_loader
def parse_args():
"""Parse input arguments."""
parser = argparse.ArgumentParser(
description='RNN depth motion estimation',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('set', choices=['gta', 'kitti', 'nuscenes', 'waymo'])
parser.add_argument(
'phase',
choices=['train', 'test'],
help='Which data split to use in testing')
parser.add_argument(
'--split',
choices=['train', 'val', 'test', 'mini'],
default='train',
help='Which data split to use in testing')
parser.add_argument(
'--input_pd_path',
dest='input_pd_path',
help='path of input pred info for tracking',
default='./data/KITTI/anns/tracking_train_output.json',
type=str)
parser.add_argument(
'--input_gt_path',
dest='input_gt_path',
help='path of input gt info for tracking',
default='./data/KITTI/anns/tracking_train.json',
type=str)
parser.add_argument(
'--cache_name',
dest='cache_name',
help='path of cache file',
default='./work_dirs/LSTM/kitti_train_full_traj.pkl',
type=str)
parser.add_argument(
'--session',
dest='session',
help='session of tracking',
default='batch10',
type=str)
parser.add_argument(
'--ckpt_path',
dest='ckpt_path',
help='path of checkpoint file',
default='./checkpoints/{}_{}_{:03d}_linear.pth',
type=str)
parser.add_argument(
'--lstm_model_name',
dest='lstm_model_name',
help='Name of the LSTM model',
default='LocLSTM',
choices=LSTM_MODEL_ZOO.keys(),
type=str)
parser.add_argument(
'--tracker_model_name',
dest='tracker_model_name',
help='Name of the LSTM model',
default='LSTM3DTracker',
choices=TRACKER_MODEL_ZOO.keys(),
type=str)
parser.add_argument(
'--start_epoch',
default=0,
type=int,
help='manual epoch number (useful on restarts)')
parser.add_argument(
'--seq_len',
dest='seq_len',
help='sequence length feed to model',
default=10,
type=int)
parser.add_argument(
'--min_seq_len',
dest='min_seq_len',
help='minimum available sequence length',
default=10,
type=int)
parser.add_argument(
'--depth_scale',
dest='depth_scale',
help='depth uncertainty in training (no uncertainty when value <= 0)',
default=10,
type=int)
parser.add_argument(
'--max_depth',
dest='max_depth',
help='maximum depth in training',
default=100,
type=int)
parser.add_argument(
'--min_depth',
dest='min_depth',
help='minimum depth in training',
default=0,
type=int)
parser.add_argument(
'--show_freq',
dest='show_freq',
help='verbose frequence',
default=100,
type=int)
parser.add_argument(
'--feature_dim',
dest='feature_dim',
help='feature dimension feed into model',
default=64,
type=int)
parser.add_argument(
'--loc_dim',
dest='loc_dim',
help='output dimension, we model depth here',
default=3,
type=int)
parser.add_argument(
'--hidden_size',
dest='hidden_size',
help='hidden size of LSTM',
default=128,
type=int)
parser.add_argument(
'--num_layers',
dest='num_layers',
help='number of layers of LSTM',
default=2,
type=int)
parser.add_argument(
'--num_epochs',
dest='num_epochs',
help='number of epochs',
default=100,
type=int)
parser.add_argument(
'--num_seq',
dest='num_seq',
help='number of seq used in predicting next step',
default=5,
type=int)
parser.add_argument(
'--init_lr',
default=5e-3,
type=float,
metavar='LR',
help='initial learning rate')
parser.add_argument(
'--lr-adjust',
help='learning rate adjust strategy',
choices=['step'],
default='step',
type=str)
parser.add_argument(
'--lr-step', help='number of steps to decay lr', default=20, type=int)
parser.add_argument(
'--step-ratio', dest='step_ratio', default=0.5, type=float)
parser.add_argument(
'--depth_weight',
dest='depth_weight',
help='weight of depth and smooth loss',
default=0.9,
type=float)
parser.add_argument(
'--weight-decay',
'--wd',
default=1e-4,
type=float,
metavar='W',
help='weight decay (default: 1e-4)')
parser.add_argument(
'-j',
'--num_workers',
default=8,
type=int,
metavar='N',
help='number of data loading workers (default: 16)')
parser.add_argument(
'-b',
'--batch_size',
default=10,
type=int,
help='the batch size on each gpu')
parser.add_argument(
'--is_plot',
dest='is_plot',
help='show prediction result',
default=False,
action='store_true')
parser.add_argument(
'--resume',
dest='resume',
help='resume model checkpoint',
default=False,
action='store_true')
parser.add_argument(
'--verbose',
dest='is_verbose',
help='verbose',
default=False,
action='store_true')
args = parser.parse_args()
args.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
print(args)
return args
def main():
"""Train an object motion LSTM from input location sequences
"""
args = parse_args()
if args.phase == 'test':
assert args.batch_size == 1, "Inference with batch size 1 only"
args.start_epoch = args.num_epochs
torch.set_num_threads(multiprocessing.cpu_count())
cudnn.benchmark = True
np.set_printoptions(formatter={'float': '{: 0.3f}'.format})
motion_trainer = MotionTrainer(args)
motion_trainer.loop_epoch()
if __name__ == '__main__':
print(f"Torch version: {torch.__version__}")
main()
| 21,924 |
337 | <reponame>qussarah/declare<filename>j2k/testData/fileOrElement/strings/escapedDollar.java
public class A {
private String dollar1 = "$a";
private String dollar2 = "$A";
private String dollar3 = "${s}";
private String dollar4 = "$$";
} | 90 |
664 | package com.badlogic.ashley.utils;
import org.junit.Assert;
import org.junit.Test;
public class BagTest {
@Test
public void testSet(){
final Bag<String> bag = new Bag<String>();
bag.add("a");
bag.add("b");
bag.add("c");
Assert.assertEquals(3, bag.size());
bag.set(1, "d");
Assert.assertEquals(3, bag.size());
Assert.assertEquals("d", bag.get(1));
}
}
| 166 |
772 | <reponame>Hsy-Intel/fedlearner<gh_stars>100-1000
# Copyright 2021 The FedLearner Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
import enum
from sqlalchemy.sql.schema import Index, UniqueConstraint
from fedlearner_webconsole.utils.mixins import to_dict_mixin
from fedlearner_webconsole.db import db, default_table_args
from fedlearner_webconsole.proto import workflow_definition_pb2
class WorkflowTemplateKind(enum.Enum):
DEFAULT = 0
PRESET_DATAJOIN = 1
@to_dict_mixin(
extras={
'config': (lambda wt: wt.get_config()),
'editor_info': (lambda wt: wt.get_editor_info())
})
class WorkflowTemplate(db.Model):
__tablename__ = 'template_v2'
__table_args__ = (UniqueConstraint('name', name='uniq_name'),
Index('idx_group_alias', 'group_alias'),
default_table_args('workflow template'))
id = db.Column(db.Integer, primary_key=True, comment='id')
name = db.Column(db.String(255), comment='name')
comment = db.Column('cmt',
db.String(255),
key='comment',
comment='comment')
group_alias = db.Column(db.String(255),
nullable=False,
comment='group_alias')
# max store 16777215 bytes (16 MB)
config = db.Column(db.LargeBinary(16777215),
nullable=False,
comment='config')
is_left = db.Column(db.Boolean, comment='is_left')
editor_info = db.Column(db.LargeBinary(16777215),
comment='editor_info',
default=b'')
kind = db.Column(db.Integer,
comment='template kind') # WorkflowTemplateKind enum
def set_config(self, proto):
self.config = proto.SerializeToString()
def set_editor_info(self, proto):
self.editor_info = proto.SerializeToString()
def get_config(self):
proto = workflow_definition_pb2.WorkflowDefinition()
proto.ParseFromString(self.config)
return proto
def get_editor_info(self):
proto = workflow_definition_pb2.WorkflowTemplateEditorInfo()
if self.editor_info is not None:
proto.ParseFromString(self.editor_info)
return proto
| 1,187 |
1,338 | /*
* Copyright 2015, <NAME>. All rights reserved.
* Distributed under the terms of the MIT License.
*/
#ifndef _MEDIA_CLIENT_NODE_H
#define _MEDIA_CLIENT_NODE_H
#include <BufferConsumer.h>
#include <BufferProducer.h>
#include <Controllable.h>
#include <MediaDefs.h>
#include <MediaEventLooper.h>
namespace BPrivate { namespace media {
class BMediaClient;
class BMediaConnection;
class BMediaOutput;
class BMediaClientNode : public BBufferConsumer, public BBufferProducer,
public BMediaEventLooper {
public:
BMediaClientNode(const char* name,
BMediaClient* owner,
media_type type
= B_MEDIA_UNKNOWN_TYPE);
// Various useful stuff
status_t SendBuffer(BBuffer* buffer, BMediaConnection* conn);
protected:
virtual BMediaAddOn* AddOn(int32* id) const;
virtual void NodeRegistered();
virtual void SetRunMode(run_mode mode);
virtual void Start(bigtime_t performanceTime);
virtual void Stop(bigtime_t performanceTime,
bool immediate);
virtual void Seek(bigtime_t mediaTime,
bigtime_t performanceTime);
virtual void TimeWarp(bigtime_t realTime,
bigtime_t performanceTime);
virtual status_t HandleMessage(int32 message,
const void* data,
size_t size);
// BBufferConsumer
virtual status_t AcceptFormat(const media_destination& dest,
media_format* format);
virtual status_t GetNextInput(int32* cookie,
media_input* input);
virtual void DisposeInputCookie(int32 cookie);
virtual void BufferReceived(BBuffer* buffer);
virtual status_t GetLatencyFor(const media_destination& dest,
bigtime_t* latency,
media_node_id* timesource);
virtual status_t Connected(const media_source& source,
const media_destination& dest,
const media_format& format,
media_input* outInput);
virtual void Disconnected(const media_source& source,
const media_destination& dest);
virtual status_t FormatChanged(const media_source& source,
const media_destination& consumer,
int32 tag,
const media_format& format);
// BBufferProducer
virtual status_t FormatSuggestionRequested(media_type type,
int32 quality, media_format* format);
virtual status_t FormatProposal(const media_source& source,
media_format *format);
virtual status_t FormatChangeRequested(const media_source& source,
const media_destination& dest,
media_format *format,
int32* _deprecated_);
virtual void LateNoticeReceived(const media_source& source,
bigtime_t late, bigtime_t when);
virtual status_t GetNextOutput(int32 *cookie, media_output *output);
virtual status_t DisposeOutputCookie(int32 cookie);
virtual status_t SetBufferGroup(const media_source& source,
BBufferGroup *group);
virtual status_t PrepareToConnect(const media_source& source,
const media_destination& dest,
media_format *format,
media_source *out_source,
char *name);
virtual void Connect(status_t status,
const media_source& source,
const media_destination& dest,
const media_format &format,
char* name);
virtual void Disconnect(const media_source& source,
const media_destination& dest);
virtual void EnableOutput(const media_source& source,
bool enabled, int32* _deprecated_);
virtual status_t GetLatency(bigtime_t *outLatency);
virtual void LatencyChanged( const media_source& source,
const media_destination& dest,
bigtime_t latency, uint32 flags);
void ProducerDataStatus(const media_destination& dest,
int32 status, bigtime_t when);
protected:
virtual void HandleEvent(const media_timed_event *event,
bigtime_t late,
bool realTimeEvent=false);
virtual ~BMediaClientNode();
private:
void _ScheduleConnections(bigtime_t eventTime);
void _HandleBuffer(BBuffer* buffer);
void _ProduceNewBuffer(const media_timed_event* event,
bigtime_t late);
BBuffer* _GetNextBuffer(BMediaOutput* output,
bigtime_t eventTime);
BMediaClient* fOwner;
bigtime_t fStartTime;
};
}
}
using namespace BPrivate::media;
#endif
| 1,696 |
348 | <gh_stars>100-1000
{"nom":"Liederschiedt","dpt":"Moselle","inscrits":78,"abs":23,"votants":55,"blancs":4,"nuls":3,"exp":48,"res":[{"panneau":"1","voix":42},{"panneau":"2","voix":6}]} | 81 |
1,826 | package com.vladsch.flexmark.util.ast;
/**
* To be implemented by nodes marking their text as not for attachment by attributes
*/
public interface DoNotAttributeDecorate {
}
| 47 |
12,887 | <gh_stars>1000+
package com.zhisheng.common.schemas;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.flink.streaming.connectors.kafka.KafkaDeserializationSchema;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import static org.apache.flink.api.java.typeutils.TypeExtractor.getForClass;
/**
* Desc: 实现 KafkaDeserializationSchema 接口的反序列化类,可以获取数据的元数据,
* 注意这种和 JSONKeyValueDeserializationSchema 有个区别就是,本类是不需要数据源的数据是 JSON
* Created by zhisheng on 2019-09-23
* blog:http://www.54tianzhisheng.cn/
* 微信公众号:zhisheng
*/
public class KafkaMetricSchema implements KafkaDeserializationSchema<ObjectNode> {
private static final long serialVersionUID = 1509391548173891955L;
private final boolean includeMetadata;
private ObjectMapper mapper;
public KafkaMetricSchema(boolean includeMetadata) {
this.includeMetadata = includeMetadata;
}
@Override
public boolean isEndOfStream(ObjectNode metricEvent) {
return false;
}
@Override
public ObjectNode deserialize(ConsumerRecord<byte[], byte[]> consumerRecord) throws Exception {
if (mapper == null) {
mapper = new ObjectMapper();
}
ObjectNode node = mapper.createObjectNode();
if (consumerRecord.key() != null) {
node.put("key", new String(consumerRecord.key()));
}
if (consumerRecord.value() != null) {
node.put("value", new String(consumerRecord.value()));
}
if (includeMetadata) {
node.putObject("metadata")
.put("offset", consumerRecord.offset())
.put("topic", consumerRecord.topic())
.put("partition", consumerRecord.partition());
}
return node;
}
@Override
public TypeInformation<ObjectNode> getProducedType() {
return getForClass(ObjectNode.class);
}
}
| 913 |
643 | <reponame>abhijitsri007/LoginRegister<filename>java-examples/java-core/src/main/java/com/hellokoding/java/collections/StringSortDescendingByStringBuilder.java<gh_stars>100-1000
package com.hellokoding.java.collections;
import java.util.Arrays;
public class StringSortDescendingByStringBuilder {
static String sortAsc(String str) {
// convert String to char array
char[] arr = str.toCharArray();
// sort char array in alphabetically / ascending order
Arrays.sort(arr);
// String join char array
return String.valueOf(arr);
}
static String sortDesc(String str) {
return new StringBuilder(sortAsc(str)).reverse().toString();
}
public static void main(String[] args) {
String str = "bac";
System.out.println(sortDesc(str));
}
}
| 306 |
580 | // DynaMix
// Copyright (c) 2013-2016 <NAME>, <NAME>
//
// Distributed under the MIT Software License
// See accompanying file LICENSE.txt or copy at
// https://opensource.org/licenses/MIT
//
#include "executable_pch.hpp"
#include "exe_mixin.hpp"
#include "../dynlib/dynlib_messages.hpp"
using namespace std;
void exe_mixin::trace(std::ostream& o) const
{
o << "\twith exe_mixin" << endl;
}
void exe_mixin::dl_exported()
{
cout << "calling a message defined in a dynlib over local exe_mixin" << endl;
}
void exe_mixin::dl_multicast()
{
cout << "dynlib multicast in exe mixin" << endl;
}
DYNAMIX_DEFINE_MIXIN(exe_mixin, trace_msg & dl_exported_msg & dl_multicast_msg);
| 276 |
480 | /*
* Copyright [2013-2021], Alibaba Group Holding Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.polardbx.executor.operator;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
/**
* Basic interface for producer which can finish driver by itself.
*/
public interface ProducerExecutor {
ListenableFuture<?> NOT_BLOCKED = Futures.immediateFuture(null);
/**
* this operator is finished or not
*/
boolean produceIsFinished();
/**
* this operator is blocked or not if this operator isn't finished.
*/
ListenableFuture<?> produceIsBlocked();
default void forceClose() {
}
}
| 354 |
880 | #include <stdio.h>
#include <stdlib.h>
#include <getopt.h>
#include <string.h>
#include "i2c.h"
#include "IO2.h"
IO2 io2(0x21);
int main ()
{
long int i;
io2.pinMode(io2.GPIO0 | io2.GPIO1, io2.IN);
io2.setPullpin(io2.GPIO0 | io2.GPIO1, io2.OFF);
while (1)
{
// read analog input 0
if (io2.analogRead (io2.AN0) < 0x1FF)
io2.setRelay(io2.RELAY1, io2.ON);
else
io2.setRelay(io2.RELAY1, io2.OFF);
// read digital input 1
if (io2.digitalRead(io2.GPIO1))
io2.setRelay (io2.RELAY2, io2.ON);
else
io2.setRelay (io2.RELAY2, io2.OFF);
}
}
| 494 |
2,372 | //
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// * Neither the name of NVIDIA CORPORATION nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ''AS IS'' AND ANY
// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
// Copyright (c) 2008-2021 NVIDIA Corporation. All rights reserved.
// Copyright (c) 2004-2008 AGEIA Technologies, Inc. All rights reserved.
// Copyright (c) 2001-2004 NovodeX AG. All rights reserved.
#ifndef PX_PHYSICS_SCP_TRIGGER_PAIRS
#define PX_PHYSICS_SCP_TRIGGER_PAIRS
#include "PsArray.h"
#include "CmPhysXCommon.h"
#include "PxFiltering.h"
#include "PxClient.h"
#include "PxSimulationEventCallback.h"
namespace physx
{
class PxShape;
namespace Sc
{
struct TriggerPairFlag
{
enum Enum
{
eTEST_FOR_REMOVED_SHAPES = PxTriggerPairFlag::eNEXT_FREE // for cases where the pair got deleted because one of the shape volumes got removed from broadphase.
// This covers scenarios like volume re-insertion into broadphase as well since the shape might get removed
// after such an operation. The scenarios to consider are:
//
// - shape gets removed (this includes raising PxActorFlag::eDISABLE_SIMULATION)
// - shape switches to eSCENE_QUERY_SHAPE only
// - shape switches to eSIMULATION_SHAPE
// - resetFiltering()
// - actor gets removed from an aggregate
};
};
PX_COMPILE_TIME_ASSERT((1 << (8*sizeof(PxTriggerPairFlags::InternalType))) > TriggerPairFlag::eTEST_FOR_REMOVED_SHAPES);
struct TriggerPairExtraData
{
PX_INLINE TriggerPairExtraData() :
shape0ID(0xffffffff),
shape1ID(0xffffffff),
client0ID(0xff),
client1ID(0xff)
{
}
PX_INLINE TriggerPairExtraData(PxU32 s0ID, PxU32 s1ID,
PxClientID cl0ID, PxClientID cl1ID) :
shape0ID(s0ID),
shape1ID(s1ID),
client0ID(cl0ID),
client1ID(cl1ID)
{
}
PxU32 shape0ID;
PxU32 shape1ID;
PxClientID client0ID;
PxClientID client1ID;
};
typedef Ps::Array<TriggerPairExtraData> TriggerBufferExtraData;
typedef Ps::Array<PxTriggerPair> TriggerBufferAPI;
} // namespace Sc
}
#endif
| 1,290 |
692 | /**
* Kinesis Video TLS
*/
#define LOG_CLASS "TLS_openssl"
#include "../Include_i.h"
STATUS createTlsSession(PTlsSessionCallbacks pCallbacks, PTlsSession* ppTlsSession)
{
ENTERS();
STATUS retStatus = STATUS_SUCCESS;
PTlsSession pTlsSession = NULL;
CHK(ppTlsSession != NULL && pCallbacks != NULL, STATUS_NULL_ARG);
CHK(pCallbacks->outboundPacketFn != NULL, STATUS_INVALID_ARG);
pTlsSession = MEMCALLOC(1, SIZEOF(TlsSession));
CHK(pTlsSession != NULL, STATUS_NOT_ENOUGH_MEMORY);
pTlsSession->callbacks = *pCallbacks;
pTlsSession->state = TLS_SESSION_STATE_NEW;
CleanUp:
if (STATUS_FAILED(retStatus) && pTlsSession != NULL) {
freeTlsSession(&pTlsSession);
}
if (ppTlsSession != NULL) {
*ppTlsSession = pTlsSession;
}
LEAVES();
return retStatus;
}
STATUS freeTlsSession(PTlsSession* ppTlsSession)
{
ENTERS();
STATUS retStatus = STATUS_SUCCESS;
PTlsSession pTlsSession = NULL;
CHK(ppTlsSession != NULL, STATUS_NULL_ARG);
pTlsSession = *ppTlsSession;
CHK(pTlsSession != NULL, retStatus);
if (pTlsSession->pSslCtx != NULL) {
SSL_CTX_free(pTlsSession->pSslCtx);
}
if (pTlsSession->pSsl != NULL) {
SSL_free(pTlsSession->pSsl);
}
retStatus = tlsSessionShutdown(pTlsSession);
SAFE_MEMFREE(*ppTlsSession);
CleanUp:
return retStatus;
}
// https://www.openssl.org/docs/man1.0.2/man3/SSL_CTX_set_verify.html
INT32 tlsSessionCertificateVerifyCallback(INT32 preverify_ok, X509_STORE_CTX* ctx)
{
UNUSED_PARAM(preverify_ok);
UNUSED_PARAM(ctx);
return 1;
}
STATUS tlsSessionStart(PTlsSession pTlsSession, BOOL isServer)
{
ENTERS();
STATUS retStatus = STATUS_SUCCESS;
BIO *pReadBio = NULL, *pWriteBio = NULL;
BOOL freeBios = TRUE;
CHK(pTlsSession != NULL, STATUS_NULL_ARG);
CHK(pTlsSession->state == TLS_SESSION_STATE_NEW, retStatus);
pTlsSession->pSslCtx = SSL_CTX_new(SSLv23_method());
CHK(pTlsSession->pSslCtx != NULL, STATUS_SSL_CTX_CREATION_FAILED);
SSL_CTX_set_read_ahead(pTlsSession->pSslCtx, 1);
SSL_CTX_set_verify(pTlsSession->pSslCtx, SSL_VERIFY_PEER | SSL_VERIFY_FAIL_IF_NO_PEER_CERT, tlsSessionCertificateVerifyCallback);
CHK(SSL_CTX_set_cipher_list(pTlsSession->pSslCtx, "HIGH:!aNULL:!MD5:!RC4"), STATUS_SSL_CTX_CREATION_FAILED);
pTlsSession->pSsl = SSL_new(pTlsSession->pSslCtx);
CHK(pTlsSession->pSsl != NULL, STATUS_CREATE_SSL_FAILED);
if (isServer) {
SSL_set_accept_state(pTlsSession->pSsl);
} else {
SSL_set_connect_state(pTlsSession->pSsl);
}
SSL_set_mode(pTlsSession->pSsl, SSL_MODE_AUTO_RETRY);
CHK((pReadBio = BIO_new(BIO_s_mem())) != NULL, STATUS_SSL_CTX_CREATION_FAILED);
CHK((pWriteBio = BIO_new(BIO_s_mem())) != NULL, STATUS_SSL_CTX_CREATION_FAILED);
BIO_set_mem_eof_return(pReadBio, -1);
BIO_set_mem_eof_return(pWriteBio, -1);
SSL_set_bio(pTlsSession->pSsl, pReadBio, pWriteBio);
freeBios = FALSE;
/* init handshake */
tlsSessionChangeState(pTlsSession, TLS_SESSION_STATE_CONNECTING);
SSL_do_handshake(pTlsSession->pSsl);
/* send handshake */
CHK_STATUS(tlsSessionPutApplicationData(pTlsSession, NULL, 0));
CleanUp:
CHK_LOG_ERR(retStatus);
if (STATUS_FAILED(retStatus)) {
if (freeBios) {
if (pReadBio != NULL) {
BIO_free(pReadBio);
}
if (pWriteBio != NULL) {
BIO_free(pWriteBio);
}
}
ERR_print_errors_fp(stderr);
}
LEAVES();
return retStatus;
}
STATUS tlsSessionProcessPacket(PTlsSession pTlsSession, PBYTE pData, UINT32 bufferLen, PUINT32 pDataLen)
{
STATUS retStatus = STATUS_SUCCESS;
BOOL continueRead = TRUE;
INT32 sslReadRet = 0;
UINT32 writtenBytes = 0;
UINT64 sslErrorRet;
CHK(pTlsSession != NULL && pData != NULL && pDataLen != NULL, STATUS_NULL_ARG);
CHK(pTlsSession->state != TLS_SESSION_STATE_NEW, STATUS_SOCKET_CONNECTION_NOT_READY_TO_SEND);
CHK(pTlsSession->state != TLS_SESSION_STATE_CLOSED, STATUS_SOCKET_CONNECTION_CLOSED_ALREADY);
// return early if there's no data
CHK(*pDataLen != 0, retStatus);
CHK(BIO_write(SSL_get_rbio(pTlsSession->pSsl), pData, *pDataLen) > 0, STATUS_SECURE_SOCKET_READ_FAILED);
// read as much as possible
while (continueRead && writtenBytes < bufferLen) {
sslReadRet = SSL_read(pTlsSession->pSsl, pData + writtenBytes, bufferLen - writtenBytes);
if (sslReadRet <= 0) {
sslReadRet = SSL_get_error(pTlsSession->pSsl, sslReadRet);
switch (sslReadRet) {
case SSL_ERROR_WANT_WRITE:
continueRead = FALSE;
break;
case SSL_ERROR_WANT_READ:
case SSL_ERROR_ZERO_RETURN:
break;
default:
sslErrorRet = ERR_get_error();
DLOGW("SSL_read failed with %s", ERR_error_string(sslErrorRet, NULL));
break;
}
continueRead = FALSE;
} else {
writtenBytes += sslReadRet;
}
}
*pDataLen = writtenBytes;
CleanUp:
// CHK_LOG_ERR might be too verbose
if (STATUS_FAILED(retStatus)) {
DLOGD("Warning: reading socket data failed with 0x%08x", retStatus);
}
return retStatus;
}
STATUS tlsSessionPutApplicationData(PTlsSession pTlsSession, PBYTE pData, UINT32 dataLen)
{
STATUS retStatus = STATUS_SUCCESS;
INT32 sslRet = 0, sslErr = 0;
SIZE_T wBioDataLen = 0;
PCHAR wBioBuffer = NULL;
CHK(pTlsSession != NULL, STATUS_NULL_ARG);
if (SSL_is_init_finished(pTlsSession->pSsl)) {
tlsSessionChangeState(pTlsSession, TLS_SESSION_STATE_CONNECTED);
sslRet = SSL_write(pTlsSession->pSsl, pData, dataLen);
if (sslRet < 0) {
sslErr = SSL_get_error(pTlsSession->pSsl, sslRet);
switch (sslErr) {
case SSL_ERROR_WANT_READ:
/* explicit fall-through */
case SSL_ERROR_WANT_WRITE:
break;
default:
DLOGD("Warning: SSL_write failed with %s", ERR_error_string(sslErr, NULL));
tlsSessionChangeState(pTlsSession, TLS_SESSION_STATE_CLOSED);
break;
}
CHK(FALSE, STATUS_SEND_DATA_FAILED);
}
}
wBioDataLen = (SIZE_T) BIO_get_mem_data(SSL_get_wbio(pTlsSession->pSsl), &wBioBuffer);
CHK_ERR(wBioDataLen >= 0, STATUS_SEND_DATA_FAILED, "BIO_get_mem_data failed");
if (wBioDataLen > 0) {
retStatus =
pTlsSession->callbacks.outboundPacketFn(pTlsSession->callbacks.outBoundPacketFnCustomData, (PBYTE) wBioBuffer, (UINT32) wBioDataLen);
/* reset bio to clear its content since it's already sent if possible */
BIO_reset(SSL_get_wbio(pTlsSession->pSsl));
}
CleanUp:
return retStatus;
}
STATUS tlsSessionShutdown(PTlsSession pTlsSession)
{
STATUS retStatus = STATUS_SUCCESS;
CHK(pTlsSession != NULL, STATUS_NULL_ARG);
CHK(pTlsSession->state != TLS_SESSION_STATE_CLOSED, retStatus);
CHK_STATUS(tlsSessionChangeState(pTlsSession, TLS_SESSION_STATE_CLOSED));
CleanUp:
CHK_LOG_ERR(retStatus);
return retStatus;
}
| 3,526 |
337 | #ifndef INCLUDE_PARSEOBJECTS_H_
#define INCLUDE_PARSEOBJECTS_H_
#include "common.h"
typedef enum
{
eDatetimeParsed = 0x01,
eTemperatureParsed = 0x02,
eDescriptionParsed = 0x04,
eIconParsed = 0x08,
eRainsnowParsed = 0x10,
eWeatherAllParsed = eDatetimeParsed |
eTemperatureParsed |
eDescriptionParsed |
eIconParsed,
eForecastParsed = eDatetimeParsed |
eTemperatureParsed |
eIconParsed
}ParsedFields;
ParsedFields parseWeather(const char *json, int jsonLen, CurWeather *curWeather, char *cityId, int cityIdSize, int *statusCode);
int parseForecast(const char *json, int jsonLen, Forecast *forecast, int forecastSize, int *statusCode);
#endif /* INCLUDE_PARSEOBJECTS_H_ */
| 446 |
317 | package com.googlecode.totallylazy.transducers;
import java.util.Iterator;
import static com.googlecode.totallylazy.transducers.State.Stop;
public class IteratorSender<A> implements Sender<A> {
private final Iterator<? extends A> iterator;
public IteratorSender(Iterator<? extends A> iterator) {this.iterator = iterator;}
@Override
public AutoCloseable send(Receiver<A> receiver) {
if (receiver.start().equals(Stop)) return EMPTY_CLOSEABLE;
while (iterator.hasNext()) {
A value = iterator.next();
if (receiver.next(value).equals(Stop)) break;
}
receiver.finish();
return EMPTY_CLOSEABLE;
}
}
| 259 |
538 | #include "stdafx.h"
#include "resource.h"
#include "HexView.h"
#include "VGMFileTreeView.h"
#include "ItemTreeView.h"
//#include "MainFrm.h"
#include "WinVGMRoot.h"
CVGMFileTreeView VGMFilesView;
CVGMFileTreeView::CVGMFileTreeView()
{
}
BOOL CVGMFileTreeView::PreTranslateMessage(MSG* pMsg)
{
if(pMsg)
{
if((pMsg->hwnd == m_hWnd) || ::IsChild(m_hWnd, pMsg->hwnd))
{
// We'll have the Accelerator send the WM_COMMAND to our view
//if(m_hAccel != NULL && ::TranslateAccelerator(m_hWnd, m_hAccel, pMsg))
//{
// return TRUE;
//}
}
}
return FALSE;
}
LRESULT CVGMFileTreeView::OnDestroy(UINT /*uMsg*/, WPARAM /*wParam*/, LPARAM /*lParam*/, BOOL& bHandled)
{
BOOL bDestroy = m_ImageList.Destroy();
bDestroy; //avoid level 4 warning
bHandled = FALSE;
return 0;
}
LRESULT CVGMFileTreeView::OnCreate(UINT uMsg, WPARAM wParam, LPARAM lParam, BOOL& bHandled)
{
// "base::OnCreate()"
LRESULT lRet = DefWindowProc(uMsg, wParam, lParam);
bHandled = TRUE;
// "OnInitialUpdate"
int cx = 16, cy = 16;
BOOL bCreate = FALSE;
bCreate = m_ImageList.Create(cx, cy, ILC_COLOR32 | ILC_MASK, 4, 4);
m_ImageList.SetBkColor( RGB(255,255,255) );
if(bCreate)
{
Init(cx, cy);
}
/*CTreeItem tiRoot = this->InsertItem(_T("Root"), nIconFolderIndexNormal, nIconFolderIndexSelected, TVI_ROOT, NULL);
CTreeItem tiFolder1 = this->InsertItem(_T("Folder"), nIconFolderIndexNormal, nIconFolderIndexSelected, tiRoot, NULL);
this->InsertItem(_T("Item"), nIconIndexNormal, nIconIndexSelected, tiFolder1, NULL);
CTreeItem tiFolder2 = this->InsertItem(_T("Folder"), nIconFolderIndexNormal, nIconFolderIndexSelected, tiRoot, NULL);
this->InsertItem(_T("Item"), nIconIndexNormal, nIconIndexSelected, tiFolder2, NULL);
this->InsertItem(_T("Item"), nIconIndexNormal, nIconIndexSelected, tiFolder2, NULL);
tiRoot.Expand();
tiFolder1.Expand();
tiFolder2.Expand();*/
bHandled = TRUE;
return lRet;
}
void CVGMFileTreeView::OnLButtonDblClk(UINT nFlags, CPoint point)
{
// TODO: Add your message handler code here and/or call default
// if Shift-F10
if (point.x == -1 && point.y == -1)
point = (CPoint) GetMessagePos();
//ScreenToClient(&ptMousePos);
UINT uFlags;
CTreeItem item = HitTest( point, &uFlags );
if (item == NULL)
return;
pMainFrame->ShowVGMFileFrame((VGMFile*)item.GetData());
//pMainFrame->ShowFileHexView(true);
//if( htItem == NULL )
// return;
//VGMItem* myVGMItem;
//VGMItemMap.Lookup(htItem, (void *&)myVGMItem); //look up the VGMItem from the HTREEITEM key
//myVGMItem->OnDblClick();
//bHandled = true;
}
LRESULT CVGMFileTreeView::OnTvnSelchanged(int idCtrl, LPNMHDR pnmh, BOOL& bHandled)
{
CTreeViewCtrlEx* treeview = reinterpret_cast<CTreeViewCtrlEx*>(pnmh);
NM_TREEVIEW* pNMTreeView = (NM_TREEVIEW*)pnmh;
if (pNMTreeView->action != TVC_UNKNOWN)
{
CTreeItem treeitem = treeview->GetSelectedItem();
VGMFile* vgmfile = (VGMFile*)treeitem.GetData();
//fileView.SetCurFile(vgmfile->file);
//fileView.SelectItem(vgmfile);
//WriteItemUpdateStatusBarWithItem(vgmfile);
//itemTreeView.ChangeCurVGMFile(vgmfile);
winroot.SelectItem(vgmfile);
//pMainFrame->OnPlay();
}
bHandled = false;
return 0;
}
void CVGMFileTreeView::Init(int cx, int cy)
{
nIconIndexNormal = -1;
nIconIndexSelected = -1;
HICON hIcon = NULL;
// NOTE: Don't Load using the LR_LOADTRANSPARENT bit, because the icon
// already properly deals with transparency (setting it causes problems).
// We will load this as LR_SHARED so that we don't have to do a DeleteIcon.
hIcon = (HICON)::LoadImage(
_Module.GetResourceInstance(),
MAKEINTRESOURCE(IDI_VGMFILE_SEQ),
IMAGE_ICON, cx, cy, LR_SHARED);
nIconIndexNormal = m_ImageList.AddIcon(hIcon);
nIconIndexSelected = nIconIndexNormal;
nIconFolderIndexNormal = -1;
nIconFolderIndexSelected = -1;
hIcon = (HICON)::LoadImage(
_Module.GetResourceInstance(),
MAKEINTRESOURCE(IDI_FOLDER_CLOSED),
IMAGE_ICON, cx, cy, LR_SHARED);
nIconFolderIndexNormal = m_ImageList.AddIcon(hIcon);
hIcon = (HICON)::LoadImage(
_Module.GetResourceInstance(),
MAKEINTRESOURCE(IDI_FOLDER_OPEN),
IMAGE_ICON, cx, cy, LR_SHARED);
nIconFolderIndexSelected = m_ImageList.AddIcon(hIcon);
// Hook up the image list to the tree view
SetImageList(m_ImageList, TVSIL_NORMAL);
}
BOOL CVGMFileTreeView::SelectFile(VGMFile* vgmfile)
{
if (vgmfile) //check that the item passed was NULL
return CTreeViewCtrlEx::SelectItem(items[vgmfile].m_hTreeItem);
return NULL;
}
void CVGMFileTreeView::AddFile(VGMFile* newFile)
{
//LPCTSTR str = A2W(newFile->name.c_str());
CTreeItem newItem = this->InsertItem(newFile->GetName()->c_str(), nIconIndexNormal, nIconIndexSelected, TVI_ROOT, NULL);
newItem.SetData((DWORD_PTR)newFile);
items[newFile] = newItem;
//lItems.push_back(newItem);
}
void CVGMFileTreeView::RemoveFile(VGMFile* theFile)
{
//if (items[theFile].GetState(TVIS_SELECTED))
// itemTreeView.RemoveAllItems();
//if (GetSelectedItem() == items[theFile])
items[theFile].Delete(); //remove CTreeItem from view
items.erase(items.find(theFile)); //remove the CTreeItem from the item map
}
LRESULT CVGMFileTreeView::OnNMRClick(int idCtrl, LPNMHDR pnmh, BOOL& bHandled)
{
SendMessage(WM_CONTEXTMENU, (WPARAM)m_hWnd, GetMessagePos());
return 0;
}
LRESULT CVGMFileTreeView::OnContextMenu(HWND hwndCtrl, CPoint ptClick )
{
//bHandled = TRUE;
//CTreeViewCtrlEx* treeview = (CTreeViewCtrlEx*)hwndCtrl;
// if Shift-F10
if (ptClick.x == -1 && ptClick.y == -1)
ptClick = (CPoint) GetMessagePos();
ScreenToClient(&ptClick);
UINT uFlags;
//HTREEITEM htItem = GetTreeCtrl().HitTest( ptMousePos, &uFlags );
CTreeItem treeitem = HitTest( ptClick, &uFlags );
if( treeitem == NULL )
return 0;
VGMFile* pvgmfile = (VGMFile*)treeitem.GetData();
ClientToScreen(&ptClick);
ItemContextMenu(hwndCtrl, ptClick, pvgmfile);
// Load from resource
//mnuContext.LoadMenu(IDR_RAWFILE);
//CMenuHandle pPopup = mnuContext.GetSubMenu(0);
//ClientToScreen(&ptClick);
//pPopup.TrackPopupMenu( TPM_LEFTALIGN, ptClick.x, ptClick.y, hwndCtrl );
// or build dynamically
// (being sure to enable/disable menu items as appropriate,
// and giving the appropriate IDs)
/*if(mnuContext.CreatePopupMenu())
{
int cchWindowText = this->GetWindowTextLength();
CString sWindowText;
this->GetWindowText(sWindowText.GetBuffer(cchWindowText+1), cchWindowText+1);
sWindowText.ReleaseBuffer();
CString sSave(_T("&Save '"));
sSave += sWindowText;
sSave += _T("'");
mnuContext.AppendMenu((MF_ENABLED | MF_STRING), ID_FILE_SAVE, sSave);
mnuContext.AppendMenu((MF_ENABLED | MF_STRING), ID_FILE_CLOSE, _T("&Close\tCtrl+F4"));
mnuContext.AppendMenu(MF_SEPARATOR);
//mnuContext.AppendMenu((MF_ENABLED | MF_STRING), ID_VIEW_SOURCE, _T("&View Source"));
//if(m_pCmdBar != NULL)
//{
// NOTE: The CommandBarCtrl in our case is the mainframe's, so the commands
// would actually go to the main frame if we don't specify TPM_RETURNCMD.
// In the main frame's message map, if we don't specify
// CHAIN_MDI_CHILD_COMMANDS, we are not going to see those command
// messages. We have 2 choices here - either specify TPM_RETURNCMD,
// then send/post the message to our window, or don't specify
// TPM_RETURNCMD, and be sure to have CHAIN_MDI_CHILD_COMMANDS
// in the main frame's message map.
//m_pCmdBar->TrackPopupMenu(mnuContext, TPM_LEFTALIGN | TPM_RIGHTBUTTON | TPM_TOPALIGN | TPM_VERTICAL,
// ptPopup.x, ptPopup.y);
ClientToScreen(&ptClick);
DWORD nSelection = mnuContext.TrackPopupMenu(TPM_LEFTALIGN | TPM_RIGHTBUTTON | TPM_TOPALIGN | TPM_VERTICAL | TPM_RETURNCMD,
ptClick.x, ptClick.y, hwndCtrl);
if(nSelection != 0)
{
this->PostMessage(WM_COMMAND, MAKEWPARAM(nSelection, 0));
}
//}
// else
// {
// mnuContext.TrackPopupMenuEx(TPM_LEFTALIGN | TPM_RIGHTBUTTON | TPM_TOPALIGN | TPM_VERTICAL,
// ptPopup.x, ptPopup.y, m_hWnd, NULL);
// }
}*/
return 0;
}
BOOL CVGMFileTreeView::SelectItem(VGMItem* item)
{
if (item) //test for NULL pointer
{
VGMFile* itemsVGMFile = item->vgmfile;//item->GetRawFile()->GetVGMFileFromOffset(item->dwOffset);
if (itemsVGMFile)
return CTreeViewCtrlEx::SelectItem(items[itemsVGMFile].m_hTreeItem);
}
return NULL;
}
| 3,362 |
3,325 | <gh_stars>1000+
/*
* Copyright (c) 2016-present, RxJava Contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package autodispose2;
import io.reactivex.rxjava3.annotations.NonNull;
import io.reactivex.rxjava3.core.Observer;
import io.reactivex.rxjava3.plugins.RxJavaPlugins;
import java.util.concurrent.atomic.AtomicInteger;
import org.reactivestreams.Subscriber;
/**
* Utility methods to perform half-serialization: a form of serialization
* where onNext is guaranteed to be called from a single thread but
* onError or onComplete may be called from any threads.
* <p>
* The onNext methods have been modified to return a boolean indicating whether or not the delegate
* observer was sent a terminal event.
*/
final class HalfSerializer {
/** Utility class. */
private HalfSerializer() {
throw new IllegalStateException("No instances!");
}
/**
* Emits the given value if possible and terminates if there was an onComplete or onError
* while emitting, drops the value otherwise.
*
* @param <T> the value type
* @param subscriber the target Subscriber to emit to
* @param value the value to emit
* @param wip the serialization work-in-progress counter/indicator
* @param error the holder of Throwables
* @return true if a terminal event was emitted to {@code observer}, false if not
*/
public static <@NonNull T> boolean onNext(Subscriber<? super T> subscriber,
T value,
AtomicInteger wip,
AtomicThrowable error) {
if (wip.get() == 0 && wip.compareAndSet(0, 1)) {
subscriber.onNext(value);
if (wip.decrementAndGet() != 0) {
Throwable ex = error.terminate();
if (ex != null) {
subscriber.onError(ex);
} else {
subscriber.onComplete();
}
return true;
}
}
return false;
}
/**
* Emits the given exception if possible or adds it to the given error container to
* be emitted by a concurrent onNext if one is running.
* Undeliverable exceptions are sent to the RxJavaPlugins.onError.
*
* @param subscriber the target Subscriber to emit to
* @param ex the Throwable to emit
* @param wip the serialization work-in-progress counter/indicator
* @param error the holder of Throwables
*/
public static void onError(Subscriber<?> subscriber, Throwable ex, AtomicInteger wip, AtomicThrowable error) {
if (error.addThrowable(ex)) {
if (wip.getAndIncrement() == 0) {
subscriber.onError(error.terminate());
}
} else {
RxJavaPlugins.onError(ex);
}
}
/**
* Emits an onComplete signal or an onError signal with the given error or indicates
* the concurrently running onNext should do that.
*
* @param subscriber the target Subscriber to emit to
* @param wip the serialization work-in-progress counter/indicator
* @param error the holder of Throwables
*/
public static void onComplete(Subscriber<?> subscriber, AtomicInteger wip, AtomicThrowable error) {
if (wip.getAndIncrement() == 0) {
Throwable ex = error.terminate();
if (ex != null) {
subscriber.onError(ex);
} else {
subscriber.onComplete();
}
}
}
/**
* Emits the given value if possible and terminates if there was an onComplete or onError
* while emitting, drops the value otherwise.
*
* @param <T> the value type
* @param observer the target Observer to emit to
* @param value the value to emit
* @param wip the serialization work-in-progress counter/indicator
* @param error the holder of Throwables
* @return true if a terminal event was emitted to {@code observer}, false if not
*/
public static <@NonNull T> boolean onNext(Observer<? super T> observer, T value, AtomicInteger wip, AtomicThrowable error) {
if (wip.get() == 0 && wip.compareAndSet(0, 1)) {
observer.onNext(value);
if (wip.decrementAndGet() != 0) {
Throwable ex = error.terminate();
if (ex != null) {
observer.onError(ex);
} else {
observer.onComplete();
}
return true;
}
}
return false;
}
/**
* Emits the given exception if possible or adds it to the given error container to
* be emitted by a concurrent onNext if one is running.
* Undeliverable exceptions are sent to the RxJavaPlugins.onError.
*
* @param observer the target Subscriber to emit to
* @param ex the Throwable to emit
* @param wip the serialization work-in-progress counter/indicator
* @param error the holder of Throwables
*/
public static void onError(Observer<?> observer, Throwable ex, AtomicInteger wip, AtomicThrowable error) {
if (error.addThrowable(ex)) {
if (wip.getAndIncrement() == 0) {
observer.onError(error.terminate());
}
} else {
RxJavaPlugins.onError(ex);
}
}
/**
* Emits an onComplete signal or an onError signal with the given error or indicates
* the concurrently running onNext should do that.
*
* @param observer the target Subscriber to emit to
* @param wip the serialization work-in-progress counter/indicator
* @param error the holder of Throwables
*/
public static void onComplete(Observer<?> observer, AtomicInteger wip, AtomicThrowable error) {
if (wip.getAndIncrement() == 0) {
Throwable ex = error.terminate();
if (ex != null) {
observer.onError(ex);
} else {
observer.onComplete();
}
}
}
}
| 2,019 |
575 | // Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/ui/tabs/existing_tab_group_sub_menu_model.h"
#include "base/metrics/user_metrics.h"
#include "base/metrics/user_metrics_action.h"
#include "chrome/app/vector_icons/vector_icons.h"
#include "chrome/browser/profiles/profile.h"
#include "chrome/browser/themes/theme_service.h"
#include "chrome/browser/ui/tabs/tab_group.h"
#include "chrome/browser/ui/tabs/tab_group_model.h"
#include "chrome/browser/ui/tabs/tab_group_theme.h"
#include "chrome/browser/ui/tabs/tab_strip_model.h"
#include "chrome/grit/generated_resources.h"
#include "components/tab_groups/tab_group_color.h"
#include "components/tab_groups/tab_group_id.h"
#include "components/tab_groups/tab_group_visual_data.h"
#include "ui/base/models/image_model.h"
#include "ui/gfx/canvas.h"
#include "ui/gfx/image/canvas_image_source.h"
#include "ui/gfx/image/image_skia.h"
#include "ui/gfx/paint_vector_icon.h"
#include "ui/native_theme/native_theme.h"
ExistingTabGroupSubMenuModel::ExistingTabGroupSubMenuModel(
ui::SimpleMenuModel::Delegate* parent_delegate,
TabStripModel* model,
int context_index)
: ExistingBaseSubMenuModel(parent_delegate,
model,
context_index,
kMinExistingTabGroupCommandId) {
const auto& tp = ThemeService::GetThemeProviderForProfile(model->profile());
constexpr int kIconSize = 14;
std::vector<MenuItemInfo> menu_item_infos;
for (tab_groups::TabGroupId group : GetOrderedTabGroupsInSubMenu()) {
const TabGroup* tab_group = model->group_model()->GetTabGroup(group);
const std::u16string group_title = tab_group->visual_data()->title();
const std::u16string displayed_title =
group_title.empty() ? tab_group->GetContentString() : group_title;
const int color_id =
GetTabGroupContextMenuColorId(tab_group->visual_data()->color());
// TODO (kylixrd): Investigate passing in color_id in order to color the
// icon using the ColorProvider.
ui::ImageModel image_model = ui::ImageModel::FromVectorIcon(
kTabGroupIcon, tp.GetColor(color_id), kIconSize);
menu_item_infos.emplace_back(MenuItemInfo{displayed_title, image_model});
menu_item_infos.back().may_have_mnemonics = false;
}
Build(IDS_TAB_CXMENU_SUBMENU_NEW_GROUP, menu_item_infos);
}
std::vector<tab_groups::TabGroupId>
ExistingTabGroupSubMenuModel::GetOrderedTabGroupsInSubMenu() {
std::vector<tab_groups::TabGroupId> ordered_groups;
base::Optional<tab_groups::TabGroupId> current_group = base::nullopt;
for (int i = 0; i < model()->count(); ++i) {
base::Optional<tab_groups::TabGroupId> new_group =
model()->GetTabGroupForTab(i);
if (new_group.has_value() && new_group != current_group &&
ShouldShowGroup(model(), GetContextIndex(), new_group.value())) {
ordered_groups.push_back(new_group.value());
}
current_group = new_group;
}
return ordered_groups;
}
// static
bool ExistingTabGroupSubMenuModel::ShouldShowSubmenu(TabStripModel* model,
int context_index) {
for (tab_groups::TabGroupId group : model->group_model()->ListTabGroups()) {
if (ShouldShowGroup(model, context_index, group)) {
return true;
}
}
return false;
}
void ExistingTabGroupSubMenuModel::ExecuteNewCommand(int event_flags) {
parent_delegate()->ExecuteCommand(TabStripModel::CommandAddToNewGroup,
event_flags);
}
void ExistingTabGroupSubMenuModel::ExecuteExistingCommand(int command_index) {
base::RecordAction(base::UserMetricsAction("TabContextMenu_NewTabInGroup"));
if (size_t{command_index} >= model()->group_model()->ListTabGroups().size())
return;
if (!model()->ContainsIndex(GetContextIndex()))
return;
model()->ExecuteAddToExistingGroupCommand(
GetContextIndex(), GetOrderedTabGroupsInSubMenu()[command_index]);
}
// static
bool ExistingTabGroupSubMenuModel::ShouldShowGroup(
TabStripModel* model,
int context_index,
tab_groups::TabGroupId group) {
if (!model->IsTabSelected(context_index)) {
if (group != model->GetTabGroupForTab(context_index))
return true;
} else {
for (int index : model->selection_model().selected_indices()) {
if (group != model->GetTabGroupForTab(index)) {
return true;
}
}
}
return false;
}
| 1,764 |
474 | <filename>javacord-core/src/main/java/org/javacord/core/entity/server/invite/InviteBuilderDelegateImpl.java
package org.javacord.core.entity.server.invite;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import org.javacord.api.entity.channel.ServerChannel;
import org.javacord.api.entity.server.invite.Invite;
import org.javacord.api.entity.server.invite.internal.InviteBuilderDelegate;
import org.javacord.core.util.rest.RestEndpoint;
import org.javacord.core.util.rest.RestMethod;
import org.javacord.core.util.rest.RestRequest;
import java.util.concurrent.CompletableFuture;
/**
* The implementation of {@link InviteBuilderDelegate}.
*/
public class InviteBuilderDelegateImpl implements InviteBuilderDelegate {
/**
* The server channel for the invite.
*/
private final ServerChannel channel;
/**
* The reason for the creation.
*/
private String reason = null;
/**
* The duration of the invite in seconds before expiry, or 0 for never.
*/
private int maxAge = 86400;
/**
* The max number of uses or 0 for unlimited.
*/
private int maxUses = 0;
/**
* Whether this invite only grants temporary membership or not.
*/
private boolean temporary = false;
/**
* Whether this invite is be unique or not.
*/
private boolean unique = false;
/**
* Creates a new invite builder delegate.
*
* @param channel The channel for the invite.
*/
public InviteBuilderDelegateImpl(ServerChannel channel) {
this.channel = channel;
}
@Override
public void setAuditLogReason(String reason) {
this.reason = reason;
}
@Override
public void setMaxAgeInSeconds(int maxAge) {
this.maxAge = maxAge;
}
@Override
public void setNeverExpire() {
setMaxAgeInSeconds(0);
}
@Override
public void setMaxUses(int maxUses) {
this.maxUses = maxUses;
}
@Override
public void setTemporary(boolean temporary) {
this.temporary = temporary;
}
@Override
public void setUnique(boolean unique) {
this.unique = unique;
}
@Override
public CompletableFuture<Invite> create() {
return new RestRequest<Invite>(channel.getApi(), RestMethod.POST, RestEndpoint.CHANNEL_INVITE)
.setUrlParameters(channel.getIdAsString())
.setBody(JsonNodeFactory.instance.objectNode()
.put("max_age", maxAge)
.put("max_uses", maxUses)
.put("temporary", temporary)
.put("unique", unique))
.setAuditLogReason(reason)
.execute(result -> new InviteImpl(channel.getApi(), result.getJsonBody()));
}
}
| 1,171 |
1,538 | <filename>src/exitfunc.c
/* ______ ___ ___
* /\ _ \ /\_ \ /\_ \
* \ \ \L\ \\//\ \ \//\ \ __ __ _ __ ___
* \ \ __ \ \ \ \ \ \ \ /'__`\ /'_ `\/\`'__\/ __`\
* \ \ \/\ \ \_\ \_ \_\ \_/\ __//\ \L\ \ \ \//\ \L\ \
* \ \_\ \_\/\____\/\____\ \____\ \____ \ \_\\ \____/
* \/_/\/_/\/____/\/____/\/____/\/___L\ \/_/ \/___/
* /\____/
* \_/__/
*
* List of functions to call at program cleanup.
*
* By <NAME>.
*
* See LICENSE.txt for copyright information.
*/
#include "allegro5/allegro.h"
#include "allegro5/internal/aintern_exitfunc.h"
/* dynamic registration system for cleanup code */
struct al_exit_func {
void (*funcptr)(void);
const char *desc;
struct al_exit_func *next;
};
static struct al_exit_func *exit_func_list = NULL;
/* _al_add_exit_func:
* Adds a function to the list that need to be called on Allegro shutdown.
* `desc' should point to a statically allocated string to help with
* debugging.
*/
void _al_add_exit_func(void (*func)(void), const char *desc)
{
struct al_exit_func *n;
for (n = exit_func_list; n; n = n->next)
if (n->funcptr == func)
return;
n = al_malloc(sizeof(struct al_exit_func));
if (!n)
return;
n->next = exit_func_list;
n->funcptr = func;
n->desc = desc;
exit_func_list = n;
}
/* _al_remove_exit_func:
* Removes a function from the list that need to be called on Allegro
* shutdown.
*/
void _al_remove_exit_func(void (*func)(void))
{
struct al_exit_func *iter = exit_func_list, *prev = NULL;
while (iter) {
if (iter->funcptr == func) {
if (prev)
prev->next = iter->next;
else
exit_func_list = iter->next;
al_free(iter);
return;
}
prev = iter;
iter = iter->next;
}
}
/* _al_run_exit_funcs:
* Run all the functions registered with _al_add_exit_func, in reverse order of
* registration.
*/
void _al_run_exit_funcs(void)
{
while (exit_func_list) {
void (*func)(void) = exit_func_list->funcptr;
_al_remove_exit_func(func);
(*func)();
}
}
/* vim: set sts=3 sw=3 et: */
| 1,095 |
14,668 | <reponame>zealoussnow/chromium<filename>chrome/installer/setup/setup_util_unittest.h<gh_stars>1000+
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CHROME_INSTALLER_SETUP_SETUP_UTIL_UNITTEST_H_
#define CHROME_INSTALLER_SETUP_SETUP_UTIL_UNITTEST_H_
// A command line switch that causes the test harness to exit with the result of
// DoProcessPriorityAdjustment rather than executing all tests.
extern const char kAdjustProcessPriority[];
// Process exit codes when the test harness is run with the
// kAdjustProcessPriority switch.
enum PriorityClassChangeResult {
PCCR_UNKNOWN,
PCCR_UNCHANGED,
PCCR_CHANGED,
};
// Calls AdjustProcessPriority() and returns PCCR_CHANGED or PCCR_UNCHANGED
// based on its true or false result (respectively).
PriorityClassChangeResult DoProcessPriorityAdjustment();
#endif // CHROME_INSTALLER_SETUP_SETUP_UTIL_UNITTEST_H_
| 318 |
488 | <reponame>maurizioabba/rose
// t0118.cc
// test getImplicitConversion
// copied from stdconv.h
enum StandardConversion {
SC_IDENTITY = 0x00, // types are identical
// conversion group 1 (comes first)
SC_LVAL_TO_RVAL = 0x01, // 4.1: int& -> int
SC_ARRAY_TO_PTR = 0x02, // 4.2: char[] -> char*
SC_FUNC_TO_PTR = 0x03, // 4.3: int ()(int) -> int (*)(int)
SC_GROUP_1_MASK = 0x03,
// conversion group 3 (comes last conceptually)
SC_QUAL_CONV = 0x04, // 4.4: int* -> int const*
SC_GROUP_3_MASK = 0x04,
// conversion group 2 (goes in the middle)
SC_INT_PROM = 0x10, // 4.5: int... -> int..., no info loss possible
SC_FLOAT_PROM = 0x20, // 4.6: float -> double, no info loss possible
SC_INT_CONV = 0x30, // 4.7: int... -> int..., info loss possible
SC_FLOAT_CONV = 0x40, // 4.8: float... -> float..., info loss possible
SC_FLOAT_INT_CONV = 0x50, // 4.9: int... <-> float..., info loss possible
SC_PTR_CONV = 0x60, // 4.10: 0 -> Foo*, Child* -> Parent*
SC_PTR_MEMB_CONV = 0x70, // 4.11: int Child::* -> int Parent::*
SC_BOOL_CONV = 0x80, // 4.12: various types <-> bool
SC_GROUP_2_MASK = 0xF0,
SC_ERROR = 0xFF, // cannot convert
};
// copied from implconv.h
enum Kind {
IC_NONE, // no conversion possible
IC_STANDARD, // 13.3.3.1.1: standard conversion sequence
IC_USER_DEFINED, // 13.3.3.1.2: user-defined conversion sequence
IC_ELLIPSIS, // 13.3.3.1.3: ellipsis conversion sequence
IC_AMBIGUOUS, // 13.3.3.1 para 10
NUM_KINDS
} kind;
struct A {
};
struct B {
B(A const &); // line 47
};
struct C : A { operator int(); // line 50
};
struct D {
operator int(); // line 55
operator float(); // line 56
};
struct E {
operator char(); // line 60
operator float(); // line 61
};
struct F {
operator bool (); // line 65
operator char (); // line 66
operator signed char (); // line 67
operator unsigned char (); // line 68
operator short (); // line 69
operator unsigned short (); // line 70
operator int (); // line 71
operator unsigned int (); // line 72
operator long (); // line 73
operator unsigned long (); // line 74
operator float (); // line 75
operator double (); // line 76
operator long double (); // line 77
};
class G {
operator A* (); // line 81
operator C* (); // line 82
};
class H {
operator A* (); // line 86
operator B* (); // line 87
};
class I {
operator int (); // line 91
operator int& (); // line 92
};
class J {
J(int); // line 96
J(long); // line 97
};
class L;
class K {
K(L const &); // line 102
};
class L {
operator K (); // line 106
};
class L2;
class K2 {
K2(L2 const &); // line 111
};
class L2 {
operator K2 (); // line 115
operator K2& (); // line 116
};
// __getImplicitConversion(
// <expression with source type>,
// <expression with destination type>,
// <implicit conversion kind>,
// <standard conversion 1>,
// <line number of user-defined conversion, or 0 if none>,
// <standard conversion 2, or 0 if none>
// )
void f()
{
// elementary conversions
__getImplicitConversion((int)0, (int)0,
IC_STANDARD, SC_IDENTITY, 0, 0);
__getImplicitConversion((int)0, (int const &)0,
IC_STANDARD, SC_IDENTITY,0,0);
// constructor conversions
A a;
B b(a);
C c;
__getImplicitConversion((A)a, (B)b,
IC_USER_DEFINED, SC_IDENTITY, 47, SC_IDENTITY);
__getImplicitConversion((A&)a, (B)b,
IC_USER_DEFINED, SC_IDENTITY, 47, SC_IDENTITY);
__getImplicitConversion((C&)c, (B)b,
IC_USER_DEFINED, SC_PTR_CONV, 47, SC_IDENTITY);
__getImplicitConversion(c, (int)0,
IC_USER_DEFINED, SC_IDENTITY, 50, SC_IDENTITY);
// operator conversions
D d;
__getImplicitConversion(d, (int)0,
IC_USER_DEFINED, SC_IDENTITY, 55, SC_IDENTITY);
__getImplicitConversion(d, (float)0,
IC_USER_DEFINED, SC_IDENTITY, 56, SC_IDENTITY);
// int->char and float->char are both conversions, therefore it's ambiguous
__getImplicitConversion(d, (char)0,
IC_AMBIGUOUS, 0,0,0);
E e;
// char->int is a promotion, therefore preferred
__getImplicitConversion(e, (int)0,
IC_USER_DEFINED, SC_IDENTITY, 60, SC_INT_PROM);
// float->double also a promotion
__getImplicitConversion(e, (double)0,
IC_USER_DEFINED, SC_IDENTITY, 61, SC_FLOAT_PROM);
F f;
__getImplicitConversion(f, (bool)0,
IC_USER_DEFINED, SC_IDENTITY, 65, SC_IDENTITY);
__getImplicitConversion(f, (unsigned short)0,
IC_USER_DEFINED, SC_IDENTITY, 70, SC_IDENTITY);
__getImplicitConversion(f, (double)0,
IC_USER_DEFINED, SC_IDENTITY, 76, SC_IDENTITY);
G g;
__getImplicitConversion(g, (A*)0,
IC_USER_DEFINED, SC_IDENTITY, 81, SC_IDENTITY);
__getImplicitConversion(g, (B*)0,
IC_NONE, 0,0,0);
__getImplicitConversion(g, (C*)0,
IC_USER_DEFINED, SC_IDENTITY, 82, SC_IDENTITY);
__getImplicitConversion(g, (void*)0,
IC_USER_DEFINED, SC_IDENTITY, 81, SC_PTR_CONV);
H h;
__getImplicitConversion(h, (A*)0,
IC_USER_DEFINED, SC_IDENTITY, 86, SC_IDENTITY);
__getImplicitConversion(h, (B*)0,
IC_USER_DEFINED, SC_IDENTITY, 87, SC_IDENTITY);
__getImplicitConversion(h, (void*)0,
IC_AMBIGUOUS, 0,0,0);
I i;
int anInt;
__getImplicitConversion(i, anInt /*int&*/,
IC_USER_DEFINED, SC_IDENTITY, 92, SC_IDENTITY);
J j(1);
__getImplicitConversion((int)0, (J)j,
IC_USER_DEFINED, SC_IDENTITY, 96, SC_IDENTITY);
__getImplicitConversion((long)0, (J)j,
IC_USER_DEFINED, SC_IDENTITY, 97, SC_IDENTITY);
__getImplicitConversion((float)0, (J)j,
IC_AMBIGUOUS, 0,0,0);
// ambiguity between conversion operator and conversion constructor
L ell;
K k(ell);
__getImplicitConversion(ell, (K)k,
IC_AMBIGUOUS, 0,0,0);
// ambiguity among 3: ambiguous conversion operator, and conversion constructor
L2 ell2;
K2 k2(ell2);
__getImplicitConversion(ell2, (K2)k2,
IC_AMBIGUOUS, 0,0,0);
}
| 3,479 |
1,475 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache.operations;
import org.apache.geode.cache.operations.KeyOperationContext;
/**
* Encapsulates a
* {@link org.apache.geode.cache.operations.OperationContext.OperationCode#CONTAINS_KEY} region
* operation having the key object for the pre-operation case.
*
* @since GemFire 5.5
* @deprecated since Geode1.0, use {@link org.apache.geode.security.ResourcePermission} instead
*/
public class ContainsKeyOperationContext extends KeyOperationContext {
/**
* Constructor for the operation.
*
* @param key the key for this operation
*/
public ContainsKeyOperationContext(Object key) {
super(key);
}
/**
* Return the operation associated with the <code>OperationContext</code> object.
*
* @return <code>OperationCode.CONTAINS_KEY</code>.
*/
@Override
public OperationCode getOperationCode() {
return OperationCode.CONTAINS_KEY;
}
}
| 467 |
17,703 | <reponame>dcillera/envoy
#include "source/extensions/filters/http/compressor/config.h"
#include "test/extensions/filters/http/compressor/mock_compressor_library.pb.h"
#include "test/mocks/server/factory_context.h"
#include "gtest/gtest.h"
namespace Envoy {
namespace Extensions {
namespace HttpFilters {
namespace Compressor {
namespace {
using testing::NiceMock;
const ::test::mock_compressor_library::Unregistered _mock_compressor_library_dummy;
TEST(CompressorFilterFactoryTests, UnregisteredCompressorLibraryConfig) {
const std::string yaml_string = R"EOF(
compressor_library:
name: fake_compressor
typed_config:
"@type": type.googleapis.com/test.mock_compressor_library.Unregistered
)EOF";
envoy::extensions::filters::http::compressor::v3::Compressor proto_config;
TestUtility::loadFromYaml(yaml_string, proto_config);
CompressorFilterFactory factory;
NiceMock<Server::Configuration::MockFactoryContext> context;
EXPECT_THROW_WITH_MESSAGE(factory.createFilterFactoryFromProto(proto_config, "stats", context),
EnvoyException,
"Didn't find a registered implementation for type: "
"'test.mock_compressor_library.Unregistered'");
}
} // namespace
} // namespace Compressor
} // namespace HttpFilters
} // namespace Extensions
} // namespace Envoy
| 509 |
3,074 | # Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import numpy as np
import argparse
import os
import re
import tensorflow as tf
import time
import glob
from azureml.core import Run
from utils import load_data
from tensorflow.keras import Model, layers
# Create TF Model.
class NeuralNet(Model):
# Set layers.
def __init__(self):
super(NeuralNet, self).__init__()
# First hidden layer.
self.h1 = layers.Dense(n_h1, activation=tf.nn.relu)
# Second hidden layer.
self.h2 = layers.Dense(n_h2, activation=tf.nn.relu)
self.out = layers.Dense(n_outputs)
# Set forward pass.
def call(self, x, is_training=False):
x = self.h1(x)
x = self.h2(x)
x = self.out(x)
if not is_training:
# Apply softmax when not training.
x = tf.nn.softmax(x)
return x
def cross_entropy_loss(y, logits):
# Convert labels to int 64 for tf cross-entropy function.
y = tf.cast(y, tf.int64)
# Apply softmax to logits and compute cross-entropy.
loss = tf.nn.sparse_softmax_cross_entropy_with_logits(labels=y, logits=logits)
# Average loss across the batch.
return tf.reduce_mean(loss)
# Accuracy metric.
def accuracy(y_pred, y_true):
# Predicted class is the index of highest score in prediction vector (i.e. argmax).
correct_prediction = tf.equal(tf.argmax(y_pred, 1), tf.cast(y_true, tf.int64))
return tf.reduce_mean(tf.cast(correct_prediction, tf.float32), axis=-1)
# Optimization process.
def run_optimization(x, y):
# Wrap computation inside a GradientTape for automatic differentiation.
with tf.GradientTape() as g:
# Forward pass.
logits = neural_net(x, is_training=True)
# Compute loss.
loss = cross_entropy_loss(y, logits)
# Variables to update, i.e. trainable variables.
trainable_variables = neural_net.trainable_variables
# Compute gradients.
gradients = g.gradient(loss, trainable_variables)
# Update W and b following gradients.
optimizer.apply_gradients(zip(gradients, trainable_variables))
print("TensorFlow version:", tf.__version__)
parser = argparse.ArgumentParser()
parser.add_argument('--data-folder', type=str, dest='data_folder', default='data', help='data folder mounting point')
parser.add_argument('--batch-size', type=int, dest='batch_size', default=128, help='mini batch size for training')
parser.add_argument('--first-layer-neurons', type=int, dest='n_hidden_1', default=128,
help='# of neurons in the first layer')
parser.add_argument('--second-layer-neurons', type=int, dest='n_hidden_2', default=128,
help='# of neurons in the second layer')
parser.add_argument('--learning-rate', type=float, dest='learning_rate', default=0.01, help='learning rate')
parser.add_argument('--resume-from', type=str, default=None,
help='location of the model or checkpoint files from where to resume the training')
args = parser.parse_args()
previous_model_location = args.resume_from
# You can also use environment variable to get the model/checkpoint files location
# previous_model_location = os.path.expandvars(os.getenv("AZUREML_DATAREFERENCE_MODEL_LOCATION", None))
data_folder = args.data_folder
print('Data folder:', data_folder)
# load train and test set into numpy arrays
# note we scale the pixel intensity values to 0-1 (by dividing it with 255.0) so the model can converge faster.
X_train = load_data(glob.glob(os.path.join(data_folder, '**/train-images-idx3-ubyte.gz'),
recursive=True)[0], False) / np.float32(255.0)
X_test = load_data(glob.glob(os.path.join(data_folder, '**/t10k-images-idx3-ubyte.gz'),
recursive=True)[0], False) / np.float32(255.0)
y_train = load_data(glob.glob(os.path.join(data_folder, '**/train-labels-idx1-ubyte.gz'),
recursive=True)[0], True).reshape(-1)
y_test = load_data(glob.glob(os.path.join(data_folder, '**/t10k-labels-idx1-ubyte.gz'),
recursive=True)[0], True).reshape(-1)
print(X_train.shape, y_train.shape, X_test.shape, y_test.shape, sep='\n')
training_set_size = X_train.shape[0]
n_inputs = 28 * 28
n_h1 = args.n_hidden_1
n_h2 = args.n_hidden_2
n_outputs = 10
learning_rate = args.learning_rate
n_epochs = 20
batch_size = args.batch_size
# Build neural network model.
neural_net = NeuralNet()
# Stochastic gradient descent optimizer.
optimizer = tf.optimizers.SGD(learning_rate)
# start an Azure ML run
run = Run.get_context()
if previous_model_location:
# Restore variables from latest checkpoint.
checkpoint = tf.train.Checkpoint(model=neural_net, optimizer=optimizer)
checkpoint_file_path = tf.train.latest_checkpoint(previous_model_location)
checkpoint.restore(checkpoint_file_path)
checkpoint_filename = os.path.basename(checkpoint_file_path)
num_found = re.search(r'\d+', checkpoint_filename)
if num_found:
start_epoch = int(num_found.group(0))
print("Resuming from epoch {}".format(str(start_epoch)))
start_time = time.perf_counter()
for epoch in range(0, n_epochs):
# randomly shuffle training set
indices = np.random.permutation(training_set_size)
X_train = X_train[indices]
y_train = y_train[indices]
# batch index
b_start = 0
b_end = b_start + batch_size
for _ in range(training_set_size // batch_size):
# get a batch
X_batch, y_batch = X_train[b_start: b_end], y_train[b_start: b_end]
# update batch index for the next batch
b_start = b_start + batch_size
b_end = min(b_start + batch_size, training_set_size)
# train
run_optimization(X_batch, y_batch)
# evaluate training set
pred = neural_net(X_batch, is_training=False)
acc_train = accuracy(pred, y_batch)
# evaluate validation set
pred = neural_net(X_test, is_training=False)
acc_val = accuracy(pred, y_test)
# log accuracies
run.log('training_acc', np.float(acc_train))
run.log('validation_acc', np.float(acc_val))
print(epoch, '-- Training accuracy:', acc_train, '\b Validation accuracy:', acc_val)
# Save checkpoints in the "./outputs" folder so that they are automatically uploaded into run history.
checkpoint_dir = './outputs/'
checkpoint = tf.train.Checkpoint(model=neural_net, optimizer=optimizer)
if epoch % 2 == 0:
checkpoint.save(checkpoint_dir)
time.sleep(3)
run.log('final_acc', np.float(acc_val))
os.makedirs('./outputs/model', exist_ok=True)
# files saved in the "./outputs" folder are automatically uploaded into run history
# this is workaround for https://github.com/tensorflow/tensorflow/issues/33913 and will be fixed once we move to >tf2.1
neural_net._set_inputs(X_train)
tf.saved_model.save(neural_net, './outputs/model/')
stop_time = time.perf_counter()
training_time = (stop_time - start_time) * 1000
print("Total time in milliseconds for training: {}".format(str(training_time)))
| 2,760 |
523 | package io.github.droidkaigi.confsched2017.util;
import android.app.Application;
public class FpsMeasureUtil {
private FpsMeasureUtil() {
}
public static void play(Application application) {
// no-op.
}
public static void finish() {
// no-op.
}
}
| 116 |
2,221 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.channel.file;
import org.apache.flume.Event;
/**
*
*/
public class EventUtils {
/**
* Returns the Event encapsulated by a Put wrapper
*
* @param transactionEventRecord TransactionEvent
* @return Event if Put instance is present, null otherwise
*/
public static Event getEventFromTransactionEvent(TransactionEventRecord transactionEventRecord) {
if (transactionEventRecord instanceof Put) {
return ((Put)transactionEventRecord).getEvent();
}
return null;
}
}
| 353 |
2,151 | <reponame>zipated/src
// Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "components/password_manager/core/browser/hsts_query.h"
#include "base/bind.h"
#include "base/location.h"
#include "base/task_runner_util.h"
#include "net/url_request/url_request_context.h"
#include "url/gurl.h"
namespace password_manager {
namespace {
bool IsHSTSActiveForHostAndRequestContext(
const GURL& origin,
const scoped_refptr<net::URLRequestContextGetter>& request_context) {
if (!origin.is_valid())
return false;
net::TransportSecurityState* security_state =
request_context->GetURLRequestContext()->transport_security_state();
if (!security_state)
return false;
return security_state->ShouldUpgradeToSSL(origin.host());
}
} // namespace
void PostHSTSQueryForHostAndRequestContext(
const GURL& origin,
const scoped_refptr<net::URLRequestContextGetter>& request_context,
const HSTSCallback& callback) {
base::PostTaskAndReplyWithResult(
request_context->GetNetworkTaskRunner().get(), FROM_HERE,
base::Bind(&IsHSTSActiveForHostAndRequestContext, origin,
request_context),
callback);
}
} // namespace password_manager
| 449 |
466 | //---------------------------------------------------------------------------
// Greenplum Database
// Copyright (C) 2011 Greenplum, Inc.
//
// @filename:
// CIdGenerator.h
//
// @doc:
// Class providing methods for a ULONG counter
//
// @owner:
//
//
// @test:
//
//---------------------------------------------------------------------------
#ifndef GPDXL_CIdGenerator_H
#define GPDXL_CIdGenerator_H
#define GPDXL_INVALID_ID gpos::ulong_max
#include "gpos/base.h"
namespace gpdxl
{
using namespace gpos;
class CIdGenerator
{
private:
ULONG id;
public:
explicit CIdGenerator(ULONG);
ULONG next_id();
ULONG current_id();
};
} // namespace gpdxl
#endif // GPDXL_CIdGenerator_H
// EOF
| 243 |
381 | <reponame>AriVuolas/helix
package org.apache.helix.model;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.ArrayList;
import org.apache.helix.ConfigAccessor;
import org.apache.helix.HelixException;
import org.apache.helix.PropertyKey.Builder;
import org.apache.helix.TestHelper;
import org.apache.helix.ZkUnitTestBase;
import org.apache.helix.manager.zk.ZKHelixDataAccessor;
import java.util.List;
import org.apache.helix.manager.zk.ZkBaseDataAccessor;
import org.testng.Assert;
import org.testng.annotations.Test;
public class TestCustomizedStateConfig extends ZkUnitTestBase {
@Test(expectedExceptions = HelixException.class)
public void TestCustomizedStateConfigNonExistentCluster() {
String className = getShortClassName();
String clusterName = "CLUSTER_" + className;
// Read CustomizedStateConfig from Zookeeper and get exception since cluster in not setup yet
ConfigAccessor _configAccessor = new ConfigAccessor(ZK_ADDR);
CustomizedStateConfig customizedStateConfig =
_configAccessor.getCustomizedStateConfig(clusterName);
}
@Test(dependsOnMethods = "TestCustomizedStateConfigNonExistentCluster")
public void testCustomizedStateConfigNull() {
String className = getShortClassName();
String clusterName = "CLUSTER_" + className;
TestHelper.setupEmptyCluster(_gZkClient, clusterName);
// Read CustomizedStateConfig from Zookeeper
ConfigAccessor _configAccessor = new ConfigAccessor(ZK_ADDR);
CustomizedStateConfig customizedStateConfigFromZk =
_configAccessor.getCustomizedStateConfig(clusterName);
Assert.assertNull(customizedStateConfigFromZk);
}
@Test(dependsOnMethods = "testCustomizedStateConfigNull")
public void testCustomizedStateConfig() {
String className = getShortClassName();
String clusterName = "CLUSTER_" + className;
TestHelper.setupEmptyCluster(_gZkClient, clusterName);
// Create dummy CustomizedStateConfig object
CustomizedStateConfig.Builder customizedStateConfigBuilder =
new CustomizedStateConfig.Builder();
List<String> aggregationEnabledTypes = new ArrayList<String>();
aggregationEnabledTypes.add("mockType1");
aggregationEnabledTypes.add("mockType2");
customizedStateConfigBuilder.setAggregationEnabledTypes(aggregationEnabledTypes);
CustomizedStateConfig customizedStateConfig =
customizedStateConfigBuilder.build();
// Write the CustomizedStateConfig to Zookeeper
ZKHelixDataAccessor accessor =
new ZKHelixDataAccessor(clusterName, new ZkBaseDataAccessor(ZK_ADDR));
Builder keyBuilder = accessor.keyBuilder();
accessor.setProperty(keyBuilder.customizedStateConfig(),
customizedStateConfig);
// Read CustomizedStateConfig from Zookeeper and check the content
ConfigAccessor _configAccessor = new ConfigAccessor(ZK_ADDR);
CustomizedStateConfig customizedStateConfigFromZk =
_configAccessor.getCustomizedStateConfig(clusterName);
Assert.assertEquals(customizedStateConfigFromZk.getAggregationEnabledTypes().size(),
2);
Assert.assertEquals(aggregationEnabledTypes.get(0), "mockType1");
Assert.assertEquals(aggregationEnabledTypes.get(1), "mockType2");
}
@Test(dependsOnMethods = "testCustomizedStateConfig")
public void testCustomizedStateConfigBuilder() {
String className = getShortClassName();
String clusterName = "CLUSTER_" + className;
TestHelper.setupEmptyCluster(_gZkClient, clusterName);
CustomizedStateConfig.Builder builder =
new CustomizedStateConfig.Builder();
builder.addAggregationEnabledType("mockType1");
builder.addAggregationEnabledType("mockType2");
// Check builder getter methods
List<String> aggregationEnabledTypes = builder.getAggregationEnabledTypes();
Assert.assertEquals(aggregationEnabledTypes.size(), 2);
Assert.assertEquals(aggregationEnabledTypes.get(0), "mockType1");
Assert.assertEquals(aggregationEnabledTypes.get(1), "mockType2");
CustomizedStateConfig customizedStateConfig = builder.build();
ZKHelixDataAccessor accessor =
new ZKHelixDataAccessor(clusterName, new ZkBaseDataAccessor(ZK_ADDR));
Builder keyBuilder = accessor.keyBuilder();
accessor.setProperty(keyBuilder.customizedStateConfig(),
customizedStateConfig);
// Read CustomizedStateConfig from Zookeeper and check the content
ConfigAccessor _configAccessor = new ConfigAccessor(ZK_ADDR);
CustomizedStateConfig customizedStateConfigFromZk =
_configAccessor.getCustomizedStateConfig(clusterName);
List<String> aggregationEnabledTypesFromZk =
customizedStateConfigFromZk.getAggregationEnabledTypes();
Assert.assertEquals(aggregationEnabledTypesFromZk.get(0), "mockType1");
Assert.assertEquals(aggregationEnabledTypesFromZk.get(1), "mockType2");
}
}
| 1,746 |
535 | /**\mainpage
* Copyright (C) 2017 - 2019 Bosch Sensortec GmbH
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* Neither the name of the copyright holder nor the names of the
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
* CONTRIBUTORS "AS IS" AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDER
* OR CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
* OR CONSEQUENTIAL DAMAGES(INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE
*
* File bmp388_priv.h
* Date 10 May 2019
* Version 1.0.2
*
*/
#ifndef __BMP388_PRIV_H__
#define __BMP388_PRIV_H__
#ifdef __cplusplus
extern "C" {
#endif
#define BMP388_SPI_READ_CMD_BIT 0x80
#define BMP388_REG_WHO_AM_I 0x00
int8_t bmp388_i2c_write8(struct sensor_itf *itf, uint8_t reg, uint8_t value);
int8_t bmp388_i2c_readlen(struct sensor_itf *itf, uint8_t reg, uint8_t *buffer, uint16_t len);
int8_t bmp388_spi_readlen(struct sensor_itf *itf, uint8_t reg, uint8_t *buffer, uint16_t len);
int8_t bmp388_readlen(struct bmp388 *bmp388, uint8_t reg, uint8_t *buffer, uint16_t len);
#ifdef __cplusplus
}
#endif
#endif /* __BMP388_PRIV_H_ */
| 768 |
3,702 | // Copyright (c) YugaByte, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
// in compliance with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software distributed under the License
// is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
// or implied. See the License for the specific language governing permissions and limitations
// under the License.
//
package org.yb.loadtester;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.yb.util.YBTestRunnerNonTsanOnly;
import org.yb.minicluster.MiniYBCluster;
import static junit.framework.TestCase.assertTrue;
import static junit.framework.TestCase.assertFalse;
/**
* This is an integration test that ensures we can fully move a YB cluster
* without any significant impact to a running load test.
*/
@RunWith(value=YBTestRunnerNonTsanOnly.class)
public class TestClusterIsLoadBalancerIdle extends TestClusterBase {
@Override
protected int getReplicationFactor() {
return 3;
}
@Override
protected int getInitialNumMasters() {
return 3;
}
@Override
protected int getInitialNumTServers() {
return 3;
}
@Test(timeout = TEST_TIMEOUT_SEC * 1000) // 20 minutes.
public void testClusterIsLoadBalancerIdle() throws Exception {
// Setup test tables.
int num_tables = 7;
for (int i = 0; i < num_tables; i++) {
setupTable("test_table_" + i, 2);
}
// Wait for the partition metadata to refresh.
Thread.sleep(num_tables * MiniYBCluster.CQL_NODE_LIST_REFRESH_SECS * 1000);
// Wait for load to balance across the three tservers.
assertTrue(client.waitForLoadBalance(LOADBALANCE_TIMEOUT_MS, NUM_TABLET_SERVERS));
// Load should be balanced and load balancer should be idle.
verifyClusterHealth(NUM_TABLET_SERVERS);
assertTrue(client.waitForLoadBalance(LOADBALANCE_TIMEOUT_MS, NUM_TABLET_SERVERS));
assertTrue(client.waitForLoadBalancerIdle(LOADBALANCE_TIMEOUT_MS));
// Add a new tserver.
addNewTServers(1);
// Wait for the partition metadata to refresh.
Thread.sleep(num_tables * MiniYBCluster.CQL_NODE_LIST_REFRESH_SECS * 1000);
verifyClusterHealth(NUM_TABLET_SERVERS + 1);
// Load should be balanced and load balancer should be idle.
assertTrue(client.waitForLoadBalance(LOADBALANCE_TIMEOUT_MS, NUM_TABLET_SERVERS + 1));
assertTrue(client.waitForLoadBalancerIdle(LOADBALANCE_TIMEOUT_MS));
}
}
| 861 |
16,259 | <gh_stars>1000+
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from .block_pair_dataset import BlockPairDataset
from .masked_lm_dataset import MaskedLMDataset
from .masked_lm_dictionary import BertDictionary, MaskedLMDictionary
__all__ = [
"BertDictionary",
"BlockPairDataset",
"MaskedLMDataset",
"MaskedLMDictionary",
]
| 166 |
4,996 | import base64
from typing import List, TYPE_CHECKING, Union, Optional
from binascii import hexlify
from itertools import chain
from lbry.error import ResolveCensoredError
from lbry.schema.types.v2.result_pb2 import Outputs as OutputsMessage
from lbry.schema.types.v2.result_pb2 import Error as ErrorMessage
if TYPE_CHECKING:
from lbry.wallet.server.leveldb import ResolveResult
INVALID = ErrorMessage.Code.Name(ErrorMessage.INVALID)
NOT_FOUND = ErrorMessage.Code.Name(ErrorMessage.NOT_FOUND)
BLOCKED = ErrorMessage.Code.Name(ErrorMessage.BLOCKED)
def set_reference(reference, claim_hash, rows):
if claim_hash:
for txo in rows:
if claim_hash == txo.claim_hash:
reference.tx_hash = txo.tx_hash
reference.nout = txo.position
reference.height = txo.height
return
class Censor:
NOT_CENSORED = 0
SEARCH = 1
RESOLVE = 2
__slots__ = 'censor_type', 'censored'
def __init__(self, censor_type):
self.censor_type = censor_type
self.censored = {}
def is_censored(self, row):
return (row.get('censor_type') or self.NOT_CENSORED) >= self.censor_type
def apply(self, rows):
return [row for row in rows if not self.censor(row)]
def censor(self, row) -> Optional[bytes]:
if self.is_censored(row):
censoring_channel_hash = bytes.fromhex(row['censoring_channel_id'])[::-1]
self.censored.setdefault(censoring_channel_hash, set())
self.censored[censoring_channel_hash].add(row['tx_hash'])
return censoring_channel_hash
return None
def to_message(self, outputs: OutputsMessage, extra_txo_rows: dict):
for censoring_channel_hash, count in self.censored.items():
blocked = outputs.blocked.add()
blocked.count = len(count)
set_reference(blocked.channel, censoring_channel_hash, extra_txo_rows)
outputs.blocked_total += len(count)
class Outputs:
__slots__ = 'txos', 'extra_txos', 'txs', 'offset', 'total', 'blocked', 'blocked_total'
def __init__(self, txos: List, extra_txos: List, txs: set,
offset: int, total: int, blocked: List, blocked_total: int):
self.txos = txos
self.txs = txs
self.extra_txos = extra_txos
self.offset = offset
self.total = total
self.blocked = blocked
self.blocked_total = blocked_total
def inflate(self, txs):
tx_map = {tx.hash: tx for tx in txs}
for txo_message in self.extra_txos:
self.message_to_txo(txo_message, tx_map)
txos = [self.message_to_txo(txo_message, tx_map) for txo_message in self.txos]
return txos, self.inflate_blocked(tx_map)
def inflate_blocked(self, tx_map):
return {
"total": self.blocked_total,
"channels": [{
'channel': self.message_to_txo(blocked.channel, tx_map),
'blocked': blocked.count
} for blocked in self.blocked]
}
def message_to_txo(self, txo_message, tx_map):
if txo_message.WhichOneof('meta') == 'error':
error = {
'error': {
'name': txo_message.error.Code.Name(txo_message.error.code),
'text': txo_message.error.text,
}
}
if error['error']['name'] == BLOCKED:
error['error']['censor'] = self.message_to_txo(
txo_message.error.blocked.channel, tx_map
)
return error
tx = tx_map.get(txo_message.tx_hash)
if not tx:
return
txo = tx.outputs[txo_message.nout]
if txo_message.WhichOneof('meta') == 'claim':
claim = txo_message.claim
txo.meta = {
'short_url': f'lbry://{claim.short_url}',
'canonical_url': f'lbry://{claim.canonical_url or claim.short_url}',
'reposted': claim.reposted,
'is_controlling': claim.is_controlling,
'take_over_height': claim.take_over_height,
'creation_height': claim.creation_height,
'activation_height': claim.activation_height,
'expiration_height': claim.expiration_height,
'effective_amount': claim.effective_amount,
'support_amount': claim.support_amount,
# 'trending_group': claim.trending_group,
# 'trending_mixed': claim.trending_mixed,
# 'trending_local': claim.trending_local,
# 'trending_global': claim.trending_global,
}
if claim.HasField('channel'):
txo.channel = tx_map[claim.channel.tx_hash].outputs[claim.channel.nout]
if claim.HasField('repost'):
txo.reposted_claim = tx_map[claim.repost.tx_hash].outputs[claim.repost.nout]
try:
if txo.claim.is_channel:
txo.meta['claims_in_channel'] = claim.claims_in_channel
except:
pass
return txo
@classmethod
def from_base64(cls, data: str) -> 'Outputs':
return cls.from_bytes(base64.b64decode(data))
@classmethod
def from_bytes(cls, data: bytes) -> 'Outputs':
outputs = OutputsMessage()
outputs.ParseFromString(data)
txs = set()
for txo_message in chain(outputs.txos, outputs.extra_txos):
if txo_message.WhichOneof('meta') == 'error':
continue
txs.add((hexlify(txo_message.tx_hash[::-1]).decode(), txo_message.height))
return cls(
outputs.txos, outputs.extra_txos, txs,
outputs.offset, outputs.total,
outputs.blocked, outputs.blocked_total
)
@classmethod
def from_grpc(cls, outputs: OutputsMessage) -> 'Outputs':
txs = set()
for txo_message in chain(outputs.txos, outputs.extra_txos):
if txo_message.WhichOneof('meta') == 'error':
continue
txs.add((hexlify(txo_message.tx_hash[::-1]).decode(), txo_message.height))
return cls(
outputs.txos, outputs.extra_txos, txs,
outputs.offset, outputs.total,
outputs.blocked, outputs.blocked_total
)
@classmethod
def to_base64(cls, txo_rows, extra_txo_rows, offset=0, total=None, blocked=None) -> str:
return base64.b64encode(cls.to_bytes(txo_rows, extra_txo_rows, offset, total, blocked)).decode()
@classmethod
def to_bytes(cls, txo_rows, extra_txo_rows, offset=0, total=None, blocked: Censor = None) -> bytes:
page = OutputsMessage()
page.offset = offset
if total is not None:
page.total = total
if blocked is not None:
blocked.to_message(page, extra_txo_rows)
for row in extra_txo_rows:
txo_message: 'OutputsMessage' = page.extra_txos.add()
if not isinstance(row, Exception):
if row.channel_hash:
set_reference(txo_message.claim.channel, row.channel_hash, extra_txo_rows)
if row.reposted_claim_hash:
set_reference(txo_message.claim.repost, row.reposted_claim_hash, extra_txo_rows)
cls.encode_txo(txo_message, row)
for row in txo_rows:
# cls.row_to_message(row, page.txos.add(), extra_txo_rows)
txo_message: 'OutputsMessage' = page.txos.add()
cls.encode_txo(txo_message, row)
if not isinstance(row, Exception):
if row.channel_hash:
set_reference(txo_message.claim.channel, row.channel_hash, extra_txo_rows)
if row.reposted_claim_hash:
set_reference(txo_message.claim.repost, row.reposted_claim_hash, extra_txo_rows)
elif isinstance(row, ResolveCensoredError):
set_reference(txo_message.error.blocked.channel, row.censor_id, extra_txo_rows)
return page.SerializeToString()
@classmethod
def encode_txo(cls, txo_message, resolve_result: Union['ResolveResult', Exception]):
if isinstance(resolve_result, Exception):
txo_message.error.text = resolve_result.args[0]
if isinstance(resolve_result, ValueError):
txo_message.error.code = ErrorMessage.INVALID
elif isinstance(resolve_result, LookupError):
txo_message.error.code = ErrorMessage.NOT_FOUND
elif isinstance(resolve_result, ResolveCensoredError):
txo_message.error.code = ErrorMessage.BLOCKED
return
txo_message.tx_hash = resolve_result.tx_hash
txo_message.nout = resolve_result.position
txo_message.height = resolve_result.height
txo_message.claim.short_url = resolve_result.short_url
txo_message.claim.reposted = resolve_result.reposted
txo_message.claim.is_controlling = resolve_result.is_controlling
txo_message.claim.creation_height = resolve_result.creation_height
txo_message.claim.activation_height = resolve_result.activation_height
txo_message.claim.expiration_height = resolve_result.expiration_height
txo_message.claim.effective_amount = resolve_result.effective_amount
txo_message.claim.support_amount = resolve_result.support_amount
if resolve_result.canonical_url is not None:
txo_message.claim.canonical_url = resolve_result.canonical_url
if resolve_result.last_takeover_height is not None:
txo_message.claim.take_over_height = resolve_result.last_takeover_height
if resolve_result.claims_in_channel is not None:
txo_message.claim.claims_in_channel = resolve_result.claims_in_channel
| 4,666 |
14,668 | <reponame>zealoussnow/chromium
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "remoting/host/audio_silence_detector.h"
#include <stdint.h>
#include "base/cxx17_backports.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace remoting {
namespace {
const int kSamplingRate = 1000;
void TestSilenceDetector(AudioSilenceDetector* target,
const int16_t* samples,
int samples_count,
bool silence_expected) {
target->Reset(kSamplingRate, 1);
bool silence_started = false;
int threshold_length = 0;
for (int i = 0; i < 3 * kSamplingRate / samples_count; ++i) {
bool result = target->IsSilence(samples, samples_count);
if (silence_started) {
ASSERT_TRUE(result);
} else if (result) {
silence_started = true;
threshold_length = i * samples_count;
}
}
// Check that the silence was detected if it was expected.
EXPECT_EQ(silence_expected, silence_started);
if (silence_expected) {
// Check that silence threshold is between 0.5 and 2 seconds.
EXPECT_GE(threshold_length, kSamplingRate / 2);
EXPECT_LE(threshold_length, kSamplingRate * 2);
}
}
} // namespace
TEST(AudioSilenceDetectorTest, Silence) {
const int16_t kSamples[] = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0};
AudioSilenceDetector target(0);
TestSilenceDetector(&target, kSamples, base::size(kSamples), true);
}
TEST(AudioSilenceDetectorTest, Sound) {
const int16_t kSamples[] = {65, 73, 83, 89, 92, -1, 5, 9, 123, 0};
AudioSilenceDetector target(0);
TestSilenceDetector(&target, kSamples, base::size(kSamples), false);
}
TEST(AudioSilenceDetectorTest, Threshold) {
const int16_t kSamples[] = {0, 0, 0, 0, 1, 0, 0, -1, 0, 0};
AudioSilenceDetector target1(0);
TestSilenceDetector(&target1, kSamples, base::size(kSamples), false);
AudioSilenceDetector target2(1);
TestSilenceDetector(&target2, kSamples, base::size(kSamples), true);
}
} // namespace remoting
| 824 |
350 | package ar.rulosoft.mimanganu.componentes;
import android.content.ClipData;
import android.content.ClipboardManager;
import android.content.Context;
import android.graphics.Bitmap;
import android.util.AttributeSet;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TextView;
import android.widget.Toast;
import androidx.annotation.UiThread;
import ar.rulosoft.mimanganu.R;
import ar.rulosoft.mimanganu.utils.ThemeColors;
/**
* Series information.
* Created by Raul on 16/05/2015.
*/
// used in MangaFragment
public class ControlInfoNoScroll extends LinearLayout implements Imaginable {
private View blockSummaryView;
private View lineAuthorView;
private View lineStatusView;
private View lineServerView;
private View lineGenreView;
private View lineLastUpdateView;
private TextView author;
private TextView status;
private TextView server;
private TextView synopsis;
private TextView genre;
private TextView authorTitle;
private TextView statusTitle;
private TextView serverTitle;
private TextView genreTitle;
private TextView lastUpdateTitle;
private TextView lastUpdate;
private ImageView image;
public ControlInfoNoScroll(Context context) {
super(context);
initialize();
}
public ControlInfoNoScroll(Context context, AttributeSet attrs) {
super(context, attrs);
initialize();
}
public ControlInfoNoScroll(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
initialize();
}
private void initialize() {
LayoutInflater li = (LayoutInflater) getContext().getSystemService(Context.LAYOUT_INFLATER_SERVICE);
li.inflate(R.layout.control_info_no_scroll, this, true);
blockSummaryView = findViewById(R.id.blockSummary);
lineAuthorView = findViewById(R.id.lineAuthor);
lineStatusView = findViewById(R.id.lineStatus);
lineServerView = findViewById(R.id.lineServer);
lineGenreView = findViewById(R.id.lineGenre);
lineLastUpdateView = findViewById(R.id.lineLastUpdate);
authorTitle = findViewById(R.id.titleAuthor);
statusTitle = findViewById(R.id.titleStatus);
serverTitle = findViewById(R.id.titleServer);
genreTitle = findViewById(R.id.titleGenre);
lastUpdateTitle = findViewById(R.id.titleLastUpdate);
lastUpdate = findViewById(R.id.textLastUpdate);
status = findViewById(R.id.textStatus);
server = findViewById(R.id.textServer);
synopsis = findViewById(R.id.sinopsis);
author = findViewById(R.id.textAuthor);
genre = findViewById(R.id.textGenre);
image = findViewById(R.id.image);
}
@SuppressWarnings("ResourceAsColor")//lint error
public void setColor(boolean dark_theme, int color) {
int mColor = dark_theme ? ThemeColors.brightenColor(color, 150) : color;
blockSummaryView.setBackgroundColor(color);
lineAuthorView.setBackgroundColor(color);
lineStatusView.setBackgroundColor(color);
lineServerView.setBackgroundColor(color);
lineGenreView.setBackgroundColor(color);
lineLastUpdateView.setBackgroundColor(color);
authorTitle.setTextColor(mColor);
statusTitle.setTextColor(mColor);
serverTitle.setTextColor(mColor);
genreTitle.setTextColor(mColor);
lastUpdateTitle.setTextColor(mColor);
}
public void setLastUpdate(String lastUpdate) {
this.lastUpdate.setText(lastUpdate);
}
public void setAuthor(String author) {
this.author.setText(author);
}
public void setStatus(String status) {
this.status.setText(status);
}
public void setServer(String server) {
this.server.setText(server);
}
public void setSynopsis(String synopsis) {
this.synopsis.setText(synopsis);
}
public void setGenre(String genre) {
this.genre.setText(genre);
}
@UiThread
public void enableTitleCopy(final Context context, final String name) {
this.image.setClickable(true);
this.image.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
ClipboardManager clipboard = (ClipboardManager) context.getSystemService(Context.CLIPBOARD_SERVICE);
ClipData clip = ClipData.newPlainText("label", name);
clipboard.setPrimaryClip(clip);
Toast.makeText(context, R.string.title_copied, Toast.LENGTH_SHORT).show();
}
});
}
@Override
public void setImageBitmap(Bitmap b) {
image.setImageBitmap(b);
}
@Override
public void setImageResource(int id) {
image.setImageResource(id);
}
}
| 1,882 |
6,557 | <gh_stars>1000+
{
"buttonLabel": "Zobrazit doporučení",
"countAnnouncement": "K dispozici {optionCount, plural, one {je # možnost} other {jsou/je # možnosti/-í}}.",
"focusAnnouncement": "{isGroupChange, select, true {Zadaná skupina „{groupTitle}“ {groupCount, plural, one {s # možností} other {se # možnostmi}}. } other {}}{optionText}{isSelected, select, true { (vybráno)} other {}}",
"listboxLabel": "Návrhy",
"selectedAnnouncement": "{optionText}, vybráno"
}
| 192 |
602 | # encoding: UTF-8
"""
Basic data types, classes and models for trade.
"""
from .tradeapi import TradeApi
from .backtest import AlphaBacktestInstance, EventBacktestInstance
from .portfoliomanager import PortfolioManager
from .livetrade import EventLiveTradeInstance, AlphaLiveTradeInstance
from .strategy import Strategy, AlphaStrategy, EventDrivenStrategy
from .tradegateway import BaseTradeApi, RealTimeTradeApi, AlphaTradeApi, BacktestTradeApi
__all__ = ['TradeApi',
'AlphaBacktestInstance', 'EventBacktestInstance',
'PortfolioManager',
'EventLiveTradeInstance', 'AlphaLiveTradeInstance',
'Strategy', 'AlphaStrategy', 'EventDrivenStrategy',
'BaseTradeApi', 'RealTimeTradeApi', 'AlphaTradeApi', 'BacktestTradeApi']
| 257 |
843 | <filename>src/olympia/blocklist/serializers.py
from rest_framework import serializers
from olympia.api.fields import OutgoingURLField, TranslationSerializerField
from .models import Block
class BlockSerializer(serializers.ModelSerializer):
addon_name = TranslationSerializerField(source='addon.name')
url = OutgoingURLField()
class Meta:
model = Block
fields = (
'id',
'created',
'modified',
'addon_name',
'guid',
'min_version',
'max_version',
'reason',
'url',
)
| 276 |
5,169 | <gh_stars>1000+
{
"name": "VOKBenkode",
"version": "0.2.0",
"summary": "An Objective-C library for encoding/decoding objects using Bencoding.",
"homepage": "https://github.com/vokalinteractive/VOKBenkode",
"license": {
"type": "MIT",
"file": "LICENSE"
},
"authors": {
"<NAME>": "<EMAIL>"
},
"source": {
"git": "https://github.com/vokalinteractive/VOKBenkode.git",
"tag": "0.2.0"
},
"platforms": {
"ios": "6.0",
"osx": "10.8"
},
"requires_arc": true,
"source_files": "Pod/Classes"
}
| 243 |
1,880 | <gh_stars>1000+
// Copyright 2016 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
////////////////////////////////////////////////////////////////////////////////
package tiger;
import com.google.android.apps.docs.tools.dagger.componentfactory.MembersInjector;
import com.google.auto.service.AutoService;
import com.google.common.collect.Iterables;
import com.google.common.io.BaseEncoding;
import com.google.gson.Gson;
import com.squareup.javapoet.FieldSpec;
import com.squareup.javapoet.JavaFile;
import com.squareup.javapoet.TypeSpec;
import dagger.Module;
import java.io.IOException;
import java.util.HashSet;
import java.util.Set;
import java.util.UUID;
import javax.annotation.processing.AbstractProcessor;
import javax.annotation.processing.Filer;
import javax.annotation.processing.Messager;
import javax.annotation.processing.ProcessingEnvironment;
import javax.annotation.processing.Processor;
import javax.annotation.processing.RoundEnvironment;
import javax.inject.Inject;
import javax.lang.model.SourceVersion;
import javax.lang.model.element.AnnotationMirror;
import javax.lang.model.element.Element;
import javax.lang.model.element.ElementKind;
import javax.lang.model.element.Modifier;
import javax.lang.model.element.TypeElement;
import javax.lang.model.util.Elements;
import javax.tools.Diagnostic.Kind;
/**
* Annotation processor to collect dependency information. All the information
* are written into java files in package
* {@link SharedNames#DEPENDENCY_INFORMATION_PACKAGE_NAME} which is later used as
* input for {@link ComponentGeneratorProcessor}. Three piece of information are
* collected: {@link Module}s, {@link MembersInjector}s and ctor {@link Inject}
* ed scoped classes. A class is created as container. For each piece of
* information collected, either a class or a interface, a field of that type is
* added to the container.
*/
@AutoService(Processor.class)
public class DependencyInformationCollectorProcessor extends AbstractProcessor {
private static final String TAG = "DependencyInformationCollectorProcessor";
private Gson gson = new Gson();
private Filer filer;
private Elements elements;
private Messager messager;
// This might depend on some Classes generated by other processor. Delay to
// the next round if needed.
private boolean done;
private boolean foundSomething;
private String uniqueSuffix;
// Following could be used cross processing rounds.
private Set<String> moduleStrings = new HashSet<>();
private Set<String> membersInjectors = new HashSet<>();
private Set<String> scopeDependencies = new HashSet<>();
private Set<String> scopedComponentNames = new HashSet<>();
private Set<String> ctorInjectedClassStrings = new HashSet<>();
private Utils utils;
@Override
public synchronized void init(ProcessingEnvironment env) {
super.init(env);
filer = env.getFiler();
elements = env.getElementUtils();
messager = env.getMessager();
}
@Override
public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment env) {
utils = new Utils(processingEnv, env);
if (annotations.isEmpty()) {
return false;
}
boolean foundSomething = false;
Element someElement =
Iterables.getFirst(
env.getElementsAnnotatedWith(Iterables.getFirst(annotations, null)), null);
ElementKind elementKind = someElement.getKind();
while (!(elementKind.equals(ElementKind.CLASS) || elementKind.equals(ElementKind.INTERFACE))) {
someElement = someElement.getEnclosingElement();
elementKind = someElement.getKind();
}
String suffix = ((TypeElement) someElement).getQualifiedName().toString().replace(".", "_");
String uniqueSuffix =
suffix + BaseEncoding.base64().encode(UUID.randomUUID().toString().getBytes());
TypeSpec.Builder dependencyInfoCollectedBuilder =
TypeSpec.classBuilder("DependencyInfo_" + uniqueSuffix)
.addModifiers(Modifier.PUBLIC, Modifier.FINAL);
for (Element element : env.getElementsAnnotatedWith(Module.class)) {
foundSomething = true;
moduleStrings.add(element.toString());
}
String jsonString = gson.toJson(moduleStrings);
dependencyInfoCollectedBuilder.addField(
FieldSpec.builder(
String.class,
SharedNames.DEPENDENCY_INFORMATION_FIELD_NAME_MODULES,
Modifier.PUBLIC,
Modifier.FINAL)
.initializer("$S", jsonString)
.build());
for (Element element : env.getElementsAnnotatedWith(MembersInjector.class)) {
foundSomething = true;
membersInjectors.add(element.toString());
}
jsonString = gson.toJson(membersInjectors);
dependencyInfoCollectedBuilder.addField(
FieldSpec.builder(
String.class,
SharedNames.DEPENDENCY_INFORMATION_FIELD_NAME_MEMBERS_INJECTORS,
Modifier.PUBLIC,
Modifier.FINAL)
.initializer("$S", jsonString)
.build());
for (Element element : env.getElementsAnnotatedWith(ScopeDependency.class)) {
foundSomething = true;
scopeDependencies.add(element.toString());
}
if (!scopeDependencies.isEmpty()) {
jsonString = gson.toJson(scopeDependencies);
dependencyInfoCollectedBuilder.addField(
FieldSpec.builder(
String.class,
SharedNames.DEPENDENCY_INFORMATION_FIELD_NAME_SCOPE_DEPENDENCIES,
Modifier.PUBLIC,
Modifier.FINAL)
.initializer("$S", jsonString)
.build());
}
for (Element element : env.getElementsAnnotatedWith(ScopedComponentNames.class)) {
foundSomething = true;
scopedComponentNames.add(element.toString());
}
if (!scopedComponentNames.isEmpty()) {
jsonString = gson.toJson(scopedComponentNames);
dependencyInfoCollectedBuilder.addField(
FieldSpec.builder(
String.class,
SharedNames.DEPENDENCY_INFORMATION_FIELD_NAME_SCOPED_COMPONENT_NAMES,
Modifier.PUBLIC,
Modifier.FINAL)
.initializer("$S", jsonString)
.build());
}
for (Element element : env.getElementsAnnotatedWith(PackageForGenerated.class)) {
foundSomething = true;
jsonString = getPackageForGenerated((TypeElement) element);
dependencyInfoCollectedBuilder.addField(
FieldSpec.builder(
String.class,
SharedNames.DEPENDENCY_INFORMATION_FIELD_NAME_PACKAGE_FOR_GENERATED,
Modifier.PUBLIC,
Modifier.FINAL)
.initializer("$S", jsonString)
.build());
}
for (Element element : env.getElementsAnnotatedWith(Inject.class)) {
if (element.getKind().equals(ElementKind.CONSTRUCTOR)) {
Element classElement = element.getEnclosingElement();
if (utils.getScopeType(classElement, null) != null) {
foundSomething = true;
ctorInjectedClassStrings.add(classElement.toString());
}
}
}
jsonString = gson.toJson(ctorInjectedClassStrings);
dependencyInfoCollectedBuilder.addField(
FieldSpec.builder(
String.class,
SharedNames.DEPENDENCY_INFORMATION_FIELD_NAME_CTOR_INJECTED_CLASSES,
Modifier.PUBLIC,
Modifier.FINAL)
.initializer("$S", jsonString)
.build());
if (foundSomething) {
writeJavaFile(
SharedNames.DEPENDENCY_INFORMATION_PACKAGE_NAME, dependencyInfoCollectedBuilder.build());
}
return false;
}
private String getPackageForGenerated(TypeElement typeElement) {
AnnotationMirror amAnnotationMirror =
utils.getAnnotationMirror(typeElement, PackageForGenerated.class);
return (String) utils.getAnnotationValue(elements, amAnnotationMirror, "value").getValue();
}
private void writeJavaFile(String packageName, TypeSpec typeSpec) {
JavaFile file = JavaFile.builder(packageName, typeSpec).build();
try {
file.writeTo(filer);
} catch (IOException e) {
messager.printMessage(
Kind.ERROR,
String.format(
"Generating modules class failed. %s, %s, package %s TypeSpec: %s",
e,
file,
packageName,
typeSpec));
throw new RuntimeException(e);
}
}
@Override
public Set<String> getSupportedAnnotationTypes() {
Set<String> result = new HashSet<>();
result.add(Module.class.getCanonicalName());
result.add(MembersInjector.class.getCanonicalName());
result.add(Inject.class.getCanonicalName());
result.add(ScopeDependency.class.getCanonicalName());
result.add(ScopedComponentNames.class.getCanonicalName());
result.add(PackageForGenerated.class.getCanonicalName());
return result;
}
@Override
public SourceVersion getSupportedSourceVersion() {
return SourceVersion.latestSupported();
}
}
| 3,591 |
560 | // Copyright (c) 2020 by <NAME>. All rights reserved.
// Copyright (c) 2021 by Apex.AI Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// SPDX-License-Identifier: Apache-2.0
#include "iceoryx_dds/Mempool.hpp"
#include "iceoryx_dds/dds/cyclone_data_reader.hpp"
#include "iceoryx_posh/testing/mocks/chunk_mock.hpp"
#include "test.hpp"
#include <dds/dds.hpp>
namespace
{
using namespace ::testing;
using ::testing::_;
using namespace iox::dds;
// ======================================== Helpers ======================================== //
using TestDataReader = CycloneDataReader;
struct DummyPayload
{
uint64_t a;
uint64_t b;
uint64_t c;
};
struct DummyUserHeader
{
uint64_t a;
};
// ======================================== Fixture ======================================== //
class CycloneDataReaderTest : public Test
{
public:
void SetUp(){};
void TearDown(){};
};
// ======================================== Tests ======================================== //
TEST_F(CycloneDataReaderTest, DoesNotAttemptToReadWhenDisconnected)
{
// ===== Setup
ChunkMock<DummyPayload> chunkMock;
iox::dds::IoxChunkDatagramHeader datagramHeader;
datagramHeader.endianness = getEndianess();
datagramHeader.userPayloadSize = chunkMock.chunkHeader()->userPayloadSize();
datagramHeader.userPayloadAlignment = chunkMock.chunkHeader()->userPayloadAlignment();
// ===== Test
TestDataReader reader{"", "", ""};
auto takeNextResult = reader.takeNext(datagramHeader,
static_cast<uint8_t*>(chunkMock.chunkHeader()->userHeader()),
static_cast<uint8_t*>(chunkMock.chunkHeader()->userPayload()));
ASSERT_EQ(true, takeNextResult.has_error());
EXPECT_EQ(iox::dds::DataReaderError::NOT_CONNECTED, takeNextResult.get_error());
}
TEST_F(CycloneDataReaderTest, ReturnsErrorWhenAttemptingToReadIntoANullBuffer)
{
// ===== Setup
ChunkMock<DummyPayload, DummyUserHeader> chunkMock;
iox::dds::IoxChunkDatagramHeader datagramHeader;
datagramHeader.endianness = getEndianess();
datagramHeader.userHeaderId = iox::mepoo::ChunkHeader::UNKNOWN_USER_HEADER;
datagramHeader.userHeaderSize = chunkMock.chunkHeader()->userHeaderSize();
datagramHeader.userPayloadSize = chunkMock.chunkHeader()->userPayloadSize();
datagramHeader.userPayloadAlignment = chunkMock.chunkHeader()->userPayloadAlignment();
// ===== Test
TestDataReader reader{"", "", ""};
reader.connect();
auto takeNextResult1 =
reader.takeNext(datagramHeader, nullptr, static_cast<uint8_t*>(chunkMock.chunkHeader()->userPayload()));
ASSERT_EQ(true, takeNextResult1.has_error());
EXPECT_EQ(iox::dds::DataReaderError::INVALID_BUFFER_PARAMETER_FOR_USER_HEADER, takeNextResult1.get_error());
auto takeNextResult2 =
reader.takeNext(datagramHeader, static_cast<uint8_t*>(chunkMock.chunkHeader()->userHeader()), nullptr);
ASSERT_EQ(true, takeNextResult2.has_error());
EXPECT_EQ(iox::dds::DataReaderError::INVALID_BUFFER_PARAMETER_FOR_USER_PAYLOAD, takeNextResult2.get_error());
}
} // namespace
| 1,301 |
375 | package hudson.plugins.git;
import java.util.ArrayList;
import java.util.Arrays;
import static hudson.plugins.git.GitChangeSet.TRUNCATE_LIMIT;
import static org.hamcrest.MatcherAssert.*;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertEquals;
import org.junit.Test;
public class GitChangeSetBasicTest {
private GitChangeSet genChangeSet(boolean authorOrCommitter, boolean useLegacyFormat) {
return GitChangeSetUtil.genChangeSet(authorOrCommitter, useLegacyFormat, true);
}
private GitChangeSet genChangeSet(boolean authorOrCommitter, boolean useLegacyFormat, boolean hasParent) {
return GitChangeSetUtil.genChangeSet(authorOrCommitter, useLegacyFormat, hasParent);
}
@Test
public void testLegacyChangeSet() {
GitChangeSet gitChangeSet = GitChangeSetUtil.genChangeSet(false, true, false, GitChangeSetUtil.COMMIT_TITLE ,false);
GitChangeSetUtil.assertChangeSet( gitChangeSet );
}
@Test
public void testChangeSet() {
GitChangeSetUtil.assertChangeSet(genChangeSet(false, false));
}
@Test
public void testChangeSetNoParent() {
GitChangeSet changeSet = genChangeSet(false, false, false);
GitChangeSetUtil.assertChangeSet(changeSet);
assertNull(changeSet.getParentCommit());
}
@Test
public void testCommitter() {
assertEquals(GitChangeSetUtil.COMMITTER_NAME, genChangeSet(false, false).getAuthorName());
assertEquals(GitChangeSetUtil.COMMITTER_EMAIL, genChangeSet(false, false).getAuthorEmail());
}
@Test
public void testAuthor() {
assertEquals(GitChangeSetUtil.AUTHOR_NAME, genChangeSet(true, false).getAuthorName());
assertEquals(GitChangeSetUtil.AUTHOR_EMAIL, genChangeSet(true, false).getAuthorEmail());
}
@Test
public void testGetDate() {
assertEquals("1970-01-15T06:56:08-0600", genChangeSet(true, false).getDate());
}
@Test
public void testGetTimestamp() {
assertEquals(1256168000L, genChangeSet(true, false).getTimestamp());
}
@Test
public void testInvalidDate() {
final String badDateString = "2015-03-03x09:22:42 -0700";
GitChangeSet c = new GitChangeSet(Arrays.asList("author <NAME> <<EMAIL>> " + badDateString), true);
assertEquals(badDateString, c.getDate());
assertEquals(-1L, c.getTimestamp());
}
@Test
public void testIsoDate() {
GitChangeSet c = new GitChangeSet(Arrays.asList("author <NAME> <<EMAIL>> 2015-03-03T09:22:42-0700"), true);
assertEquals("2015-03-03T09:22:42-0700", c.getDate());
assertEquals(1425399762000L, c.getTimestamp());
c = new GitChangeSet(Arrays.asList("author <NAME> <<EMAIL>> 2015-03-03T09:22:42-07:00"), true);
assertEquals("2015-03-03T09:22:42-07:00", c.getDate());
assertEquals(1425399762000L, c.getTimestamp());
c = new GitChangeSet(Arrays.asList("author <NAME> <<EMAIL>> 2015-03-03T16:22:42Z"), true);
assertEquals("2015-03-03T16:22:42Z", c.getDate());
assertEquals(1425399762000L, c.getTimestamp());
c = new GitChangeSet(Arrays.asList("author <NAME> <<EMAIL>> 1425399762"), true);
assertEquals("2015-03-03T16:22:42Z", c.getDate());
assertEquals(1425399762000L, c.getTimestamp());
c = new GitChangeSet(Arrays.asList("author <NAME> <<EMAIL>> 1425374562 -0700"), true);
assertEquals("2015-03-03T09:22:42-0700", c.getDate());
assertEquals(1425399762000L, c.getTimestamp());
}
private GitChangeSet genChangeSetForSwedCase(boolean authorOrCommitter) {
ArrayList<String> lines = new ArrayList<>();
lines.add("commit 1567861636cd854f4dd6fa40bf94c0c657681dd5");
lines.add("tree 66236cf9a1ac0c589172b450ed01f019a5697c49");
lines.add("parent e74a24e995305bd67a180f0ebc57927e2b8783ce");
lines.add("author misterÅ <<EMAIL>> 1363879004 +0100");
lines.add("committer <NAME> <<EMAIL>> 1364199539 -0400");
lines.add("");
lines.add(" [task] Updated version.");
lines.add(" ");
lines.add(" Including earlier updates.");
lines.add(" ");
lines.add(" Changes in this version:");
lines.add(" - Changed to take the gerrit url from gerrit query command.");
lines.add(" - Aligned reason information with our new commit hooks");
lines.add(" ");
lines.add(" Change-Id: Ife96d2abed5b066d9620034bec5f04cf74b8c66d");
lines.add(" Reviewed-on: https://gerrit.e.se/12345");
lines.add(" Tested-by: Jenkins <<EMAIL>>");
lines.add(" Reviewed-by: <NAME> <<EMAIL>>");
lines.add("");
//above lines all on purpose vs specific troublesome case @ericsson.
return new GitChangeSet(lines, authorOrCommitter);
}
@Test
public void testSwedishCommitterName() {
assertEquals("<NAME>", genChangeSetForSwedCase(false).getAuthorName());
}
@Test
public void testSwedishAuthorName() {
assertEquals("misterÅ", genChangeSetForSwedCase(true).getAuthorName());
}
@Test
public void testSwedishDate() {
assertEquals("2013-03-21T15:16:44+0100", genChangeSetForSwedCase(true).getDate());
}
@Test
public void testSwedishTimestamp() {
assertEquals(1363875404000L, genChangeSetForSwedCase(true).getTimestamp());
}
@Test
public void testChangeLogTruncationWithShortMessage(){
GitChangeSet changeSet = GitChangeSetUtil.genChangeSet(true, false, true,
"Lorem ipsum dolor sit amet.",
false);
String msg = changeSet.getMsg();
assertThat("Title is correct ", msg, containsString("Lorem ipsum dolor sit amet.") );
assertThat("Title length is correct ", msg.length(), lessThanOrEqualTo(TRUNCATE_LIMIT));
}
@Test
public void testChangeLogTruncationWithNewLine(){
GitChangeSet changeSet = GitChangeSetUtil.genChangeSet(true, false, true,
"Lorem ipsum dolor sit amet, "+System.lineSeparator()+"consectetur adipiscing elit.",
false);
String msg = changeSet.getMsg();
assertThat(msg, is("Lorem ipsum dolor sit amet,"));
assertThat("Title length is correct ", msg.length(), lessThanOrEqualTo(TRUNCATE_LIMIT));
}
@Test
public void testChangeLogRetainSummaryWithoutNewLine(){
String originalCommitMessage = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Phasellus pellentesque ipsum non aliquam interdum. Integer metus orci, vulputate id turpis in, pharetra pretium magna. Fusce sollicitudin vehicula lectus. Nam ut eros purus. Mauris aliquam mi et nunc porta, non consectetur mauris pretium. Fusce a venenatis dolor. Sed commodo, dui ac posuere dignissim, dolor tortor semper eros, varius consequat nulla purus a lacus. Vestibulum egestas, orci vitae pellentesque laoreet, dolor lorem molestie tellus, nec luctus lorem ex quis orci. Phasellus interdum elementum luctus. Nam commodo, turpis in sollicitudin auctor, ipsum lectus finibus erat, in iaculis sapien neque ultrices sapien. In congue diam semper tortor laoreet aliquet. Mauris lacinia quis nunc vel accumsan. Nullam sed nisl eget orci porttitor venenatis. Lorem ipsum dolor sit amet, consectetur adipiscing elit";
GitChangeSet changeSet = GitChangeSetUtil.genChangeSet(true, false, true,
originalCommitMessage,
true);
assertThat(changeSet.getMsg(), is(originalCommitMessage));
}
@Test
public void testChangeLogDoNotRetainSummaryWithoutNewLine(){
String msg = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Phasellus pellentesque ipsum non aliquam interdum. Integer metus orci, vulputate id turpis in, pharetra pretium magna. Fusce sollicitudin vehicula lectus. Nam ut eros purus. Mauris aliquam mi et nunc porta, non consectetur mauris pretium. Fusce a venenatis dolor. Sed commodo, dui ac posuere dignissim, dolor tortor semper eros, varius consequat nulla purus a lacus. Vestibulum egestas, orci vitae pellentesque laoreet, dolor lorem molestie tellus, nec luctus lorem ex quis orci. Phasellus interdum elementum luctus. Nam commodo, turpis in sollicitudin auctor, ipsum lectus finibus erat, in iaculis sapien neque ultrices sapien. In congue diam semper tortor laoreet aliquet. Mauris lacinia quis nunc vel accumsan. Nullam sed nisl eget orci porttitor venenatis. Lorem ipsum dolor sit amet, consectetur adipiscing elit";
GitChangeSet changeSet = GitChangeSetUtil.genChangeSet(true, false, true,
msg,
false);
assertThat(changeSet.getMsg(), is("Lorem ipsum dolor sit amet, consectetur adipiscing elit. Phasellus"));
}
@Test
public void testChangeLogNoTruncationWithNewLine(){
GitChangeSet changeSet = GitChangeSetUtil.genChangeSet(true, false, true,
"Lorem ipsum dolor sit amet, consectetur "+System.lineSeparator()+" adipiscing elit. Phasellus pellentesque ipsum non aliquam interdum. Integer metus orci, vulputate id turpis in, pharetra pretium magna. Fusce sollicitudin vehicula lectus. Nam ut eros purus. Mauris aliquam mi et nunc porta, non consectetur mauris pretium. Fusce a venenatis dolor. Sed commodo, dui ac posuere dignissim, dolor tortor semper eros, varius consequat nulla purus a lacus. Vestibulum egestas, orci vitae pellentesque laoreet, dolor lorem molestie tellus, nec luctus lorem ex quis orci. Phasellus interdum elementum luctus. Nam commodo, turpis in sollicitudin auctor, ipsum lectus finibus erat, in iaculis sapien neque ultrices sapien. In congue diam semper tortor laoreet aliquet. Mauris lacinia quis nunc vel accumsan. Nullam sed nisl eget orci porttitor venenatis. Lorem ipsum dolor sit amet, consectetur adipiscing elit",
true);
String msg = changeSet.getMsg();
assertThat("Title is correct ", msg, is("Lorem ipsum dolor sit amet, consectetur") );
}
@Test
public void testChangeLogEdgeCaseNotTruncating(){
GitChangeSet changeSet = GitChangeSetUtil.genChangeSet(true, false, true,
"[JENKINS-012345] 8901 34567 90 23456 8901 34567 9012 4567890 2345678 0 2 4 5",
false);
String msg = changeSet.getMsg();
assertThat( msg.length(), lessThanOrEqualTo( TRUNCATE_LIMIT ));
assertThat( msg, is("[JENKINS-012345] 8901 34567 90 23456 8901 34567 9012 4567890 2345678 0 2") );
}
@Test
public void testChangeLogEdgeCaseTruncating(){
GitChangeSet changeSet = GitChangeSetUtil.genChangeSet(true, false, true,
"[JENKINS-012345] 8901 34567 90 23456 8901 34567 9012 4567890 2345678 0 2 4 5",
true);
String msg = changeSet.getMsg();
assertThat( msg, is("[JENKINS-012345] 8901 34567 90 23456 8901 34567 9012 4567890 2345678 0 2 4 5") );
}
@Test
public void testChangeLogEdgeCaseTruncatingAndNewLine(){
GitChangeSet changeSet = GitChangeSetUtil.genChangeSet(true, false, true,
"[JENKINS-012345] 8901 34567 " + System.lineSeparator() + "90 23456 8901 34567 9012 4567890 2345678 0 2 4 5",
true);
String msg = changeSet.getMsg();
assertThat( msg, is("[JENKINS-012345] 8901 34567") );
}
@Test
public void testLongString(){
GitChangeSet changeSet = GitChangeSetUtil.genChangeSet(true, false, true,
"12345678901234567890123456789012345678901234567890123456789012345678901234567890",
false);
String msg = changeSet.getMsg();
assertThat( msg, is("12345678901234567890123456789012345678901234567890123456789012345678901234567890") );
}
@Test
public void stringSplitter(){
String msg = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Phasellus pellentesque ipsum non aliquam interdum. Integer metus orci, vulputate id turpis in, pharetra pretium magna. Fusce sollicitudin vehicula lectus. Nam ut eros purus. Mauris aliquam mi et nunc porta, non consectetur mauris pretium. Fusce a venenatis dolor. Sed commodo, dui ac posuere dignissim, dolor tortor semper eros, varius consequat nulla purus a lacus. Vestibulum egestas, orci vitae pellentesque laoreet, dolor lorem molestie tellus, nec luctus lorem ex quis orci. Phasellus interdum elementum luctus. Nam commodo, turpis in sollicitudin auctor, ipsum lectus finibus erat, in iaculis sapien neque ultrices sapien. In congue diam semper tortor laoreet aliquet. Mauris lacinia quis nunc vel accumsan. Nullam sed nisl eget orci porttitor venenatis. Lorem ipsum dolor sit amet, consectetur adipiscing elit";
assertThat(GitChangeSet.splitString(msg, 15), is("Lorem ipsum"));
assertThat(GitChangeSet.splitString(msg, 16), is("Lorem ipsum"));
assertThat(GitChangeSet.splitString(msg, 17), is("Lorem ipsum dolor"));
assertThat(GitChangeSet.splitString(msg, 18), is("Lorem ipsum dolor"));
assertThat(GitChangeSet.splitString(msg, 19), is("Lorem ipsum dolor"));
assertThat(GitChangeSet.splitString(msg, 20), is("Lorem ipsum dolor"));
assertThat(GitChangeSet.splitString(msg, 21), is("Lorem ipsum dolor sit"));
assertThat(GitChangeSet.splitString(msg, 22), is("Lorem ipsum dolor sit"));
msg = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Phasellus pellentesque ipsum non aliquam interdum.";
assertThat(GitChangeSet.splitString(msg, TRUNCATE_LIMIT),
is("Lorem ipsum dolor sit amet, consectetur adipiscing elit. Phasellus"));
}
@Test
public void splitingWithBrackets(){
assertThat(GitChangeSet.splitString("[task] Lorem ipsum dolor sit amet, consectetur adipiscing elit.", 25), is("[task] Lorem ipsum dolor"));
}
@Test
public void splitingEmptyString(){
assertThat(GitChangeSet.splitString("", 25), is(""));
}
@Test
public void splitingNullString(){
assertThat(GitChangeSet.splitString(null, 25), is(""));
}
}
| 5,748 |
1,091 | //
// RMGalleryCollectionViewCell.h
// HeapInspectorExample
//
// Created by <NAME> on 12/11/14.
// Copyright (c) 2014 tapwork. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface RMGalleryCollectionViewCell : UICollectionViewCell
@property (nonatomic, readonly) UIImageView *imageView;
@end
| 108 |
1,760 | <gh_stars>1000+
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// PersistentSampleMap implements HistogramSamples interface. It is used
// by the SparseHistogram class to store samples in persistent memory which
// allows it to be shared between processes or live across restarts.
#ifndef BASE_METRICS_PERSISTENT_SAMPLE_MAP_H_
#define BASE_METRICS_PERSISTENT_SAMPLE_MAP_H_
#include <stdint.h>
#include <map>
#include <memory>
#include "base/compiler_specific.h"
#include "base/macros.h"
#include "base/metrics/histogram_base.h"
#include "base/metrics/histogram_samples.h"
#include "base/metrics/persistent_memory_allocator.h"
namespace base {
class PersistentHistogramAllocator;
class PersistentSampleMapRecords;
class PersistentSparseHistogramDataManager;
// The logic here is similar to that of SampleMap but with different data
// structures. Changes here likely need to be duplicated there.
class BASE_EXPORT PersistentSampleMap : public HistogramSamples {
public:
// Constructs a persistent sample map using a PersistentHistogramAllocator
// as the data source for persistent records.
PersistentSampleMap(uint64_t id,
PersistentHistogramAllocator* allocator,
Metadata* meta);
~PersistentSampleMap() override;
// HistogramSamples:
void Accumulate(HistogramBase::Sample value,
HistogramBase::Count count) override;
HistogramBase::Count GetCount(HistogramBase::Sample value) const override;
HistogramBase::Count TotalCount() const override;
std::unique_ptr<SampleCountIterator> Iterator() const override;
// Uses a persistent-memory |iterator| to locate and return information about
// the next record holding information for a PersistentSampleMap. The record
// could be for any Map so return the |sample_map_id| as well.
static PersistentMemoryAllocator::Reference GetNextPersistentRecord(
PersistentMemoryAllocator::Iterator& iterator,
uint64_t* sample_map_id);
// Creates a new record in an |allocator| storing count information for a
// specific sample |value| of a histogram with the given |sample_map_id|.
static PersistentMemoryAllocator::Reference CreatePersistentRecord(
PersistentMemoryAllocator* allocator,
uint64_t sample_map_id,
HistogramBase::Sample value);
protected:
// Performs arithemetic. |op| is ADD or SUBTRACT.
bool AddSubtractImpl(SampleCountIterator* iter, Operator op) override;
// Gets a pointer to a "count" corresponding to a given |value|. Returns NULL
// if sample does not exist.
HistogramBase::Count* GetSampleCountStorage(HistogramBase::Sample value);
// Gets a pointer to a "count" corresponding to a given |value|, creating
// the sample (initialized to zero) if it does not already exists.
HistogramBase::Count* GetOrCreateSampleCountStorage(
HistogramBase::Sample value);
private:
// Gets the object that manages persistent records. This returns the
// |records_| member after first initializing it if necessary.
PersistentSampleMapRecords* GetRecords();
// Imports samples from persistent memory by iterating over all sample
// records found therein, adding them to the sample_counts_ map. If a
// count for the sample |until_value| is found, stop the import and return
// a pointer to that counter. If that value is not found, null will be
// returned after all currently available samples have been loaded. Pass
// true for |import_everything| to force the importing of all available
// samples even if a match is found.
HistogramBase::Count* ImportSamples(HistogramBase::Sample until_value,
bool import_everything);
// All created/loaded sample values and their associated counts. The storage
// for the actual Count numbers is owned by the |records_| object and its
// underlying allocator.
std::map<HistogramBase::Sample, HistogramBase::Count*> sample_counts_;
// The allocator that manages histograms inside persistent memory. This is
// owned externally and is expected to live beyond the life of this object.
PersistentHistogramAllocator* allocator_;
// The object that manages sample records inside persistent memory. This is
// owned by the |allocator_| object (above) and so, like it, is expected to
// live beyond the life of this object. This value is lazily-initialized on
// first use via the GetRecords() accessor method.
PersistentSampleMapRecords* records_ = nullptr;
DISALLOW_COPY_AND_ASSIGN(PersistentSampleMap);
};
} // namespace base
#endif // BASE_METRICS_PERSISTENT_SAMPLE_MAP_H_
| 1,391 |
353 | #ifndef CAFFE2_OPERATORS_TOP_K_H_
#define CAFFE2_OPERATORS_TOP_K_H_
#include "caffe2/core/logging.h"
#include "caffe2/core/operator.h"
#include "caffe2/utils/math.h"
namespace caffe2 {
template <typename T, class Context>
class TopKOp : public Operator<Context> {
public:
USE_OPERATOR_CONTEXT_FUNCTIONS;
template <class... Args>
explicit TopKOp(Args&&... args)
: Operator<Context>(std::forward<Args>(args)...),
OP_SINGLE_ARG(int, "k", k_, -1),
OP_SINGLE_ARG(int, "axis", axis_, -1) {
}
~TopKOp() {}
bool RunOnDevice() override;
private:
const int k_;
int axis_;
};
template <typename T, class Context>
class TopKGradientOp : public Operator<Context> {
public:
USE_OPERATOR_CONTEXT_FUNCTIONS;
template <class... Args>
explicit TopKGradientOp(Args&&... args)
: Operator<Context>(std::forward<Args>(args)...),
OP_SINGLE_ARG(int, "axis", axis_, -1) {}
~TopKGradientOp() {}
bool RunOnDevice() override;
private:
int axis_;
};
} // namespace caffe2
#endif // CAFFE2_OPERATORS_TOP_K_H_
| 497 |
4,538 | /*
* Copyright (C) 2015-2020 Alibaba Group Holding Limited
*/
#ifndef WDT_HAL_H
#define WDT_HAL_H
#ifdef __cplusplus
extern "C" {
#endif
#include "plat_types.h"
enum HAL_WDT_ID_T {
HAL_WDT_ID_0 = 0,
HAL_WDT_ID_NUM,
};
enum HAL_WDT_EVENT_T {
HAL_WDT_EVENT_FIRE = 0,
};
#define HAL_WDT_YES 1
#define HAL_WDT_NO 0
typedef void (*HAL_WDT_IRQ_CALLBACK)(enum HAL_WDT_ID_T id, uint32_t status);
/* hal api */
void hal_wdt_set_irq_callback(enum HAL_WDT_ID_T id, HAL_WDT_IRQ_CALLBACK handler);
/* mandatory operations */
int hal_wdt_start(enum HAL_WDT_ID_T id);
int hal_wdt_stop(enum HAL_WDT_ID_T id);
/* optional operations */
int hal_wdt_ping(enum HAL_WDT_ID_T id);
int hal_wdt_set_timeout(enum HAL_WDT_ID_T id, unsigned int);
unsigned int hal_wdt_get_timeleft(enum HAL_WDT_ID_T id);
#ifdef __cplusplus
}
#endif
#endif /* WDT_HAL_H */
| 393 |
7,482 | /**
******************************************************************************
* @file tae32f53xx_ll_dma.h
* @author MCD Application Team
* @brief Header file for DMA LL module.
*
******************************************************************************
* @attention
*
* <h2><center>© Copyright (c) 2020 Tai-Action.
* All rights reserved.</center></h2>
*
* This software is licensed by Tai-Action under BSD 3-Clause license,
* the "License"; You may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
* opensource.org/licenses/BSD-3-Clause
*
******************************************************************************
*/
/* Define to prevent recursive inclusion -------------------------------------*/
#ifndef _TAE32F53XX_LL_DMA_H_
#define _TAE32F53XX_LL_DMA_H_
#ifdef __cplusplus
extern "C" {
#endif /* __cplusplus */
/* Includes ------------------------------------------------------------------*/
#include "tae32f53xx_ll_def.h"
/** @addtogroup TAE32F53xx_LL_Driver
* @{
*/
/** @addtogroup DMA_LL
* @{
*/
/* Exported constants --------------------------------------------------------*/
/** @defgroup DMA_LL_Exported_Constants DMA LL Exported Constants
* @brief DMA LL Exported Constants
* @{
*/
/**
* @brief DMA block size max
*/
#define LL_DMA_BLOCK_SIZE_MAX (0xfffU)
/**
* @brief SRAMBC address start
*/
#define LL_DMA_SRMBC_ADDR_START (0x20004000UL)
/**
* @brief SRAMBC address end
*/
#define LL_DMA_SRMBC_ADDR_END (0x20006000UL - 1)
/**
* @}
*/
/* Exported types ------------------------------------------------------------*/
/** @defgroup DMA_LL_Exported_Types DMA LL Exported Types
* @brief DMA LL Exported Types
* @{
*/
/**
* @brief DMA Source Peripheral bus type definition
*/
typedef enum {
DMA_SRC_PERIPH_BUS_AHB_MST1 = DMA_CH_CR0_SMS_AHB_MST1, /*!< Source Peripheral bus AHB Master1 */
DMA_SRC_PERIPH_BUS_AHB_MST2 = DMA_CH_CR0_SMS_AHB_MST2, /*!< Source Peripheral bus AHB Master2 */
} DMA_SrcPeriphBusETypeDef;
/**
* @brief DMA Destination Peripheral bus type definition
*/
typedef enum {
DMA_DST_PERIPH_BUS_AHB_MST1 = DMA_CH_CR0_DMS_AHB_MST1, /*!< Destination Peripheral bus AHB Master1 */
DMA_DST_PERIPH_BUS_AHB_MST2 = DMA_CH_CR0_DMS_AHB_MST2, /*!< Destination Peripheral bus AHB Master2 */
} DMA_DstPeriphBusETypeDef;
/**
* brief DMA transfer type type definition
*/
typedef enum {
DMA_TRANS_TYPE_M2M = DMA_CH_CR0_TTC_M2M, /*!< Transfer type M2M */
DMA_TRANS_TYPE_M2P = DMA_CH_CR0_TTC_M2P, /*!< Transfer type M2P */
DMA_TRANS_TYPE_P2M = DMA_CH_CR0_TTC_P2M, /*!< Transfer type P2M */
DMA_TRANS_TYPE_P2P = DMA_CH_CR0_TTC_P2P, /*!< Transfer type P2P */
} DMA_TransTypeETypeDef;
/**
* @brief DMA Source burst length type definition
*/
typedef enum {
DMA_SRC_BURST_LEN_1 = DMA_CH_CR0_SBTL_1, /*!< Source burst length 1 */
DMA_SRC_BURST_LEN_4 = DMA_CH_CR0_SBTL_4, /*!< Source burst length 4 */
DMA_SRC_BURST_LEN_8 = DMA_CH_CR0_SBTL_8, /*!< Source burst length 8 */
} DMA_SrcBurstLenETypeDef;
/**
* @brief DMA Destination burst length type definition
*/
typedef enum {
DMA_DST_BURST_LEN_1 = DMA_CH_CR0_DBTL_1, /*!< Destination burst length 1 */
DMA_DST_BURST_LEN_4 = DMA_CH_CR0_DBTL_4, /*!< Destination burst length 4 */
DMA_DST_BURST_LEN_8 = DMA_CH_CR0_DBTL_8, /*!< Destination burst length 8 */
} DMA_DstBurstLenETypeDef;
/**
* @brief DMA Source address mode type definition
*/
typedef enum {
DMA_SRC_ADDR_MODE_INC = DMA_CH_CR0_SINC_INC, /*!< Source address mode Increase */
DMA_SRC_ADDR_MODE_DEC = DMA_CH_CR0_SINC_DEC, /*!< Source address mode Decrease */
DMA_SRC_ADDR_MODE_FIX = DMA_CH_CR0_SINC_FIX, /*!< Source address mode Fixed */
} DMA_SrcAddrModeETypeDef;
/**
* @brief DMA Destination address mode type definition
*/
typedef enum {
DMA_DST_ADDR_MODE_INC = DMA_CH_CR0_DINC_INC, /*!< Destination address mode Increase */
DMA_DST_ADDR_MODE_DEC = DMA_CH_CR0_DINC_DEC, /*!< Destination address mode Decrease */
DMA_DST_ADDR_MODE_FIX = DMA_CH_CR0_DINC_FIX, /*!< Destination address mode Fixed */
} DMA_DstAddrModeETypeDef;
/**
* @brief DMA Source transfer width type definition
*/
typedef enum {
DMA_SRC_TRANS_WIDTH_8b = DMA_CH_CR0_STW_8b, /*!< Source transfer width 8bit */
DMA_SRC_TRANS_WIDTH_16b = DMA_CH_CR0_STW_16b, /*!< Source transfer width 16bit */
DMA_SRC_TRANS_WIDTH_32b = DMA_CH_CR0_STW_32b, /*!< Source transfer width 32bit */
} DMA_SrcTransWidthETypeDef;
/**
* @brief DMA Destination transfer width type definition
*/
typedef enum {
DMA_DST_TRANS_WIDTH_8b = DMA_CH_CR0_DTW_8b, /*!< Destination transfer width 8bit */
DMA_DST_TRANS_WIDTH_16b = DMA_CH_CR0_DTW_16b, /*!< Destination transfer width 16bit */
DMA_DST_TRANS_WIDTH_32b = DMA_CH_CR0_DTW_32b, /*!< Destination transfer width 32bit */
} DMA_DstTransWidthETypeDef;
/**
* @brief DMA Source handshaking interface type definition
*/
typedef enum {
DMA_SRC_HANDSHAKE_IFC_MEMORY = 0, /*!< Source handshaking interface MEMORY */
DMA_SRC_HANDSHAKE_IFC_I2C0_TX = DMA_CH_CR3_SHSIF_I2C0_TX, /*!< Source handshaking interface I2C0_TX */
DMA_SRC_HANDSHAKE_IFC_I2C0_RX = DMA_CH_CR3_SHSIF_I2C0_RX, /*!< Source handshaking interface I2C0_RX */
DMA_SRC_HANDSHAKE_IFC_I2C1_TX = DMA_CH_CR3_SHSIF_I2C1_TX, /*!< Source handshaking interface I2C1_TX */
DMA_SRC_HANDSHAKE_IFC_I2C1_RX = DMA_CH_CR3_SHSIF_I2C1_RX, /*!< Source handshaking interface I2C1_RX */
DMA_SRC_HANDSHAKE_IFC_UART0_TX = DMA_CH_CR3_SHSIF_UART0_TX, /*!< Source handshaking interface UART0_TX */
DMA_SRC_HANDSHAKE_IFC_UART0_RX = DMA_CH_CR3_SHSIF_UART0_RX, /*!< Source handshaking interface UART0_RX */
DMA_SRC_HANDSHAKE_IFC_UART1_TX = DMA_CH_CR3_SHSIF_UART1_TX, /*!< Source handshaking interface UART1_TX */
DMA_SRC_HANDSHAKE_IFC_UART1_RX = DMA_CH_CR3_SHSIF_UART1_RX, /*!< Source handshaking interface UART1_RX */
} DMA_SrcHandshakeIfcETypeDef;
/**
* @brief DMA Destination handshaking interface type definition
*/
typedef enum {
DMA_DST_HANDSHAKE_IFC_MEMORY = 0, /*!< Destination handshaking interface MEMORY */
DMA_DST_HANDSHAKE_IFC_I2C0_TX = DMA_CH_CR3_DHSIF_I2C0_TX, /*!< Destination handshaking interface I2C0_TX */
DMA_DST_HANDSHAKE_IFC_I2C0_RX = DMA_CH_CR3_DHSIF_I2C0_RX, /*!< Destination handshaking interface I2C0_RX */
DMA_DST_HANDSHAKE_IFC_I2C1_TX = DMA_CH_CR3_DHSIF_I2C1_TX, /*!< Destination handshaking interface I2C1_TX */
DMA_DST_HANDSHAKE_IFC_I2C1_RX = DMA_CH_CR3_DHSIF_I2C1_RX, /*!< Destination handshaking interface I2C1_RX */
DMA_DST_HANDSHAKE_IFC_UART0_TX = DMA_CH_CR3_DHSIF_UART0_TX, /*!< Destination handshaking interface UART0_TX */
DMA_DST_HANDSHAKE_IFC_UART0_RX = DMA_CH_CR3_DHSIF_UART0_RX, /*!< Destination handshaking interface UART0_RX */
DMA_DST_HANDSHAKE_IFC_UART1_TX = DMA_CH_CR3_DHSIF_UART1_TX, /*!< Destination handshaking interface UART1_TX */
DMA_DST_HANDSHAKE_IFC_UART1_RX = DMA_CH_CR3_DHSIF_UART1_RX, /*!< Destination handshaking interface UART1_RX */
} DMA_DstHandshakeIfcETypeDef;
/**
* @brief DMA channel type definition
*/
typedef enum {
DMA_CHANNEL_0 = 0U, /*!< DMA Channel 0 */
DMA_CHANNEL_1 = 1U, /*!< DMA Channel 1 */
DMA_CHANNEL_NUM = 2U, /*!< DMA Channel Number */
DMA_CHANNEL_INVALID = 0xFFU, /*!< DMA Channel Invalid */
} DMA_ChannelETypeDef;
/**
* @brief DMA State type definition
*/
typedef enum {
DMA_STATE_RESET = 0, /*!< DMA State Reset: not yet initialized or disabled */
DMA_STATE_READY, /*!< DMA State Ready: initialized and ready for use */
DMA_STATE_BUSY, /*!< DMA State Busy: process is ongoing */
} DMA_StateETypeDef;
/**
* @brief DMA IRQ callback function type definition
*/
typedef void (*DMA_IRQCallback)(void *arg);
/**
* @brief DMA user config type definition
*/
typedef struct __DMA_UserCfgTypeDef {
DMA_TransTypeETypeDef trans_type; /*!< transfer type */
DMA_SrcAddrModeETypeDef src_addr_mode; /*!< source address mode */
DMA_DstAddrModeETypeDef dst_addr_mode; /*!< destination address mode */
DMA_SrcTransWidthETypeDef src_data_width; /*!< source data width */
DMA_DstTransWidthETypeDef dst_data_width; /*!< destination data width */
DMA_SrcHandshakeIfcETypeDef src_hs_ifc; /*!< source handshake interface */
DMA_DstHandshakeIfcETypeDef dst_hs_ifc; /*!< destination handshake interface */
void *end_arg; /*!< argument of transfer complete callback fucntion */
DMA_IRQCallback end_callback; /*!< transfer complete callback fucntion */
void *err_arg; /*!< argument of transfer error callback fucntion */
DMA_IRQCallback err_callback; /*!< transfer error callback fucntion */
} DMA_UserCfgTypeDef;
/**
* @}
*/
/* Exported macro ------------------------------------------------------------*/
/** @defgroup DMA_LL_Exported_Macros DMA LL Exported Macros
* @brief DMA LL Exported Macros
* @{
*/
/**
* @brief Source address set
* @param __DMA__ Specifies DMA peripheral
* @param ch DMA channel
* @param addr Source address
* @return None
*/
#define __LL_DMA_SrcAddr_Set(__DMA__, ch, addr) WRITE_REG((__DMA__)->CH[(ch)].SAR, addr)
/**
* @brief Destination address set
* @param __DMA__ Specifies DMA peripheral
* @param ch DMA channel
* @param addr Destination address
* @return None
*/
#define __LL_DMA_DstAddr_Set(__DMA__, ch, addr) WRITE_REG((__DMA__)->CH[(ch)].DAR, addr)
/**
* @brief Source peripheral bus set
* @param __DMA__ Specifies DMA peripheral
* @param ch DMA channel
* @param bus Source peripheral bus
* @return None
*/
#define __LL_DMA_SrcPeriphBus_Set(__DMA__, ch, bus) MODIFY_REG((__DMA__)->CH[(ch)].CR0, DMA_CH_CR0_SMS_Msk, bus)
/**
* @brief Destination peripheral bus set
* @param __DMA__ Specifies DMA peripheral
* @param ch DMA channel
* @param bus Destination peripheral bus
* @return None
*/
#define __LL_DMA_DstPeriphBus_Set(__DMA__, ch, bus) MODIFY_REG((__DMA__)->CH[(ch)].CR0, DMA_CH_CR0_DMS_Msk, bus)
/**
* @brief Transfer type set
* @param __DMA__ Specifies DMA peripheral
* @param ch DMA channel
* @param type Transfer type
* @return None
*/
#define __LL_DMA_TransType_Set(__DMA__, ch, type) MODIFY_REG((__DMA__)->CH[(ch)].CR0, DMA_CH_CR0_TTC_Msk, type)
/**
* @brief Source burst length set
* @param __DMA__ Specifies DMA peripheral
* @param ch DMA channel
* @param len Source burst length
* @return None
*/
#define __LL_DMA_SrcBurstLen_Set(__DMA__, ch, len) MODIFY_REG((__DMA__)->CH[(ch)].CR0, DMA_CH_CR0_SBTL_Msk, len)
/**
* @brief Destination burst length set
* @param __DMA__ Specifies DMA peripheral
* @param ch DMA channel
* @param len Destination burst length
* @return None
*/
#define __LL_DMA_DstBurstLen_Set(__DMA__, ch, len) MODIFY_REG((__DMA__)->CH[(ch)].CR0, DMA_CH_CR0_DBTL_Msk, len)
/**
* @brief Source address mode set
* @param __DMA__ Specifies DMA peripheral
* @param ch DMA channel
* @param mode Source address mode
* @return None
*/
#define __LL_DMA_SrcAddrMode_Set(__DMA__, ch, mode) MODIFY_REG((__DMA__)->CH[(ch)].CR0, DMA_CH_CR0_SINC_Msk, mode)
/**
* @brief Destination address mode set
* @param __DMA__ Specifies DMA peripheral
* @param ch DMA channel
* @param mode Destination address mode
* @return None
*/
#define __LL_DMA_DstAddrMode_Set(__DMA__, ch, mode) MODIFY_REG((__DMA__)->CH[(ch)].CR0, DMA_CH_CR0_DINC_Msk, mode)
/**
* @brief Source transfer width set
* @param __DMA__ Specifies DMA peripheral
* @param ch DMA channel
* @param width Source transfer width
* @return None
*/
#define __LL_DMA_SrcTransWidth_Set(__DMA__, ch, width) MODIFY_REG((__DMA__)->CH[(ch)].CR0, DMA_CH_CR0_STW_Msk, width)
/**
* @brief Source transfer width get
* @param __DMA__ Specifies DMA peripheral
* @param ch DMA channel
* @retval 0 8 bits
* @retval 1 16 bits
* @retval 2 32 bits
*/
#define __LL_DMA_SrcTransWidth_Get(__DMA__, ch) (READ_BIT((__DMA__)->CH[(ch)].CR0, DMA_CH_CR0_STW_Msk) >> DMA_CH_CR0_STW_Pos)
/**
* @brief Destination transfer width set
* @param __DMA__ Specifies DMA peripheral
* @param ch DMA channel
* @param width Destination transfer width
* @return None
*/
#define __LL_DMA_DstTransWidth_Set(__DMA__, ch, width) MODIFY_REG((__DMA__)->CH[(ch)].CR0, DMA_CH_CR0_DTW_Msk, width)
/**
* @brief Channel interrupt enable
* @param __DMA__ Specifies DMA peripheral
* @param ch DMA channel
* @return None
*/
#define __LL_DMA_Channel_Int_En(__DMA__, ch) SET_BIT((__DMA__)->CH[(ch)].CR0, DMA_CH_CR0_CHIE_Msk)
/**
* @brief Channel interrupt disable
* @param __DMA__ Specifies DMA peripheral
* @param ch DMA channel
* @return None
*/
#define __LL_DMA_Channel_Int_Dis(__DMA__, ch) CLEAR_BIT((__DMA__)->CH[(ch)].CR0, DMA_CH_CR0_CHIE_Msk)
/**
* @brief Channel register CR0 write
* @param __DMA__ Specifies DMA peripheral
* @param ch DMA channel
* @param val write value
* @return None
*/
#define __LL_DMA_ChannelRegCR0_Write(__DMA__, ch, val) WRITE_REG((__DMA__)->CH[(ch)].CR0, val)
/**
* @brief Judge is block transfer done or not
* @param __DMA__ Specifies DMA peripheral
* @param ch DMA channel
* @retval 0 isn't block transfer done
* @retval 1 is block transfer done
*/
#define __LL_DMA_IsBlockTransDone(__DMA__, ch) (READ_BIT((__DMA__)->CH[(ch)].CR1, DMA_CH_CR1_DONE_Msk) >> DMA_CH_CR1_DONE_Pos)
/**
* @brief Block transfer done clear
* @param __DMA__ Specifies DMA peripheral
* @param ch DMA channel
* @return None
*/
#define __LL_DMA_BlockTransDone_Clr(__DMA__, ch) CLEAR_BIT((__DMA__)->CH[(ch)].CR1, DMA_CH_CR1_DONE_Msk)
/**
* @brief Block transfer count set
* @param __DMA__ Specifies DMA peripheral
* @param ch DMA channel
* @param cnt Block transfer count
* @return None
*/
#define __LL_DMA_BlockTransCnt_Set(__DMA__, ch, cnt) \
MODIFY_REG((__DMA__)->CH[(ch)].CR1, DMA_CH_CR1_BTCNT_Msk, (((cnt) & 0xfffUL) << DMA_CH_CR1_BTCNT_Pos))
/**
* @brief Channel register CR1 write
* @param __DMA__ Specifies DMA peripheral
* @param ch DMA channel
* @param val write value
* @return None
*/
#define __LL_DAM_ChannelRegCR1_Write(__DMA__, ch, val) WRITE_REG((__DMA__)->CH[(ch)].CR1, val)
/**
* @brief Burst length max set
* @param __DMA__ Specifies DMA peripheral
* @param ch DMA channel
* @param max Burst length max
* @return None
*/
#define __LL_DMA_BurstLenMax_Set(__DMA__, ch, max) \
MODIFY_REG((__DMA__)->CH[(ch)].CR2, DMA_CH_CR2_MBL_Msk, (((max) & 0x3ffUL) << DMA_CH_CR2_MBL_Pos))
/**
* @brief Source handshake mode set
* @param __DMA__ Specifies DMA peripheral
* @param ch DMA channel
* @return None
*/
#define __LL_DMA_SrcHandshakeMode_Set(__DMA__, ch) SET_BIT((__DMA__)->CH[(ch)].CR2, DMA_CH_CR2_SHSM_Msk)
/**
* @brief Source handshake mode clear
* @param __DMA__ Specifies DMA peripheral
* @param ch DMA channel
* @return None
*/
#define __LL_DMA_SrcHandshakeMode_Clr(__DMA__, ch) CLEAR_BIT((__DMA__)->CH[(ch)].CR2, DMA_CH_CR2_SHSM_Msk)
/**
* @brief Destination handshake mode set
* @param __DMA__ Specifies DMA peripheral
* @param ch DMA channel
* @return None
*/
#define __LL_DMA_DstHandshakeMode_Set(__DMA__, ch) SET_BIT((__DMA__)->CH[(ch)].CR2, DMA_CH_CR2_DHSM_Msk)
/**
* @brief Destination handshake mode clear
* @param __DMA__ Specifies DMA peripheral
* @param ch DMA channel
* @return None
*/
#define __LL_DMA_DstHandshakeMode_Clr(__DMA__, ch) CLEAR_BIT((__DMA__)->CH[(ch)].CR2, DMA_CH_CR2_DHSM_Msk)
/**
* @brief Judge is channel FIFO empty or not
* @param __DMA__ Specifies DMA peripheral
* @param ch DMA channel
* @retval 0 isn't channel FIFO empty
* @retval 1 is channel FIFO empty
*/
#define __LL_DMA_IsChannelFIFOEmpty(__DMA__, ch) \
(READ_BIT((__DMA__)->CH[(ch)].CR2, DMA_CH_CR2_FIFO_EF_Msk) >> DMA_CH_CR2_FIFO_EF_Pos)
/**
* @brief Channel suspend set
* @param __DMA__ Specifies DMA peripheral
* @param ch DMA channel
* @return None
*/
#define __LL_DMA_ChannelSuspend_Set(__DMA__, ch) SET_BIT((__DMA__)->CH[(ch)].CR2, DMA_CH_CR2_SUSP_Msk)
/**
* @brief Channel suspend clear
* @param __DMA__ Specifies DMA peripheral
* @param ch DMA channel
* @return None
*/
#define __LL_DMA_ChannelSuspend_Clr(__DMA__, ch) CLEAR_BIT((__DMA__)->CH[(ch)].CR2, DMA_CH_CR2_SUSP_Msk)
/**
* @brief Channel priority set high
* @param __DMA__ Specifies DMA peripheral
* @param ch DMA channel
* @return None
*/
#define __LL_DMA_ChannelPriHigh_Set(__DMA__, ch) SET_BIT((__DMA__)->CH[(ch)].CR2, DMA_CH_CR2_PRI_Msk)
/**
* @brief Channel priority set low
* @param __DMA__ Specifies DMA peripheral
* @param ch DMA channel
* @return None
*/
#define __LL_DMA_ChannelPriLow_Set(__DMA__, ch) CLEAR_BIT((__DMA__)->CH[(ch)].CR2, DMA_CH_CR2_PRI_Msk)
/**
* @brief Channel register CR2 write
* @param __DMA__ Specifies DMA peripheral
* @param ch DMA channel
* @param val write value
* @return None
*/
#define __LL_DAM_ChannelRegCR2_Write(__DMA__, ch, val) WRITE_REG((__DMA__)->CH[(ch)].CR2, val)
/**
* @brief Destination handshake interface set
* @param __DMA__ Specifies DMA peripheral
* @param ch DMA channel
* @param ifc Destination handshake interface
* @return None
*/
#define __LL_DMA_DstHandshakeIfc_Set(__DMA__, ch, ifc) MODIFY_REG((__DMA__)->CH[(ch)].CR3, DMA_CH_CR3_DHSIF_Msk, ifc)
/**
* @brief Source handshake interface set
* @param __DMA__ Specifies DMA peripheral
* @param ch DMA channel
* @param ifc Source handshake interface
* @return None
*/
#define __LL_DMA_SrcHandshakeIfc_Set(__DMA__, ch, ifc) MODIFY_REG((__DMA__)->CH[(ch)].CR3, DMA_CH_CR3_SHSIF_Msk, ifc)
/**
* @brief FIFO mode half set
* @param __DMA__ Specifies DMA peripheral
* @param ch DMA channel
* @return None
*/
#define __LL_DMA_FIFOModeHalf_Set(__DMA__, ch) SET_BIT((__DMA__)->CH[(ch)].CR3, DMA_CH_CR3_FMD_Msk)
/**
* @brief FIFO mode once set
* @param __DMA__ Specifies DMA peripheral
* @param ch DMA channel
* @return None
*/
#define __LL_DMA_FIFOModeOnce_Set(__DMA__, ch) CLEAR_BIT((__DMA__)->CH[(ch)].CR3, DMA_CH_CR3_FMD_Msk)
/**
* @brief Channel folw control mode source request set
* @param __DMA__ Specifies DMA peripheral
* @param ch DMA channel
* @return None
*/
#define __LL_DMA_ChFlowModeSrcReq_Set(__DMA__, ch) CLEAR_BIT((__DMA__)->CH[(ch)].CR3, DMA_CH_CR3_FCMD_Msk)
/**
* @brief Channel folw control mode destination request set
* @param __DMA__ Specifies DMA peripheral
* @param ch DMA channel
* @return None
*/
#define __LL_DMA_ChFlowModeDstReq_Set(__DMA__, ch) SET_BIT((__DMA__)->CH[(ch)].CR3, DMA_CH_CR3_FCMD_Msk)
/**
* @brief Channel register CR3 write
* @param __DMA__ Specifies DMA peripheral
* @param ch DMA channel
* @param val write value
* @return None
*/
#define __LL_DAM_ChannelRegCR3_Write(__DMA__, ch, val) WRITE_REG((__DMA__)->CH[(ch)].CR3, val)
/**
* @brief Channel 1 transfer complete status get
* @param __DMA__ Specifies DMA peripheral
* @retval 0 Channel 1 transfer hasn't completed
* @retval 1 Channel 1 transfer has completed
*/
#define __LL_DMA_Ch1TransComSta_Get(__DMA__) (READ_BIT((__DMA__)->TSR, DMA_TSR_TS_CH1_Msk) >> DMA_TSR_TS_CH1_Pos)
/**
* @brief Channel 0 transfer complete status get
* @param __DMA__ Specifies DMA peripheral
* @retval 0 Channel 0 transfer hasn't completed
* @retval 1 Channel 0 transfer has completed
*/
#define __LL_DMA_Ch0TransComSta_Get(__DMA__) (READ_BIT((__DMA__)->TSR, DMA_TSR_TS_CH0_Msk) >> DMA_TSR_TS_CH0_Pos)
/**
* @brief Channel 1 block transfer complete status get
* @param __DMA__ Specifies DMA peripheral
* @retval 0 Channel 1 block transfer hasn't completed
* @retval 1 Channel 1 block transfer has completed
*/
#define __LL_DMA_Ch1BlockTransComSta_Get(__DMA__) (READ_BIT((__DMA__)->BTSR, DMA_BTSR_BTS_CH1_Msk) >> DMA_BTSR_BTS_CH1_Pos)
/**
* @brief Channel 0 block transfer complete status get
* @param __DMA__ Specifies DMA peripheral
* @retval 0 Channel 0 block transfer hasn't completed
* @retval 1 Channel 0 block transfer has completed
*/
#define __LL_DMA_Ch0BlockTransComSta_Get(__DMA__) (READ_BIT((__DMA__)->BTSR, DMA_BTSR_BTS_CH0_Msk) >> DMA_BTSR_BTS_CH0_Pos)
/**
* @brief Channel 1 source transfer complete status get
* @param __DMA__ Specifies DMA peripheral
* @retval 0 Channel 1 source transfer hasn't completed
* @retval 1 Channel 1 source transfer has completed
*/
#define __LL_DMA_Ch1SrcTransComSta_Get(__DMA__) (READ_BIT((__DMA__)->STSR, DMA_STSR_STS_CH1_Msk) >> DMA_STSR_STS_CH1_Pos)
/**
* @brief Channel 0 source transfer complete status get
* @param __DMA__ Specifies DMA peripheral
* @retval 0 Channel 0 source transfer hasn't completed
* @retval 1 Channel 0 source transfer has completed
*/
#define __LL_DMA_Ch0SrcTransComSta_Get(__DMA__) (READ_BIT((__DMA__)->STSR, DMA_STSR_STS_CH0_Msk) >> DMA_STSR_STS_CH0_Pos)
/**
* @brief Channel 1 destination transfer complete status get
* @param __DMA__ Specifies DMA peripheral
* @retval 0 Channel 1 destination transfer hasn't completed
* @retval 1 Channel 1 destination transfer has completed
*/
#define __LL_DMA_Ch1DstTransComSta_Get(__DMA__) (READ_BIT((__DMA__)->DTSR, DMA_DTSR_DTS_CH1_Msk) >> DMA_DTSR_DTS_CH1_Pos)
/**
* @brief Channel 0 destination transfer complete status get
* @param __DMA__ Specifies DMA peripheral
* @retval 0 Channel 0 destination transfer hasn't completed
* @retval 1 Channel 0 destination transfer has completed
*/
#define __LL_DMA_Ch0DstTransComSta_Get(__DMA__) (READ_BIT((__DMA__)->DTSR, DMA_DTSR_DTS_CH0_Msk) >> DMA_DTSR_DTS_CH0_Pos)
/**
* @brief Channel 1 transfer error status get
* @param __DMA__ Specifies DMA peripheral
* @retval 0 Channel 1 transfer normal
* @retval 1 Channel 1 transfer error
*/
#define __LL_DMA_Ch1TransErrSta_Get(__DMA__) (READ_BIT((__DMA__)->TESR, DMA_TESR_TES_CH1_Msk) >> DMA_TESR_TES_CH1_Pos)
/**
* @brief Channel 0 transfer error status get
* @param __DMA__ Specifies DMA peripheral
* @retval 0 Channel 0 transfer normal
* @retval 1 Channel 0 transfer error
*/
#define __LL_DMA_Ch0TransErrSta_Get(__DMA__) (READ_BIT((__DMA__)->TESR, DMA_TESR_TES_CH0_Msk) >> DMA_TESR_TES_CH0_Pos)
/**
* @brief Channel 1 transfer complete interrupt pending get
* @param __DMA__ Specifies DMA peripheral
* @retval 0 no pending
* @retval 1 pending
*/
#define __LL_DMA_Ch1TransComIntSta_Get(__DMA__) (READ_BIT((__DMA__)->TIPR, DMA_TIPR_TIP_CH1_Msk) >> DMA_TIPR_TIP_CH1_Pos)
/**
* @brief Channel 0 transfer complete interrupt pending get
* @param __DMA__ Specifies DMA peripheral
* @retval 0 no pending
* @retval 1 pending
*/
#define __LL_DMA_Ch0TransComIntSta_Get(__DMA__) (READ_BIT((__DMA__)->TIPR, DMA_TIPR_TIP_CH0_Msk) >> DMA_TIPR_TIP_CH0_Pos)
/**
* @brief Channel 1 block transfer complete interrupt pending get
* @param __DMA__ Specifies DMA peripheral
* @retval 0 no pending
* @retval 1 pending
*/
#define __LL_DMA_Ch1BlockTransComIntSta_Get(__DMA__) (READ_BIT((__DMA__)->BTIPR, DMA_BTIPR_BTIF_CH1_Msk) >> DMA_BTIPR_BTIF_CH1_Pos)
/**
* @brief Channel 0 block transfer complete interrupt pending get
* @param __DMA__ Specifies DMA peripheral
* @retval 0 no pending
* @retval 1 pending
*/
#define __LL_DMA_Ch0BlockTransComIntSta_Get(__DMA__) (READ_BIT((__DMA__)->BTIPR, DMA_BTIPR_BTIF_CH0_Msk) >> DMA_BTIPR_BTIF_CH0_Pos)
/**
* @brief Channel 1 source transfer complete interrupt pending get
* @param __DMA__ Specifies DMA peripheral
* @retval 0 no pending
* @retval 1 pending
*/
#define __LL_DMA_Ch1SrcTransComIntSta_Get(__DMA__) (READ_BIT((__DMA__)->STIPR, DMA_STIPR_STIF_CH1_Msk) >> DMA_STIPR_STIF_CH1_Pos)
/**
* @brief Channel 0 source transfer complete interrupt pending get
* @param __DMA__ Specifies DMA peripheral
* @retval 0 no pending
* @retval 1 pending
*/
#define __LL_DMA_Ch0SrcTransComIntSta_Get(__DMA__) (READ_BIT((__DMA__)->STIPR, DMA_STIPR_STIF_CH0_Msk) >> DMA_STIPR_STIF_CH0_Pos)
/**
* @brief Channel 1 destination transfer complete interrupt pending get
* @param __DMA__ Specifies DMA peripheral
* @retval 0 no pending
* @retval 1 pending
*/
#define __LL_DMA_Ch1DstTransComIntSta_Get(__DMA__) (READ_BIT((__DMA__)->DTIPR, DMA_DTIPR_DTIF_CH1_Msk) >> DMA_DTIPR_DTIF_CH1_Pos)
/**
* @brief Channel 0 destination transfer complete interrupt pending get
* @param __DMA__ Specifies DMA peripheral
* @retval 0 no pending
* @retval 1 pending
*/
#define __LL_DMA_Ch0DstTransComIntSta_Get(__DMA__) (READ_BIT((__DMA__)->DTIPR, DMA_DTIPR_DTIF_CH0_Msk) >> DMA_DTIPR_DTIF_CH0_Pos)
/**
* @brief Channel 1 transfer error interrupt pending get
* @param __DMA__ Specifies DMA peripheral
* @retval 0 no pending
* @retval 1 pending
*/
#define __LL_DMA_Ch1TransErrIntSta_Get(__DMA__) (READ_BIT((__DMA__)->TEIPR, DMA_TEIPR_TEIF_CH1_Msk) >> DMA_TEIPR_TEIF_CH1_Pos)
/**
* @brief Channel 0 transfer error interrupt pending get
* @param __DMA__ Specifies DMA peripheral
* @retval 0 no pending
* @retval 1 pending
*/
#define __LL_DMA_Ch0TransErrIntSta_Get(__DMA__) (READ_BIT((__DMA__)->TEIPR, DMA_TEIPR_TEIF_CH0_Msk) >> DMA_TEIPR_TEIF_CH0_Pos)
/**
* @brief Channel 1 transfer complete interrupt enable
* @param __DMA__ Specifies DMA peripheral
* @return None
*/
#define __LL_DMA_Ch1TransCom_Int_En(__DMA__) SET_BIT((__DMA__)->TIMR, DMA_TIMR_TIWE_CH1_Msk | DMA_TIMR_TIE_CH1_Msk)
/**
* @brief Channel 1 transfer complete interrupt disable
* @param __DMA__ Specifies DMA peripheral
* @return None
*/
#define __LL_DMA_Ch1TransCom_Int_Dis(__DMA__) \
MODIFY_REG((__DMA__)->TIMR, DMA_TIMR_TIWE_CH1_Msk | DMA_TIMR_TIE_CH1_Msk, DMA_TIMR_TIWE_CH1_Msk | (0x0 << DMA_TIMR_TIE_CH1_Pos))
/**
* @brief Channel 0 transfer complete interrupt enable
* @param __DMA__ Specifies DMA peripheral
* @return None
*/
#define __LL_DMA_Ch0TransCom_Int_En(__DMA__) SET_BIT((__DMA__)->TIMR, DMA_TIMR_TIWE_CH0_Msk | DMA_TIMR_TIE_CH0_Msk)
/**
* @brief Channel 0 transfer complete interrupt disable
* @param __DMA__ Specifies DMA peripheral
* @return None
*/
#define __LL_DMA_Ch0TransCom_Int_Dis(__DMA__) \
MODIFY_REG((__DMA__)->TIMR, DMA_TIMR_TIWE_CH0_Msk | DMA_TIMR_TIE_CH0_Msk, DMA_TIMR_TIWE_CH0_Msk | (0x0 << DMA_TIMR_TIE_CH0_Pos))
/**
* @brief Reg TIMR Write
* @param __DMA__ Specifies DMA peripheral
* @param val write value
* @return None
*/
#define __LL_DMA_RegTIMR_Write(__DMA__, val) WRITE_REG((__DMA__)->TIMR, val)
/**
* @brief Channel 1 block transfer complete interrupt enable
* @param __DMA__ Specifies DMA peripheral
* @return None
*/
#define __LL_DMA_Ch1BlockTransCom_Int_En(__DMA__) SET_BIT((__DMA__)->BTIMR, DMA_BTIMR_BTIWE_CH1_Msk | DMA_BTIMR_BTIE_CH1_Msk)
/**
* @brief Channel 1 block transfer complete interrupt disable
* @param __DMA__ Specifies DMA peripheral
* @return None
*/
#define __LL_DMA_Ch1BlockTransCom_Int_Dis(__DMA__) \
MODIFY_REG((__DMA__)->BTIMR, DMA_BTIMR_BTIWE_CH1_Msk | DMA_BTIMR_BTIE_CH1_Msk, DMA_BTIMR_BTIWE_CH1_Msk | (0x0 << DMA_BTIMR_BTIE_CH1_Pos))
/**
* @brief Channel 0 block transfer complete interrupt enable
* @param __DMA__ Specifies DMA peripheral
* @return None
*/
#define __LL_DMA_Ch0BlockTransCom_Int_En(__DMA__) SET_BIT((__DMA__)->BTIMR, DMA_BTIMR_BTIWE_CH0_Msk | DMA_BTIMR_BTIE_CH0_Msk)
/**
* @brief Channel 0 block transfer complete interrupt disable
* @param __DMA__ Specifies DMA peripheral
* @return None
*/
#define __LL_DMA_Ch0BlockTransCom_Int_Dis(__DMA__) \
MODIFY_REG((__DMA__)->BTIMR, DMA_BTIMR_BTIWE_CH0_Msk | DMA_BTIMR_BTIE_CH0_Msk, DMA_BTIMR_BTIWE_CH0_Msk | (0x0 << DMA_BTIMR_BTIE_CH0_Pos))
/**
* @brief Reg BTIMR Write
* @param __DMA__ Specifies DMA peripheral
* @param val write value
* @return None
*/
#define __LL_DMA_RegBTIMR_Write(__DMA__, val) WRITE_REG((__DMA__)->BTIMR, val)
/**
* @brief Channel 1 source transfer complete interrupt enable
* @param __DMA__ Specifies DMA peripheral
* @return None
*/
#define __LL_DMA_Ch1SrcTransCom_Int_En(__DMA__) SET_BIT((__DMA__)->STIMR, DMA_STIMR_STIWE_CH1_Msk | DMA_STIMR_STIE_CH1_Msk)
/**
* @brief Channel 1 source transfer complete interrupt disable
* @param __DMA__ Specifies DMA peripheral
* @return None
*/
#define __LL_DMA_Ch1SrcTransCom_Int_Dis(__DMA__) \
MODIFY_REG((__DMA__)->STIMR, DMA_STIMR_STIWE_CH1_Msk | DMA_STIMR_STIE_CH1_Msk, DMA_STIMR_STIWE_CH1_Msk | (0x0 << DMA_STIMR_STIE_CH1_Pos))
/**
* @brief Channel 0 source transfer complete interrupt enable
* @param __DMA__ Specifies DMA peripheral
* @return None
*/
#define __LL_DMA_Ch0SrcTransCom_Int_En(__DMA__) SET_BIT((__DMA__)->STIMR, DMA_STIMR_STIWE_CH0_Msk | DMA_STIMR_STIE_CH0_Msk)
/**
* @brief Channel 0 source transfer complete interrupt disable
* @param __DMA__ Specifies DMA peripheral
* @return None
*/
#define __LL_DMA_Ch0SrcTransCom_Int_Dis(__DMA__) \
MODIFY_REG((__DMA__)->STIMR, DMA_STIMR_STIWE_CH0_Msk | DMA_STIMR_STIE_CH0_Msk, DMA_STIMR_STIWE_CH0_Msk | (0x0 << DMA_STIMR_STIE_CH0_Pos))
/**
* @brief Reg STIMR Write
* @param __DMA__ Specifies DMA peripheral
* @param val write value
* @return None
*/
#define __LL_DMA_RegSTIMR_Write(__DMA__, val) WRITE_REG((__DMA__)->STIMR, val)
/**
* @brief Channel 1 destination transfer complete interrupt enable
* @param __DMA__ Specifies DMA peripheral
* @return None
*/
#define __LL_DMA_Ch1DstTransCom_Int_En(__DMA__) SET_BIT((__DMA__)->DTIMR, DMA_DTIMR_DTIWE_CH1_Msk | DMA_DTIMR_DTIE_CH1_Msk)
/**
* @brief Channel 1 destination transfer complete interrupt disable
* @param __DMA__ Specifies DMA peripheral
* @return None
*/
#define __LL_DMA_Ch1DstTransCom_Int_Dis(__DMA__) \
MODIFY_REG((__DMA__)->DTIMR, DMA_DTIMR_DTIWE_CH1_Msk | DMA_DTIMR_DTIE_CH1_Msk, DMA_DTIMR_DTIWE_CH1_Msk | (0x0 << DMA_DTIMR_DTIE_CH1_Pos))
/**
* @brief Channel 0 destination transfer complete interrupt enable
* @param __DMA__ Specifies DMA peripheral
* @return None
*/
#define __LL_DMA_Ch0DstTransCom_Int_En(__DMA__) SET_BIT((__DMA__)->DTIMR, DMA_DTIMR_DTIWE_CH0_Msk | DMA_DTIMR_DTIE_CH0_Msk)
/**
* @brief Channel 0 destination transfer complete interrupt disable
* @param __DMA__ Specifies DMA peripheral
* @return None
*/
#define __LL_DMA_Ch0DstTransCom_Int_Dis(__DMA__) \
MODIFY_REG((__DMA__)->DTIMR, DMA_DTIMR_DTIWE_CH0_Msk | DMA_DTIMR_DTIE_CH0_Msk, DMA_DTIMR_DTIWE_CH0_Msk | (0x0 << DMA_DTIMR_DTIE_CH0_Pos))
/**
* @brief Reg DTIMR Write
* @param __DMA__ Specifies DMA peripheral
* @param val write value
* @return None
*/
#define __LL_DMA_RegDTIMR_Write(__DMA__, val) WRITE_REG((__DMA__)->DTIMR, val)
/**
* @brief Channel 1 transfer error interrupt enable
* @param __DMA__ Specifies DMA peripheral
* @return None
*/
#define __LL_DMA_Ch1TransErr_Int_En(__DMA__) SET_BIT((__DMA__)->TEIMR, DMA_TEIMR_TEIWE_CH1_Msk | DMA_TEIMR_TEIE_CH1_Msk)
/**
* @brief Channel 1 transfer error interrupt disable
* @param __DMA__ Specifies DMA peripheral
* @return None
*/
#define __LL_DMA_Ch1TransErr_Int_Dis(__DMA__) \
MODIFY_REG((__DMA__)->TEIMR, DMA_TEIMR_TEIWE_CH1_Msk | DMA_TEIMR_TEIE_CH1_Msk, DMA_TEIMR_TEIWE_CH1_Msk | (0x0 << DMA_TEIMR_TEIE_CH1_Pos))
/**
* @brief Channel 0 transfer error interrupt enable
* @param __DMA__ Specifies DMA peripheral
* @return None
*/
#define __LL_DMA_Ch0TransErr_Int_En(__DMA__) SET_BIT((__DMA__)->TEIMR, DMA_TEIMR_TEIWE_CH0_Msk | DMA_TEIMR_TEIE_CH0_Msk)
/**
* @brief Channel 0 transfer error interrupt disable
* @param __DMA__ Specifies DMA peripheral
* @return None
*/
#define __LL_DMA_Ch0TransErr_Int_Dis(__DMA__) \
MODIFY_REG((__DMA__)->TEIMR, DMA_TEIMR_TEIWE_CH0_Msk | DMA_TEIMR_TEIE_CH0_Msk, DMA_TEIMR_TEIWE_CH0_Msk | (0x0 << DMA_TEIMR_TEIE_CH0_Pos))
/**
* @brief Reg TEIMR Write
* @param __DMA__ Specifies DMA peripheral
* @param val write value
* @return None
*/
#define __LL_DMA_RegTEIMR_Write(__DMA__, val) WRITE_REG((__DMA__)->TEIMR, val)
/**
* @brief Channel 1 transfer complete status clear
* @param __DMA__ Specifies DMA peripheral
* @return None
*/
#define __LL_DMA_Ch1TransComSta_Clr(__DMA__) WRITE_REG((__DMA__)->TCR, DMA_TCR_TC_CH1_Msk)
/**
* @brief Channel 0 transfer complete status clear
* @param __DMA__ Specifies DMA peripheral
* @return None
*/
#define __LL_DMA_Ch0TransComSta_Clr(__DMA__) WRITE_REG((__DMA__)->TCR, DMA_TCR_TC_CH0_Msk)
/**
* @brief Reg TCR Write
* @param __DMA__ Specifies DMA peripheral
* @param val write value
* @return None
*/
#define __LL_DMA_RegTCR_Write(__DMA__, val) WRITE_REG((__DMA__)->TCR, val)
/**
* @brief Channel 1 block transfer complete status clear
* @param __DMA__ Specifies DMA peripheral
* @return None
*/
#define __LL_DMA_Ch1BlockTransComSta_Clr(__DMA__) WRITE_REG((__DMA__)->BTCR, DMA_BTCR_BTC_CH1_Msk)
/**
* @brief Channel 0 block transfer complete status clear
* @param __DMA__ Specifies DMA peripheral
* @return None
*/
#define __LL_DMA_Ch0BlockTransComSta_Clr(__DMA__) WRITE_REG((__DMA__)->BTCR, DMA_BTCR_BTC_CH0_Msk)
/**
* @brief Reg BTCR Write
* @param __DMA__ Specifies DMA peripheral
* @param val write value
* @return None
*/
#define __LL_DMA_RegBTCR_Write(__DMA__, val) WRITE_REG((__DMA__)->BTCR, val)
/**
* @brief Channel 1 source transfer complete status clear
* @param __DMA__ Specifies DMA peripheral
* @return None
*/
#define __LL_DMA_Ch1SrcTransComSta_Clr(__DMA__) WRITE_REG((__DMA__)->STCR, DMA_STCR_STC_CH1_Msk)
/**
* @brief Channel 0 source transfer complete status clear
* @param __DMA__ Specifies DMA peripheral
* @return None
*/
#define __LL_DMA_Ch0SrcTransComSta_Clr(__DMA__) WRITE_REG((__DMA__)->STCR, DMA_STCR_STC_CH0_Msk)
/**
* @brief Reg STCR Write
* @param __DMA__ Specifies DMA peripheral
* @param val write value
* @return None
*/
#define __LL_DMA_RegSTCR_Write(__DMA__, val) WRITE_REG((__DMA__)->STCR, val)
/**
* @brief Channel 1 destination transfer complete status clear
* @param __DMA__ Specifies DMA peripheral
* @return None
*/
#define __LL_DMA_Ch1DstTransComSta_Clr(__DMA__) WRITE_REG((__DMA__)->DTCR, DMA_DTCR_DTC_CH1_Msk)
/**
* @brief Channel 0 destination transfer complete status clear
* @param __DMA__ Specifies DMA peripheral
* @return None
*/
#define __LL_DMA_Ch0DstTransComSta_Clr(__DMA__) WRITE_REG((__DMA__)->DTCR, DMA_DTCR_DTC_CH0_Msk)
/**
* @brief Reg DTCR Write
* @param __DMA__ Specifies DMA peripheral
* @param val write value
* @return None
*/
#define __LL_DMA_RegDTCR_Write(__DMA__, val) WRITE_REG((__DMA__)->DTCR, val)
/**
* @brief Channel 1 transfer error status clear
* @param __DMA__ Specifies DMA peripheral
* @return None
*/
#define __LL_DMA_Ch1TransErrSta_Clr(__DMA__) WRITE_REG((__DMA__)->TECR, DMA_TECR_TEC_CH1_Msk)
/**
* @brief Channel 0 transfer error status clear
* @param __DMA__ Specifies DMA peripheral
* @return None
*/
#define __LL_DMA_Ch0TransErrSta_Clr(__DMA__) WRITE_REG((__DMA__)->TECR, DMA_TECR_TEC_CH0_Msk)
/**
* @brief Reg TECR Write
* @param __DMA__ Specifies DMA peripheral
* @param val write value
* @return None
*/
#define __LL_DMA_RegTECR_Write(__DMA__, val) WRITE_REG((__DMA__)->TECR, val)
/**
* @brief Peripheral enable
* @param __DMA__ Specifies DMA peripheral
* @return None
*/
#define __LL_DMA_Periph_En(__DMA__) SET_BIT((__DMA__)->CR0, DMA_CR0_PEN_Msk)
/**
* @brief Peripheral disable
* @param __DMA__ Specifies DMA peripheral
* @return None
*/
#define __LL_DMA_Periph_Dis(__DMA__) CLEAR_BIT((__DMA__)->CR0, DMA_CR0_PEN_Msk)
/**
* @brief Reg CR0 Write
* @param __DMA__ Specifies DMA peripheral
* @param val write value
* @return None
*/
#define __LL_DMA_RegCR0_Write(__DMA__, val) WRITE_REG((__DMA__)->CR0, val)
/**
* @brief Channel 1 enable
* @param __DMA__ Specifies DMA peripheral
* @return None
*/
#define __LL_DMA_Ch1_En(__DMA__) SET_BIT((__DMA__)->CR1, DMA_CR1_CHWE_CH1_Msk | DMA_CR1_CHEN_CH1_Msk)
/**
* @brief Channel 1 disable
* @param __DMA__ Specifies DMA peripheral
* @return None
*/
#define __LL_DMA_Ch1_Dis(__DMA__) \
MODIFY_REG((__DMA__)->CR1, DMA_CR1_CHWE_CH1_Msk | DMA_CR1_CHEN_CH1_Msk, DMA_CR1_CHWE_CH1_Msk | (0x0 << DMA_CR1_CHEN_CH1_Pos))
/**
* @brief Channel 0 enable
* @param __DMA__ Specifies DMA peripheral
* @return None
*/
#define __LL_DMA_Ch0_En(__DMA__) SET_BIT((__DMA__)->CR1, DMA_CR1_CHWE_CH0_Msk | DMA_CR1_CHEN_CH0_Msk)
/**
* @brief Channel 0 disable
* @param __DMA__ Specifies DMA peripheral
* @return None
*/
#define __LL_DMA_Ch0_Dis(__DMA__) \
MODIFY_REG((__DMA__)->CR1, DMA_CR1_CHWE_CH0_Msk | DMA_CR1_CHEN_CH0_Msk, DMA_CR1_CHWE_CH0_Msk | (0x0 << DMA_CR1_CHEN_CH0_Pos))
/**
* @brief Reg CR1 Write
* @param __DMA__ Specifies DMA peripheral
* @param val write value
* @return None
*/
#define __LL_DMA_RegCR1_Write(__DMA__, val) WRITE_REG((__DMA__)->CR1, val)
/**
* @}
*/
/* Exported functions --------------------------------------------------------*/
/** @addtogroup DMA_LL_Exported_Functions
* @{
*/
/** @addtogroup DMA_LL_Exported_Functions_Group1
* @{
*/
LL_StatusETypeDef LL_DMA_Init(DMA_TypeDef *Instance, DMA_ChannelETypeDef ch, DMA_UserCfgTypeDef *user_cfg);
LL_StatusETypeDef LL_DMA_DeInit(DMA_TypeDef *Instance, DMA_ChannelETypeDef ch);
/**
* @}
*/
/** @addtogroup DMA_LL_Exported_Functions_Group2
* @{
*/
DMA_ChannelETypeDef LL_DMA_ChannelRequest(void);
DMA_ChannelETypeDef LL_DMA_ChReqSpecific(DMA_ChannelETypeDef ch);
void LL_DMA_ChannelRelease(DMA_ChannelETypeDef ch);
/**
* @}
*/
/** @addtogroup DMA_LL_Exported_Functions_Group3
* @{
*/
LL_StatusETypeDef LL_DMA_Start_CPU(DMA_TypeDef *Instance, DMA_ChannelETypeDef ch,
uint32_t src_addr, uint32_t dst_addr, uint32_t data_len);
LL_StatusETypeDef LL_DMA_Start_IT(DMA_TypeDef *Instance, DMA_ChannelETypeDef ch,
uint32_t src_addr, uint32_t dst_addr, uint32_t data_len);
LL_StatusETypeDef LL_DMA_Stop_CPU(DMA_TypeDef *Instance, DMA_ChannelETypeDef ch);
LL_StatusETypeDef LL_DMA_Stop_IT(DMA_TypeDef *Instance, DMA_ChannelETypeDef ch);
LL_StatusETypeDef LL_DMA_WaitComplete_CPU(DMA_TypeDef *Instance, DMA_ChannelETypeDef ch, uint32_t timeout);
/**
* @}
*/
/** @addtogroup DMA_LL_Exported_Functions_Interrupt
* @{
*/
void LL_DMA_IRQHandler(DMA_TypeDef *Instance);
/**
* @}
*/
/**
* @}
*/
/* Private constants ---------------------------------------------------------*/
/* Private variables ---------------------------------------------------------*/
/* Private types -------------------------------------------------------------*/
/* Private macros ------------------------------------------------------------*/
/* Private functions ---------------------------------------------------------*/
/**
* @}
*/
/**
* @}
*/
#ifdef __cplusplus
}
#endif /* __cplusplus */
#endif /* _TAE32F53XX_LL_DMA_H_ */
/************************* (C) COPYRIGHT Tai-Action *****END OF FILE***********/
| 18,619 |
903 | <reponame>FranciscoShi/FAutoTest<filename>fastAutoTest/core/common/errormsgmanager.py
# -*- coding: utf-8 -*-
'''
Tencent is pleased to support the open source community by making FAutoTest available.
Copyright (C) 2018 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the BSD 3-Clause License (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
https://opensource.org/licenses/BSD-3-Clause
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
'''
"""
设备连接类错误信息
"""
ERROR_CODE_DEVICE_NOT_CONNECT = 1000 # 设备没有连上
ERROR_CODE_MULTIPLE_DEVICE = 1001 # 有多个设备连接,但是使用时并没有指定具体设备
ERROR_CODE_NOT_CONFIG_ENV = 1002 # 没有配置环境变量
"""
获取debug url相关错误
"""
ERROR_CODE_MULTIPLE_URL = 2000 # 多个debug URL
ERROR_CODE_NOT_ENABLE_DEBUG_MODE = 2001 # 没有打开调试模式、
ERROR_CODE_NOT_ENTER_H5 = 2002 # 打开了调试模式, 但是当前并没有进入H5页面
ERROR_CODE_NOT_FOUND_WEIXIN_TOOLS_PROCESS = 2003 # 找不到微信Tools进程
ERROR_CODE_CONFIG_PROXY = 2004 # 无法获取debug url,检查是否配置了代理
ERROR_CODE_NOT_ENTER_XCX = 2005 # 未在小程序首屏进行初始化
ERROR_CODE_NOT_GET_XCX_PAGE_INFO = 2006 # 获取小程序页面特征失败
ERROR_CODE_GET_PID_WRONG = 2007 # 检测到小程序进程,获取PID失败
"""
协议相关操作错误
"""
ERROR_CODE_BAD_REQUEST = 3000
ERROR_CODE_REQUEST_EXCEPTION = 3001
ERROR_CODE_REQUEST_NOT_MATCH_RESPONSE = 3002
"""
websocket链接相关错误
"""
ERROR_CODE_CONNECT_CLOSED = 4000
"""
运行时错误
"""
ERROR_CODE_GETCOORDINATE = 5000 # 获取Element坐标失败,该Element不存在
ERROR_CODE_SETUP_FRAME_PAGE = 5001 # Body标签中的IFrame页面不存在或还未加载
"""
未知错误
"""
ERROR_CODE_UNKNOWN = -999999 # 未知错误
_ERROR_CODE_SET = [
ERROR_CODE_NOT_CONFIG_ENV,
ERROR_CODE_DEVICE_NOT_CONNECT,
ERROR_CODE_MULTIPLE_DEVICE,
ERROR_CODE_MULTIPLE_URL,
ERROR_CODE_NOT_ENABLE_DEBUG_MODE,
ERROR_CODE_NOT_ENTER_H5,
ERROR_CODE_NOT_FOUND_WEIXIN_TOOLS_PROCESS,
ERROR_CODE_CONFIG_PROXY,
ERROR_CODE_BAD_REQUEST,
ERROR_CODE_REQUEST_EXCEPTION,
ERROR_CODE_REQUEST_NOT_MATCH_RESPONSE,
ERROR_CODE_CONNECT_CLOSED,
ERROR_CODE_UNKNOWN,
ERROR_CODE_NOT_ENTER_XCX,
ERROR_CODE_NOT_GET_XCX_PAGE_INFO,
ERROR_CODE_GET_PID_WRONG,
ERROR_CODE_GETCOORDINATE,
ERROR_CODE_SETUP_FRAME_PAGE
]
_ERROR_MSG_MAPPING = {
ERROR_CODE_NOT_CONFIG_ENV: "执行adb命令失败,请检查是否配置系统环境变量",
ERROR_CODE_DEVICE_NOT_CONNECT: "没有设备连上,请用adb device确认是否有设备连接到PC",
ERROR_CODE_MULTIPLE_DEVICE: "当前有多个设备连接到PC,请创建H5Driver时指定要操作的设备",
ERROR_CODE_MULTIPLE_URL: "检测到多个debug url",
ERROR_CODE_NOT_ENABLE_DEBUG_MODE: "请在微信端打开H5调试模式,或者后台杀死微信进程后重试",
ERROR_CODE_NOT_ENTER_H5: "在执行脚本前,先进入H5页面",
ERROR_CODE_NOT_FOUND_WEIXIN_TOOLS_PROCESS: "找不到微信Tools进程",
ERROR_CODE_CONFIG_PROXY: "无法获取debug url,并检查是否配置了代理,是否已经建立了websocket连接未关闭",
ERROR_CODE_NOT_ENTER_XCX: "获取小程序pid失败,请检查是否在小程序首屏进行初始化",
ERROR_CODE_NOT_GET_XCX_PAGE_INFO: "获取小程序页面特征失败",
ERROR_CODE_BAD_REQUEST: "操作错误,请检查",
ERROR_CODE_REQUEST_EXCEPTION: "操作发生异常",
ERROR_CODE_REQUEST_NOT_MATCH_RESPONSE: "请求和响应不匹配",
ERROR_CODE_CONNECT_CLOSED: "websocket链接已关闭",
ERROR_CODE_GET_PID_WRONG: "检测到小程序进程,获取PID失败",
ERROR_CODE_UNKNOWN: "未知错误",
ERROR_CODE_GETCOORDINATE: "获取Element坐标失败,该Element不存在",
ERROR_CODE_SETUP_FRAME_PAGE: "Body标签中的IFrame页面不存在或还未加载"
}
class ErrorMsgManager(object):
_instance = None
"""
单例模式
"""
def __new__(cls, *args, **kwargs):
if cls._instance is None:
cls._instance = super(ErrorMsgManager, cls).__new__(cls, *args, **kwargs)
return cls._instance
def errorCodeToString(self, errorCode):
if errorCode not in _ERROR_CODE_SET:
errorCode = ERROR_CODE_UNKNOWN
return _ERROR_MSG_MAPPING[errorCode]
if __name__ == "__main__":
errorMgr = ErrorMsgManager()
print(errorMgr.errorCodeToString(0))
| 2,533 |
335 | <reponame>Safal08/Hacktoberfest-1<filename>M/Misadventure_noun.json
{
"word": "Misadventure",
"definitions": [
"Death caused by a person accidentally while performing a legal act without negligence or intent to harm.",
"An unfortunate incident; a mishap."
],
"parts-of-speech": "Noun"
} | 118 |
480 | /*
* Copyright [2013-2021], Alibaba Group Holding Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.polardbx.executor.operator;
import com.google.common.util.concurrent.ListenableFuture;
import com.alibaba.polardbx.common.exception.TddlRuntimeException;
import com.alibaba.polardbx.common.exception.code.ErrorCode;
import com.alibaba.polardbx.config.ConfigDataMode;
import com.alibaba.polardbx.executor.chunk.Block;
import com.alibaba.polardbx.executor.chunk.Chunk;
import com.alibaba.polardbx.executor.chunk.Chunk.ChunkRow;
import com.alibaba.polardbx.executor.chunk.ChunkConverter;
import com.alibaba.polardbx.executor.chunk.NullBlock;
import com.alibaba.polardbx.optimizer.context.ExecutionContext;
import com.alibaba.polardbx.optimizer.core.datatype.DataType;
import com.alibaba.polardbx.optimizer.core.expression.calc.IExpression;
import com.alibaba.polardbx.optimizer.core.join.EquiJoinKey;
import com.alibaba.polardbx.optimizer.core.row.JoinRow;
import com.alibaba.polardbx.optimizer.core.row.Row;
import org.apache.calcite.rel.core.JoinRelType;
import java.util.ArrayList;
import java.util.List;
/**
* Sort-Merge Join Executor
*
*/
public class SortMergeJoinExec extends AbstractJoinExec {
private final OneJoinSide outerSide;
private final OneJoinSide innerSide;
private final ChunkRow nullInnerRow;
private final boolean outerOrAntiJoin;
private final int compareCoeffients[];
private ChunkRow outerKey;
private ChunkRow innerKey;
private boolean continueAdvanceCurrentInner;
private boolean continueAdvanceCurrentOuter;
private List<ChunkRow> outerSubset;
private List<ChunkRow> innerSubset;
private boolean needAdvanceOuter;
private boolean needAdvanceInner;
private boolean outerFinished;
private boolean innerFinished;
private ResultsIterator resultsIter;
private boolean innerEmpty;
private boolean passNothing;
private boolean bStartConsumeInner; //标记MergeJoinExec 是否成功开始接受到inner第一条数据
private boolean bStartConsumeOuter; //标记MergeJoinExec 是否成功开始接受到outer第一条数据
private ListenableFuture<?> blocked;
public SortMergeJoinExec(Executor outerInput,
Executor innerInput,
JoinRelType joinType,
boolean maxOneRow,
List<EquiJoinKey> joinKeys,
List<Boolean> keyColumnIsAscending,
IExpression otherCondition,
List<IExpression> antiJoinOperands,
ExecutionContext context) {
super(outerInput, innerInput, joinType, maxOneRow, joinKeys, otherCondition, antiJoinOperands, null, context);
createBlockBuilders();
this.outerSide = new OneJoinSide(outerInput, outerKeyChunkGetter);
this.innerSide = new OneJoinSide(innerInput, innerKeyChunkGetter);
this.nullInnerRow = buildNullRow(innerInput.getDataTypes());
this.outerOrAntiJoin = outerJoin || joinType == JoinRelType.ANTI;
this.blocked = NOT_BLOCKED;
this.compareCoeffients = new int[joinKeys.size()];
for (int i = 0; i < joinKeys.size(); i++) {
this.compareCoeffients[i] = keyColumnIsAscending.get(i) ? 1 : -1;
}
}
@Override
void doOpen() {
outerInput.open();
innerInput.open();
// Reset inner states
outerFinished = false;
innerFinished = false;
outerSubset = new ArrayList<>();
innerSubset = new ArrayList<>();
this.bStartConsumeInner = false;
this.bStartConsumeOuter = false;
}
@Override
Chunk doNextChunk() {
if (!bStartConsumeInner || !bStartConsumeOuter) {
if (!bStartConsumeInner) {
boolean exist = innerSide.next();
if (exist || innerSide.isDone()) {
innerEmpty = innerSide.isDone();
if (joinType == JoinRelType.ANTI) {
doSpecialCheckForAntiJoin();
}
consumeInner();
bStartConsumeInner = true;
}
}
if (!bStartConsumeOuter) {
boolean exist = outerSide.next();
if (exist || outerSide.isDone()) {
consumeOuter();
bStartConsumeOuter = true;
}
}
}
if (!bStartConsumeInner || !bStartConsumeOuter) {
return null;
}
if (passNothing) {
return null;
}
while (currentPosition() < chunkLimit) {
Row row = nextRow();
if (row == null) {
break;
}
if (semiJoin) {
appendChunkRow((ChunkRow) row);
} else {
final JoinRow r = (JoinRow) row;
appendJoinedChunkRows((ChunkRow) r.getLeftRowSet(), (ChunkRow) r.getRightRowSet());
}
}
if (currentPosition() == 0) {
return null;
} else {
return buildChunkAndReset();
}
}
private void appendChunkRow(ChunkRow row) {
for (int i = 0; i < outerInput.getDataTypes().size(); i++) {
row.getChunk().getBlock(i).writePositionTo(row.getPosition(), blockBuilders[i]);
}
}
private void appendJoinedChunkRows(ChunkRow left, ChunkRow right) {
int col = 0;
for (int i = 0; i < left.getChunk().getBlockCount(); i++) {
left.getChunk().getBlock(i).writePositionTo(left.getPosition(), blockBuilders[col++]);
}
// Single join only output the first row of right side
final int rightColumns = singleJoin ? 1 : right.getChunk().getBlockCount();
for (int i = 0; i < rightColumns; i++) {
right.getChunk().getBlock(i).writePositionTo(right.getPosition(), blockBuilders[col++]);
}
assert col == blockBuilders.length;
}
@Override
void doClose() {
outerInput.close();
innerInput.close();
}
private Row nextRow() {
while (true) {
// First of all, consume all the joined results in results iterator if exists
if (resultsIter != null) {
Row record = resultsIter.next();
if (record != null) {
return record;
} else {
resultsIter = null;
}
}
if (outerFinished || innerFinished) {
if (!outerOrAntiJoin) {
// for inner join, if any of inner/outer is finished, join is finished.
passNothing = true;
}
break;
}
if (needAdvanceOuter) {
advanceOuter();
}
if (needAdvanceInner) {
advanceInner();
}
if (!needAdvanceInner && !needAdvanceOuter) {
//搜集完两边数据后,可以进行下一阶段计算
} else {
//未搜集全两边数据,无法进行计算,跳出当前循环
break;
}
int compare = compare(outerKey, innerKey);
if (compare == 0) {
if (checkJoinKeysNotNull(outerKey)) {
resultsIter = new JoinResultsIterator(outerSubset, innerSubset);
} else if (outerOrAntiJoin) {
resultsIter = new JoinNotMatchedResultsIterator(outerSubset);
}
consumeInner();
consumeOuter();
} else if (compare < 0) {
// For outer-join or anti-join, we have to generate some null rows even if no inner rows are matched
if (outerOrAntiJoin) {
resultsIter = new JoinNotMatchedResultsIterator(outerSubset);
}
consumeOuter();
} else { // compare > 0
consumeInner();
}
}
if (outerOrAntiJoin) {
// for outer-join or anti-join, we have to drain out the outer side
while (true) {
// consumes all the joined results in results iterator if exists
if (resultsIter != null) {
Row record = resultsIter.next();
if (record != null) {
return record;
} else {
resultsIter = null;
}
}
if (outerFinished) {
passNothing = true;
break;
}
if (needAdvanceOuter) {
advanceOuter();
}
if (!needAdvanceOuter) {
//搜集完数据后,可以进行下一阶段计算
} else {
//未搜集全数据,无法进行计算,跳出当前循环
break;
}
resultsIter = new JoinNotMatchedResultsIterator(outerSubset);
consumeOuter();
}
}
return null;
}
private void advanceInner() {
if (continueAdvanceCurrentInner) {
boolean finishCurrentProbe = continueAdvanceOneSide(innerSide, innerSubset, innerKey);
continueAdvanceCurrentInner = !finishCurrentProbe;
needAdvanceInner = !finishCurrentProbe;
} else {
innerKey = advanceOneSide(innerSide, innerSubset);
if (innerSide.currentChunk == null && !innerSide.isDone()) {
continueAdvanceCurrentInner = true;
needAdvanceInner = true;
} else {
needAdvanceInner = false;
}
}
}
private void advanceOuter() {
if (continueAdvanceCurrentOuter) {
boolean finishCurrentProbe = continueAdvanceOneSide(outerSide, outerSubset, outerKey);
continueAdvanceCurrentOuter = !finishCurrentProbe;
needAdvanceOuter = !finishCurrentProbe;
} else {
outerKey = advanceOneSide(outerSide, outerSubset);
if (outerSide.currentChunk == null && !outerSide.isDone()) {
continueAdvanceCurrentOuter = true;
needAdvanceOuter = true;
} else {
needAdvanceOuter = false;
}
}
}
private void consumeInner() {
if (innerSide.isDone()) {
innerFinished = true;
innerSubset = null;
} else {
needAdvanceInner = true;
}
}
private void consumeOuter() {
if (outerSide.isDone()) {
outerFinished = true;
outerSubset = null;
} else {
needAdvanceOuter = true;
}
}
private ChunkRow advanceOneSide(OneJoinSide input, List<ChunkRow> subset) {
assert !input.isDone() : "input is done";
// add current row to product subset
subset.clear();
subset.add(input.currentRow());
ChunkRow joinKey = input.currentJoinKey();
// ... along with all successive rows with same join key
while (input.next() && compare(joinKey, input.currentJoinKey()) == 0) {
subset.add(input.currentRow());
}
return joinKey;
}
private boolean continueAdvanceOneSide(OneJoinSide input, List<ChunkRow> subset, ChunkRow currentJoinKey) {
assert !input.isDone() : "input is done";
boolean finishCurrentProbe = false;
while (input.next() && compare(currentJoinKey, input.currentJoinKey()) == 0) {
subset.add(input.currentRow());
}
finishCurrentProbe = input.currentChunk != null || input.isDone();
return finishCurrentProbe;
}
@SuppressWarnings("unchecked")
private int compare(Row row1, Row row2) {
for (int i = 0; i < joinKeys.size(); i++) {
int result =
joinKeys.get(i).getUnifiedType().compare(row1.getObject(i), row2.getObject(i)) * compareCoeffients[i];
if (result != 0) {
return result;
}
}
return 0;
}
private interface ResultsIterator {
Row next();
}
private class JoinResultsIterator implements ResultsIterator {
private final List<ChunkRow> outerRows;
private final List<ChunkRow> innerRows;
private int outerIndex = 0;
private int innerIndex = 0;
private int matchedCount = 0;
JoinResultsIterator(List<ChunkRow> outerRows, List<ChunkRow> innerRows) {
this.outerRows = outerRows;
this.innerRows = innerRows;
}
@Override
public Row next() {
while (outerIndex < outerRows.size() && innerIndex < innerRows.size()) {
final ChunkRow innerRow = innerRows.get(innerIndex);
final ChunkRow outerRow = outerRows.get(outerIndex);
Row result = doNext(outerRow, innerRow);
// Move to next pair of (outer row, inner row)
if (++innerIndex == innerRows.size()) {
nextOuterRow();
}
if (result != null) {
return result;
}
}
return null;
}
private Row doNext(ChunkRow outerRow, ChunkRow innerRow) {
final ChunkRow leftRow = joinType.leftSide(outerRow, innerRow);
final ChunkRow rightRow = joinType.rightSide(outerRow, innerRow);
Row joinRow = new JoinRow(leftRow.getColNum(), leftRow, rightRow, null);
if (condition != null && !checkJoinCondition(joinRow)) {
// Specially, for outer-join and anti-join we have to emit a row even if not matched
if (outerOrAntiJoin && matchedCount == 0 && innerIndex == innerRows.size() - 1) {
if (outerJoin) {
return makeNullRow(outerRow);
} else if (checkAntiJoinOperands(outerRow) || innerEmpty) {
return outerRow;
}
}
// skip the row if condition check is failed
return null;
}
// check max1row
if ((!(ConfigDataMode.isFastMock())) && singleJoin && ++matchedCount > 1) {
throw new TddlRuntimeException(ErrorCode.ERR_SCALAR_SUBQUERY_RETURN_MORE_THAN_ONE_ROW);
}
if (joinType == JoinRelType.SEMI) {
// for semi-join, emit result and move forward once we found one match
skipInnerRows();
return outerRow;
} else if (joinType == JoinRelType.ANTI) {
// for anti-semi-join, move forward once we found a match
skipInnerRows();
return null;
}
return joinRow;
}
private void nextOuterRow() {
innerIndex = 0;
outerIndex++;
matchedCount = 0;
}
private void skipInnerRows() {
// just pretend to be the last inner row
innerIndex = innerRows.size() - 1;
}
}
private class JoinNotMatchedResultsIterator implements ResultsIterator {
private final List<ChunkRow> outerRows;
private int outerIndex = 0;
JoinNotMatchedResultsIterator(List<ChunkRow> outerRows) {
this.outerRows = outerRows;
}
@Override
public Row next() {
while (outerIndex < outerRows.size()) {
final Row outerRow = outerRows.get(outerIndex++);
if (outerJoin) {
return makeNullRow(outerRow);
} else if (joinType == JoinRelType.ANTI) {
if (checkAntiJoinOperands(outerRow) || innerEmpty) {
return outerRow;
}
} else {
throw new AssertionError("should be anti-join or outer-join");
}
}
return null;
}
}
private Row makeNullRow(Row outerRow) {
Row leftRow = joinType.leftSide(outerRow, nullInnerRow);
Row rightRow = joinType.rightSide(outerRow, nullInnerRow);
return new JoinRow(leftRow.getColNum(), leftRow, rightRow, null);
}
private class OneJoinSide {
private final Executor executor;
private final ChunkConverter keyGetter;
private Chunk currentChunk;
private Chunk currentKeyChunk;
private int currentPosition;
private boolean done = false;
OneJoinSide(Executor executor, ChunkConverter keyGetter) {
this.executor = executor;
this.keyGetter = keyGetter;
}
boolean next() {
currentPosition += 1;
if (currentChunk == null || currentPosition == currentChunk.getPositionCount()) {
currentChunk = executor.nextChunk();
currentPosition = 0;
}
if (currentChunk == null) {
if (executor.produceIsFinished()) {
done = true;
}
blocked = executor.produceIsBlocked();
return false;
} else {
currentKeyChunk = keyGetter.apply(currentChunk);
}
return true;
}
ChunkRow currentRow() {
return currentChunk.rowAt(currentPosition);
}
ChunkRow currentJoinKey() {
return currentKeyChunk.rowAt(currentPosition);
}
boolean isDone() {
return done;
}
}
private ChunkRow buildNullRow(List<DataType> columns) {
Block[] blocks = new Block[columns.size()];
for (int i = 0; i < blocks.length; i++) {
blocks[i] = new NullBlock(1);
}
Chunk chunk = new Chunk(blocks);
return chunk.rowAt(0);
}
private boolean checkJoinKeysNotNull(ChunkRow joinKey) {
for (int i = 0; i < joinKeys.size(); i++) {
if (joinKey.getChunk().getBlock(i).isNull(joinKey.getPosition())) {
return false;
}
}
return true;
}
/**
* Anti-Joins such as 'x NOT IN (... NULL ...)' should always produce nothing
*/
private void doSpecialCheckForAntiJoin() {
passNothing = false;
if (joinType == JoinRelType.ANTI && antiJoinOperands != null && !innerEmpty) {
if (!checkJoinKeysNotNull(innerSide.currentJoinKey())) {
// If first row is not null, then there is not null rows
passNothing = true;
}
}
}
@Override
public boolean produceIsFinished() {
return passNothing || (outerSide.isDone() && innerSide.isDone());
}
@Override
public ListenableFuture<?> produceIsBlocked() {
return blocked;
}
}
| 9,520 |
590 | <filename>src/include/vfs/fat/fat-control.h
#ifndef __FAT_CONTROL_H__
#define __FAT_CONTROL_H__
#ifdef __cplusplus
extern "C" {
#endif
#include <vfs/fat/fat.h>
#define FAT_TABLE_CACHE_SIZE (32)
/*
* Information about a "mounted" FAT filesystem
*/
struct fatfs_control_t {
/* FAT boot sector */
struct fat_bootsec_t bsec;
/* Underlying block device */
struct block_t * bdev;
/* Frequently required boot sector info */
u16_t bytes_per_sector;
u8_t sectors_per_cluster;
u8_t number_of_fat;
u32_t bytes_per_cluster;
u32_t total_sectors;
/* Derived FAT info */
u32_t first_fat_sector;
u32_t sectors_per_fat;
u32_t fat_sectors;
u32_t first_root_sector;
u32_t root_sectors;
u32_t first_root_cluster;
u32_t first_data_sector;
u32_t data_sectors;
u32_t data_clusters;
/* FAT type */
enum fat_type_t type;
/* FAT sector cache */
struct mutex_t fat_cache_lock;
u32_t fat_cache_victim;
bool_t fat_cache_dirty[FAT_TABLE_CACHE_SIZE];
u32_t fat_cache_num[FAT_TABLE_CACHE_SIZE];
u8_t * fat_cache_buf;
};
u32_t fatfs_pack_timestamp(u32_t year, u32_t mon, u32_t day, u32_t hour, u32_t min, u32_t sec);
void fatfs_current_timestamp(u32_t * year, u32_t * mon, u32_t * day, u32_t * hour, u32_t * min, u32_t * sec);
void fatfs_timestamp(time_t *t, u32_t * year, u32_t * mon, u32_t * day, u32_t * hour, u32_t * min, u32_t * sec);
bool_t fatfs_control_valid_cluster(struct fatfs_control_t * ctrl, u32_t clust);
int fatfs_control_nth_cluster(struct fatfs_control_t * ctrl, u32_t clust, u32_t pos, u32_t * next);
int fatfs_control_set_last_cluster(struct fatfs_control_t * ctrl, u32_t clust);
int fatfs_control_alloc_first_cluster(struct fatfs_control_t * ctrl, u32_t * newclust);
int fatfs_control_append_free_cluster(struct fatfs_control_t * ctrl, u32_t clust, u32_t * newclust);
int fatfs_control_truncate_clusters(struct fatfs_control_t * ctrl, u32_t clust);
int fatfs_control_sync(struct fatfs_control_t * ctrl);
int fatfs_control_init(struct fatfs_control_t * ctrl, struct block_t * bdev);
int fatfs_control_exit(struct fatfs_control_t * ctrl);
#ifdef __cplusplus
}
#endif
#endif /* __FAT_CONTROL_H__ */
| 916 |
777 | // Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef COMPONENTS_METRICS_LEAK_DETECTOR_LEAK_ANALYZER_H_
#define COMPONENTS_METRICS_LEAK_DETECTOR_LEAK_ANALYZER_H_
#include <stdint.h>
#include <map>
#include <vector>
#include "base/macros.h"
#include "components/metrics/leak_detector/custom_allocator.h"
#include "components/metrics/leak_detector/leak_detector_value_type.h"
#include "components/metrics/leak_detector/ranked_set.h"
#include "components/metrics/leak_detector/stl_allocator.h"
namespace metrics {
namespace leak_detector {
// This class looks for possible leak patterns in allocation data over time.
// Not thread-safe.
class LeakAnalyzer {
public:
using ValueType = LeakDetectorValueType;
// This class uses CustomAllocator to avoid recursive malloc hook invocation
// when analyzing allocs and frees.
template <typename Type>
using Allocator = STLAllocator<Type, CustomAllocator>;
LeakAnalyzer(uint32_t ranking_size, uint32_t num_suspicions_threshold);
~LeakAnalyzer();
// Take in a RankedSet of allocations, sorted by count. Removes the contents
// of |ranked_entries| to be stored internally, which is why it is not passed
// in as a const reference.
void AddSample(RankedSet ranked_entries);
// Used to report suspected leaks. Reported leaks are sorted by ValueType.
const std::vector<ValueType, Allocator<ValueType>>& suspected_leaks() const {
return suspected_leaks_;
}
private:
// Analyze a list of allocation count deltas from the previous iteration. If
// anything looks like a possible leak, update the suspicion scores.
void AnalyzeDeltas(const RankedSet& ranked_deltas);
// Returns the count for the given value from the previous analysis in
// |count|. Returns true if the given value was present in the previous
// analysis, or false if not.
bool GetPreviousCountForValue(const ValueType& value, uint32_t* count) const;
// Look for the top |ranking_size_| entries when analyzing leaks.
const uint32_t ranking_size_;
// Report suspected leaks when the suspicion score reaches this value.
const uint32_t score_threshold_;
// A mapping of allocation values to suspicion score. All allocations in this
// container are suspected leaks. The score can increase or decrease over
// time. Once the score reaches |score_threshold_|, the entry is reported as
// a suspected leak in |suspected_leaks_|.
std::map<ValueType,
uint32_t,
std::less<ValueType>,
Allocator<std::pair<const ValueType, uint32_t>>>
suspected_histogram_;
// Array of allocated values that passed the suspicion threshold and are being
// reported.
std::vector<ValueType, Allocator<ValueType>> suspected_leaks_;
// The most recent allocation entries, since the last call to AddSample().
RankedSet ranked_entries_;
// The previous allocation entries, from before the last call to AddSample().
RankedSet prev_ranked_entries_;
DISALLOW_COPY_AND_ASSIGN(LeakAnalyzer);
};
} // namespace leak_detector
} // namespace metrics
#endif // COMPONENTS_METRICS_LEAK_DETECTOR_LEAK_ANALYZER_H_
| 1,012 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.