max_stars_count
int64
301
224k
text
stringlengths
6
1.05M
token_count
int64
3
727k
4,036
<reponame>vadi2/codeql // Generated from QL.g4 by ANTLR 4.4 @SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"}) public class QLParser { }
65
1,056
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.netbeans.modules.editor.lib2.highlighting; /** * Visual mark encapsulates y-coordinate offset together with an object * that provides an offset (assumed that it's tracked as a SWing position). * <br> * The y-coordinate is tracked as a raw value that must first be preprocessed * by {@link VisualMarkVector}. * * @author <NAME> */ public abstract class VisualMark { private double rawY; private final VisualMarkVector<?> markVector; protected VisualMark(VisualMarkVector<?> markVector) { this.markVector = markVector; } /** * Get offset of this visual mark. * <br> * It's assumed that the offset is tracked as a Swing position. * * @return &gt;=0 offset of this mark. */ public abstract int getOffset(); /** * Get y-coordinate offset of this mark. * * @return y of this mark. */ public final double getY() { return markVector.raw2Y(rawY); } protected final VisualMarkVector<?> markVector() { return markVector; } double rawY() { return rawY; } void setRawY(double rawY) { this.rawY = rawY; } }
671
1,729
# coding:utf-8 import numpy as np from sklearn.datasets import make_s_curve import matplotlib.pyplot as plt from sklearn.manifold import LocallyLinearEmbedding from mpl_toolkits.mplot3d import Axes3D ''' author: heucoder email: <EMAIL> date: 2019.6.13 ''' def make_swiss_roll(n_samples=100, noise=0.0, random_state=None): #Generate a swiss roll dataset. t = 1.5 * np.pi * (1 + 2 * np.random.rand(1, n_samples)) x = t * np.cos(t) y = 83 * np.random.rand(1, n_samples) z = t * np.sin(t) X = np.concatenate((x, y, z)) X += noise * np.random.randn(3, n_samples) X = X.T t = np.squeeze(t) return X, t def cal_pairwise_dist(x): '''计算pairwise 距离, x是matrix (a-b)^2 = a^2 + b^2 - 2*a*b ''' sum_x = np.sum(np.square(x), 1) dist = np.add(np.add(-2 * np.dot(x, x.T), sum_x).T, sum_x) #返回任意两个点之间距离的平方 return dist def get_n_neighbors(data, n_neighbors = 10): ''' :param data: (n_samples, n_features) :param n_neighbors: n nearest neighbors :return: neighbors indexs ''' dist = cal_pairwise_dist(data) dist[dist < 0] = 0 dist = dist**0.5 n = dist.shape[0] N = np.zeros((n, n_neighbors)) for i in range(n): index_ = np.argsort(dist[i])[1:n_neighbors+1] N[i] = N[i] + index_ return N.astype(np.int32) def lle(data, n_dims = 2, n_neighbors = 10): ''' :param data:(n_samples, n_features) :param n_dims: target n_dims :param n_neighbors: n nearest neighbors :return: (n_samples, n_dims) ''' N = get_n_neighbors(data, n_neighbors) n, D = data.shape # prevent Si to small if n_neighbors > D: tol = 1e-3 else: tol = 0 # calculate W W = np.zeros((n_neighbors, n)) I = np.ones((n_neighbors, 1)) for i in range(n): Xi = np.tile(data[i], (n_neighbors, 1)).T Ni = data[N[i]].T Si = np.dot((Xi-Ni).T, (Xi-Ni)) # magic and why???? Si = Si+np.eye(n_neighbors)*tol*np.trace(Si) Si_inv = np.linalg.pinv(Si) wi = (np.dot(Si_inv, I))/(np.dot(np.dot(I.T, Si_inv), I)[0,0]) W[:, i] = wi[:,0] print("Xi.shape", Xi.shape) print("Ni.shape", Ni.shape) print("Si.shape", Si.shape) W_y = np.zeros((n, n)) for i in range(n): index = N[i] for j in range(n_neighbors): W_y[index[j],i] = W[j,i] I_y = np.eye(n) M = np.dot((I_y - W_y), (I_y - W_y).T) eig_val, eig_vector = np.linalg.eig(M) index_ = np.argsort(np.abs(eig_val))[1:n_dims+1] print("index_", index_) Y = eig_vector[:, index_] return Y if __name__ == '__main__': # X, Y = make_s_curve(n_samples = 500, # noise = 0.1, # random_state = 42) X, Y = make_swiss_roll(n_samples = 500, noise=0.1, random_state=42) data_1 =lle(X, n_neighbors = 30) print(data_1.shape) data_2 = LocallyLinearEmbedding(n_components=2, n_neighbors = 30).fit_transform(X) plt.figure(figsize=(8,4)) plt.subplot(121) plt.title("my_LLE") plt.scatter(data_1[:, 0], data_1[:, 1], c = Y) plt.subplot(122) plt.title("sklearn_LLE") plt.scatter(data_2[:, 0], data_2[:, 1], c = Y) plt.savefig("LLE.png") plt.show()
1,752
931
def stairCaseSearch(matrix , key): n , m = len(matrix) , len(matrix[0]) i , j = 0 , m - 1 isFound = False while(i <= n - 1 and j >= 0): if(matrix[i][j] == key): isFound = True print("Found at : " , i , j) break elif(matrix[i][j] > key): j -= 1 else: i += 1 if(not(isFound)): print("Key not found") n = int(input("Enter the no. of rows in the matrix : ")) m = int(input("ENter the no. of columns in the matrix : ")) matrix = [] print("Enter the elements of the Row and Column sorted matrix : ") for i in range(n): rows = [int(j) for j in input().split()] matrix.append(rows) key = int(input("Enter the key to search : ")) stairCaseSearch(matrix , key)
282
14,668
// Copyright 2017 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "net/base/network_interfaces_getifaddrs.h" #include <string> #include "build/build_config.h" #include "net/base/ip_endpoint.h" #include "testing/gtest/include/gtest/gtest.h" #include <ifaddrs.h> #include <net/if.h> #include <netinet/in.h> namespace net { namespace { class IPAttributesGetterTest : public internal::IPAttributesGetter { public: IPAttributesGetterTest() {} // internal::IPAttributesGetter interface. bool IsInitialized() const override { return true; } bool GetAddressAttributes(const ifaddrs* if_addr, int* attributes) override { *attributes = attributes_; return true; } NetworkChangeNotifier::ConnectionType GetNetworkInterfaceType( const ifaddrs* if_addr) override { return NetworkChangeNotifier::CONNECTION_UNKNOWN; } void set_attributes(int attributes) { attributes_ = attributes; } private: int attributes_ = 0; }; // Helper function to create a single valid ifaddrs bool FillIfaddrs(ifaddrs* interfaces, const char* ifname, uint flags, const IPAddress& ip_address, const IPAddress& ip_netmask, sockaddr_storage sock_addrs[2]) { interfaces->ifa_next = nullptr; interfaces->ifa_name = const_cast<char*>(ifname); interfaces->ifa_flags = flags; socklen_t sock_len = sizeof(sockaddr_storage); // Convert to sockaddr for next check. if (!IPEndPoint(ip_address, 0) .ToSockAddr(reinterpret_cast<sockaddr*>(&sock_addrs[0]), &sock_len)) { return false; } interfaces->ifa_addr = reinterpret_cast<sockaddr*>(&sock_addrs[0]); sock_len = sizeof(sockaddr_storage); if (!IPEndPoint(ip_netmask, 0) .ToSockAddr(reinterpret_cast<sockaddr*>(&sock_addrs[1]), &sock_len)) { return false; } interfaces->ifa_netmask = reinterpret_cast<sockaddr*>(&sock_addrs[1]); return true; } static const char kIfnameEm1[] = "em1"; static const char kIfnameVmnet[] = "vmnet"; static const unsigned char kIPv6LocalAddr[] = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1}; // The following 3 addresses need to be changed together. IPv6Addr is the IPv6 // address. IPv6Netmask is the mask address with as many leading bits set to 1 // as the prefix length. IPv6AddrPrefix needs to match IPv6Addr with the same // number of bits as the prefix length. static const unsigned char kIPv6Addr[] = {0x24, 0x01, 0xfa, 0x00, 0x00, 0x04, 0x10, 0x00, 0xbe, 0x30, 0x5b, 0xff, 0xfe, 0xe5, 0x00, 0xc3}; static const unsigned char kIPv6Netmask[] = {0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}; TEST(NetworkInterfacesTest, IfaddrsToNetworkInterfaceList) { IPAddress ipv6_local_address(kIPv6LocalAddr); IPAddress ipv6_address(kIPv6Addr); IPAddress ipv6_netmask(kIPv6Netmask); NetworkInterfaceList results; IPAttributesGetterTest ip_attributes_getter; sockaddr_storage addresses[2]; ifaddrs interface; // Address of offline links should be ignored. ASSERT_TRUE(FillIfaddrs(&interface, kIfnameEm1, IFF_UP, ipv6_address, ipv6_netmask, addresses)); EXPECT_TRUE(internal::IfaddrsToNetworkInterfaceList( INCLUDE_HOST_SCOPE_VIRTUAL_INTERFACES, &interface, &ip_attributes_getter, &results)); EXPECT_EQ(results.size(), 0ul); // Local address should be trimmed out. ASSERT_TRUE(FillIfaddrs(&interface, kIfnameEm1, IFF_RUNNING, ipv6_local_address, ipv6_netmask, addresses)); EXPECT_TRUE(internal::IfaddrsToNetworkInterfaceList( INCLUDE_HOST_SCOPE_VIRTUAL_INTERFACES, &interface, &ip_attributes_getter, &results)); EXPECT_EQ(results.size(), 0ul); // vmware address should return by default. ASSERT_TRUE(FillIfaddrs(&interface, kIfnameVmnet, IFF_RUNNING, ipv6_address, ipv6_netmask, addresses)); EXPECT_TRUE(internal::IfaddrsToNetworkInterfaceList( INCLUDE_HOST_SCOPE_VIRTUAL_INTERFACES, &interface, &ip_attributes_getter, &results)); EXPECT_EQ(results.size(), 1ul); EXPECT_EQ(results[0].name, kIfnameVmnet); EXPECT_EQ(results[0].prefix_length, 1ul); EXPECT_EQ(results[0].address, ipv6_address); results.clear(); // vmware address should be trimmed out if policy specified so. ASSERT_TRUE(FillIfaddrs(&interface, kIfnameVmnet, IFF_RUNNING, ipv6_address, ipv6_netmask, addresses)); EXPECT_TRUE(internal::IfaddrsToNetworkInterfaceList( EXCLUDE_HOST_SCOPE_VIRTUAL_INTERFACES, &interface, &ip_attributes_getter, &results)); EXPECT_EQ(results.size(), 0ul); results.clear(); // Addresses with banned attributes should be ignored. ip_attributes_getter.set_attributes(IP_ADDRESS_ATTRIBUTE_ANYCAST); ASSERT_TRUE(FillIfaddrs(&interface, kIfnameEm1, IFF_RUNNING, ipv6_address, ipv6_netmask, addresses)); EXPECT_TRUE(internal::IfaddrsToNetworkInterfaceList( INCLUDE_HOST_SCOPE_VIRTUAL_INTERFACES, &interface, &ip_attributes_getter, &results)); EXPECT_EQ(results.size(), 0ul); results.clear(); // Addresses with allowed attribute IFA_F_TEMPORARY should be returned and // attributes should be translated correctly. ip_attributes_getter.set_attributes(IP_ADDRESS_ATTRIBUTE_TEMPORARY); ASSERT_TRUE(FillIfaddrs(&interface, kIfnameEm1, IFF_RUNNING, ipv6_address, ipv6_netmask, addresses)); EXPECT_TRUE(internal::IfaddrsToNetworkInterfaceList( INCLUDE_HOST_SCOPE_VIRTUAL_INTERFACES, &interface, &ip_attributes_getter, &results)); EXPECT_EQ(results.size(), 1ul); EXPECT_EQ(results[0].name, kIfnameEm1); EXPECT_EQ(results[0].prefix_length, 1ul); EXPECT_EQ(results[0].address, ipv6_address); EXPECT_EQ(results[0].ip_address_attributes, IP_ADDRESS_ATTRIBUTE_TEMPORARY); results.clear(); // Addresses with allowed attribute IFA_F_DEPRECATED should be returned and // attributes should be translated correctly. ip_attributes_getter.set_attributes(IP_ADDRESS_ATTRIBUTE_DEPRECATED); ASSERT_TRUE(FillIfaddrs(&interface, kIfnameEm1, IFF_RUNNING, ipv6_address, ipv6_netmask, addresses)); EXPECT_TRUE(internal::IfaddrsToNetworkInterfaceList( INCLUDE_HOST_SCOPE_VIRTUAL_INTERFACES, &interface, &ip_attributes_getter, &results)); EXPECT_EQ(results.size(), 1ul); EXPECT_EQ(results[0].name, kIfnameEm1); EXPECT_EQ(results[0].prefix_length, 1ul); EXPECT_EQ(results[0].address, ipv6_address); EXPECT_EQ(results[0].ip_address_attributes, IP_ADDRESS_ATTRIBUTE_DEPRECATED); results.clear(); } } // namespace } // namespace net
3,010
679
/************************************************************** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * *************************************************************/ // MARKER(update_precomp.py): autogen include statement, do not remove #include "precompiled_sd.hxx" #include "cache/SlsPageCacheManager.hxx" #include "SlsBitmapCache.hxx" #include "view/SlideSorterView.hxx" #include "model/SlideSorterModel.hxx" #include <deque> #include <map> #include <boost/weak_ptr.hpp> namespace { /** Collection of data that is stored for all active preview caches. */ class CacheDescriptor { public: ::sd::slidesorter::cache::PageCacheManager::DocumentKey mpDocument; Size maPreviewSize; CacheDescriptor( ::sd::slidesorter::cache::PageCacheManager::DocumentKey pDocument, const Size& rPreviewSize) :mpDocument(pDocument),maPreviewSize(rPreviewSize) {} /// Test for equality with respect to all members. class Equal {public: bool operator() ( const CacheDescriptor& rDescriptor1, const CacheDescriptor& rDescriptor2) const { return rDescriptor1.mpDocument==rDescriptor2.mpDocument && rDescriptor1.maPreviewSize==rDescriptor2.maPreviewSize; } }; /// Hash function that takes all members into account. class Hash {public: size_t operator() (const CacheDescriptor& rDescriptor) const { return (size_t)rDescriptor.mpDocument.get() + rDescriptor.maPreviewSize.Width(); } }; }; /** Collection of data that is stored for the inactive, recently used caches. */ class RecentlyUsedCacheDescriptor { public: ::sd::slidesorter::cache::PageCacheManager::DocumentKey mpDocument; Size maPreviewSize; ::boost::shared_ptr< ::sd::slidesorter::cache::PageCacheManager::Cache> mpCache; RecentlyUsedCacheDescriptor( ::sd::slidesorter::cache::PageCacheManager::DocumentKey pDocument, const Size& rPreviewSize, const ::boost::shared_ptr< ::sd::slidesorter::cache::PageCacheManager::Cache>& rpCache) :mpDocument(pDocument),maPreviewSize(rPreviewSize),mpCache(rpCache) {} }; /** The list of recently used caches is organized as queue. When elements are added the list is shortened to the maximally allowed number of elements by removing the least recently used elements. */ typedef ::std::deque<RecentlyUsedCacheDescriptor> RecentlyUsedQueue; /** Compare the caches by preview size. Those that match the given size come first, then, regardless of the given size, the largest ones before the smaller ones. */ class BestFittingCacheComparer { public: BestFittingCacheComparer (const Size& rPreferredSize) : maPreferredSize(rPreferredSize) {} bool operator()(const ::sd::slidesorter::cache::PageCacheManager::BestFittingPageCaches::value_type& rElement1, const ::sd::slidesorter::cache::PageCacheManager::BestFittingPageCaches::value_type& rElement2) { if (rElement1.first == maPreferredSize) return true; else if (rElement2.first == maPreferredSize) return false; else return (rElement1.first.Width()*rElement1.first.Height() > rElement2.first.Width()*rElement2.first.Height()); } private: Size maPreferredSize; }; } // end of anonymous namespace namespace sd { namespace slidesorter { namespace cache { /** Container for the active caches. */ class PageCacheManager::PageCacheContainer : public ::std::hash_map<CacheDescriptor, ::boost::shared_ptr<PageCacheManager::Cache>, CacheDescriptor::Hash, CacheDescriptor::Equal> { public: PageCacheContainer (void) {} /** Compare entries in the cache container with respect to the cache address only. */ class CompareWithCache { public: CompareWithCache(const ::boost::shared_ptr<PageCacheManager::Cache>& rpCache) : mpCache(rpCache) {} bool operator () (const PageCacheContainer::value_type& rValue) { return rValue.second == mpCache; } private: ::boost::shared_ptr<PageCacheManager::Cache> mpCache; }; }; /** The recently used caches are stored in one queue for each document. */ class PageCacheManager::RecentlyUsedPageCaches : public ::std::map<DocumentKey,RecentlyUsedQueue> { public: RecentlyUsedPageCaches (void) {}; }; class PageCacheManager::Deleter { public: void operator() (PageCacheManager* pObject) { delete pObject; } }; //===== PageCacheManager ==================================================== ::boost::weak_ptr<PageCacheManager> PageCacheManager::mpInstance; ::boost::shared_ptr<PageCacheManager> PageCacheManager::Instance (void) { ::boost::shared_ptr<PageCacheManager> pInstance; ::osl::MutexGuard aGuard (::osl::Mutex::getGlobalMutex()); pInstance = mpInstance.lock(); if (pInstance.get() == NULL) { pInstance = ::boost::shared_ptr<PageCacheManager>( new PageCacheManager(), PageCacheManager::Deleter()); mpInstance = pInstance; } return pInstance; } PageCacheManager::PageCacheManager (void) : mpPageCaches(new PageCacheContainer()), mpRecentlyUsedPageCaches(new RecentlyUsedPageCaches()), mnMaximalRecentlyCacheCount(2) { } PageCacheManager::~PageCacheManager (void) { } ::boost::shared_ptr<PageCacheManager::Cache> PageCacheManager::GetCache ( DocumentKey pDocument, const Size& rPreviewSize) { ::boost::shared_ptr<Cache> pResult; // Look for the cache in the list of active caches. CacheDescriptor aKey (pDocument, rPreviewSize); PageCacheContainer::iterator iCache (mpPageCaches->find(aKey)); if (iCache != mpPageCaches->end()) pResult = iCache->second; // Look for the cache in the list of recently used caches. if (pResult.get() == NULL) pResult = GetRecentlyUsedCache(pDocument, rPreviewSize); // Create the cache when no suitable one does exist. if (pResult.get() == NULL) pResult.reset(new Cache()); // The cache may be newly created and thus empty or is old and may // contain previews that are not up-to-date. Recycle previews from // other caches to fill in the holes. Recycle(pResult, pDocument,rPreviewSize); // Put the new (or old) cache into the container. if (pResult.get() != NULL) mpPageCaches->insert(PageCacheContainer::value_type(aKey, pResult)); return pResult; } void PageCacheManager::Recycle ( const ::boost::shared_ptr<Cache>& rpCache, DocumentKey pDocument, const Size& rPreviewSize) { BestFittingPageCaches aCaches; // Add bitmap caches from active caches. PageCacheContainer::iterator iActiveCache; for (iActiveCache=mpPageCaches->begin(); iActiveCache!=mpPageCaches->end(); ++iActiveCache) { if (iActiveCache->first.mpDocument == pDocument) aCaches.push_back(BestFittingPageCaches::value_type( iActiveCache->first.maPreviewSize, iActiveCache->second)); } // Add bitmap caches from recently used caches. RecentlyUsedPageCaches::iterator iQueue (mpRecentlyUsedPageCaches->find(pDocument)); if (iQueue != mpRecentlyUsedPageCaches->end()) { RecentlyUsedQueue::const_iterator iRecentCache; for (iRecentCache=iQueue->second.begin();iRecentCache!=iQueue->second.end();++iRecentCache) aCaches.push_back(BestFittingPageCaches::value_type( iRecentCache->maPreviewSize, iRecentCache->mpCache)); } ::std::sort(aCaches.begin(), aCaches.end(), BestFittingCacheComparer(rPreviewSize)); BestFittingPageCaches::const_iterator iBestCache; for (iBestCache=aCaches.begin(); iBestCache!=aCaches.end(); ++iBestCache) { rpCache->Recycle(*iBestCache->second); } } void PageCacheManager::ReleaseCache (const ::boost::shared_ptr<Cache>& rpCache) { PageCacheContainer::iterator iCache (::std::find_if( mpPageCaches->begin(), mpPageCaches->end(), PageCacheContainer::CompareWithCache(rpCache))); if (iCache != mpPageCaches->end()) { OSL_ASSERT(iCache->second == rpCache); PutRecentlyUsedCache(iCache->first.mpDocument,iCache->first.maPreviewSize,rpCache); mpPageCaches->erase(iCache); } } ::boost::shared_ptr<PageCacheManager::Cache> PageCacheManager::ChangeSize ( const ::boost::shared_ptr<Cache>& rpCache, const Size& rOldPreviewSize, const Size& rNewPreviewSize) { (void)rOldPreviewSize; ::boost::shared_ptr<Cache> pResult; if (rpCache.get() != NULL) { // Look up the given cache in the list of active caches. PageCacheContainer::iterator iCacheToChange (::std::find_if( mpPageCaches->begin(), mpPageCaches->end(), PageCacheContainer::CompareWithCache(rpCache))); if (iCacheToChange != mpPageCaches->end()) { OSL_ASSERT(iCacheToChange->second == rpCache); // Now, we can change the preview size of the existing one by // removing the cache from the list and re-insert it with the // updated size. const ::sd::slidesorter::cache::PageCacheManager::DocumentKey aKey ( iCacheToChange->first.mpDocument); mpPageCaches->erase(iCacheToChange); mpPageCaches->insert(PageCacheContainer::value_type( CacheDescriptor(aKey,rNewPreviewSize), rpCache)); pResult = rpCache; } else { OSL_ASSERT(iCacheToChange != mpPageCaches->end()); } } return pResult; } bool PageCacheManager::InvalidatePreviewBitmap ( DocumentKey pDocument, const SdrPage* pKey) { bool bHasChanged (false); if (pDocument!=NULL) { // Iterate over all caches that are currently in use and invalidate // the previews in those that belong to the document. PageCacheContainer::iterator iCache; for (iCache=mpPageCaches->begin(); iCache!=mpPageCaches->end(); ++iCache) if (iCache->first.mpDocument == pDocument) bHasChanged |= iCache->second->InvalidateBitmap(pKey); // Invalidate the previews in the recently used caches belonging to // the given document. RecentlyUsedPageCaches::iterator iQueue (mpRecentlyUsedPageCaches->find(pDocument)); if (iQueue != mpRecentlyUsedPageCaches->end()) { RecentlyUsedQueue::const_iterator iCache2; for (iCache2=iQueue->second.begin(); iCache2!=iQueue->second.end(); ++iCache2) bHasChanged |= iCache2->mpCache->InvalidateBitmap(pKey); } } return bHasChanged; } void PageCacheManager::InvalidateAllPreviewBitmaps (DocumentKey pDocument) { if (pDocument == NULL) return; // Iterate over all caches that are currently in use and invalidate the // previews in those that belong to the document. PageCacheContainer::iterator iCache; for (iCache=mpPageCaches->begin(); iCache!=mpPageCaches->end(); ++iCache) if (iCache->first.mpDocument == pDocument) iCache->second->InvalidateCache(); // Invalidate the previews in the recently used caches belonging to the // given document. RecentlyUsedPageCaches::iterator iQueue (mpRecentlyUsedPageCaches->find(pDocument)); if (iQueue != mpRecentlyUsedPageCaches->end()) { RecentlyUsedQueue::const_iterator iCache2; for (iCache2=iQueue->second.begin(); iCache2!=iQueue->second.end(); ++iCache2) iCache2->mpCache->InvalidateCache(); } } void PageCacheManager::InvalidateAllCaches (void) { // Iterate over all caches that are currently in use and invalidate // them. PageCacheContainer::iterator iCache; for (iCache=mpPageCaches->begin(); iCache!=mpPageCaches->end(); ++iCache) iCache->second->InvalidateCache(); // Remove all recently used caches, there is not much sense in storing // invalidated and unused caches. mpRecentlyUsedPageCaches->clear(); } void PageCacheManager::ReleasePreviewBitmap (const SdrPage* pPage) { PageCacheContainer::iterator iCache; for (iCache=mpPageCaches->begin(); iCache!=mpPageCaches->end(); ++iCache) iCache->second->ReleaseBitmap(pPage); } ::boost::shared_ptr<PageCacheManager::Cache> PageCacheManager::GetRecentlyUsedCache ( DocumentKey pDocument, const Size& rPreviewSize) { ::boost::shared_ptr<Cache> pCache; // Look for the cache in the list of recently used caches. RecentlyUsedPageCaches::iterator iQueue (mpRecentlyUsedPageCaches->find(pDocument)); if (iQueue != mpRecentlyUsedPageCaches->end()) { RecentlyUsedQueue::iterator iCache; for (iCache=iQueue->second.begin(); iCache!= iQueue->second.end(); ++iCache) if (iCache->maPreviewSize == rPreviewSize) { pCache = iCache->mpCache; iQueue->second.erase(iCache); break; } } return pCache; } void PageCacheManager::PutRecentlyUsedCache( DocumentKey pDocument, const Size& rPreviewSize, const ::boost::shared_ptr<Cache>& rpCache) { // Look up the list of recently used caches for the given document. RecentlyUsedPageCaches::iterator iQueue (mpRecentlyUsedPageCaches->find(pDocument)); if (iQueue == mpRecentlyUsedPageCaches->end()) iQueue = mpRecentlyUsedPageCaches->insert( RecentlyUsedPageCaches::value_type(pDocument, RecentlyUsedQueue()) ).first; if (iQueue != mpRecentlyUsedPageCaches->end()) { iQueue->second.push_front(RecentlyUsedCacheDescriptor(pDocument,rPreviewSize,rpCache)); // Shorten the list of recently used caches to the allowed maximal length. while (iQueue->second.size() > mnMaximalRecentlyCacheCount) iQueue->second.pop_back(); } } } } } // end of namespace ::sd::slidesorter::cache
5,538
6,034
package cn.iocoder.mall.payservice.dal.mysql.dataobject.transaction; import cn.iocoder.mall.mybatis.core.dataobject.DeletableDO; import com.baomidou.mybatisplus.annotation.TableId; import com.baomidou.mybatisplus.annotation.TableName; import lombok.Data; import lombok.EqualsAndHashCode; import lombok.experimental.Accessors; import java.util.Date; /** * 支付交易表 */ @TableName("pay_transaction") @Data @EqualsAndHashCode(callSuper = true) @Accessors(chain = true) public class PayTransactionDO extends DeletableDO { /** * 编号,自增 */ @TableId private Integer id; /** * 用户编号 */ private Integer userId; /** * 应用编号 */ private String appId; /** * 发起交易的 IP */ private String createIp; /** * 业务线的订单编号 */ private String orderId; /** * 订单商品名 */ private String orderSubject; /** * 订单商品描述 */ private String orderDescription; /** * 订单备注 */ private String orderMemo; /** * 支付金额,单位:分。 */ private Integer price; /** * 订单状态 */ private Integer status; /** * 交易过期时间 */ private Date expireTime; /** * 回调业务线完成时间 */ private Date finishTime; /** * 异步通知地址 */ private String notifyUrl; /** * 成功支付的交易拓展编号 */ private Integer extensionId; /** * 支付成功的支付渠道 */ private Integer payChannel; /** * 第三方支付成功的时间 */ private Date paymentTime; /** * 收到第三方系统通知的时间 */ private Date notifyTime; /** * 第三方的流水号 */ private String tradeNo; // ========== 退款相关 ========== /** * 退款总金额 */ private Integer refundTotal; }
1,026
748
<filename>internal/utf8scannot_lettermarkspecial.h<gh_stars>100-1000 // Copyright 2013 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // // Created by utf8tablebuilder version 2.9 // // Rejects all codes from file: // lettermarkspecial_6.2.0.txt // Accepts all other UTF-8 codes 0000..10FFFF // Space optimized // // ** ASSUMES INPUT IS STRUCTURALLY VALID UTF-8 ** // // Table entries are absolute statetable subscripts #ifndef UTF8SCANNOT_LETTERMARKSPECIAL_H__ #define UTF8SCANNOT_LETTERMARKSPECIAL_H__ #include "integral_types.h" #include "utf8statetable.h" namespace CLD2 { #define X__ (kExitIllegalStructure) #define RJ_ (kExitReject) #define S1_ (kExitReplace1) #define S2_ (kExitReplace2) #define S3_ (kExitReplace3) #define S21 (kExitReplace21) #define S31 (kExitReplace31) #define S32 (kExitReplace32) #define T1_ (kExitReplaceOffset1) #define T2_ (kExitReplaceOffset2) #define S11 (kExitReplace1S0) #define SP_ (kExitSpecial) #define D__ (kExitDoAgain) #define RJA (kExitRejectAlt) // Entire table has 221 state blocks of 64 entries each static const unsigned int utf8scannot_lettermarkspecial_STATE0 = 0; // state[0] static const unsigned int utf8scannot_lettermarkspecial_STATE0_SIZE = 64; // =[1] static const unsigned int utf8scannot_lettermarkspecial_TOTAL_SIZE = 14144; static const unsigned int utf8scannot_lettermarkspecial_MAX_EXPAND_X4 = 0; static const unsigned int utf8scannot_lettermarkspecial_SHIFT = 6; static const unsigned int utf8scannot_lettermarkspecial_BYTES = 1; static const unsigned int utf8scannot_lettermarkspecial_LOSUB = 0x27272727; static const unsigned int utf8scannot_lettermarkspecial_HIADD = 0x44444444; static const uint8 utf8scannot_lettermarkspecial[] = { // state[0] 0x000000 Byte 1 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,RJ_, 0,RJ_, 0, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, X__,X__,X__,X__,X__,X__,X__,X__, X__,X__,X__,X__,X__,X__,X__,X__, X__,X__,X__,X__,X__,X__,X__,X__, X__,X__,X__,X__,X__,X__,X__,X__, X__,X__,X__,X__,X__,X__,X__,X__, X__,X__,X__,X__,X__,X__,X__,X__, X__,X__,X__,X__,X__,X__,X__,X__, X__,X__,X__,X__,X__,X__,X__,X__, X__,X__, 6, 7, 8, 8, 8, 8, 8, 8, 8, 9, 8, 10, 11, 12, 8, 8, 13, 8, 14, 15, 16, 17, 18, 19, 8, 20, 21, 22, 23, 24, 25, 57, 95,110,117,118,118,118, 118,119,121,118,118,140, 2,143, 159, 4, 4,216, 5,X__,X__,X__, X__,X__,X__,X__,X__,X__,X__,X__, // state[2 + 2] 0x00e000 Byte 2 of 3 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, // state[3 + 2] 0x001ac0 Byte 3 of 3 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[4 + 2] 0x040000 Byte 2 of 4 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, // state[5 + 2] 0x100000 Byte 2 of 4 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, X__,X__,X__,X__,X__,X__,X__,X__, X__,X__,X__,X__,X__,X__,X__,X__, X__,X__,X__,X__,X__,X__,X__,X__, X__,X__,X__,X__,X__,X__,X__,X__, X__,X__,X__,X__,X__,X__,X__,X__, X__,X__,X__,X__,X__,X__,X__,X__, // state[6 + 2] 0x000080 Byte 2 of 2 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,RJ_, 0, 0, 0, 0,RJ_, 0, 0, 0, 0, 0, // state[7 + 2] 0x0000c0 Byte 2 of 2 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[8 + 2] 0x000100 Byte 2 of 2 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[9 + 2] 0x0002c0 Byte 2 of 2 RJ_,RJ_, 0, 0, 0, 0,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0,RJ_, 0,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[10 + 2] 0x000340 Byte 2 of 2 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_, 0,RJ_,RJ_, 0, 0,RJ_,RJ_,RJ_,RJ_, 0, 0, // state[11 + 2] 0x000380 Byte 2 of 2 0, 0, 0, 0, 0, 0,RJ_, 0, RJ_,RJ_,RJ_, 0,RJ_, 0,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_, 0,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[12 + 2] 0x0003c0 Byte 2 of 2 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[13 + 2] 0x000480 Byte 2 of 2 RJ_,RJ_, 0,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[14 + 2] 0x000500 Byte 2 of 2 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[15 + 2] 0x000540 Byte 2 of 2 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0,RJ_, 0, 0, 0, 0, 0, 0, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[16 + 2] 0x000580 Byte 2 of 2 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0,RJ_, // state[17 + 2] 0x0005c0 Byte 2 of 2 0,RJ_,RJ_, 0,RJ_,RJ_, 0,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[18 + 2] 0x000600 Byte 2 of 2 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[19 + 2] 0x000640 Byte 2 of 2 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[20 + 2] 0x0006c0 Byte 2 of 2 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_, 0,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,RJ_,RJ_,RJ_, 0, 0,RJ_, // state[21 + 2] 0x000700 Byte 2 of 2 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[22 + 2] 0x000740 Byte 2 of 2 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_, 0, 0,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[23 + 2] 0x000780 Byte 2 of 2 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[24 + 2] 0x0007c0 Byte 2 of 2 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0,RJ_, 0, 0, 0, 0, 0, // state[25 + 2] 0x000000 Byte 2 of 3 X__,X__,X__,X__,X__,X__,X__,X__, X__,X__,X__,X__,X__,X__,X__,X__, X__,X__,X__,X__,X__,X__,X__,X__, X__,X__,X__,X__,X__,X__,X__,X__, 26, 27, 28, 29, 8, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, // state[26 + 2] 0x000800 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[27 + 2] 0x000840 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[28 + 2] 0x000880 Byte 3 of 3 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, RJ_, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[29 + 2] 0x0008c0 Byte 3 of 3 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, // state[30 + 2] 0x000940 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[31 + 2] 0x000980 Byte 3 of 3 0,RJ_,RJ_,RJ_, 0,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0,RJ_, RJ_, 0, 0,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_, 0,RJ_, 0, 0, 0,RJ_,RJ_, RJ_,RJ_, 0, 0,RJ_,RJ_,RJ_,RJ_, // state[32 + 2] 0x0009c0 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0,RJ_, RJ_, 0, 0,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0,RJ_, 0, 0, 0, 0,RJ_,RJ_, 0,RJ_, RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[33 + 2] 0x000a00 Byte 3 of 3 0,RJ_,RJ_,RJ_, 0,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_, 0, 0, 0, 0,RJ_, RJ_, 0, 0,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_, 0,RJ_,RJ_, 0,RJ_,RJ_, 0, RJ_,RJ_, 0, 0,RJ_, 0,RJ_,RJ_, // state[34 + 2] 0x000a40 Byte 3 of 3 RJ_,RJ_,RJ_, 0, 0, 0, 0,RJ_, RJ_, 0, 0,RJ_,RJ_,RJ_, 0, 0, 0,RJ_, 0, 0, 0, 0, 0, 0, 0,RJ_,RJ_,RJ_,RJ_, 0,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[35 + 2] 0x000a80 Byte 3 of 3 0,RJ_,RJ_,RJ_, 0,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0,RJ_, RJ_,RJ_, 0,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_, 0,RJ_,RJ_, 0,RJ_,RJ_,RJ_, RJ_,RJ_, 0, 0,RJ_,RJ_,RJ_,RJ_, // state[36 + 2] 0x000ac0 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0,RJ_, RJ_,RJ_, 0,RJ_,RJ_,RJ_, 0, 0, RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[37 + 2] 0x000b00 Byte 3 of 3 0,RJ_,RJ_,RJ_, 0,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0,RJ_, RJ_, 0, 0,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_, 0,RJ_,RJ_, 0,RJ_,RJ_,RJ_, RJ_,RJ_, 0, 0,RJ_,RJ_,RJ_,RJ_, // state[38 + 2] 0x000b40 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0,RJ_, RJ_, 0, 0,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0,RJ_,RJ_, 0, 0, 0, 0,RJ_,RJ_, 0,RJ_, RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[39 + 2] 0x000b80 Byte 3 of 3 0, 0,RJ_,RJ_, 0,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_, 0, 0, 0,RJ_,RJ_, RJ_, 0,RJ_,RJ_,RJ_,RJ_, 0, 0, 0,RJ_,RJ_, 0,RJ_, 0,RJ_,RJ_, 0, 0, 0,RJ_,RJ_, 0, 0, 0, RJ_,RJ_,RJ_, 0, 0, 0,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_, 0, 0, 0, 0,RJ_,RJ_, // state[40 + 2] 0x000bc0 Byte 3 of 3 RJ_,RJ_,RJ_, 0, 0, 0,RJ_,RJ_, RJ_, 0,RJ_,RJ_,RJ_,RJ_, 0, 0, RJ_, 0, 0, 0, 0, 0, 0,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[41 + 2] 0x000c00 Byte 3 of 3 0,RJ_,RJ_,RJ_, 0,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_, 0,RJ_,RJ_, RJ_, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_, 0,RJ_,RJ_,RJ_, RJ_,RJ_, 0, 0, 0,RJ_,RJ_,RJ_, // state[42 + 2] 0x000c40 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_, 0,RJ_,RJ_, RJ_, 0,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0,RJ_,RJ_, 0, RJ_,RJ_, 0, 0, 0, 0, 0, 0, RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[43 + 2] 0x000c80 Byte 3 of 3 0, 0,RJ_,RJ_, 0,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_, 0,RJ_,RJ_, RJ_, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_, 0,RJ_,RJ_,RJ_, RJ_,RJ_, 0, 0,RJ_,RJ_,RJ_,RJ_, // state[44 + 2] 0x000cc0 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_, 0,RJ_,RJ_, RJ_, 0,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0,RJ_, 0, RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[45 + 2] 0x000d00 Byte 3 of 3 0, 0,RJ_,RJ_, 0,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_, 0,RJ_,RJ_, RJ_, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_, 0, 0,RJ_,RJ_,RJ_, // state[46 + 2] 0x000d40 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_, 0,RJ_,RJ_, RJ_, 0,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[47 + 2] 0x000d80 Byte 3 of 3 0, 0,RJ_,RJ_, 0,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_, 0,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_, 0,RJ_, 0, 0, // state[48 + 2] 0x000dc0 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0,RJ_, 0, 0, 0, 0,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_, 0,RJ_, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[49 + 2] 0x000e00 Byte 3 of 3 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, // state[50 + 2] 0x000e40 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[51 + 2] 0x000e80 Byte 3 of 3 0,RJ_,RJ_, 0,RJ_, 0, 0,RJ_, RJ_, 0,RJ_, 0, 0,RJ_, 0, 0, 0, 0, 0, 0,RJ_,RJ_,RJ_,RJ_, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0,RJ_,RJ_,RJ_, 0,RJ_, 0,RJ_, 0, 0,RJ_,RJ_, 0,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_, 0,RJ_,RJ_,RJ_, 0, 0, // state[52 + 2] 0x000ec0 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_, 0,RJ_, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[53 + 2] 0x000f00 Byte 3 of 3 RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,RJ_, 0,RJ_, 0,RJ_, 0, 0, 0, 0,RJ_,RJ_, // state[54 + 2] 0x000f40 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[55 + 2] 0x000f80 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_, 0,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, // state[56 + 2] 0x000fc0 Byte 3 of 3 0, 0, 0, 0, 0, 0,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[57 + 2] 0x001000 Byte 2 of 3 8, 21, 58, 59, 8, 8, 8, 8, 8, 60, 61, 62, 63, 64, 65, 66, 67, 8, 8, 8, 8, 8, 8, 8, 8, 68, 69, 70, 71, 72, 8, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 3, 8, 85, 86, 87, 75, 88, 3, 89, 8, 8, 8, 90, 8, 8, 8, 8, 91, 92, 93, 94, // state[58 + 2] 0x001080 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,RJ_,RJ_,RJ_,RJ_, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[59 + 2] 0x0010c0 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0,RJ_, 0, 0, 0, 0, 0,RJ_, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_, 0,RJ_,RJ_,RJ_,RJ_, // state[60 + 2] 0x001240 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_, 0,RJ_,RJ_,RJ_,RJ_, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, RJ_, 0,RJ_,RJ_,RJ_,RJ_, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[61 + 2] 0x001280 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_, 0,RJ_,RJ_,RJ_,RJ_, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_, 0,RJ_,RJ_,RJ_,RJ_, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, // state[62 + 2] 0x0012c0 Byte 3 of 3 RJ_, 0,RJ_,RJ_,RJ_,RJ_, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[63 + 2] 0x001300 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_, 0,RJ_,RJ_,RJ_,RJ_, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[64 + 2] 0x001340 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_, 0, 0,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[65 + 2] 0x001380 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[66 + 2] 0x0013c0 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[67 + 2] 0x001400 Byte 3 of 3 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[68 + 2] 0x001640 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[69 + 2] 0x001680 Byte 3 of 3 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[70 + 2] 0x0016c0 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[71 + 2] 0x001700 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_, 0,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[72 + 2] 0x001740 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_, 0,RJ_,RJ_, RJ_, 0,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[73 + 2] 0x0017c0 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_, 0, 0, 0,RJ_, 0, 0, 0, 0,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[74 + 2] 0x001800 Byte 3 of 3 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[75 + 2] 0x001840 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, // state[76 + 2] 0x001880 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[77 + 2] 0x0018c0 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[78 + 2] 0x001900 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, // state[79 + 2] 0x001940 Byte 3 of 3 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[80 + 2] 0x001980 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[81 + 2] 0x0019c0 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[82 + 2] 0x001a00 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[83 + 2] 0x001a40 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0,RJ_, // state[84 + 2] 0x001a80 Byte 3 of 3 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[85 + 2] 0x001b40 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[86 + 2] 0x001b80 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[87 + 2] 0x001bc0 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[88 + 2] 0x001c40 Byte 3 of 3 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, // state[89 + 2] 0x001cc0 Byte 3 of 3 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, RJ_,RJ_,RJ_, 0,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[90 + 2] 0x001dc0 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,RJ_,RJ_,RJ_,RJ_, // state[91 + 2] 0x001f00 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[92 + 2] 0x001f40 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0,RJ_, 0,RJ_, 0,RJ_, 0,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, // state[93 + 2] 0x001f80 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_, 0,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_, 0,RJ_, 0, // state[94 + 2] 0x001fc0 Byte 3 of 3 0, 0,RJ_,RJ_,RJ_, 0,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, RJ_,RJ_,RJ_,RJ_, 0, 0,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0,RJ_,RJ_,RJ_, 0,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, // state[95 + 2] 0x002000 Byte 2 of 3 3, 96, 97, 98, 99,100,101, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 102,103, 8,104,105,106,107,108, 109, 3, 3, 3, 3, 3, 3, 3, // state[96 + 2] 0x002040 Byte 3 of 3 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,RJ_, // state[97 + 2] 0x002080 Byte 3 of 3 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[98 + 2] 0x0020c0 Byte 3 of 3 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[99 + 2] 0x002100 Byte 3 of 3 0, 0,RJ_, 0, 0, 0, 0,RJ_, 0, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_, 0,RJ_, 0, 0, 0,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0,RJ_, 0,RJ_, 0, RJ_, 0,RJ_,RJ_,RJ_,RJ_, 0,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_, 0, 0,RJ_,RJ_,RJ_,RJ_, // state[100 + 2] 0x002140 Byte 3 of 3 0, 0, 0, 0, 0,RJ_,RJ_,RJ_, RJ_,RJ_, 0, 0, 0, 0,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[101 + 2] 0x002180 Byte 3 of 3 0, 0, 0,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[102 + 2] 0x002c00 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[103 + 2] 0x002c40 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[104 + 2] 0x002cc0 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[105 + 2] 0x002d00 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0,RJ_, 0, 0, 0, 0, 0,RJ_, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[106 + 2] 0x002d40 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,RJ_, // state[107 + 2] 0x002d80 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, // state[108 + 2] 0x002dc0 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[109 + 2] 0x002e00 Byte 3 of 3 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[110 + 2] 0x003000 Byte 2 of 3 111, 67,112,113,114, 8,115,116, 3, 3, 3, 3, 3, 3, 3, 3, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, // state[111 + 2] 0x003000 Byte 3 of 3 0, 0, 0, 0, 0,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0,RJ_,RJ_, 0, 0, 0, // state[112 + 2] 0x003080 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[113 + 2] 0x0030c0 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_, 0,RJ_,RJ_,RJ_,RJ_, // state[114 + 2] 0x003100 Byte 3 of 3 0, 0, 0, 0, 0,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[115 + 2] 0x003180 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, // state[116 + 2] 0x0031c0 Byte 3 of 3 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[117 + 2] 0x004000 Byte 2 of 3 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 77, 3, 8, 8, 8, 8, 8, 8, 8, 8, // state[118 + 2] 0x005000 Byte 2 of 3 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, // state[119 + 2] 0x009000 Byte 2 of 3 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,120, // state[120 + 2] 0x009fc0 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[121 + 2] 0x00a000 Byte 2 of 3 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,120,122, 8, 8, 8, 8, 123,124,125,126,127, 8,128,129, 130, 87, 8,131,132,133, 8,134, 135,136, 8,137,138, 3, 3,139, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, // state[122 + 2] 0x00a4c0 Byte 3 of 3 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, // state[123 + 2] 0x00a600 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[124 + 2] 0x00a640 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_, 0,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0,RJ_, // state[125 + 2] 0x00a680 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[126 + 2] 0x00a6c0 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[127 + 2] 0x00a700 Byte 3 of 3 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[128 + 2] 0x00a780 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_, 0, 0,RJ_,RJ_,RJ_,RJ_, 0, RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[129 + 2] 0x00a7c0 Byte 3 of 3 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[130 + 2] 0x00a800 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[131 + 2] 0x00a8c0 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0,RJ_, 0, 0, 0, 0, // state[132 + 2] 0x00a900 Byte 3 of 3 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[133 + 2] 0x00a940 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, // state[134 + 2] 0x00a9c0 Byte 3 of 3 RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[135 + 2] 0x00aa00 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[136 + 2] 0x00aa40 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0,RJ_,RJ_, 0, 0, 0, 0, // state[137 + 2] 0x00aac0 Byte 3 of 3 RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,RJ_,RJ_,RJ_, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[138 + 2] 0x00ab00 Byte 3 of 3 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[139 + 2] 0x00abc0 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_, 0,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[140 + 2] 0x00d000 Byte 2 of 3 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,141,142, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, // state[141 + 2] 0x00d780 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[142 + 2] 0x00d7c0 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, // state[143 + 2] 0x00f000 Byte 2 of 3 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 8, 8, 8, 8, 8,144, 8,145,146,147, 23,148, 8, 8, 8, 8,149, 21,150,151, 152,153, 8,154,155,156,157,158, // state[144 + 2] 0x00fa40 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[145 + 2] 0x00fac0 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[146 + 2] 0x00fb00 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, RJ_,RJ_,RJ_,RJ_,RJ_, 0,RJ_, 0, // state[147 + 2] 0x00fb40 Byte 3 of 3 RJ_,RJ_, 0,RJ_,RJ_, 0,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[148 + 2] 0x00fbc0 Byte 3 of 3 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[149 + 2] 0x00fd00 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, // state[150 + 2] 0x00fd80 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[151 + 2] 0x00fdc0 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, // state[152 + 2] 0x00fe00 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[153 + 2] 0x00fe40 Byte 3 of 3 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_, 0,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[154 + 2] 0x00fec0 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, // state[155 + 2] 0x00ff00 Byte 3 of 3 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, // state[156 + 2] 0x00ff40 Byte 3 of 3 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[157 + 2] 0x00ff80 Byte 3 of 3 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, // state[158 + 2] 0x00ffc0 Byte 3 of 3 0, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[159 + 2] 0x000000 Byte 2 of 4 X__,X__,X__,X__,X__,X__,X__,X__, X__,X__,X__,X__,X__,X__,X__,X__, 160,180,184,186, 2, 2,187, 2, 2, 2, 2,191, 2,193,208, 2, 118,118,118,118,118,118,118,118, 118,118,212,214, 2, 2, 2,215, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, // state[160 + 2] 0x010000 Byte 3 of 4 161,162, 8,163, 3, 3, 3,164, 3, 3,165,166,167,168,169,170, 8, 8,171, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 172,173, 3, 3,174, 3,175, 3, 176,177, 3, 3, 77,178, 3, 3, 8,179, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, // state[161 + 2] 0x010000 Byte 4 of 4 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_, 0,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_, 0,RJ_,RJ_, 0,RJ_, // state[162 + 2] 0x010040 Byte 4 of 4 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[163 + 2] 0x0100c0 Byte 4 of 4 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, // state[164 + 2] 0x0101c0 Byte 4 of 4 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,RJ_, 0, 0, // state[165 + 2] 0x010280 Byte 4 of 4 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[166 + 2] 0x0102c0 Byte 4 of 4 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[167 + 2] 0x010300 Byte 4 of 4 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[168 + 2] 0x010340 Byte 4 of 4 RJ_, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[169 + 2] 0x010380 Byte 4 of 4 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[170 + 2] 0x0103c0 Byte 4 of 4 RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[171 + 2] 0x010480 Byte 4 of 4 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[172 + 2] 0x010800 Byte 4 of 4 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, RJ_, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0,RJ_, RJ_, 0, 0, 0,RJ_, 0, 0,RJ_, // state[173 + 2] 0x010840 Byte 4 of 4 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[174 + 2] 0x010900 Byte 4 of 4 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_, 0, 0, 0, 0, 0, 0, // state[175 + 2] 0x010980 Byte 4 of 4 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0,RJ_,RJ_, // state[176 + 2] 0x010a00 Byte 4 of 4 RJ_,RJ_,RJ_,RJ_, 0,RJ_,RJ_, 0, 0, 0, 0, 0,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_, 0,RJ_,RJ_,RJ_, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, RJ_,RJ_,RJ_, 0, 0, 0, 0,RJ_, // state[177 + 2] 0x010a40 Byte 4 of 4 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, // state[178 + 2] 0x010b40 Byte 4 of 4 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[179 + 2] 0x010c40 Byte 4 of 4 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[180 + 2] 0x011000 Byte 3 of 4 8,181,163,182, 66, 3, 8,183, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 75, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, // state[181 + 2] 0x011040 Byte 4 of 4 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[182 + 2] 0x0110c0 Byte 4 of 4 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[183 + 2] 0x0111c0 Byte 4 of 4 RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[184 + 2] 0x012000 Byte 3 of 4 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,185, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, // state[185 + 2] 0x012340 Byte 4 of 4 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[186 + 2] 0x013000 Byte 3 of 4 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 185, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, // state[187 + 2] 0x016000 Byte 3 of 4 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 8, 8, 8, 8, 8, 8, 8, 8, 188, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 8,189,190, 3, // state[188 + 2] 0x016a00 Byte 4 of 4 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_, 0, 0, 0, 0, 0, 0, 0, // state[189 + 2] 0x016f40 Byte 4 of 4 RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, // state[190 + 2] 0x016f80 Byte 4 of 4 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[191 + 2] 0x01b000 Byte 3 of 4 192, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, // state[192 + 2] 0x01b000 Byte 4 of 4 RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[193 + 2] 0x01d000 Byte 3 of 4 3, 3, 3, 3, 3,194,195, 3, 3,196, 3, 3, 3, 3, 3, 3, 8,197,198,199,200,201, 8, 8, 8, 8,202,203,204,205,206,207, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, // state[194 + 2] 0x01d140 Byte 4 of 4 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,RJ_,RJ_,RJ_, RJ_,RJ_, 0, 0, 0,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0,RJ_,RJ_,RJ_,RJ_,RJ_, // state[195 + 2] 0x01d180 Byte 4 of 4 RJ_,RJ_,RJ_, 0, 0,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[196 + 2] 0x01d240 Byte 4 of 4 0, 0,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[197 + 2] 0x01d440 Byte 4 of 4 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_, 0,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[198 + 2] 0x01d480 Byte 4 of 4 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_, 0,RJ_,RJ_, 0, 0,RJ_, 0, 0,RJ_,RJ_, 0, 0,RJ_,RJ_,RJ_,RJ_, 0,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_, 0,RJ_, 0,RJ_,RJ_,RJ_, // state[199 + 2] 0x01d4c0 Byte 4 of 4 RJ_,RJ_,RJ_,RJ_, 0,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[200 + 2] 0x01d500 Byte 4 of 4 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0,RJ_, RJ_,RJ_,RJ_, 0, 0,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_, 0,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_, 0,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_, 0,RJ_,RJ_,RJ_,RJ_, 0, // state[201 + 2] 0x01d540 Byte 4 of 4 RJ_,RJ_,RJ_,RJ_,RJ_, 0,RJ_, 0, 0, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[202 + 2] 0x01d680 Byte 4 of 4 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[203 + 2] 0x01d6c0 Byte 4 of 4 RJ_, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_, 0,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_, 0,RJ_,RJ_,RJ_,RJ_, // state[204 + 2] 0x01d700 Byte 4 of 4 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_, 0,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_, 0,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[205 + 2] 0x01d740 Byte 4 of 4 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[206 + 2] 0x01d780 Byte 4 of 4 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, // state[207 + 2] 0x01d7c0 Byte 4 of 4 RJ_,RJ_,RJ_, 0,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[208 + 2] 0x01e000 Byte 3 of 4 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 209,210,211, 3, 3, 3, 3, 3, // state[209 + 2] 0x01ee00 Byte 4 of 4 RJ_,RJ_,RJ_,RJ_, 0,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0,RJ_,RJ_, 0,RJ_, 0, 0,RJ_, 0,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_, 0,RJ_,RJ_,RJ_,RJ_, 0,RJ_, 0,RJ_, 0, 0, 0, 0, // state[210 + 2] 0x01ee40 Byte 4 of 4 0, 0,RJ_, 0, 0, 0, 0,RJ_, 0,RJ_, 0,RJ_, 0,RJ_,RJ_,RJ_, 0,RJ_,RJ_, 0,RJ_, 0, 0,RJ_, 0,RJ_, 0,RJ_, 0,RJ_, 0,RJ_, 0,RJ_,RJ_, 0,RJ_, 0, 0,RJ_, RJ_,RJ_,RJ_, 0,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_, 0,RJ_,RJ_,RJ_,RJ_, 0,RJ_,RJ_,RJ_,RJ_, 0,RJ_, 0, // state[211 + 2] 0x01ee80 Byte 4 of 4 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_, 0,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0,RJ_,RJ_,RJ_, 0,RJ_,RJ_,RJ_, RJ_,RJ_, 0,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, // state[212 + 2] 0x02a000 Byte 3 of 4 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,213, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, // state[213 + 2] 0x02a6c0 Byte 4 of 4 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // state[214 + 2] 0x02b000 Byte 3 of 4 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 66, 8, 8, 8, 171, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, // state[215 + 2] 0x02f000 Byte 3 of 4 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 8, 8, 8, 8, 8, 8, 8, 8, 171, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, // state[216 + 2] 0x0c0000 Byte 2 of 4 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 217, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, // state[217 + 2] 0x0e0000 Byte 3 of 4 3, 3, 3, 3, 8, 8, 8,218, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, // state[218 + 2] 0x0e01c0 Byte 4 of 4 RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_,RJ_, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, }; // Remap base[0] = (del, add, string_offset) static const RemapEntry utf8scannot_lettermarkspecial_remap_base[] = { {0,0,0} }; // Remap string[0] static const unsigned char utf8scannot_lettermarkspecial_remap_string[] = { 0 }; static const unsigned char utf8scannot_lettermarkspecial_fast[256] = { 0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0, 0,0,0,0,0,0,1,0, 0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0, 0,0,0,0,1,0,1,0, 0,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, 1,1,1,0,0,0,0,0, 0,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, 1,1,1,0,0,0,0,0, 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, }; static const UTF8ScanObj utf8scannot_lettermarkspecial_obj = { utf8scannot_lettermarkspecial_STATE0, utf8scannot_lettermarkspecial_STATE0_SIZE, utf8scannot_lettermarkspecial_TOTAL_SIZE, utf8scannot_lettermarkspecial_MAX_EXPAND_X4, utf8scannot_lettermarkspecial_SHIFT, utf8scannot_lettermarkspecial_BYTES, utf8scannot_lettermarkspecial_LOSUB, utf8scannot_lettermarkspecial_HIADD, utf8scannot_lettermarkspecial, utf8scannot_lettermarkspecial_remap_base, utf8scannot_lettermarkspecial_remap_string, utf8scannot_lettermarkspecial_fast }; #undef X__ #undef RJ_ #undef S1_ #undef S2_ #undef S3_ #undef S21 #undef S31 #undef S32 #undef T1_ #undef T2_ #undef S11 #undef SP_ #undef D__ #undef RJA // Table has 14400 bytes, Hash = 9E4D-F2F2 } // End namespace CLD2 #endif // UTF8SCANNOT_LETTERMARKSPECIAL_H__
56,441
5,203
/* * gcc -z relro -z now -fPIE -pie -fstack-protector-all -o lab8C lab8C.c */ #include<errno.h> #include<fcntl.h> #include<stdio.h> #include<stdlib.h> #include<string.h> #include<sys/types.h> #include<unistd.h> struct fileComp { char fileContents1[255]; char fileContents2[255]; int cmp; }; char* readfd(int fd) { // Find length of file int size = lseek(fd, 0, SEEK_END); if(size >= 255) { printf("Your file is too big.\n"); exit(EXIT_FAILURE); } // Reset fd to beginning of file lseek(fd, 0, SEEK_SET); // Allocate space for the file and a null byte char* fileContents = malloc((size+1) & 0xff); if(!fileContents) { printf("Could not allocate space for file contents\n"); exit(EXIT_FAILURE); } // Read the file contents into the buffer int numRead = read(fd, fileContents, size & 0xff); return fileContents; } int getfd(char* arg) { if(arg[0] != '-' || arg[1] != 'f' || arg[3] != '=') { printf("Invalid formatting in argument \"%s\"\n", arg); return -1; } int fd; if(arg[2] == 'n') { // O_NOFOLLOW means that it won't follow symlinks. Sorry. fd = open(arg+4, O_NOFOLLOW | O_RDONLY); if(fd == -1) { printf("File could not be opened\n"); return -1; } } else if(arg[2] == 'd') { errno = 0; fd = atoi(arg+4); } else { printf("Invalid formatting in argument \"%s\"\n", arg); return -1; } return fd; } struct fileComp* comparefds(int fd1, int fd2) { struct fileComp* fc = malloc(sizeof(struct fileComp)); if(!fc) { printf("Could not allocate space for file contents\n"); exit(EXIT_FAILURE); } strcpy(fc->fileContents1, readfd(fd1)); strcpy(fc->fileContents2, readfd(fd2)); fc->cmp = strcmp(fc->fileContents1, fc->fileContents2); return fc; } char* securityCheck(char* arg, char* s) { if(strstr(arg, ".pass")) return "<<<For security reasons, your filename has been blocked>>>"; return s; } int main(int argc, char** argv) { if(argc != 3) { printf("Hi. This program will do a lexicographical comparison of the \ contents of two files. It has the bonus functionality of being \ able to process either filenames or file descriptors.\n"); printf("Usage: %s {-fn=<filename>|-fd=<file_descriptor>} {-fn=<filename>|-fd=<file_descriptor>}\n", argv[0]); return EXIT_FAILURE; } int fd1 = getfd(argv[1]); int fd2 = getfd(argv[2]); if(fd1 == -1 || fd2 == -1) { printf("Usage: %s {-fn=<filename>|-fd=<file_descriptor>} {-fn=<filename>|-fd=<file_descriptor>}\n", argv[0]); return EXIT_FAILURE; } if(fd1 == 0 || fd2 == 0) { printf("Invalid fd argument.\n"); printf("(We're still fixing some bugs with using STDIN.)\n"); printf("Usage: %s {-fn=<filename>|-fd=<file_descriptor>} {-fn=<filename>|-fd=<file_descriptor>}\n", argv[0]); return EXIT_FAILURE; } struct fileComp* fc = comparefds(fd1, fd2); printf( "\"%s\" is lexicographically %s \"%s\"\n", securityCheck(argv[1], fc->fileContents1), fc->cmp > 0 ? "after" : (fc->cmp < 0 ? "before" : "equivalent to"), securityCheck(argv[2], fc->fileContents2)); return EXIT_SUCCESS; }
1,318
584
/* * Copyright 2012-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.data.auditing; import java.util.Date; import org.joda.time.DateTime; import org.springframework.data.annotation.CreatedBy; import org.springframework.data.annotation.CreatedDate; import org.springframework.data.annotation.LastModifiedBy; import org.springframework.data.annotation.LastModifiedDate; /** * Sample entity using annotation based auditing. * * @author <NAME> * @since 1.5 */ class AnnotatedUser { @CreatedBy Object createdBy; @CreatedDate DateTime createdDate; @LastModifiedBy Object lastModifiedBy; @LastModifiedDate Date lastModifiedDate; }
351
360
<reponame>opengauss-mirror/openGauss-graph<gh_stars>100-1000 /* * psql - the PostgreSQL interactive terminal * * Copyright (c) 2000-2012, PostgreSQL Global Development Group * * src/bin/psql/input.h */ #ifndef INPUT_H #define INPUT_H /* * If some other file needs to have access to readline/history, include this * file and save yourself all this work. * * USE_READLINE is the definite pointers regarding existence or not. */ #define USE_READLINE 1 #ifdef HAVE_LIBREADLINE #if defined(HAVE_READLINE_READLINE_H) #include <readline/readline.h> #include <readline/history.h> #elif defined(HAVE_EDITLINE_READLINE_H) #include <editline/readline.h> #elif defined(HAVE_READLINE_H) #include <readline.h> #endif /* HAVE_READLINE_READLINE_H, etc */ #else #include <editline/readline.h> #endif /* HAVE_LIBREADLINE */ #include "libpq/pqexpbuffer.h" char* gets_interactive(const char* prompt); char* gets_fromFile(FILE* source); void pg_append_history(const char* s, PQExpBuffer history_buf); void pg_send_history(PQExpBuffer history_buf); void setHistSize(const char* targetName, const char* targetValue, bool setToDefault); extern bool useReadline; extern bool SensitiveStrCheck(const char* target); void initializeInput(int flags); #endif /* INPUT_H */
437
377
/******************************************************************************* * * Copyright 2012 Impetus Infotech. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. ******************************************************************************/ package com.impetus.kundera.configure.schema; import com.impetus.kundera.KunderaException; /** * Exception class for all type of exceptions thrown by SchemaManager during * generating schema. * * @author Kuldeep.Kumar */ public class SchemaGenerationException extends KunderaException { /** The Constant serialVersionUID. */ private static final long serialVersionUID = 3855497974944993364L; /** The data store name. */ private String dataStoreName; /** The schema name. */ private String schemaName; /** The table name. */ private String tableName; /** * Instantiates a new schemaGeneration exception. * * @param dataStore * the data store * @param schema * the schema */ public SchemaGenerationException(String arg0, String dataStore, String schema) { super(arg0); this.dataStoreName = dataStore; this.schemaName = schema; } /** * Instantiates a new schemaGeneration exception. * * @param arg0 * the arg0 * @param dataStore * the data store * @param schema * the schema * @param table * the table */ public SchemaGenerationException(String arg0, String dataStore, String schema, String table) { super(arg0); this.dataStoreName = dataStore; this.schemaName = schema; this.tableName = table; } /** * Instantiates a new schemaGeneration exception. * * @param arg0 * the arg0 */ public SchemaGenerationException(Throwable arg0) { super(arg0); } /** * Instantiates a new schemaGeneration exception. * * @param arg0 * the arg0 * @param dataStore * the data store */ public SchemaGenerationException(Throwable arg0, String dataStore) { super(arg0); this.dataStoreName = dataStore; } /** * Instantiates a new schemaGeneration exception. * * @param arg0 * the arg0 * @param dataStore * the data store * @param schema * the schema */ public SchemaGenerationException(Throwable arg0, String dataStore, String schema) { super(arg0); this.dataStoreName = dataStore; this.schemaName = schema; } /** * Instantiates a new schemaGeneration exception. * * @param arg0 * the arg0 * @param arg1 * the arg1 * @param dataStore * the data store */ public SchemaGenerationException(String arg0, Throwable arg1, String dataStore) { super(arg0, arg1); this.dataStoreName = dataStore; } public SchemaGenerationException(String arg0, Throwable arg1, String dataStoreName, String databaseName) { super(arg0, arg1); this.dataStoreName = dataStoreName; this.schemaName = databaseName; } public SchemaGenerationException(String arg0) { super(arg0); } }
1,569
352
<reponame>bolt1502/yi-hack<gh_stars>100-1000 /********** This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. (See <http://www.gnu.org/copyleft/lesser.html>.) This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA **********/ // "liveMedia" // Copyright (c) 1996-2020 Live Networks, Inc. All rights reserved. // Copyright (c) 2020 roleo. All rights reserved. // Filters for converting between raw PCM audio and aLaw // C++ header #ifndef _ALAW_AUDIO_FILTER_HH #define _ALAW_AUDIO_FILTER_HH #ifndef _FRAMED_FILTER_HH #include "FramedFilter.hh" #endif ////////// 16-bit PCM (in various byte orderings) -> 8-bit a-Law ////////// class aLawFromPCMAudioSource: public FramedFilter { public: static aLawFromPCMAudioSource* createNew(UsageEnvironment& env, FramedSource* inputSource, int byteOrdering = 0); // "byteOrdering" == 0 => host order (the default) // "byteOrdering" == 1 => little-endian order // "byteOrdering" == 2 => network (i.e., big-endian) order protected: aLawFromPCMAudioSource(UsageEnvironment& env, FramedSource* inputSource, int byteOrdering); // called only by createNew() virtual ~aLawFromPCMAudioSource(); private: // Redefined virtual functions: virtual void doGetNextFrame(); private: static void afterGettingFrame(void* clientData, unsigned frameSize, unsigned numTruncatedBytes, struct timeval presentationTime, unsigned durationInMicroseconds); void afterGettingFrame1(unsigned frameSize, unsigned numTruncatedBytes, struct timeval presentationTime, unsigned durationInMicroseconds); private: int fByteOrdering; unsigned char* fInputBuffer; unsigned fInputBufferSize; }; ////////// a-Law -> 16-bit PCM (in host order) ////////// class PCMFromaLawAudioSource: public FramedFilter { public: static PCMFromaLawAudioSource* createNew(UsageEnvironment& env, FramedSource* inputSource); protected: PCMFromaLawAudioSource(UsageEnvironment& env, FramedSource* inputSource); // called only by createNew() virtual ~PCMFromaLawAudioSource(); private: // Redefined virtual functions: virtual void doGetNextFrame(); private: static void afterGettingFrame(void* clientData, unsigned frameSize, unsigned numTruncatedBytes, struct timeval presentationTime, unsigned durationInMicroseconds); void afterGettingFrame1(unsigned frameSize, unsigned numTruncatedBytes, struct timeval presentationTime, unsigned durationInMicroseconds); private: unsigned char* fInputBuffer; unsigned fInputBufferSize; }; #endif
964
2,151
// Copyright 2017 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "ui/gfx/paint_vector_icon.h" #include <gtest/gtest.h> #include <vector> #include "base/i18n/rtl.h" #include "cc/paint/paint_record.h" #include "cc/paint/paint_recorder.h" #include "third_party/skia/include/core/SkCanvas.h" #include "third_party/skia/include/core/SkPath.h" #include "ui/gfx/canvas.h" #include "ui/gfx/vector_icon_types.h" namespace gfx { namespace { SkColor GetColorAtTopLeft(const Canvas& canvas) { return canvas.GetBitmap().getColor(0, 0); } class MockCanvas : public SkCanvas { public: MockCanvas(int width, int height) : SkCanvas(width, height) {} // SkCanvas overrides: void onDrawPath(const SkPath& path, const SkPaint& paint) override { paths_.push_back(path); } const std::vector<SkPath>& paths() const { return paths_; } private: std::vector<SkPath> paths_; DISALLOW_COPY_AND_ASSIGN(MockCanvas); }; // Tests that a relative move to command (R_MOVE_TO) after a close command // (CLOSE) uses the correct starting point. See crbug.com/697497 TEST(VectorIconTest, RelativeMoveToAfterClose) { cc::PaintRecorder recorder; Canvas canvas(recorder.beginRecording(100, 100), 1.0f); const PathElement elements[] = { MOVE_TO, 4, 5, LINE_TO, 10, 11, CLOSE, // This move should use (4, 5) as the start point rather than (10, 11). R_MOVE_TO, 20, 21, R_LINE_TO, 50, 51}; const VectorIconRep rep_list[] = {{elements, arraysize(elements)}}; const VectorIcon icon = {rep_list, 1u}; PaintVectorIcon(&canvas, icon, 100, SK_ColorMAGENTA); sk_sp<cc::PaintRecord> record = recorder.finishRecordingAsPicture(); MockCanvas mock(100, 100); record->Playback(&mock); ASSERT_EQ(1U, mock.paths().size()); SkPoint last_point; EXPECT_TRUE(mock.paths()[0].getLastPt(&last_point)); EXPECT_EQ(SkIntToScalar(74), last_point.x()); EXPECT_EQ(SkIntToScalar(77), last_point.y()); } TEST(VectorIconTest, FlipsInRtl) { // Set the locale to a rtl language otherwise FLIPS_IN_RTL will do nothing. base::i18n::SetICUDefaultLocale("he"); ASSERT_TRUE(base::i18n::IsRTL()); const int canvas_size = 20; const SkColor color = SK_ColorWHITE; Canvas canvas(gfx::Size(canvas_size, canvas_size), 1.0f, true); // Create a 20x20 square icon which has FLIPS_IN_RTL, and CANVAS_DIMENSIONS // are twice as large as |canvas|. const PathElement elements[] = {CANVAS_DIMENSIONS, 2 * canvas_size, FLIPS_IN_RTL, MOVE_TO, 10, 10, R_H_LINE_TO, 20, R_V_LINE_TO, 20, R_H_LINE_TO, -20, CLOSE}; const VectorIconRep rep_list[] = {{elements, arraysize(elements)}}; const VectorIcon icon = {rep_list, 1u}; PaintVectorIcon(&canvas, icon, canvas_size, color); // Count the number of pixels in the canvas. auto bitmap = canvas.GetBitmap(); int colored_pixel_count = 0; for (int i = 0; i < bitmap.width(); ++i) { for (int j = 0; j < bitmap.height(); ++j) { if (bitmap.getColor(i, j) == color) colored_pixel_count++; } } // Verify that the amount of colored pixels on the canvas bitmap should be a // quarter of the original icon, since each side should be scaled down by a // factor of two. EXPECT_EQ(100, colored_pixel_count); } TEST(VectorIconTest, CorrectSizePainted) { // Create a set of 5 icons reps, sized {48, 32, 24, 20, 16} for the test icon. // Color each of them differently so they can be differentiated (the parts of // an icon painted with PATH_COLOR_ARGB will not be overwritten by the color // provided to it at creation time). // SK_ColorRED. const PathElement elements48[] = {CANVAS_DIMENSIONS, 48, PATH_COLOR_ARGB, 0xFF, 0xFF, 0x00, 0x00, MOVE_TO, 0, 0, H_LINE_TO, 48, V_LINE_TO, 48, H_LINE_TO, 0, V_LINE_TO, 0, CLOSE}; // SK_ColorGREEN. const PathElement elements32[] = {CANVAS_DIMENSIONS, 32, PATH_COLOR_ARGB, 0xFF, 0x00, 0xFF, 0x00, MOVE_TO, 0, 0, H_LINE_TO, 32, V_LINE_TO, 32, H_LINE_TO, 0, V_LINE_TO, 0, CLOSE}; // SK_ColorBLUE. const PathElement elements24[] = {CANVAS_DIMENSIONS, 24, PATH_COLOR_ARGB, 0xFF, 0x00, 0x00, 0xFF, MOVE_TO, 0, 0, H_LINE_TO, 24, V_LINE_TO, 24, H_LINE_TO, 0, V_LINE_TO, 0, CLOSE}; // SK_ColorYELLOW. const PathElement elements20[] = {CANVAS_DIMENSIONS, 20, PATH_COLOR_ARGB, 0xFF, 0xFF, 0xFF, 0x00, MOVE_TO, 0, 0, H_LINE_TO, 20, V_LINE_TO, 20, H_LINE_TO, 0, V_LINE_TO, 0, CLOSE}; // SK_ColorCYAN. const PathElement elements16[] = {CANVAS_DIMENSIONS, 16, PATH_COLOR_ARGB, 0xFF, 0x00, 0xFF, 0xFF, MOVE_TO, 0, 0, H_LINE_TO, 16, V_LINE_TO, 16, H_LINE_TO, 0, V_LINE_TO, 0, CLOSE}; // VectorIconReps are always sorted in descending order of size. const VectorIconRep rep_list[] = {{elements48, arraysize(elements48)}, {elements32, arraysize(elements32)}, {elements24, arraysize(elements24)}, {elements20, arraysize(elements20)}, {elements16, arraysize(elements16)}}; const VectorIcon icon = {rep_list, 5u}; // Test exact sizes paint the correctly sized icon, including the largest and // smallest icon. Canvas canvas_100(gfx::Size(100, 100), 1.0, true); PaintVectorIcon(&canvas_100, icon, 48, SK_ColorBLACK); EXPECT_EQ(SK_ColorRED, GetColorAtTopLeft(canvas_100)); PaintVectorIcon(&canvas_100, icon, 32, SK_ColorBLACK); EXPECT_EQ(SK_ColorGREEN, GetColorAtTopLeft(canvas_100)); PaintVectorIcon(&canvas_100, icon, 16, SK_ColorBLACK); EXPECT_EQ(SK_ColorCYAN, GetColorAtTopLeft(canvas_100)); // Only the largest icon may be upscaled to a size larger than what it was // designed for. PaintVectorIcon(&canvas_100, icon, 50, SK_ColorBLACK); EXPECT_EQ(SK_ColorRED, GetColorAtTopLeft(canvas_100)); // All other icons may never be upscaled. PaintVectorIcon(&canvas_100, icon, 27, SK_ColorBLACK); EXPECT_EQ(SK_ColorGREEN, GetColorAtTopLeft(canvas_100)); PaintVectorIcon(&canvas_100, icon, 8, SK_ColorBLACK); EXPECT_EQ(SK_ColorCYAN, GetColorAtTopLeft(canvas_100)); // Test icons at a scale factor < 100%, still with an exact size, paint the // correctly sized icon. Canvas canvas_75(gfx::Size(100, 100), 0.75, true); PaintVectorIcon(&canvas_75, icon, 32, SK_ColorBLACK); // 32 * 0.75 = 24. EXPECT_EQ(SK_ColorBLUE, GetColorAtTopLeft(canvas_75)); // Test icons at a scale factor > 100%, still with an exact size, paint the // correctly sized icon. Canvas canvas_125(gfx::Size(100, 100), 1.25, true); PaintVectorIcon(&canvas_125, icon, 16, SK_ColorBLACK); // 16 * 1.25 = 20. EXPECT_EQ(SK_ColorYELLOW, GetColorAtTopLeft(canvas_125)); // Inexact sizes at scale factors < 100%. PaintVectorIcon(&canvas_75, icon, 12, SK_ColorBLACK); // 12 * 0.75 = 9. EXPECT_EQ(SK_ColorCYAN, GetColorAtTopLeft(canvas_75)); PaintVectorIcon(&canvas_75, icon, 28, SK_ColorBLACK); // 28 * 0.75 = 21. EXPECT_EQ(SK_ColorBLUE, GetColorAtTopLeft(canvas_75)); // Inexact sizes at scale factors > 100%. PaintVectorIcon(&canvas_125, icon, 12, SK_ColorBLACK); // 12 * 1.25 = 15. EXPECT_EQ(SK_ColorCYAN, GetColorAtTopLeft(canvas_125)); PaintVectorIcon(&canvas_125, icon, 28, SK_ColorBLACK); // 28 * 1.25 = 35. EXPECT_EQ(SK_ColorRED, GetColorAtTopLeft(canvas_125)); // Painting without a requested size will default to the smallest icon rep. PaintVectorIcon(&canvas_100, icon, SK_ColorBLACK); EXPECT_EQ(SK_ColorCYAN, GetColorAtTopLeft(canvas_100)); // But doing this in another scale factor should assume the smallest icon rep // size, then scale it up by the DSF. PaintVectorIcon(&canvas_125, icon, SK_ColorBLACK); // 16 * 1.25 = 20. EXPECT_EQ(SK_ColorYELLOW, GetColorAtTopLeft(canvas_125)); } } // namespace } // namespace gfx
6,703
32,544
<reponame>DBatOWL/tutorials package com.baeldung.zerocode; import java.util.ArrayList; import java.util.List; import java.util.UUID; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.util.StringUtils; import org.springframework.web.bind.annotation.*; @SpringBootApplication @RestController @RequestMapping("/api/users") public class ZerocodeApplication { private List<User> users = new ArrayList<>(); public static void main(String[] args) { SpringApplication.run(ZerocodeApplication.class, args); } @PostMapping public ResponseEntity create(@RequestBody User user) { if (!StringUtils.hasText(user.getFirstName())) { return new ResponseEntity("firstName can't be empty!", HttpStatus.BAD_REQUEST); } if (!StringUtils.hasText(user.getLastName())) { return new ResponseEntity("lastName can't be empty!", HttpStatus.BAD_REQUEST); } user.setId(UUID.randomUUID() .toString()); users.add(user); return new ResponseEntity(user, HttpStatus.CREATED); } }
464
14,668
// Copyright 2018 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "services/network/dns_config_change_manager.h" #include <utility> namespace network { DnsConfigChangeManager::DnsConfigChangeManager() { net::NetworkChangeNotifier::AddDNSObserver(this); } DnsConfigChangeManager::~DnsConfigChangeManager() { net::NetworkChangeNotifier::RemoveDNSObserver(this); } void DnsConfigChangeManager::AddReceiver( mojo::PendingReceiver<mojom::DnsConfigChangeManager> receiver) { receivers_.Add(this, std::move(receiver)); } void DnsConfigChangeManager::RequestNotifications( mojo::PendingRemote<mojom::DnsConfigChangeManagerClient> client) { clients_.Add(std::move(client)); } void DnsConfigChangeManager::OnDNSChanged() { for (const auto& client : clients_) client->OnDnsConfigChanged(); } } // namespace network
299
14,668
// Copyright 2020 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "base/test/gtest_xml_unittest_result_printer.h" #include "base/base64.h" #include "base/command_line.h" #include "base/files/file_util.h" #include "base/strings/strcat.h" #include "base/test/test_switches.h" #include "testing/gmock/include/gmock/gmock.h" #include "testing/gtest/include/gtest/gtest.h" namespace base { TEST(XmlUnitTestResultPrinterTest, LinkInXmlFile) { XmlUnitTestResultPrinter::Get()->AddLink("unique_link", "http://google.com"); std::string file_path = base::CommandLine::ForCurrentProcess()->GetSwitchValueASCII( switches::kTestLauncherOutput); std::string content; ASSERT_TRUE( base::ReadFileToString(FilePath::FromUTF8Unsafe(file_path), &content)); std::string expected_content = base::StrCat({"<link name=\"LinkInXmlFile\" " "classname=\"XmlUnitTestResultPrinterTest\" " "link_name=\"unique_link\">", "http://google.com", "</link>"}); EXPECT_TRUE(content.find(expected_content) != std::string::npos) << expected_content << " not found in " << content; } TEST(XmlUnitTestResultPrinterTest, EscapedLinkInXmlFile) { XmlUnitTestResultPrinter::Get()->AddLink( "unique_link", "http://google.com/path?id=\"'<>&\""); std::string file_path = base::CommandLine::ForCurrentProcess()->GetSwitchValueASCII( switches::kTestLauncherOutput); std::string content; ASSERT_TRUE( base::ReadFileToString(FilePath::FromUTF8Unsafe(file_path), &content)); std::string expected_content = base::StrCat( {"<link name=\"EscapedLinkInXmlFile\" " "classname=\"XmlUnitTestResultPrinterTest\" " "link_name=\"unique_link\">", "http://google.com/path?id=&quot;&apos;&lt;&gt;&amp;&quot;", "</link>"}); EXPECT_TRUE(content.find(expected_content) != std::string::npos) << expected_content << " not found in " << content; } class XmlUnitTestResultPrinterTimestampTest : public ::testing::Test { public: static void TearDownTestSuite() { // <testcase ...> should generated after test case finishes. After // TearDown(). std::string file_path = base::CommandLine::ForCurrentProcess()->GetSwitchValueASCII( switches::kTestLauncherOutput); if (file_path.empty()) { GTEST_SKIP() << "Test has to run with --" << switches::kTestLauncherOutput << " switch."; } std::string content; ASSERT_TRUE( base::ReadFileToString(FilePath::FromUTF8Unsafe(file_path), &content)); EXPECT_THAT(content, ::testing::ContainsRegex("<testcase.*timestamp=")); } }; TEST_F(XmlUnitTestResultPrinterTimestampTest, TimestampInXmlFile) { // <x-teststart ... /> should generated at this point std::string file_path = base::CommandLine::ForCurrentProcess()->GetSwitchValueASCII( switches::kTestLauncherOutput); if (file_path.empty()) { GTEST_SKIP() << "Test has to run with --" << switches::kTestLauncherOutput << " switch."; } std::string content; ASSERT_TRUE( base::ReadFileToString(FilePath::FromUTF8Unsafe(file_path), &content)); EXPECT_THAT(content, ::testing::ContainsRegex("<x-teststart.*timestamp=")); } } // namespace base
1,319
1,772
<reponame>mtcolman/django-DefectDojo { "errors": [], "mobsfscan_version": "0.0.6", "results": { "android_certificate_transparency": { "metadata": { "cwe": "CWE-295 Improper Certificate Validation", "description": "This app does not enforce TLS Certificate Transparency which helps to detect SSL certificates that have been mistakenly issued by a certificate authority or maliciously acquired from an otherwise unimpeachable certificate authority.", "masvs": "MSTG-NETWORK-4", "owasp-mobile": "M3: Insecure Communication", "reference": "https://github.com/MobSF/owasp-mstg/blob/master/Document/0x05g-Testing-Network-Communication.md#testing-custom-certificate-stores-and-certificate-pinning-mstg-network-4", "severity": "INFO" } }, "android_kotlin_hardcoded": { "files": [ { "file_path": "app/src/main/java/com/routes/domain/analytics/event/Signatures.kt", "match_lines": [ 10, 10 ], "match_position": [ 243, 271 ], "match_string": "key = \"hmi_busroutes_health\"" } ], "metadata": { "cwe": "CWE-798 Use of Hard-coded Credentials", "description": "Files may contain hardcoded sensitive information like usernames, passwords, keys etc.", "masvs": "MSTG-STORAGE-14", "owasp-mobile": "M9: Reverse Engineering", "reference": "https://github.com/MobSF/owasp-mstg/blob/master/Document/0x05d-Testing-Data-Storage.md#storing-a-key---example", "severity": "WARNING" } }, "android_prevent_screenshot": { "metadata": { "cwe": "CWE-200 Information Exposure", "description": "This app does not have capabilities to prevent against Screenshots from Recent Task History/ Now On Tap etc.", "masvs": "MSTG-STORAGE-9", "owasp-mobile": "M2: Insecure Data Storage", "reference": "https://github.com/MobSF/owasp-mstg/blob/master/Document/0x05d-Testing-Data-Storage.md#finding-sensitive-information-in-auto-generated-screenshots-mstg-storage-9", "severity": "INFO" } }, "android_root_detection": { "metadata": { "cwe": "CWE-919 - Weaknesses in Mobile Applications", "description": "This app does not have root detection capabilities. Running a sensitive application on a rooted device questions the device integrity and affects users data.", "masvs": "MSTG-RESILIENCE-1", "owasp-mobile": "M8: Code Tampering", "reference": "https://github.com/MobSF/owasp-mstg/blob/master/Document/0x05j-Testing-Resiliency-Against-Reverse-Engineering.md#testing-root-detection-mstg-resilience-1", "severity": "INFO" } }, "android_safetynet": { "metadata": { "cwe": "CWE-353 Missing Support for Integrity Check", "description": "This app does not uses SafetyNet Attestation API that provides cryptographically-signed attestation, assessing the device's integrity. This check helps to ensure that the servers are interacting with the genuine app running on a genuine Android device. ", "masvs": "MSTG-RESILIENCE-1", "owasp-mobile": "M8: Code Tampering", "reference": "https://github.com/MobSF/owasp-mstg/blob/master/Document/0x05j-Testing-Resiliency-Against-Reverse-Engineering.md#testing-root-detection-mstg-resilience-1", "severity": "INFO" } }, "android_ssl_pinning": { "metadata": { "cwe": "CWE-295 Improper Certificate Validation", "description": "This app does not use TLS/SSL certificate or public key pinning to detect or prevent MITM attacks in secure communication channel.", "masvs": "MSTG-NETWORK-4", "owasp-mobile": "M3: Insecure Communication", "reference": "https://github.com/MobSF/owasp-mstg/blob/master/Document/0x05g-Testing-Network-Communication.md#testing-custom-certificate-stores-and-certificate-pinning-mstg-network-4", "severity": "INFO" } }, "android_tapjacking": { "metadata": { "cwe": "CWE-200 Information Exposure", "description": "This app does not have capabilities to prevent tapjacking attacks.", "masvs": "MSTG-PLATFORM-9", "owasp-mobile": "M1: Improper Platform Usage", "reference": "https://github.com/MobSF/owasp-mstg/blob/master/Document/0x05h-Testing-Platform-Interaction.md#testing-for-overlay-attacks-mstg-platform-9", "severity": "INFO" } } } }
1,858
892
<filename>advisories/unreviewed/2022/05/GHSA-hq47-jrpj-2gwp/GHSA-hq47-jrpj-2gwp.json<gh_stars>100-1000 { "schema_version": "1.2.0", "id": "GHSA-hq47-jrpj-2gwp", "modified": "2022-05-01T18:23:19Z", "published": "2022-05-01T18:23:19Z", "aliases": [ "CVE-2007-4412" ], "details": "Multiple cross-site scripting (XSS) vulnerabilities in Headstart Solutions DeskPRO 3.0.2 allow remote authenticated users to inject arbitrary web script or HTML via unspecified parameters to (1) techs.php, (2) ticket_category.php, (3) ticket_priority.php, (4) ticket_workflow.php, (5) ticket_escalate.php, (6) fields_ticket.php, (7) ticket_rules_web.php, (8) ticket_displayfields.php, (9) ticket_rules_mail.php, (10) fields_user.php, (11) fields_faq.php, and (12) user_help.php, in (a) admincp/ and (b) possibly a directory on the \"User side.\"", "severity": [ ], "affected": [ ], "references": [ { "type": "ADVISORY", "url": "https://nvd.nist.gov/vuln/detail/CVE-2007-4412" }, { "type": "WEB", "url": "https://exchange.xforce.ibmcloud.com/vulnerabilities/36023" }, { "type": "WEB", "url": "http://securityreason.com/securityalert/3029" }, { "type": "WEB", "url": "http://www.securityfocus.com/archive/1/476454/100/0/threaded" }, { "type": "WEB", "url": "http://www.securityfocus.com/bid/25325" } ], "database_specific": { "cwe_ids": [ ], "severity": "LOW", "github_reviewed": false } }
677
14,668
// Copyright 2019 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "third_party/blink/renderer/core/frame/csp/csp_violation_report_body.h" namespace blink { void CSPViolationReportBody::BuildJSONValue(V8ObjectBuilder& builder) const { LocationReportBody::BuildJSONValue(builder); builder.AddString("documentURL", documentURL()); builder.AddStringOrNull("referrer", referrer()); builder.AddStringOrNull("blockedURL", blockedURL()); builder.AddString("effectiveDirective", effectiveDirective()); builder.AddString("originalPolicy", originalPolicy()); builder.AddStringOrNull("sample", sample()); builder.AddString("disposition", disposition()); builder.AddNumber("statusCode", statusCode()); } } // namespace blink
236
892
{ "schema_version": "1.2.0", "id": "GHSA-326g-ww7w-8qhj", "modified": "2022-05-13T01:32:29Z", "published": "2022-05-13T01:32:29Z", "aliases": [ "CVE-2018-1926" ], "details": "IBM WebSphere Application Server 7.0, 8.0, 8.5, and 9.0 Admin Console is vulnerable to cross-site request forgery, caused by improper validation of user-supplied input. By persuading a user to visit a malicious URL, a remote attacker could send a specially-crafted request. An attacker could exploit this vulnerability to perform CSRF attack and update available applications. IBM X-Force ID: 152992.", "severity": [ { "type": "CVSS_V3", "score": "CVSS:3.0/AV:N/AC:L/PR:N/UI:R/S:U/C:H/I:H/A:H" } ], "affected": [ ], "references": [ { "type": "ADVISORY", "url": "https://nvd.nist.gov/vuln/detail/CVE-2018-1926" }, { "type": "WEB", "url": "https://exchange.xforce.ibmcloud.com/vulnerabilities/152992" }, { "type": "WEB", "url": "https://www.ibm.com/support/docview.wss?uid=ibm10742301" }, { "type": "WEB", "url": "http://www.securityfocus.com/bid/106204" } ], "database_specific": { "cwe_ids": [ "CWE-352" ], "severity": "HIGH", "github_reviewed": false } }
583
1,986
#include "BLEUtils.h" #include "BLEServer.h" #include <esp_log.h> #include <string> #include <stdio.h> #include "BLEDevice.h" #include "sdkconfig.h" // See the following for generating UUIDs: // https://www.uuidgenerator.net/ #define SERVICE_UUID "4fafc201-1fb5-459e-8fcc-c5c9c331914b" #define CHARACTERISTIC_UUID "beb5483e-36e1-4688-b7f5-ea07361b26a8" #define CHARACTERISTIC_UUID2 "54059634-9448-404f-9af4-7d14556f3ad8" #define CHARACTERISTIC_UUID3 "78f8a814-7b20-40ca-b970-0aba448c53b1" #define CHARACTERISTIC_UUID4 "03a55273-c1ef-4eab-a6c0-7ff11509122f" #define CHARACTERISTIC_UUID5 "0d19566d-2144-4443-9779-19d42e283439" static void run() { BLEDevice::init("MYDEVICE"); BLEServer* pServer = BLEDevice::createServer(); BLEService* pService = pServer->createService(BLEUUID(SERVICE_UUID)); BLECharacteristic* pCharacteristic = pService->createCharacteristic( BLEUUID(CHARACTERISTIC_UUID), BLECharacteristic::PROPERTY_READ | BLECharacteristic::PROPERTY_WRITE ); pCharacteristic = pService->createCharacteristic( BLEUUID(CHARACTERISTIC_UUID2), BLECharacteristic::PROPERTY_READ | BLECharacteristic::PROPERTY_WRITE ); pCharacteristic = pService->createCharacteristic( BLEUUID(CHARACTERISTIC_UUID3), BLECharacteristic::PROPERTY_READ | BLECharacteristic::PROPERTY_WRITE ); pCharacteristic = pService->createCharacteristic( BLEUUID(CHARACTERISTIC_UUID4), BLECharacteristic::PROPERTY_READ | BLECharacteristic::PROPERTY_WRITE ); pCharacteristic = pService->createCharacteristic( BLEUUID(CHARACTERISTIC_UUID5), BLECharacteristic::PROPERTY_READ | BLECharacteristic::PROPERTY_WRITE ); pCharacteristic->setValue("Hello World says Neil"); pService->start(); BLEAdvertising* pAdvertising = pServer->getAdvertising(); pAdvertising->start(); } void Sample1(void) { //esp_log_level_set("*", ESP_LOG_ERROR); run(); } // app_main
764
3,102
<reponame>clayne/DirectXShaderCompiler // RUN: %clang_cc1 -fsyntax-only -verify -std=c++11 %s void f() { int x = 3; // expected-note{{'x' declared here}} const int c = 2; struct C { int& x2 = x; // expected-error{{reference to local variable 'x' declared in enclosing function 'f'}} int cc = c; }; (void)[]() mutable { int x = 3; // expected-note{{'x' declared here}} struct C { int& x2 = x; // expected-error{{reference to local variable 'x' declared in enclosing lambda expression}} }; }; C(); }
207
479
// Copyright (C) 2017-2018 Dremio Corporation // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #ifndef GANDIVA_FIELDDESCRIPTOR_H #define GANDIVA_FIELDDESCRIPTOR_H #include <string> #include "gandiva/arrow.h" namespace gandiva { /// \brief Descriptor for an arrow field. Holds indexes into the flattened array of /// buffers that is passed to LLVM generated functions. class FieldDescriptor { public: static const int kInvalidIdx = -1; FieldDescriptor(FieldPtr field, int data_idx, int validity_idx = kInvalidIdx, int offsets_idx = kInvalidIdx) : field_(field), data_idx_(data_idx), validity_idx_(validity_idx), offsets_idx_(offsets_idx) {} /// Index of validity array in the array-of-buffers int validity_idx() const { return validity_idx_; } /// Index of data array in the array-of-buffers int data_idx() const { return data_idx_; } /// Index of offsets array in the array-of-buffers int offsets_idx() const { return offsets_idx_; } FieldPtr field() const { return field_; } const std::string &Name() const { return field_->name(); } DataTypePtr Type() const { return field_->type(); } bool HasOffsetsIdx() const { return offsets_idx_ != kInvalidIdx; } private: FieldPtr field_; int data_idx_; int validity_idx_; int offsets_idx_; }; } // namespace gandiva #endif // GANDIVA_FIELDDESCRIPTOR_H
637
530
<reponame>meghasfdc/jmc<filename>application/org.openjdk.jmc.joverflow/src/main/java/org/openjdk/jmc/joverflow/descriptors/AbstractLinkedCollectionDescriptor.java<gh_stars>100-1000 /* * Copyright (c) 2018, Oracle and/or its affiliates. All rights reserved. * * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * The contents of this file are subject to the terms of either the Universal Permissive License * v 1.0 as shown at http://oss.oracle.com/licenses/upl * * or the following license: * * Redistribution and use in source and binary forms, with or without modification, are permitted * provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this list of conditions * and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, this list of * conditions and the following disclaimer in the documentation and/or other materials provided with * the distribution. * * 3. Neither the name of the copyright holder nor the names of its contributors may be used to * endorse or promote products derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY * WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.openjdk.jmc.joverflow.descriptors; import org.openjdk.jmc.joverflow.heap.model.JavaClass; import org.openjdk.jmc.joverflow.heap.model.JavaInt; import org.openjdk.jmc.joverflow.heap.model.JavaObject; import org.openjdk.jmc.joverflow.support.Constants; import org.openjdk.jmc.joverflow.util.ClassUtils; /** * Base descriptor for collection classes that keep their elements in a linked structure, such as * LinkedList, TreeMap, ConcurrentLinkedQueue etc. Such classes may or may not have the 'size' * field, so this descriptor supports different methods of determining collection size. */ public abstract class AbstractLinkedCollectionDescriptor extends AbstractCollectionDescriptor implements Constants { protected final Factory factory; protected AbstractLinkedCollectionDescriptor(JavaObject col, Factory factory) { super(col); this.factory = factory; } @Override public int getNumElements() { if (factory.sizeFieldIdx != -1) { return ((JavaInt) fields[factory.sizeFieldIdx]).getValue(); } else { return getSizeByCountingElements(); } } @Override AbstractCollectionDescriptor.Factory getFactory() { return factory; } /** * Returns the size of the described collection determined by counting its elements. The * implementation may, of course, cache the value and return it on subsequent invocations. */ protected abstract int getSizeByCountingElements(); static abstract class Factory extends AbstractCollectionDescriptor.Factory { protected final int sizeFieldIdx, rootFieldIdx; private final String elementFieldName; private int elementFieldIdx = -1; /** * Note that sizeFieldName parameter may be null, which means that the described class does * not have 'size' field. In that case, the descriptor subclass should provide the * implementation of {@link AbstractLinkedCollectionDescriptor#getSizeByCountingElements()} * method. */ Factory(JavaClass clazz, boolean isMap, String sizeFieldName, String rootFieldName, String elementFieldName, JavaClass[] implClasses) { super(clazz, isMap, implClasses, null, false, new String[] {rootFieldName}); sizeFieldIdx = sizeFieldName != null ? clazz.getInstanceFieldIndex(sizeFieldName) : -1; rootFieldIdx = clazz.getInstanceFieldIndex(rootFieldName); this.elementFieldName = elementFieldName; } protected Factory(JavaClass clazz, AbstractCollectionDescriptor.Factory superclassFactory) { super(clazz, superclassFactory); this.sizeFieldIdx = ((Factory) superclassFactory).sizeFieldIdx; this.rootFieldIdx = ((Factory) superclassFactory).rootFieldIdx; this.elementFieldName = ((Factory) superclassFactory).elementFieldName; } protected int getElementFieldIdx(JavaObject entry) { if (elementFieldIdx == -1) { JavaClass entryClass = entry.getClazz(); String elFieldName = ClassUtils.getExactFieldName(elementFieldName, entryClass); elementFieldIdx = entry.getClazz().getInstanceFieldIndex(elFieldName); } return elementFieldIdx; } } }
1,499
307
# WARNING: you are on the master branch, please refer to the examples on the branch that matches your `cortex version` import numpy as np import cv2 from .colors import get_color class BoundBox: def __init__(self, xmin, ymin, xmax, ymax, c=None, classes=None): self.xmin = xmin self.ymin = ymin self.xmax = xmax self.ymax = ymax self.c = c self.classes = classes self.label = -1 self.score = -1 def get_label(self): if self.label == -1: self.label = np.argmax(self.classes) return self.label def get_score(self): if self.score == -1: self.score = self.classes[self.get_label()] return self.score def draw_boxes(image, boxes, overlay_text, labels, obj_thresh, quiet=True): for box, overlay in zip(boxes, overlay_text): label_str = "" label = -1 for i in range(len(labels)): if box.classes[i] > obj_thresh: if label_str != "": label_str += ", " label_str += labels[i] + " " + str(round(box.get_score() * 100, 2)) + "%" label = i if not quiet: print(label_str) if label >= 0: if len(overlay) > 0: text = label_str + ": [" + " ".join(overlay) + "]" else: text = label_str text = text.upper() text_size = cv2.getTextSize(text, cv2.FONT_HERSHEY_SIMPLEX, 1.1e-3 * image.shape[0], 5) width, height = text_size[0][0], text_size[0][1] region = np.array( [ [box.xmin - 3, box.ymin], [box.xmin - 3, box.ymin - height - 26], [box.xmin + width + 13, box.ymin - height - 26], [box.xmin + width + 13, box.ymin], ], dtype="int32", ) # cv2.rectangle(img=image, pt1=(box.xmin,box.ymin), pt2=(box.xmax,box.ymax), color=get_color(label), thickness=5) rec = (box.xmin, box.ymin, box.xmax - box.xmin, box.ymax - box.ymin) rec = tuple(int(i) for i in rec) cv2.rectangle(img=image, rec=rec, color=get_color(label), thickness=3) cv2.fillPoly(img=image, pts=[region], color=get_color(label)) cv2.putText( img=image, text=text, org=(box.xmin + 13, box.ymin - 13), fontFace=cv2.FONT_HERSHEY_SIMPLEX, fontScale=1e-3 * image.shape[0], color=(0, 0, 0), thickness=1, ) return image
1,460
1,184
<reponame>YJSoft/Talon-for-Twitter<filename>app/src/main/java/com/klinker/android/twitter/data/sq_lite/DMDataSource.java /* * Copyright 2014 <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.klinker.android.twitter.data.sq_lite; import android.content.ContentValues; import android.content.Context; import android.database.Cursor; import android.database.SQLException; import android.database.sqlite.SQLiteDatabase; import android.util.Log; import com.klinker.android.twitter.settings.AppSettings; import com.klinker.android.twitter.utils.TweetLinkUtils; import twitter4j.DirectMessage; import twitter4j.DirectMessageEvent; import twitter4j.MediaEntity; import twitter4j.URLEntity; import twitter4j.User; public class DMDataSource { // provides access to the database public static DMDataSource dataSource = null; /* This is used so that we don't have to open and close the database on different threads or fragments every time. This will facilitate it between all of them to avoid Illegal State Exceptions. */ public static DMDataSource getInstance(Context context) { // if the datasource isn't open or it the object is null if (dataSource == null || dataSource.getDatabase() == null || !dataSource.getDatabase().isOpen()) { dataSource = new DMDataSource(context); // create the database dataSource.open(); // open the database } return dataSource; } // Database fields private SQLiteDatabase database; private DMSQLiteHelper dbHelper; public String[] allColumns = {DMSQLiteHelper.COLUMN_ID, DMSQLiteHelper.COLUMN_TWEET_ID, DMSQLiteHelper.COLUMN_ACCOUNT, DMSQLiteHelper.COLUMN_TYPE, DMSQLiteHelper.COLUMN_TEXT, DMSQLiteHelper.COLUMN_NAME, DMSQLiteHelper.COLUMN_PRO_PIC, DMSQLiteHelper.COLUMN_SCREEN_NAME, DMSQLiteHelper.COLUMN_TIME, DMSQLiteHelper.COLUMN_PIC_URL, DMSQLiteHelper.COLUMN_RETWEETER, DMSQLiteHelper.COLUMN_URL, HomeSQLiteHelper.COLUMN_USERS, HomeSQLiteHelper.COLUMN_HASHTAGS, DMSQLiteHelper.COLUMN_EXTRA_ONE, DMSQLiteHelper.COLUMN_EXTRA_TWO }; public DMDataSource(Context context) { dbHelper = new DMSQLiteHelper(context); } public void open() throws SQLException { try { database = dbHelper.getWritableDatabase(); } catch (Exception e) { close(); } } public void close() { try { dbHelper.close(); } catch (Exception e) { } database = null; dataSource = null; } public SQLiteDatabase getDatabase() { return database; } public DMSQLiteHelper getHelper() { return dbHelper; } public synchronized void createDirectMessage(DirectMessage status, int account) { ContentValues values = new ContentValues(); long time = status.getCreatedAt().getTime(); values.put(DMSQLiteHelper.COLUMN_ACCOUNT, account); values.put(DMSQLiteHelper.COLUMN_TEXT, TweetLinkUtils.getLinksInStatus(status)[0]); values.put(DMSQLiteHelper.COLUMN_TWEET_ID, status.getId()); values.put(DMSQLiteHelper.COLUMN_NAME, status.getSender().getName()); values.put(DMSQLiteHelper.COLUMN_PRO_PIC, status.getSender().getOriginalProfileImageURL()); values.put(DMSQLiteHelper.COLUMN_SCREEN_NAME, status.getSender().getScreenName()); values.put(DMSQLiteHelper.COLUMN_TIME, time); values.put(DMSQLiteHelper.COLUMN_RETWEETER, status.getRecipientScreenName()); values.put(DMSQLiteHelper.COLUMN_EXTRA_ONE, status.getRecipient().getOriginalProfileImageURL()); values.put(DMSQLiteHelper.COLUMN_EXTRA_TWO, status.getRecipient().getName()); values.put(HomeSQLiteHelper.COLUMN_PIC_URL, TweetLinkUtils.getLinksInStatus(status)[1]); MediaEntity[] entities = status.getMediaEntities(); if (entities.length > 0) { values.put(DMSQLiteHelper.COLUMN_PIC_URL, entities[0].getMediaURL()); } URLEntity[] urls = status.getURLEntities(); for (URLEntity url : urls) { Log.v("inserting_dm", "url here: " + url.getExpandedURL()); values.put(DMSQLiteHelper.COLUMN_URL, url.getExpandedURL()); } try { database.insert(DMSQLiteHelper.TABLE_DM, null, values); } catch (Exception e) { open(); database.insert(DMSQLiteHelper.TABLE_DM, null, values); } } public synchronized void createSentDirectMessage(DirectMessageEvent status, User recipient, AppSettings settings, int account) { ContentValues values = new ContentValues(); long time = status.getCreatedTimestamp().getTime(); String[] html = TweetLinkUtils.getLinksInStatus(status); String text = html[0]; String media = html[1]; String url = html[2]; String hashtags = html[3]; String users = html[4]; values.put(DMSQLiteHelper.COLUMN_ACCOUNT, account); values.put(DMSQLiteHelper.COLUMN_TEXT, text); values.put(DMSQLiteHelper.COLUMN_TWEET_ID, status.getId()); values.put(DMSQLiteHelper.COLUMN_NAME, settings.myName); values.put(DMSQLiteHelper.COLUMN_PRO_PIC, settings.myProfilePicUrl); values.put(DMSQLiteHelper.COLUMN_SCREEN_NAME, settings.myScreenName); values.put(DMSQLiteHelper.COLUMN_TIME, time); values.put(DMSQLiteHelper.COLUMN_RETWEETER, recipient.getScreenName()); values.put(DMSQLiteHelper.COLUMN_EXTRA_ONE, recipient.getOriginalProfileImageURL()); values.put(DMSQLiteHelper.COLUMN_EXTRA_TWO, recipient.getName()); values.put(HomeSQLiteHelper.COLUMN_PIC_URL, media); values.put(DMSQLiteHelper.COLUMN_EXTRA_THREE, TweetLinkUtils.getGIFUrl(status.getMediaEntities(), url)); MediaEntity[] entities = status.getMediaEntities(); if (entities.length > 0) { values.put(DMSQLiteHelper.COLUMN_PIC_URL, entities[0].getMediaURL()); } URLEntity[] urls = status.getUrlEntities(); for (URLEntity u : urls) { Log.v("inserting_dm", "url here: " + u.getExpandedURL()); values.put(DMSQLiteHelper.COLUMN_URL, u.getExpandedURL()); } try { database.insert(DMSQLiteHelper.TABLE_DM, null, values); } catch (Exception e) { open(); database.insert(DMSQLiteHelper.TABLE_DM, null, values); } } public synchronized void deleteTweet(long tweetId) { long id = tweetId; try { database.delete(DMSQLiteHelper.TABLE_DM, DMSQLiteHelper.COLUMN_TWEET_ID + " = " + id, null); } catch (Exception e) { open(); database.delete(DMSQLiteHelper.TABLE_DM, DMSQLiteHelper.COLUMN_TWEET_ID + " = " + id, null); } } public synchronized void deleteAllTweets(int account) { try { database.delete(DMSQLiteHelper.TABLE_DM, DMSQLiteHelper.COLUMN_ACCOUNT + " = " + account, null); } catch (Exception e) { open(); database.delete(DMSQLiteHelper.TABLE_DM, DMSQLiteHelper.COLUMN_ACCOUNT + " = " + account, null); } } public synchronized Cursor getCursor(int account) { Cursor cursor; try { cursor = database.query(true, DMSQLiteHelper.TABLE_DM, allColumns, DMSQLiteHelper.COLUMN_ACCOUNT + " = " + account, null, DMSQLiteHelper.COLUMN_TWEET_ID, null, HomeSQLiteHelper.COLUMN_TWEET_ID + " ASC", null); } catch (Exception e) { open(); cursor = database.query(true, DMSQLiteHelper.TABLE_DM, allColumns, DMSQLiteHelper.COLUMN_ACCOUNT + " = " + account, null, DMSQLiteHelper.COLUMN_TWEET_ID, null, HomeSQLiteHelper.COLUMN_TWEET_ID + " ASC", null); } return cursor; } public synchronized Cursor getConvCursor(String name, int account) { Cursor cursor; try { cursor = database.query(true, DMSQLiteHelper.TABLE_DM, allColumns, DMSQLiteHelper.COLUMN_ACCOUNT + " = " + account + " AND (" + DMSQLiteHelper.COLUMN_SCREEN_NAME + " = ? OR " + DMSQLiteHelper.COLUMN_RETWEETER + " = ?)", new String[] {name, name}, DMSQLiteHelper.COLUMN_TWEET_ID, null, HomeSQLiteHelper.COLUMN_TWEET_ID + " DESC", null); } catch (Exception e) { open(); cursor = database.query(true, DMSQLiteHelper.TABLE_DM, allColumns, DMSQLiteHelper.COLUMN_ACCOUNT + " = " + account + " AND (" + DMSQLiteHelper.COLUMN_SCREEN_NAME + " = ? OR " + DMSQLiteHelper.COLUMN_RETWEETER + " = ?)", new String[] {name, name}, DMSQLiteHelper.COLUMN_TWEET_ID, null, HomeSQLiteHelper.COLUMN_TWEET_ID + " DESC", null); } return cursor; } public synchronized String getNewestName(int account) { Cursor cursor = getCursor(account); String name = ""; try { if (cursor.moveToLast()) { name = cursor.getString(cursor.getColumnIndex(DMSQLiteHelper.COLUMN_SCREEN_NAME)); } } catch (Exception e) { } cursor.close(); return name; } public synchronized String getNewestMessage(int account) { Cursor cursor = getCursor(account); String message = ""; try { if (cursor.moveToLast()) { message = cursor.getString(cursor.getColumnIndex(DMSQLiteHelper.COLUMN_TEXT)); } } catch (Exception e) { } cursor.close(); return message; } public synchronized void deleteDups(int account) { try { database.execSQL("DELETE FROM " + DMSQLiteHelper.TABLE_DM + " WHERE _id NOT IN (SELECT MIN(_id) FROM " + DMSQLiteHelper.TABLE_DM + " GROUP BY " + DMSQLiteHelper.COLUMN_TWEET_ID + ") AND " + DMSQLiteHelper.COLUMN_ACCOUNT + " = " + account); } catch (Exception e) { open(); database.execSQL("DELETE FROM " + DMSQLiteHelper.TABLE_DM + " WHERE _id NOT IN (SELECT MIN(_id) FROM " + DMSQLiteHelper.TABLE_DM + " GROUP BY " + DMSQLiteHelper.COLUMN_TWEET_ID + ") AND " + DMSQLiteHelper.COLUMN_ACCOUNT + " = " + account); } } public synchronized void removeHTML(long tweetId, String text) { ContentValues cv = new ContentValues(); cv.put(DMSQLiteHelper.COLUMN_TEXT, text); if (database == null || !database.isOpen()) { open(); } database.update(DMSQLiteHelper.TABLE_DM, cv, DMSQLiteHelper.COLUMN_TWEET_ID + " = ?", new String[] {tweetId + ""}); } }
4,795
4,303
#ifndef CONVERT_MODEL_H_ #define CONVERT_MODEL_H_ #include "Halide.h" #include "onnx/onnx.pb.h" #include <unordered_map> #include <vector> struct Tensor { std::string name; onnx::TensorProto::DataType type; std::vector<Halide::Expr> shape; Halide::Func rep; }; struct Node { std::vector<Tensor> inputs; std::vector<Tensor> outputs; std::vector<Halide::Expr> requirements; }; Node convert_node( const onnx::NodeProto &node, const std::vector<Tensor> &inputs); struct Model { std::unordered_map<std::string, Halide::ImageParam> inputs; std::unordered_map<std::string, Tensor> outputs; std::unordered_map<std::string, Tensor> tensors; std::vector<Halide::Expr> requirements; }; // Layout of the inputs and outputs to the model. enum IOLayout { Native = 0, NumPy = 1, }; Model convert_model(const onnx::ModelProto &model, const std::unordered_map<std::string, int> &expected_dim_sizes, IOLayout layout); Halide::Type get_halide_type(const Tensor &tensor); void compute_output_shapes( const Model &model, const std::map<std::string, std::vector<int>> &input_shapes, std::map<std::string, std::vector<int>> *output_shapes); void extract_expected_input_shapes( const Model &model, std::map<std::string, std::vector<int>> *input_shapes); void compute_expected_output_shapes( const Model &model, std::map<std::string, std::vector<int>> *output_shapes); #endif
566
551
<gh_stars>100-1000 package com.klinker.android.peekview; import android.animation.Animator; import android.animation.ObjectAnimator; import android.app.Activity; import android.graphics.Color; import android.graphics.Point; import android.support.annotation.FloatRange; import android.support.annotation.LayoutRes; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.view.Display; import android.view.LayoutInflater; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; import android.view.animation.DecelerateInterpolator; import android.view.animation.Interpolator; import android.widget.FrameLayout; import com.klinker.android.peekview.builder.PeekViewOptions; import com.klinker.android.peekview.callback.OnPeek; import com.klinker.android.peekview.util.DensityUtils; import com.klinker.android.peekview.util.NavigationUtils; import jp.wasabeef.blurry.Blurry; public class PeekView extends FrameLayout { private static final int ANIMATION_TIME = 300; private static final Interpolator INTERPOLATOR = new DecelerateInterpolator(); private static final int FINGER_SIZE_DP = 40; private int FINGER_SIZE; private View content; private ViewGroup.LayoutParams contentParams; private View dim; private PeekViewOptions options; private int distanceFromTop; private int distanceFromLeft; private int screenWidth; private int screenHeight; private ViewGroup androidContentView = null; private OnPeek callbacks; public PeekView(Activity context, PeekViewOptions options, @LayoutRes int layoutRes, @Nullable OnPeek callbacks) { super(context); init(context, options, LayoutInflater.from(context).inflate(layoutRes, this, false), callbacks); } public PeekView(Activity context, PeekViewOptions options, @NonNull View content, @Nullable OnPeek callbacks) { super(context); init(context, options, content, callbacks); } private void init(Activity context, PeekViewOptions options, @NonNull View content, @Nullable OnPeek callbacks) { this.options = options; this.callbacks = callbacks; FINGER_SIZE = DensityUtils.toPx(context, FINGER_SIZE_DP); // get the main content view of the display androidContentView = (FrameLayout) context.findViewById(android.R.id.content).getRootView(); // initialize the display size Display display = context.getWindowManager().getDefaultDisplay(); Point size = new Point(); display.getSize(size); screenHeight = size.y; screenWidth = size.x; // set up the content we want to show this.content = content; contentParams = content.getLayoutParams(); if (options.getAbsoluteHeight() != 0) { setHeight(DensityUtils.toPx(context, options.getAbsoluteHeight())); } else { setHeightByPercent(options.getHeightPercent()); } if (options.getAbsoluteWidth() != 0) { setWidth(DensityUtils.toPx(context, options.getAbsoluteWidth())); } else { setWidthByPercent(options.getWidthPercent()); } // tell the code that the view has been onInflated and let them use it to // set up the layout. if (callbacks != null) { callbacks.onInflated(content); } // add the background dim to the frame dim = new View(context); dim.setBackgroundColor(Color.BLACK); dim.setAlpha(options.getBackgroundDim()); FrameLayout.LayoutParams dimParams = new FrameLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT); dim.setLayoutParams(dimParams); if (options.shouldBlurBackground()) { Blurry.with(context) .radius(2) .sampling(5) .animate() .color(options.getBlurOverlayColor()) .onto((ViewGroup) androidContentView.getRootView()); dim.setAlpha(0f); } // add the dim and the content view to the upper level frame layout addView(dim); addView(content); } /** * Sets how far away from the top of the screen the view should be displayed. * Distance should be the value in PX. * * @param distance the distance from the top in px. */ private void setDistanceFromTop(int distance) { this.distanceFromTop = options.fullScreenPeek() ? 0 : distance; } /** * Sets how far away from the left side of the screen the view should be displayed. * Distance should be the value in PX. * * @param distance the distance from the left in px. */ private void setDistanceFromLeft(int distance) { this.distanceFromLeft = options.fullScreenPeek() ? 0 : distance; } /** * Sets the width of the view in PX. * * @param width the width of the circle in px */ private void setWidth(int width) { contentParams.width = options.fullScreenPeek() ? screenWidth : width; content.setLayoutParams(contentParams); } /** * Sets the height of the view in PX. * * @param height the height of the circle in px */ private void setHeight(int height) { contentParams.height = options.fullScreenPeek() ? screenHeight : height; content.setLayoutParams(contentParams); } /** * Sets the width of the window according to the screen width. * * @param percent of screen width */ public void setWidthByPercent(@FloatRange(from=0,to=1) float percent) { setWidth((int) (screenWidth * percent)); } /** * Sets the height of the window according to the screen height. * * @param percent of screen height */ public void setHeightByPercent(@FloatRange(from=0,to=1) float percent) { setHeight((int) (screenHeight * percent)); } /** * Places the peek view over the top of a motion event. This will translate the motion event's start points * so that the PeekView isn't covered by the finger. * * @param event event that activates the peek view */ public void setOffsetByMotionEvent(MotionEvent event) { int x = (int) event.getRawX(); int y = (int) event.getRawY(); if (x + contentParams.width + FINGER_SIZE < screenWidth) { setContentOffset(x, y, Translation.HORIZONTAL, FINGER_SIZE); } else if (x - FINGER_SIZE - contentParams.width > 0) { setContentOffset(x, y, Translation.HORIZONTAL, -1 * FINGER_SIZE); } else if (y + contentParams.height + FINGER_SIZE < screenHeight) { setContentOffset(x, y, Translation.VERTICAL, FINGER_SIZE); } else if (y - FINGER_SIZE - contentParams.height > 0) { setContentOffset(x, y, Translation.VERTICAL, -1 * FINGER_SIZE); } else { // it won't fit anywhere if (x < screenWidth / 2) { setContentOffset(x, y, Translation.HORIZONTAL, FINGER_SIZE); } else { setContentOffset(x, y, Translation.HORIZONTAL, -1 * FINGER_SIZE); } } } /** * Show the PeekView over the point of motion * * @param startX * @param startY */ private void setContentOffset(int startX, int startY, Translation translation, int movementAmount) { if (translation == Translation.VERTICAL) { // center the X around the start point int originalStartX = startX; startX -= contentParams.width / 2; // if Y is in the lower half, we want it to go up, otherwise, leave it the same boolean moveDown = true; if (startY + contentParams.height + FINGER_SIZE > screenHeight) { startY -= contentParams.height; moveDown = false; if (movementAmount > 0) { movementAmount *= -1; } } // when moving the peek view below the finger location, we want to offset it a bit to the right // or left as well, just so the hand doesn't cover it up. int extraXOffset = 0; if (moveDown) { extraXOffset = DensityUtils.toPx(getContext(), 200); if (originalStartX > screenWidth / 2) { extraXOffset = extraXOffset * -1; // move it a bit to the left } } // make sure they aren't outside of the layout bounds and move them with the movementAmount // I move the x just a bit to the right or left here as well, because it just makes things look better startX = ensureWithinBounds(startX + extraXOffset, screenWidth, contentParams.width); startY = ensureWithinBounds(startY + movementAmount, screenHeight, contentParams.height); } else { // center the Y around the start point startY -= contentParams.height / 2; // if X is in the right half, we want it to go left if (startX + contentParams.width + FINGER_SIZE > screenWidth) { startX -= contentParams.width; if (movementAmount > 0) { movementAmount *= -1; } } // make sure they aren't outside of the layout bounds and move them with the movementAmount startX = ensureWithinBounds(startX + movementAmount, screenWidth, contentParams.width); startY = ensureWithinBounds(startY, screenHeight, contentParams.height); } // check to see if the system bars are covering anything int statusBar = NavigationUtils.getStatusBarHeight(getContext()); if (startY < statusBar) { // if it is above the status bar and action bar startY = statusBar + 10; } else if (NavigationUtils.hasNavBar(getContext()) && startY + contentParams.height > screenHeight - NavigationUtils.getNavBarHeight(getContext())) { // if there is a nav bar and the popup is underneath it startY = screenHeight - contentParams.height - NavigationUtils.getNavBarHeight(getContext()) - DensityUtils.toDp(getContext(), 10); } else if (!NavigationUtils.hasNavBar(getContext()) && startY + contentParams.height > screenHeight) { startY = screenHeight - contentParams.height - DensityUtils.toDp(getContext(), 10); } // set the newly computed distances from the start and top sides setDistanceFromLeft(startX); setDistanceFromTop(startY); } private int ensureWithinBounds(int value, int screenSize, int contentSize) { // check these against the layout bounds if (value < 0) { // if it is off the left side value = 10; } else if (value > screenSize - contentSize) { // if it is off the right side value = screenSize - contentSize - 10; } return value; } /** * Show the content of the PeekView by adding it to the android.R.id.content FrameLayout. */ public void show() { androidContentView.addView(this); // set the translations for the content view content.setTranslationX(distanceFromLeft); content.setTranslationY(distanceFromTop); // animate the alpha of the PeekView ObjectAnimator animator = ObjectAnimator.ofFloat(this, View.ALPHA, 0.0f, 1.0f); animator.addListener(new AnimatorEndListener() { @Override public void onAnimationEnd(Animator animator) { if (callbacks != null) { callbacks.shown(); } } }); animator.setDuration(options.useFadeAnimation() ? ANIMATION_TIME : 0); animator.setInterpolator(INTERPOLATOR); animator.start(); } /** * Hide the PeekView and remove it from the android.R.id.content FrameLayout. */ public void hide() { // animate with a fade ObjectAnimator animator = ObjectAnimator.ofFloat(this, View.ALPHA, 1.0f, 0.0f); animator.addListener(new AnimatorEndListener() { @Override public void onAnimationEnd(Animator animator) { // remove the view from the screen androidContentView.removeView(PeekView.this); if (callbacks != null) { callbacks.dismissed(); } } }); animator.setDuration(options.useFadeAnimation() ? ANIMATION_TIME : 0); animator.setInterpolator(INTERPOLATOR); animator.start(); Blurry.delete((ViewGroup) androidContentView.getRootView()); } /** * Wrapper class so we only have to implement the onAnimationEnd method. */ private abstract class AnimatorEndListener implements Animator.AnimatorListener { @Override public void onAnimationStart(Animator animator) { } @Override public void onAnimationCancel(Animator animator) { } @Override public void onAnimationRepeat(Animator animator) { } } private enum Translation { HORIZONTAL, VERTICAL } }
5,397
683
/* * Copyright The OpenTelemetry Authors * SPDX-License-Identifier: Apache-2.0 */ package io.opentelemetry.javaagent.instrumentation.spring.batch.chunk; import org.springframework.batch.core.scope.context.ChunkContext; class ChunkContextAndBuilder { final ChunkContext chunkContext; final Class<?> builderClass; ChunkContextAndBuilder(ChunkContext chunkContext, Class<?> builderClass) { this.chunkContext = chunkContext; this.builderClass = builderClass; } }
149
6,828
#!/usr/bin/env python3 import os ignores = [ '.git/', 'misc/afl-fuzz-tokens/', 'ninja_deps', 'src/depfile_parser.cc', 'src/lexer.cc', ] error_count = 0 def error(path, msg): global error_count error_count += 1 print('\x1b[1;31m{}\x1b[0;31m{}\x1b[0m'.format(path, msg)) for root, directory, filenames in os.walk('.'): for filename in filenames: path = os.path.join(root, filename)[2:] if any([path.startswith(x) for x in ignores]): continue with open(path, 'rb') as file: line_nr = 1 try: for line in [x.decode() for x in file.readlines()]: if len(line) == 0 or line[-1] != '\n': error(path, ' missing newline at end of file.') if len(line) > 1: if line[-2] == '\r': error(path, ' has Windows line endings.') break if line[-2] == ' ' or line[-2] == '\t': error(path, ':{} has trailing whitespace.'.format(line_nr)) line_nr += 1 except UnicodeError: pass # binary file exit(error_count)
457
1,144
<filename>backend/de.metas.adempiere.adempiere/base/src/test/java/de/metas/dataentry/data/DataEntryRecordFieldTest.java package de.metas.dataentry.data; import static de.metas.dataentry.data.DataEntryRecordTestConstants.CREATED_UPDATED_INFO; import static org.assertj.core.api.Assertions.assertThat; import java.math.BigDecimal; import java.time.LocalDate; import java.time.Month; import org.compiere.util.TimeUtil; import org.junit.Test; import com.google.common.collect.ImmutableCollection; import com.google.common.collect.ImmutableMultimap; import de.metas.dataentry.DataEntryFieldId; import de.metas.dataentry.DataEntryListValueId; import de.metas.dataentry.FieldType; import de.metas.dataentry.layout.DataEntryField; import de.metas.dataentry.layout.DataEntryListValue; import de.metas.i18n.TranslatableStrings; /* * #%L * de.metas.adempiere.adempiere.base * %% * Copyright (C) 2019 metas GmbH * %% * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as * published by the Free Software Foundation, either version 2 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public * License along with this program. If not, see * <http://www.gnu.org/licenses/gpl-2.0.html>. * #L% */ public class DataEntryRecordFieldTest { @Test(expected = NullPointerException.class) public void createDataEntryRecordField_nonNull() { DataEntryRecordField.createDataEntryRecordField(DataEntryFieldId.ofRepoId(1), CREATED_UPDATED_INFO, null); } @Test public void createDataEntryRecordField_String() { final DataEntryRecordField<String> result1 = DataEntryRecordField.createDataEntryRecordField(DataEntryFieldId.ofRepoId(1), CREATED_UPDATED_INFO, "string"); assertThat(result1.getValue()).isEqualTo("string"); } @Test public void createDataEntryRecordField_Bool() { final DataEntryRecordField<Boolean> result1 = DataEntryRecordField.createDataEntryRecordField(DataEntryFieldId.ofRepoId(1), CREATED_UPDATED_INFO, true); assertThat(result1.getValue()).isEqualTo(true); } @Test public void createDataEntryRecordField_BigDecimal() { final DataEntryRecordField<BigDecimal> result1 = DataEntryRecordField.createDataEntryRecordField(DataEntryFieldId.ofRepoId(1), CREATED_UPDATED_INFO, new BigDecimal("15")); assertThat(result1.getValue()).isEqualTo(new BigDecimal("15")); } @Test public void createDataEntryRecordField_LocalDate() { final DataEntryRecordField<LocalDate> result1 = DataEntryRecordField.createDataEntryRecordField(DataEntryFieldId.ofRepoId(1), CREATED_UPDATED_INFO, DataEntryRecordTestConstants.DATE); assertThat(result1.getValue()).isEqualTo(DataEntryRecordTestConstants.DATE); } @Test public void convertValueToFieldType_MakeSureCoversPrimitiveDataTypes() { final ImmutableMultimap<Class<?>, Object> testValuesByClass = ImmutableMultimap.<Class<?>, Object> builder() .put(Integer.class, 1234) .put(Integer.class, "1234") .put(Integer.class, BigDecimal.valueOf(1234)) // .put(String.class, "some dummy") .put(String.class, 111) .put(String.class, new BigDecimal("1234.55")) // .put(BigDecimal.class, new BigDecimal("123.456")) .put(BigDecimal.class, "123.456") .put(BigDecimal.class, 123) // .put(Boolean.class, Boolean.TRUE) .put(Boolean.class, "Y") .put(Boolean.class, "N") .put(Boolean.class, "true") .put(Boolean.class, "false") // .put(LocalDate.class, LocalDate.of(2019, Month.JULY, 3)) .put(LocalDate.class, "2019-07-03") .put(LocalDate.class, TimeUtil.asTimestamp(LocalDate.of(2019, Month.JULY, 3))) // .build(); for (final FieldType fieldType : FieldType.values()) { // Skip types not handled by this test if (FieldType.LIST.equals(fieldType)) { continue; } final DataEntryField field = DataEntryField.builder() .id(DataEntryFieldId.ofRepoId(123)) .caption(TranslatableStrings.anyLanguage("caption")) .description(TranslatableStrings.anyLanguage("description")) .type(fieldType) .build(); final Class<?> valueType = fieldType.getClazz(); final ImmutableCollection<Object> testValues = testValuesByClass.get(valueType); assertThat(testValues) .as("Test values shall exist for " + valueType + " - " + field) .isNotEmpty(); for (final Object value : testValues) { final Object valueConv = DataEntryRecordField.convertValueToFieldType(value, field); assertThat(valueConv).isInstanceOf(valueType); } } } @Test public void convertValueToFieldType_ListDataType() { final DataEntryField field = DataEntryField.builder() .id(DataEntryFieldId.ofRepoId(123)) .caption(TranslatableStrings.anyLanguage("caption")) .description(TranslatableStrings.anyLanguage("description")) .type(FieldType.LIST) .listValue(DataEntryListValue.builder() .id(DataEntryListValueId.ofRepoId(1)) .name(TranslatableStrings.anyLanguage("item1")) .build()) .listValue(DataEntryListValue.builder() .id(DataEntryListValueId.ofRepoId(2)) .name(TranslatableStrings.anyLanguage("item2")) .build()) .build(); assertThat(DataEntryRecordField.convertValueToFieldType(1, field)) .isEqualTo(DataEntryListValueId.ofRepoId(1)); assertThat(DataEntryRecordField.convertValueToFieldType("1", field)) .isEqualTo(DataEntryListValueId.ofRepoId(1)); assertThat(DataEntryRecordField.convertValueToFieldType("item1", field)) .isEqualTo(DataEntryListValueId.ofRepoId(1)); assertThat(DataEntryRecordField.convertValueToFieldType(2, field)) .isEqualTo(DataEntryListValueId.ofRepoId(2)); assertThat(DataEntryRecordField.convertValueToFieldType("2", field)) .isEqualTo(DataEntryListValueId.ofRepoId(2)); assertThat(DataEntryRecordField.convertValueToFieldType("item2", field)) .isEqualTo(DataEntryListValueId.ofRepoId(2)); } }
2,265
9,402
<filename>src/mono/dlls/mscordbi/cordb-assembly.cpp<gh_stars>1000+ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // // File: CORDB-ASSEMBLY.CPP // #include <cordb-appdomain.h> #include <cordb-assembly.h> #include <cordb-class.h> #include <cordb-function.h> #include <cordb-process.h> #include <cordb.h> #include "corerror.h" #include "metamodel.h" #include "metamodelpub.h" #include "rwutil.h" #include "stdafx.h" #include "stgio.h" #include "importhelper.h" #include <metamodelrw.h> #include "mdlog.h" #include "mdperf.h" #include "regmeta.h" #include "ex.h" using namespace std; CordbAssembly::CordbAssembly(Connection* conn, CordbProcess* process, CordbAppDomain* appDomain, int id_assembly) : CordbBaseMono(conn) { m_pProcess = process; m_pAppDomain = appDomain; m_pAppDomain->InternalAddRef(); m_debuggerId = id_assembly; m_pAssemblyName = NULL; } CordbAssembly::~CordbAssembly() { m_pAppDomain->InternalRelease(); if (m_pAssemblyName) free(m_pAssemblyName); } HRESULT CordbAssembly::IsFullyTrusted(BOOL* pbFullyTrusted) { *pbFullyTrusted = true; LOG((LF_CORDB, LL_INFO100000, "CorDebugAssembly - IsFullyTrusted - NOT IMPLEMENTED\n")); return S_OK; } HRESULT CordbAssembly::GetAppDomain(ICorDebugAppDomain** ppAppDomain) { LOG((LF_CORDB, LL_INFO1000000, "CorDebugAssembly - GetAppDomain - IMPLEMENTED\n")); m_pAppDomain->QueryInterface(IID_ICorDebugAppDomain, (void**)ppAppDomain); return S_OK; } HRESULT CordbAssembly::EnumerateModules(ICorDebugModuleEnum** ppModules) { LOG((LF_CORDB, LL_INFO100000, "CorDebugAssembly - EnumerateModules - NOT IMPLEMENTED\n")); return E_NOTIMPL; } HRESULT CordbAssembly::GetCodeBase(ULONG32 cchName, ULONG32* pcchName, WCHAR szName[]) { LOG((LF_CORDB, LL_INFO100000, "CorDebugAssembly - GetCodeBase - NOT IMPLEMENTED\n")); return E_NOTIMPL; } HRESULT CordbAssembly::GetName(ULONG32 cchName, ULONG32* pcchName, WCHAR szName[]) { HRESULT hr = S_OK; EX_TRY { if (!m_pAssemblyName) { LOG((LF_CORDB, LL_INFO1000000, "CordbAssembly - GetName - IMPLEMENTED\n")); MdbgProtBuffer localbuf; m_dbgprot_buffer_init(&localbuf, 128); m_dbgprot_buffer_add_id(&localbuf, m_debuggerId); int cmdId = conn->SendEvent(MDBGPROT_CMD_SET_ASSEMBLY, MDBGPROT_CMD_ASSEMBLY_GET_LOCATION, &localbuf); m_dbgprot_buffer_free(&localbuf); ReceivedReplyPacket* received_reply_packet = conn->GetReplyWithError(cmdId); CHECK_ERROR_RETURN_FALSE(received_reply_packet); MdbgProtBuffer* pReply = received_reply_packet->Buffer(); m_pAssemblyName = m_dbgprot_decode_string_with_len(pReply->p, &pReply->p, pReply->end, &m_nAssemblyNameLen); char* c_mobile_symbols_path = getenv("MOBILE_SYMBOLS_PATH"); if (strlen(c_mobile_symbols_path) > 0) { size_t size_path = strlen(m_pAssemblyName); size_t pos_separator = 0; for (pos_separator = size_path ; pos_separator > 0 ; pos_separator--) { if (m_pAssemblyName[pos_separator] == DIR_SEPARATOR) break; } m_nAssemblyNameLen = (int)(size_path + strlen(c_mobile_symbols_path)); char* symbols_full_path = (char*)malloc(m_nAssemblyNameLen); sprintf_s(symbols_full_path, m_nAssemblyNameLen, "%s%s", c_mobile_symbols_path , m_pAssemblyName + pos_separator + 1); free(m_pAssemblyName); m_pAssemblyName = symbols_full_path; m_nAssemblyNameLen = (int) strlen(m_pAssemblyName); } } if (cchName < (ULONG32) m_nAssemblyNameLen + 1) { *pcchName = m_nAssemblyNameLen + 1; } else { MultiByteToWideChar(CP_UTF8, 0, m_pAssemblyName, -1, szName, cchName); *pcchName = m_nAssemblyNameLen + 1; } } EX_CATCH_HRESULT(hr); return hr; } HRESULT CordbAssembly::QueryInterface(REFIID id, _COM_Outptr_ void __RPC_FAR* __RPC_FAR* ppInterface) { if (id == IID_ICorDebugAssembly) *ppInterface = static_cast<ICorDebugAssembly*>(this); else if (id == IID_ICorDebugAssembly2) *ppInterface = static_cast<ICorDebugAssembly2*>(this); else if (id == IID_IUnknown) *ppInterface = static_cast<IUnknown*>(static_cast<ICorDebugAssembly*>(this)); else { *ppInterface = NULL; return E_NOINTERFACE; } AddRef(); return S_OK; } HRESULT CordbAssembly::GetProcess(ICorDebugProcess** ppProcess) { LOG((LF_CORDB, LL_INFO1000000, "CorDebugAssembly - GetProcess - IMPLEMENTED\n")); conn->GetProcess()->QueryInterface(IID_ICorDebugProcess, (void**)ppProcess); return S_OK; } CordbModule::CordbModule(Connection* conn, CordbProcess* process, CordbAssembly* assembly, int id_assembly) : CordbBaseMono(conn) { m_pProcess = process; m_pRegMeta = NULL; m_pAssembly = assembly; m_debuggerId = id_assembly; m_pAssembly->InternalAddRef(); dwFlags = 0; conn->GetProcess()->AddModule(this); m_pPeImage = NULL; m_pAssemblyName = NULL; } CordbModule::~CordbModule() { if (m_pAssembly) m_pAssembly->InternalRelease(); /*if (m_pPeImage) free(m_pPeImage);*/ if (m_pAssemblyName) free(m_pAssemblyName); } HRESULT CordbModule::QueryInterface(REFIID id, void** pInterface) { if (id == IID_ICorDebugModule) { *pInterface = static_cast<ICorDebugModule*>(this); } else if (id == IID_ICorDebugModule2) { *pInterface = static_cast<ICorDebugModule2*>(this); } else if (id == IID_ICorDebugModule3) { *pInterface = static_cast<ICorDebugModule3*>(this); } else if (id == IID_ICorDebugModule4) { *pInterface = static_cast<ICorDebugModule4*>(this); } else if (id == IID_IUnknown) { *pInterface = static_cast<IUnknown*>(static_cast<ICorDebugModule*>(this)); } else { *pInterface = NULL; return E_NOINTERFACE; } AddRef(); return S_OK; } HRESULT CordbModule::IsMappedLayout(BOOL* pIsMapped) { *pIsMapped = FALSE; LOG((LF_CORDB, LL_INFO1000000, "CordbModule - IsMappedLayout - IMPLEMENTED\n")); return S_OK; } HRESULT CordbModule::CreateReaderForInMemorySymbols(REFIID riid, void** ppObj) { LOG((LF_CORDB, LL_INFO100000, "CordbModule - CreateReaderForInMemorySymbols - NOT IMPLEMENTED\n")); return E_NOTIMPL; } HRESULT CordbModule::SetJMCStatus(BOOL bIsJustMyCode, ULONG32 cOthers, mdToken pTokens[]) { if (cOthers != 0) { _ASSERTE(!"not yet impl for cOthers != 0"); return E_NOTIMPL; } LOG((LF_CORDB, LL_INFO100000, "CordbModule - SetJMCStatus - IMPLEMENTED\n")); //on mono JMC is not by module, for now receiving this for one module, will affect all. if (bIsJustMyCode) conn->GetProcess()->SetJMCStatus(bIsJustMyCode); return S_OK; } HRESULT CordbModule::ApplyChanges(ULONG cbMetadata, BYTE pbMetadata[], ULONG cbIL, BYTE pbIL[]) { LOG((LF_CORDB, LL_INFO100000, "CordbModule - ApplyChanges - NOT IMPLEMENTED\n")); return E_NOTIMPL; } HRESULT CordbModule::SetJITCompilerFlags(DWORD dwFlags) { this->dwFlags = dwFlags; LOG((LF_CORDB, LL_INFO100000, "CordbModule - SetJITCompilerFlags - NOT IMPLEMENTED\n")); return S_OK; } HRESULT CordbModule::GetJITCompilerFlags(DWORD* pdwFlags) { *pdwFlags = dwFlags; LOG((LF_CORDB, LL_INFO100000, "CordbModule - GetJITCompilerFlags - NOT IMPLEMENTED\n")); return S_OK; } HRESULT CordbModule::ResolveAssembly(mdToken tkAssemblyRef, ICorDebugAssembly** ppAssembly) { LOG((LF_CORDB, LL_INFO100000, "CordbModule - ResolveAssembly - NOT IMPLEMENTED\n")); return E_NOTIMPL; } HRESULT CordbModule::GetProcess(ICorDebugProcess** ppProcess) { LOG((LF_CORDB, LL_INFO100000, "CordbModule - GetProcess - IMPLEMENTED\n")); conn->GetProcess()->QueryInterface(IID_ICorDebugProcess, (void**)ppProcess); return S_OK; } HRESULT CordbModule::GetBaseAddress(CORDB_ADDRESS* pAddress) { HRESULT hr = S_OK; EX_TRY { if (!m_pPeImage) { MdbgProtBuffer localbuf; m_dbgprot_buffer_init(&localbuf, 128); m_dbgprot_buffer_add_id(&localbuf, GetDebuggerId()); int cmdId = conn->SendEvent(MDBGPROT_CMD_SET_ASSEMBLY, MDBGPROT_CMD_ASSEMBLY_GET_PEIMAGE_ADDRESS, &localbuf); m_dbgprot_buffer_free(&localbuf); ReceivedReplyPacket* received_reply_packet = conn->GetReplyWithError(cmdId); CHECK_ERROR_RETURN_FALSE(received_reply_packet); MdbgProtBuffer* pReply = received_reply_packet->Buffer(); m_pPeImage = m_dbgprot_decode_long(pReply->p, &pReply->p, pReply->end); m_nPeImageSize = m_dbgprot_decode_int(pReply->p, &pReply->p, pReply->end); } LOG((LF_CORDB, LL_INFO1000000, "CordbModule - GetBaseAddress - IMPLEMENTED\n")); *pAddress = (CORDB_ADDRESS)m_pPeImage; } EX_CATCH_HRESULT(hr); return hr; } HRESULT CordbModule::GetName(ULONG32 cchName, ULONG32* pcchName, WCHAR szName[]) { return m_pAssembly->GetName(cchName, pcchName, szName); } HRESULT CordbModule::EnableJITDebugging(BOOL bTrackJITInfo, BOOL bAllowJitOpts) { LOG((LF_CORDB, LL_INFO100000, "CordbModule - EnableJITDebugging - NOT IMPLEMENTED\n")); return E_NOTIMPL; } HRESULT CordbModule::EnableClassLoadCallbacks(BOOL bClassLoadCallbacks) { LOG((LF_CORDB, LL_INFO100000, "CordbModule - EnableClassLoadCallbacks - NOT IMPLEMENTED\n")); return E_NOTIMPL; } HRESULT CordbModule::GetFunctionFromToken(mdMethodDef methodDef, ICorDebugFunction** ppFunction) { HRESULT hr = S_OK; EX_TRY { LOG((LF_CORDB, LL_INFO1000000, "CordbModule - GetFunctionFromToken - IMPLEMENTED\n")); MdbgProtBuffer localbuf; m_dbgprot_buffer_init(&localbuf, 128); m_dbgprot_buffer_add_id(&localbuf, m_debuggerId); m_dbgprot_buffer_add_int(&localbuf, methodDef); int cmdId = conn->SendEvent(MDBGPROT_CMD_SET_ASSEMBLY, MDBGPROT_CMD_ASSEMBLY_GET_METHOD_FROM_TOKEN, &localbuf); m_dbgprot_buffer_free(&localbuf); ReceivedReplyPacket* received_reply_packet = conn->GetReplyWithError(cmdId); if (received_reply_packet->Error() == 0 && received_reply_packet->Error2() == 0) { MdbgProtBuffer* pReply = received_reply_packet->Buffer(); int id = m_dbgprot_decode_id(pReply->p, &pReply->p, pReply->end); CordbFunction* func = NULL; func = m_pProcess->FindFunction(id); if (func == NULL) { func = new CordbFunction(conn, methodDef, id, this); } func->QueryInterface(IID_ICorDebugFunction, (void**)ppFunction); } } EX_CATCH_HRESULT(hr); return hr; } HRESULT CordbModule::GetFunctionFromRVA(CORDB_ADDRESS rva, ICorDebugFunction** ppFunction) { LOG((LF_CORDB, LL_INFO100000, "CordbModule - GetFunctionFromRVA - NOT IMPLEMENTED\n")); return E_NOTIMPL; } HRESULT CordbModule::GetClassFromToken(mdTypeDef typeDef, ICorDebugClass** ppClass) { CordbClass* pClass = conn->GetProcess()->FindOrAddClass(typeDef, GetDebuggerId()); pClass->QueryInterface(IID_ICorDebugClass, (void**)ppClass); return S_OK; } HRESULT CordbModule::CreateBreakpoint(ICorDebugModuleBreakpoint** ppBreakpoint) { LOG((LF_CORDB, LL_INFO100000, "CordbModule - CreateBreakpoint - NOT IMPLEMENTED\n")); return E_NOTIMPL; } HRESULT CordbModule::GetEditAndContinueSnapshot(ICorDebugEditAndContinueSnapshot** ppEditAndContinueSnapshot) { LOG((LF_CORDB, LL_INFO100000, "CordbModule - GetEditAndContinueSnapshot - NOT IMPLEMENTED\n")); return E_NOTIMPL; } HRESULT CordbModule::GetMetaDataInterface(REFIID riid, IUnknown** ppObj) { if (m_pRegMeta == NULL) { OptionValue optionForNewScope; memset(&optionForNewScope, 0, sizeof(OptionValue)); optionForNewScope.m_ThreadSafetyOptions = MDThreadSafetyOn; m_pRegMeta = new RegMeta(); m_pRegMeta->SetOption(&optionForNewScope); m_pStgdbRW = new CLiteWeightStgdbRW(); ULONG32 pcchName = 0; GetName(0, &pcchName, NULL); WCHAR* full_path; full_path = (WCHAR*)malloc(sizeof(WCHAR) * pcchName); GetName(pcchName, &pcchName, full_path); HRESULT ret = m_pStgdbRW->OpenForRead(full_path, NULL, 0, 0); free(full_path); if (ret != S_OK) { delete m_pRegMeta; delete m_pStgdbRW; m_pRegMeta = NULL; m_pStgdbRW = NULL; return CORDBG_E_MISSING_METADATA; } m_pRegMeta->InitWithStgdb((ICorDebugModule*)this, m_pStgdbRW); } m_pRegMeta->QueryInterface(riid, (void**)ppObj); LOG((LF_CORDB, LL_INFO1000000, "CordbModule - GetMetaDataInterface - IMPLEMENTED\n")); return S_OK; } HRESULT CordbModule::GetToken(mdModule* pToken) { LOG((LF_CORDB, LL_INFO100000, "CordbModule - GetToken - NOT IMPLEMENTED\n")); return E_NOTIMPL; } HRESULT CordbModule::IsDynamic(BOOL* pDynamic) { LOG((LF_CORDB, LL_INFO1000000, "CordbModule - IsDynamic - IMPLEMENTED\n")); HRESULT hr = S_OK; EX_TRY { MdbgProtBuffer localbuf; m_dbgprot_buffer_init(&localbuf, 128); m_dbgprot_buffer_add_id(&localbuf, GetDebuggerId()); int cmdId = conn->SendEvent(MDBGPROT_CMD_SET_ASSEMBLY, MDBGPROT_CMD_ASSEMBLY_GET_IS_DYNAMIC, &localbuf); m_dbgprot_buffer_free(&localbuf); ReceivedReplyPacket* received_reply_packet = conn->GetReplyWithError(cmdId); CHECK_ERROR_RETURN_FALSE(received_reply_packet); MdbgProtBuffer* pReply = received_reply_packet->Buffer(); int m_bIsDynamic = m_dbgprot_decode_byte(pReply->p, &pReply->p, pReply->end); *pDynamic = m_bIsDynamic; } EX_CATCH_HRESULT(hr); return hr; } HRESULT CordbModule::GetGlobalVariableValue(mdFieldDef fieldDef, ICorDebugValue** ppValue) { LOG((LF_CORDB, LL_INFO100000, "CordbModule - GetGlobalVariableValue - NOT IMPLEMENTED\n")); return E_NOTIMPL; } HRESULT CordbModule::GetSize(ULONG32* pcBytes) { LOG((LF_CORDB, LL_INFO100000, "CordbModule - GetSize -IMPLEMENTED\n")); *pcBytes = m_nPeImageSize; return S_OK; } HRESULT CordbModule::IsInMemory(BOOL* pInMemory) { LOG((LF_CORDB, LL_INFO1000000, "CordbModule - IsInMemory - IMPLEMENTED\n")); *pInMemory = FALSE; return S_OK; } HRESULT CordbModule::GetAssembly(ICorDebugAssembly** ppAssembly) { LOG((LF_CORDB, LL_INFO1000000, "CordbModule - GetAssembly - IMPLEMENTED\n")); m_pAssembly->QueryInterface(IID_ICorDebugAssembly, (void**)ppAssembly); return S_OK; }
6,824
3,513
<filename>src/SHADERed/Objects/Debug/Breakpoint.h<gh_stars>1000+ #pragma once #include <SHADERed/Engine/Model.h> #include <SHADERed/Objects/PipelineItem.h> #include <glm/glm.hpp> #include <unordered_map> namespace ed { namespace dbg { class Breakpoint { public: Breakpoint() { Line = 0; IsConditional = false; Condition = ""; } Breakpoint(int ln) { Line = ln; IsConditional = false; Condition = ""; } Breakpoint(int ln, const std::string& cond) { Line = ln; IsConditional = true; Condition = cond; } int Line; bool IsConditional; std::string Condition; }; } }
234
1,352
// pbrt is Copyright(c) 1998-2020 <NAME>, <NAME>, and <NAME>. // The pbrt source code is licensed under the Apache License, Version 2.0. // SPDX: Apache-2.0 #ifndef PBRT_UTIL_MESH_H #define PBRT_UTIL_MESH_H #include <pbrt/pbrt.h> #include <pbrt/util/containers.h> #include <pbrt/util/error.h> #include <pbrt/util/hash.h> #include <pbrt/util/parallel.h> #include <pbrt/util/pstd.h> #include <pbrt/util/vecmath.h> #include <array> #include <string> #include <vector> namespace pbrt { // TriangleMesh Definition class TriangleMesh { public: // TriangleMesh Public Methods TriangleMesh(const Transform &renderFromObject, bool reverseOrientation, std::vector<int> vertexIndices, std::vector<Point3f> p, std::vector<Vector3f> S, std::vector<Normal3f> N, std::vector<Point2f> uv, std::vector<int> faceIndices, Allocator alloc); std::string ToString() const; bool WritePLY(std::string filename) const; static void Init(Allocator alloc); // TriangleMesh Public Members int nTriangles, nVertices; const int *vertexIndices = nullptr; const Point3f *p = nullptr; const Normal3f *n = nullptr; const Vector3f *s = nullptr; const Point2f *uv = nullptr; const int *faceIndices = nullptr; bool reverseOrientation, transformSwapsHandedness; }; // BilinearPatchMesh Definition class BilinearPatchMesh { public: // BilinearPatchMesh Public Methods BilinearPatchMesh(const Transform &renderFromObject, bool reverseOrientation, std::vector<int> vertexIndices, std::vector<Point3f> p, std::vector<Normal3f> N, std::vector<Point2f> uv, std::vector<int> faceIndices, PiecewiseConstant2D *imageDist, Allocator alloc); std::string ToString() const; static void Init(Allocator alloc); // BilinearPatchMesh Public Members bool reverseOrientation, transformSwapsHandedness; int nPatches, nVertices; const int *vertexIndices = nullptr; const Point3f *p = nullptr; const Normal3f *n = nullptr; const Point2f *uv = nullptr; const int *faceIndices = nullptr; PiecewiseConstant2D *imageDistribution; }; // HashIntPair Definition struct HashIntPair { PBRT_CPU_GPU size_t operator()(std::pair<int, int> p) const { return MixBits(uint64_t(p.first) << 32 | p.second); }; }; struct TriQuadMesh { // TriQuadMesh Public Methods static TriQuadMesh ReadPLY(const std::string &filename); void ConvertToOnlyTriangles(); void ComputeNormals(); std::string ToString() const; template <typename Dist, typename Disp> TriQuadMesh Displace(Dist &&dist, Float maxDist, Disp &&displace, const FileLoc *loc = nullptr) const { if (uv.empty()) ErrorExit(loc, "Vertex uvs are currently required by Displace(). Sorry.\n"); // Prepare the output mesh TriQuadMesh outputMesh = *this; outputMesh.ConvertToOnlyTriangles(); if (outputMesh.n.empty()) outputMesh.ComputeNormals(); outputMesh.triIndices.clear(); // Refine HashMap<std::pair<int, int>, int, HashIntPair> edgeSplit({}); for (int i = 0; i < triIndices.size() / 3; ++i) outputMesh.Refine(dist, maxDist, triIndices[3 * i], triIndices[3 * i + 1], triIndices[3 * i + 2], edgeSplit); // Displace displace(outputMesh.p.data(), outputMesh.n.data(), outputMesh.uv.data(), outputMesh.p.size()); outputMesh.ComputeNormals(); return outputMesh; } std::vector<Point3f> p; std::vector<Normal3f> n; std::vector<Point2f> uv; std::vector<int> faceIndices; std::vector<int> triIndices, quadIndices; private: // TriQuadMesh Private Methods template <typename Dist> void Refine(Dist &&distance, Float maxDist, int v0, int v1, int v2, HashMap<std::pair<int, int>, int, HashIntPair> &edgeSplit) { Point3f p0 = p[v0], p1 = p[v1], p2 = p[v2]; Float d01 = distance(p0, p1), d12 = distance(p1, p2), d20 = distance(p2, p0); if (d01 < maxDist && d12 < maxDist && d20 < maxDist) { triIndices.push_back(v0); triIndices.push_back(v1); triIndices.push_back(v2); return; } // order so that the first two vertices have the longest edge std::array<int, 3> v; if (d01 > d12) { if (d01 > d20) v = {v0, v1, v2}; else v = {v2, v0, v1}; } else { if (d12 > d20) v = {v1, v2, v0}; else v = {v2, v0, v1}; } // has the edge been spilt before? std::pair<int, int> edge(v[0], v[1]); if (v[0] > v[1]) std::swap(edge.first, edge.second); int vmid; if (edgeSplit.HasKey(edge)) { vmid = edgeSplit[edge]; } else { vmid = p.size(); edgeSplit.Insert(edge, vmid); p.push_back((p[v[0]] + p[v[1]]) / 2); if (!n.empty()) { Normal3f nn = n[v[0]] + n[v[1]]; if (LengthSquared(nn) > 0) nn = Normalize(nn); n.push_back(nn); } if (!uv.empty()) uv.push_back((uv[v[0]] + uv[v[1]]) / 2); } Refine(distance, maxDist, v[0], vmid, v[2], edgeSplit); Refine(distance, maxDist, vmid, v[1], v[2], edgeSplit); } }; bool WritePLY(std::string filename, pstd::span<const int> triIndices, pstd::span<const int> quadIndices, pstd::span<const Point3f> p, pstd::span<const Normal3f> n, pstd::span<const Point2f> uv, pstd::span<const int> faceIndices); } // namespace pbrt #endif // PBRT_UTIL_MESH_H
2,824
364
package com.linkedin.dagli.util.io; import java.io.IOException; import java.io.InputStream; /** * A "virtual stream" backed by another stream (which provides all the read data), which has been previously written * via {@link OutputSubstream}. This can be used to provide a stream to a consumer that expects to "own" the stream, * while still safely allowing for subsequent reads. * * {@link #close()} the stream to consume any remaining bytes of the substream and make the wrapped stream ready to * read subsequent data. Closing the substream does not close the underlying stream. */ public class InputSubstream extends InputStream { private final InputStream _wrapped; private int _remainingInChunk; private boolean _closed = false; private boolean _ignoreClose = false; /** * Creates a new instance that will be backed by the provided stream. * * @param wrapped the backing stream providing the read bytes */ public InputSubstream(InputStream wrapped) { _wrapped = wrapped; } /** * Configures this stream to ignore (or not) calls to {@link #close()}. * * The stream will not allow reads beyond the end of the substream regardless of whether closing is enabled. * * The default behavior is to not ignore a user-requested close. * * @param ignore whether to ignore calls to the close() method or not * @return this instance */ public InputSubstream setIgnoreClose(boolean ignore) { _ignoreClose = ignore; return this; } private void nextChunk() throws IOException { if (!_closed) { byte[] buf = new byte[4]; int readSoFar = 0; while (readSoFar < 4) { readSoFar += _wrapped.read(buf, readSoFar, 4 - readSoFar); } _remainingInChunk = fromBytes(buf); if (_remainingInChunk == 0) { _closed = true; } } } @Override public int available() throws IOException { if (_closed) { return 0; } int wrappedAvailable = _wrapped.available(); if (_remainingInChunk == 0 && wrappedAvailable >= 4) { nextChunk(); wrappedAvailable -= 4; } // return the number of bytes we can be sure are available: return Math.min(_remainingInChunk, wrappedAvailable); } // convert bytes (encoded in most-significant-bit-first format) to int private static int fromBytes(byte[] bytes) { return ((bytes[0] & 0xFF) << 24) | ((bytes[1] & 0xFF) << 16) | ((bytes[2] & 0xFF) << 8) | (bytes[3] & 0xFF); } @Override public int read() throws IOException { byte[] buf = new byte[1]; if (read(buf, 0, 1) == -1) { return -1; } return buf[0]; } @Override public int read(byte[] b, int off, int len) throws IOException { if (_closed) { return -1; } if (_remainingInChunk == 0) { nextChunk(); if (_closed) { return -1; } } int read = _wrapped.read(b, off, Math.min(_remainingInChunk, len)); _remainingInChunk -= read; return read; } @Override public void close() throws IOException { if (!_ignoreClose && !_closed) { byte[] buffer = new byte[1024]; while (read(buffer) >= 1) { } // exhaust the substream assert _closed; } } }
1,127
5,169
<reponame>Ray0218/Specs { "name": "FPStepView", "version": "1.0.0", "summary": "Simple step view", "homepage": "https://github.com/FellowPlusDev/FPStepView", "license": "MIT", "authors": { "<NAME>": "<EMAIL>" }, "source": { "git": "https://github.com/FellowPlusDev/FPStepView.git", "tag": "1.0.0" }, "source_files": "FPStepView/Sources/**/*.{h,m}", "requires_arc": true, "platforms": { "ios": "7.0" } }
200
2,497
<reponame>baajur/eta package eta.runtime.io; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; public class SizeLock { private static final int INITIAL_PERMITS = 1; private AtomicInteger permits = new AtomicInteger(INITIAL_PERMITS); /* This lock is used for making tryAcquire() atomic. */ private AtomicBoolean lock = new AtomicBoolean(); public boolean tryAcquire() { if (tryStartTransaction()) { try { return unconditionalAcquire(); } finally { endTransaction(); } } return false; } public boolean unconditionalAcquire() { if (permits.get() <= 0) return false; permits.getAndDecrement(); return true; } public void enlarge() { permits.getAndIncrement(); } public boolean tryStartTransaction() { return lock.compareAndSet(false, true); } public void endTransaction() { lock.set(false); } public int peekPermits() { return permits.get(); } public void reset() { permits.set(INITIAL_PERMITS); } }
486
1,599
# Copyright (c) 2015-2018 Cisco Systems, Inc. # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. import os import pytest from molecule import state, util @pytest.fixture def _instance(config_instance): return state.State(config_instance) def test_state_file_property(_instance): x = os.path.join(_instance._config.scenario.ephemeral_directory, "state.yml") assert x == _instance.state_file def test_converged(_instance): assert not _instance.converged def test_created(_instance): assert not _instance.created def test_driver(_instance): assert not _instance.driver def test_prepared(_instance): assert not _instance.prepared def test_reset(_instance): assert not _instance.converged _instance.change_state("converged", True) assert _instance.converged _instance.reset() assert not _instance.converged def test_reset_persists(_instance): assert not _instance.converged _instance.change_state("converged", True) assert _instance.converged _instance.reset() assert not _instance.converged d = util.safe_load_file(_instance.state_file) assert not d.get("converged") def test_change_state_converged(_instance): _instance.change_state("converged", True) assert _instance.converged def test_change_state_created(_instance): _instance.change_state("created", True) assert _instance.created def test_change_state_driver(_instance): _instance.change_state("driver", "foo") assert "foo" == _instance.driver def test_change_state_prepared(_instance): _instance.change_state("prepared", True) assert _instance.prepared def test_change_state_raises(_instance): with pytest.raises(state.InvalidState): _instance.change_state("invalid-state", True) def test_get_data_loads_existing_state_file(_instance, molecule_data, config_instance): data = {"converged": False, "created": True, "driver": None, "prepared": None} util.write_file(_instance._state_file, util.safe_dump(data)) s = state.State(config_instance) assert not s.converged assert s.created assert not s.driver assert not s.prepared
1,011
384
<reponame>hejamu/gromacs /* * This file is part of the GROMACS molecular simulation package. * * Copyright (c) 1991-2000, University of Groningen, The Netherlands. * Copyright (c) 2001-2004, The GROMACS development team. * Copyright (c) 2013,2014,2015,2017,2018 by the GROMACS development team. * Copyright (c) 2019,2020, by the GROMACS development team, led by * <NAME>, <NAME>, <NAME>, and <NAME>, * and including many others, as listed in the AUTHORS file in the * top-level source directory and at http://www.gromacs.org. * * GROMACS is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public License * as published by the Free Software Foundation; either version 2.1 * of the License, or (at your option) any later version. * * GROMACS is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with GROMACS; if not, see * http://www.gnu.org/licenses, or write to the Free Software Foundation, * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. * * If you want to redistribute modifications to GROMACS, please * consider that scientific software is very special. Version * control is crucial - bugs must be traceable. We will be happy to * consider code for inclusion in the official distribution, but * derived work must not be called official GROMACS. Details are found * in the README & COPYING files - if they are missing, get the * official version at http://www.gromacs.org. * * To help us fund GROMACS development, we humbly ask that you cite * the research papers on the package. Check out http://www.gromacs.org. */ #include "gmxpre.h" #include "splitter.h" #include <cstdlib> #include <cstring> #include <algorithm> #include "gromacs/pbcutil/mshift.h" #include "gromacs/topology/block.h" #include "gromacs/topology/idef.h" #include "gromacs/utility/fatalerror.h" #include "gromacs/utility/gmxassert.h" #include "gromacs/utility/smalloc.h" typedef struct { int atom, sid; } t_sid; static bool sid_comp(const t_sid& sa, const t_sid& sb) { if (sa.sid == sb.sid) { return sa.atom < sb.atom; } else { return sa.sid < sb.sid; } } static int mk_grey(gmx::ArrayRef<egCol> edgeColor, const t_graph* g, int* AtomI, int maxsid, t_sid sid[]) { int ng, ai, g0; ng = 0; ai = *AtomI; g0 = g->edgeAtomBegin; /* Loop over all the bonds */ for (int aj : g->edges[ai]) { aj -= g0; /* If there is a white one, make it gray and set pbc */ if (edgeColor[aj] == egcolWhite) { if (aj < *AtomI) { *AtomI = aj; } edgeColor[aj] = egcolGrey; /* Check whether this one has been set before... */ range_check(aj + g0, 0, maxsid); range_check(ai + g0, 0, maxsid); if (sid[aj + g0].sid != -1) { gmx_fatal(FARGS, "sid[%d]=%d, sid[%d]=%d, file %s, line %d", ai, sid[ai + g0].sid, aj, sid[aj + g0].sid, __FILE__, __LINE__); } else { sid[aj + g0].sid = sid[ai + g0].sid; sid[aj + g0].atom = aj + g0; } ng++; } } return ng; } static int first_colour(const int fC, const egCol Col, const t_graph* g, gmx::ArrayRef<const egCol> edgeColor) /* Return the first node with colour Col starting at fC. * return -1 if none found. */ { int i; for (i = fC; i < int(g->edges.size()); i++) { if (!g->edges[i].empty() && edgeColor[i] == Col) { return i; } } return -1; } static int mk_sblocks(FILE* fp, t_graph* g, int maxsid, t_sid sid[]) { int ng; int nW, nG, nB; /* Number of Grey, Black, White */ int fW, fG; /* First of each category */ int g0, nblock; if (!g->numConnectedAtoms) { return 0; } nblock = 0; std::vector<egCol> edgeColor(g->edges.size(), egcolWhite); g0 = g->edgeAtomBegin; nW = g->numConnectedAtoms; nG = 0; nB = 0; fW = 0; /* We even have a loop invariant: * nW+nG+nB == g->nbound */ if (fp) { fprintf(fp, "Walking down the molecule graph to make constraint-blocks\n"); } while (nW > 0) { /* Find the first white, this will allways be a larger * number than before, because no nodes are made white * in the loop */ if ((fW = first_colour(fW, egcolWhite, g, edgeColor)) == -1) { gmx_fatal(FARGS, "No WHITE nodes found while nW=%d\n", nW); } /* Make the first white node grey, and set the block number */ edgeColor[fW] = egcolGrey; range_check(fW + g0, 0, maxsid); sid[fW + g0].sid = nblock++; nG++; nW--; /* Initial value for the first grey */ fG = fW; if (debug) { fprintf(debug, "Starting G loop (nW=%d, nG=%d, nB=%d, total %d)\n", nW, nG, nB, nW + nG + nB); } while (nG > 0) { if ((fG = first_colour(fG, egcolGrey, g, edgeColor)) == -1) { gmx_fatal(FARGS, "No GREY nodes found while nG=%d\n", nG); } /* Make the first grey node black */ edgeColor[fG] = egcolBlack; nB++; nG--; /* Make all the neighbours of this black node grey * and set their block number */ ng = mk_grey(edgeColor, g, &fG, maxsid, sid); /* ng is the number of white nodes made grey */ nG += ng; nW -= ng; } } if (debug) { fprintf(debug, "Found %d shake blocks\n", nblock); } return nblock; } typedef struct { int first, last, sid; } t_merge_sid; static int ms_comp(const void* a, const void* b) { const t_merge_sid* ma = reinterpret_cast<const t_merge_sid*>(a); const t_merge_sid* mb = reinterpret_cast<const t_merge_sid*>(b); int d; d = ma->first - mb->first; if (d == 0) { return ma->last - mb->last; } else { return d; } } static int merge_sid(int at_start, int at_end, int nsid, t_sid sid[], t_blocka* sblock) { int i, j, k, n, isid, ndel; t_merge_sid* ms; /* We try to remdy the following problem: * Atom: 1 2 3 4 5 6 7 8 9 10 * Sid: 0 -1 1 0 -1 1 1 1 1 1 */ /* Determine first and last atom in each shake ID */ snew(ms, nsid); for (k = 0; (k < nsid); k++) { ms[k].first = at_end + 1; ms[k].last = -1; ms[k].sid = k; } for (i = at_start; (i < at_end); i++) { isid = sid[i].sid; range_check(isid, -1, nsid); if (isid >= 0) { ms[isid].first = std::min(ms[isid].first, sid[i].atom); ms[isid].last = std::max(ms[isid].last, sid[i].atom); } } qsort(ms, nsid, sizeof(ms[0]), ms_comp); /* Now merge the overlapping ones */ ndel = 0; for (k = 0; (k < nsid);) { for (j = k + 1; (j < nsid);) { if (ms[j].first <= ms[k].last) { ms[k].last = std::max(ms[k].last, ms[j].last); ms[k].first = std::min(ms[k].first, ms[j].first); ms[j].sid = -1; ndel++; j++; } else { k = j; j = k + 1; } } if (j == nsid) { k++; } } for (k = 0; (k < nsid); k++) { while ((k < nsid - 1) && (ms[k].sid == -1)) { for (j = k + 1; (j < nsid); j++) { std::memcpy(&(ms[j - 1]), &(ms[j]), sizeof(ms[0])); } nsid--; } } for (k = at_start; (k < at_end); k++) { sid[k].atom = k; sid[k].sid = -1; } sblock->nr = nsid; sblock->nalloc_index = sblock->nr + 1; snew(sblock->index, sblock->nalloc_index); sblock->nra = at_end - at_start; sblock->nalloc_a = sblock->nra; snew(sblock->a, sblock->nalloc_a); sblock->index[0] = 0; for (k = n = 0; (k < nsid); k++) { sblock->index[k + 1] = sblock->index[k] + ms[k].last - ms[k].first + 1; for (j = ms[k].first; (j <= ms[k].last); j++) { range_check(n, 0, sblock->nra); sblock->a[n++] = j; range_check(j, 0, at_end); if (sid[j].sid == -1) { sid[j].sid = k; } else { fprintf(stderr, "Double sids (%d, %d) for atom %d\n", sid[j].sid, k, j); } } } sblock->nra = n; GMX_RELEASE_ASSERT(sblock->index[k] == sblock->nra, "Internal inconsistency; sid merge failed"); sfree(ms); return nsid; } void gen_sblocks(FILE* fp, int at_end, const InteractionDefinitions& idef, t_blocka* sblock, gmx_bool bSettle) { t_graph* g; int i, i0; t_sid* sid; int nsid; g = mk_graph(nullptr, idef, at_end, TRUE, bSettle); if (debug) { p_graph(debug, "<NAME>", g); } snew(sid, at_end); for (i = 0; i < at_end; i++) { sid[i].atom = i; sid[i].sid = -1; } nsid = mk_sblocks(fp, g, at_end, sid); if (!nsid) { return; } /* Now sort the shake blocks... */ std::sort(sid, sid + at_end, sid_comp); if (debug) { fprintf(debug, "Sorted shake block\n"); for (i = 0; i < at_end; i++) { fprintf(debug, "sid[%5d] = atom:%5d sid:%5d\n", i, sid[i].atom, sid[i].sid); } } /* Now check how many are NOT -1, i.e. how many have to be shaken */ for (i0 = 0; i0 < at_end; i0++) { if (sid[i0].sid > -1) { break; } } /* Now we have the sids that have to be shaken. We'll check the min and * max atom numbers and this determines the shake block. DvdS 2007-07-19. * For the purpose of making boundaries all atoms in between need to be * part of the shake block too. There may be cases where blocks overlap * and they will have to be merged. */ merge_sid(0, at_end, nsid, sid, sblock); sfree(sid); /* Due to unknown reason this free generates a problem sometimes */ done_graph(g); if (debug) { fprintf(debug, "Done gen_sblocks\n"); } }
5,552
5,169
{ "name": "SwiftEvents", "version": "0.3.0", "homepage": "https://github.com/denissimon/SwiftEvents", "authors": { "<NAME>": "<EMAIL>" }, "summary": "A lightweight, pure-Swift library for implementing events in iOS and other platforms", "description": "SwiftEvents is a lightweight, pure-Swift library for implementing events. It has Delegation (one subscriber to the event), NotificationCenter (multiple subscribers to the event) and KVO (observing properties using events) functionality in one simple, not verbose and type-safe API.", "license": { "type": "MIT" }, "swift_versions": "4.2", "platforms": { "ios": "9.0", "osx": "10.10", "watchos": "3.0", "tvos": "10.0" }, "source": { "git": "https://github.com/denissimon/SwiftEvents.git", "tag": "0.3.0" }, "source_files": "Sources/**/*.swift", "frameworks": "Foundation", "swift_version": "4.2" }
336
345
package spring.study.componentprovider.interfaze; /** * Created by Format on 2017/6/11. */ public interface IConsumer { }
39
313
import os import numpy as np import yaml from director import visualization as vis from director import objectmodel as om from director import transformUtils from director import ioUtils from director import filterUtils from director import vtkAll as vtk from director.debugVis import DebugData from director import segmentation from director import lcmUtils from director import lcmframe from director import robotstate from director.ikplanner import ConstraintSet from director.timercallback import TimerCallback from director.ikparameters import IkParameters from . import utils from .cameraposes import CameraPoses from .registration import GlobalRegistrationUtils from .camerafrustumvisualizer import CameraFrustumVisualizer class DataCollectionHelper(object): def __init__(self, robotSystem, openniDepthPointCloud): self.robotSystem = robotSystem self.openniDepthPointCloud = openniDepthPointCloud self.loadData() def loadData(self): logFolder = 'logs_test/moving-camera' self.pathDict = utils.getFilenames(logFolder) if self.pathDict is None: return self.cameraposes = CameraPoses(self.pathDict['cameraposes']) # load the elastic fusion reconstruction if we already know where to # put it self.savedTransformFilename = os.path.join(utils.getLabelFusionBaseDir(), 'sandbox', 'reconstruction_robot_frame.yaml') if os.path.exists(self.savedTransformFilename): firstFrameToWorld = utils.getFirstFrameToWorldTransform(self.savedTransformFilename) utils.loadElasticFusionReconstruction(self.pathDict['reconstruction'], transform=firstFrameToWorld) def loadReconstructedPointCloud(self): utime = self.openniDepthPointCloud.lastUtime cameraToFirstFrame = self.cameraposes.getCameraPoseAtUTime(utime) cameraToWorld = om.findObjectByName('camera frame').transform firstFrameToCamera = cameraToFirstFrame.GetLinearInverse() firstFrameToWorld = transformUtils.concatenateTransforms([firstFrameToCamera, cameraToWorld]) self.firstFrameToWorld = firstFrameToWorld utils.loadElasticFusionReconstruction(self.pathDict['reconstruction'], transform=firstFrameToWorld) def saveTransform(self): (pos, quat) = transformUtils.poseFromTransform(self.firstFrameToWorld) d = dict() d['firstFrameToWorld'] = [pos.tolist(), quat.tolist()] utils.saveDictToYaml(d, self.savedTransformFilename) class DataCollection(object): def __init__(self, robotSystem, openniDepthPointCloud, measurementPanel, imageManager): self.robotSystem = robotSystem self.openniDepthPointCloud = openniDepthPointCloud self.measurementPanel = measurementPanel self.imageManager = imageManager self.visFolder = om.getOrCreateContainer('data collection') self.cameraName = 'OPENNI_FRAME_LEFT' self.savedTransformFilename = os.path.join(utils.getLabelFusionBaseDir(), 'sandbox', 'reconstruction_robot_frame.yaml') self.frustumVis = dict() self.loadSavedData() self.setupDevTools() def loadSavedData(self): if not os.path.exists(self.savedTransformFilename): return d = utils.getDictFromYamlFilename(self.savedTransformFilename) if 'table frame' not in d: return (pos, quat) = d['table frame'] t = transformUtils.transformFromPose(pos, quat) self.tableFrame = vis.updateFrame(t, 'table frame', scale=0.15) def spawnTableFrame(self): pointOnCloseTableEdge = self.measurementPanel.pickPoints[0] pointOnTable = self.measurementPanel.pickPoints[1] pointAboveTable = self.measurementPanel.pickPoints[2] scenePolyData = self.openniDepthPointCloud.polyData d = GlobalRegistrationUtils.segmentTable(scenePolyData=scenePolyData, searchRadius=0.3, visualize=False, thickness=0.01, pointOnTable=pointOnTable, pointAboveTable=pointAboveTable) origin = d['pointOnTable'] normal = d['normal'] yaxis = -normal zaxis = pointOnTable - pointOnCloseTableEdge xaxis = np.cross(yaxis, zaxis) # frame = transformUtils.getTransformFromOriginAndNormal(origin, normal) frame = transformUtils.getTransformFromAxesAndOrigin(xaxis, yaxis, zaxis, origin) self.tableFrame = vis.updateFrame(frame, 'table frame', parent=self.visFolder, scale=0.15) def testCameraFrustrum(self): frame = om.findObjectByName('camera frame') self.cameraFrustumVisualizer = CameraFrustumVisualizer(self.imageManager, self.cameraName, frame) self.frustumVis['camera'] = self.cameraFrustumVisualizer def makeTargetCameraTransform(self, rotateX=-40, rotateY=0, translateZ=-0.8, visualize=True): t = transformUtils.copyFrame(self.tableFrame.transform) t.PreMultiply() t.RotateX(rotateX) t.RotateY(rotateY) t.Translate((0,0,translateZ)) if visualize: name = 'target camera frame' if om.findObjectByName(name) is None: frame = vis.updateFrame(t, name, scale=0.15) cameraFrustum = CameraFrustumVisualizer(self.imageManager, self.cameraName, frame, verbose=False, visFolder=frame) self.frustumVis['target camera'] = cameraFrustum else: frame = vis.updateFrame(t, name, scale=0.15) self.targetCameraFrame = frame return t def makeTargetCameraFrames(self, filename=None): self.targetFrames = [] if filename is None: filename = 'data_collection.yaml' fullFilename = os.path.join(utils.getLabelFusionBaseDir(), 'config', filename) frameData = utils.getDictFromYamlFilename(fullFilename)['frames'] graspToHandLinkFrame = utils.getCameraToKukaEndEffectorFrame() # d = dict() # d['rotateX'] = -40 # d['rotateY'] = 30 # d['translateZ'] = -0.8 # d['numFrames'] = 4 # frameData.append(d) rotationDirection = 1 for data in frameData: rotateX = data['rotateX'] translateZ = data['translateZ'] numFrames = data['numFrames'] rotateY = data['rotateY'] rotateYGrid = np.linspace(rotateY['min'], rotateY['max'], numFrames) for idx in xrange(0,numFrames): rotateY = rotateYGrid[idx]*rotationDirection transform = self.makeTargetCameraTransform(rotateX=rotateX, rotateY=rotateY, translateZ=translateZ, visualize=False) # check if feasible to reach that frame ikData = self.runIK(transform, makePlan=False, graspToHandLinkFrame=graspToHandLinkFrame) # if infeasible increase tolerance to 5 degs if ikData['info'] != 1: ikData = self.runIK(transform, makePlan=False, graspToHandLinkFrame=graspToHandLinkFrame, angleToleranceInDegrees=5.0) if ikData['info'] == 1: frameName = 'target frame ' + str(len(self.targetFrames)) frame = self.showTargetFrame(transform, frameName) self.targetFrames.append(frame) else: print "\n\n----------" print "infeasible frame" print "rotateX = ", rotateX print "rotateY = ", rotateY print "translateZ = ", translateZ print "-----------\n\n" # alternate the rotation direction each time rotationDirection = -rotationDirection def showTargetFrame(self, transform, frameName): visFolder = om.getOrCreateContainer('target camera frames') frame = vis.updateFrame(transform, frameName, parent='target camera frames', scale=0.15) cameraFrustum = CameraFrustumVisualizer(self.imageManager, self.cameraName, frame, verbose=False, visFolder=frame) return frame def saveTableFrame(self): # d = utils.getDictFromYamlFilename(self.savedTransformFilename) d = dict() (pos, quat) = transformUtils.poseFromTransform(self.tableFrame.transform) d['table frame'] = [pos.tolist(), quat.tolist()] utils.saveDictToYaml(d, self.savedTransformFilename) def setupDevTools(self): teleopCameraFrame = om.findObjectByName('camera frame teleop') teleopCameraFrameFrustumVis = CameraFrustumVisualizer(self.imageManager, self.cameraName, teleopCameraFrame, verbose=False) self.frustumVis['teleop'] = teleopCameraFrameFrustumVis self.testCameraFrustrum() # self.makeTargetCameraFrames() def runIK(self, targetFrame, startPose=None, graspToHandLinkFrame=None, makePlan=True, positionTolerance=0.0, angleToleranceInDegrees=5.0, maxDegreesPerSecond=60): """ Sets the cameraFrame to the targetFrame using IK :param targetFrame: :return: """ if startPose is None: startPose = self.getPlanningStartPose() ikPlanner = self.robotSystem.ikPlanner startPoseName = 'reach_start' endPoseName = 'reach_end' ikPlanner.addPose(startPose, startPoseName) side = ikPlanner.reachingSide constraints = [] constraints.append(KukaPlanningUtils.createLockedBasePostureConstraint(ikPlanner, startPoseName)) positionConstraint, orientationConstraint = ikPlanner.createPositionOrientationGraspConstraints(side, targetFrame, graspToHandLinkFrame, positionTolerance=positionTolerance, angleToleranceInDegrees=angleToleranceInDegrees) positionConstraint.tspan = [1.0, 1.0] orientationConstraint.tspan = [1.0, 1.0] constraints.append(positionConstraint) constraints.append(orientationConstraint) constraintSet = ConstraintSet(ikPlanner, constraints, 'reach_end', startPoseName) constraintSet.ikParameters = IkParameters(maxDegreesPerSecond=maxDegreesPerSecond) endPose, info = constraintSet.runIk() returnData = dict() returnData['info'] = info returnData['endPose'] = endPose if makePlan: plan = constraintSet.planEndPoseGoal() returnData['plan'] = plan return returnData def getPlanningStartPose(self): return self.robotSystem.robotStateJointController.q def testRunIK(self): targetFrame = self.targetCameraFrame.transform graspToHandLinkFrame = utils.getCameraToKukaEndEffectorFrame() return self.runIK(targetFrame, graspToHandLinkFrame=graspToHandLinkFrame) def reachToTargetFrame(self, frameNum): targetFrame = self.targetFrames[frameNum].transform graspToHandLinkFrame = utils.getCameraToKukaEndEffectorFrame() return self.runIK(targetFrame, graspToHandLinkFrame=graspToHandLinkFrame) def spawnTargetFrame(self): debugFolder = om.getOrCreateContainer('debug') om.removeFromObjectModel('target frame') handLink = str(self.robotSystem.ikPlanner.getHandLink()) handFrame = transformUtils.copyFrame(self.robotSystem.robotStateModel.getLinkFrame(handLink)) handFrame.PreMultiply() handFrame.Translate(0.02, 0, 0) self.targetFrame = vis.updateFrame(handFrame, 'target frame', parent=debugFolder, scale=0.15) return self.targetFrame def makePlanRunner(self): return DataCollectionPlanRunner(self, self.robotSystem, self.targetFrames) class DataCollectionPlanRunner(object): def __init__(self, dataCollection, robotSystem, targetFrames, configFilename=None): self.robotSystem = robotSystem self.dataCollection = dataCollection self.timer = TimerCallback(targetFps=5) self.timer.callback = self.callback self.targetFrames = targetFrames self.counter = 0 self.configFilename = configFilename self.initialized = False self.loadConfig(self.configFilename) def loadConfig(self, configFilename): if configFilename is None: configFilename = 'data_collection.yaml' fullFilename = os.path.join(utils.getLabelFusionBaseDir(), 'config', configFilename) self.config = utils.getDictFromYamlFilename(fullFilename) def start(self): print "starting data collection plan runner" self.timer.start() os.system("cd /home/robot-lab/newdata && sleep 4 && auto_start_data_collect &") def stop(self): print "stopping data collection plan runner" self.timer.stop() def callback(self): if self.initialized: utime = self.getUtime() if utime < self.planData['endUTime']: return self.initialized = True if self.counter >= len(self.targetFrames): print "finished reaching all target frames" self.stop() return planData = self.makeNextPlan() if planData['info'] == 1: self.commitNextPlan() else: self.stop() raise ValueError(' plan info was not 1, stopping execution') def makeNextPlan(self): targetFrame = self.targetFrames[self.counter].transform graspToHandLinkFrame = utils.getCameraToKukaEndEffectorFrame() maxDegreesPerSecond = self.config['planning']['maxDegreesPerSecond'] self.planData = self.dataCollection.runIK(targetFrame, graspToHandLinkFrame=graspToHandLinkFrame, maxDegreesPerSecond=maxDegreesPerSecond) return self.planData def getUtime(self): return self.robotSystem.robotStateJointController.lastRobotStateMessage.utime def commitNextPlan(self): print "committed a new plan" self.robotSystem.manipPlanner.commitManipPlan(self.planData['plan']) planDuration = self.planData['plan'].plan[-1].utime self.planData['endUTime'] = self.getUtime() + 1.1*planDuration self.counter += 1 class KukaPlanningUtils(object): @staticmethod def createLockedBasePostureConstraint(ikPlanner, startPoseName): return ikPlanner.createPostureConstraint(startPoseName, robotstate.matchJoints('base_'))
7,081
14,668
// Copyright 2021 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef IOS_CHROME_BROWSER_UI_READING_LIST_IOS_ADD_TO_READING_LIST_INFOBAR_DELEGATE_H_ #define IOS_CHROME_BROWSER_UI_READING_LIST_IOS_ADD_TO_READING_LIST_INFOBAR_DELEGATE_H_ #include "components/infobars/core/confirm_infobar_delegate.h" namespace web { class WebState; } class ReadingListModel; // Shows an add to Reading List prompt in iOS class IOSAddToReadingListInfobarDelegate : public ConfirmInfoBarDelegate { public: IOSAddToReadingListInfobarDelegate(const GURL& URL, const std::u16string& title, int estimated_read_time_, double score, double long_score, ReadingListModel* model, web::WebState* web_state); ~IOSAddToReadingListInfobarDelegate() override; // Returns |delegate| as an IOSAddToReadingListInfobarDelegate, or nullptr // if it is of another type. static IOSAddToReadingListInfobarDelegate* FromInfobarDelegate( infobars::InfoBarDelegate* delegate); // Not copyable or moveable. IOSAddToReadingListInfobarDelegate( const IOSAddToReadingListInfobarDelegate&) = delete; IOSAddToReadingListInfobarDelegate& operator=( const IOSAddToReadingListInfobarDelegate&) = delete; const GURL& URL() const { return url_; } int estimated_read_time() { return estimated_read_time_; } // InfoBarDelegate implementation. InfoBarIdentifier GetIdentifier() const override; std::u16string GetMessageText() const override; void InfoBarDismissed() override; // ConfirmInfoBarDelegate implementation. bool Accept() override; // If called, sets the pref to never show the Reading List Message. virtual void NeverShow(); private: // The URL of the page to be saved to Reading List. GURL url_; // The title of the page to be saved to Reading List. const std::u16string& title_; // The estimated time to read of the page. int estimated_read_time_; // The score of the page measuring distilibility, a proxy for whether the // page is likely an article. double distilibility_score_; // The score of the page measuring length of the page. double length_score_; // Reference to save |url_| to Reading List. ReadingListModel* model_ = nullptr; // WebState pointer that is showing |url_|. web::WebState* web_state_ = nullptr; }; #endif // IOS_CHROME_BROWSER_UI_READING_LIST_IOS_ADD_TO_READING_LIST_INFOBAR_DELEGATE_H_
1,021
312
<filename>tools/federation/src/main/java/org/eclipse/rdf4j/federated/evaluation/iterator/GraphToBindingSetConversionIteration.java /******************************************************************************* * Copyright (c) 2019 Eclipse RDF4J contributors. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Distribution License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/org/documents/edl-v10.php. *******************************************************************************/ package org.eclipse.rdf4j.federated.evaluation.iterator; import java.util.NoSuchElementException; import org.eclipse.rdf4j.common.iteration.AbstractCloseableIteration; import org.eclipse.rdf4j.model.Statement; import org.eclipse.rdf4j.query.BindingSet; import org.eclipse.rdf4j.query.GraphQueryResult; import org.eclipse.rdf4j.query.QueryEvaluationException; import org.eclipse.rdf4j.query.algebra.evaluation.QueryBindingSet; /** * Converts graph results into a binding set iteration * * @author <NAME> */ public class GraphToBindingSetConversionIteration extends AbstractCloseableIteration<BindingSet, QueryEvaluationException> { protected final GraphQueryResult graph; public GraphToBindingSetConversionIteration(GraphQueryResult graph) { super(); this.graph = graph; } @Override public boolean hasNext() throws QueryEvaluationException { return graph.hasNext(); } @Override public BindingSet next() throws QueryEvaluationException { try { return convert(graph.next()); } catch (NoSuchElementException | IllegalStateException e) { throw e; } } @Override public void remove() throws QueryEvaluationException { try { graph.remove(); } catch (UnsupportedOperationException | IllegalStateException e) { throw e; } } protected BindingSet convert(Statement st) { QueryBindingSet result = new QueryBindingSet(); result.addBinding("subject", st.getSubject()); result.addBinding("predicate", st.getPredicate()); result.addBinding("object", st.getObject()); if (st.getContext() != null) { result.addBinding("context", st.getContext()); } return result; } }
672
2,855
<filename>jctools-core/src/main/java/org/jctools/queues/ConcurrentCircularArrayQueue.java /* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jctools.queues; import org.jctools.queues.IndexedQueueSizeUtil.IndexedQueue; import org.jctools.util.Pow2; import java.util.AbstractQueue; import java.util.Iterator; import java.util.NoSuchElementException; import static org.jctools.util.UnsafeRefArrayAccess.*; abstract class ConcurrentCircularArrayQueueL0Pad<E> extends AbstractQueue<E> { byte b000,b001,b002,b003,b004,b005,b006,b007;// 8b byte b010,b011,b012,b013,b014,b015,b016,b017;// 16b byte b020,b021,b022,b023,b024,b025,b026,b027;// 24b byte b030,b031,b032,b033,b034,b035,b036,b037;// 32b byte b040,b041,b042,b043,b044,b045,b046,b047;// 40b byte b050,b051,b052,b053,b054,b055,b056,b057;// 48b byte b060,b061,b062,b063,b064,b065,b066,b067;// 56b byte b070,b071,b072,b073,b074,b075,b076,b077;// 64b byte b100,b101,b102,b103,b104,b105,b106,b107;// 72b byte b110,b111,b112,b113,b114,b115,b116,b117;// 80b byte b120,b121,b122,b123,b124,b125,b126,b127;// 88b byte b130,b131,b132,b133,b134,b135,b136,b137;// 96b byte b140,b141,b142,b143,b144,b145,b146,b147;//104b byte b150,b151,b152,b153,b154,b155,b156,b157;//112b byte b160,b161,b162,b163,b164,b165,b166,b167;//120b byte b170,b171,b172,b173,b174,b175,b176,b177;//128b } /** * Common functionality for array backed queues. The class is pre-padded and the array is padded on either side to help * with False Sharing prevention. It is expected that subclasses handle post padding. */ abstract class ConcurrentCircularArrayQueue<E> extends ConcurrentCircularArrayQueueL0Pad<E> implements MessagePassingQueue<E>, IndexedQueue, QueueProgressIndicators, SupportsIterator { protected final long mask; protected final E[] buffer; ConcurrentCircularArrayQueue(int capacity) { int actualCapacity = Pow2.roundToPowerOfTwo(capacity); mask = actualCapacity - 1; buffer = allocateRefArray(actualCapacity); } @Override public int size() { return IndexedQueueSizeUtil.size(this, IndexedQueueSizeUtil.PLAIN_DIVISOR); } @Override public boolean isEmpty() { return IndexedQueueSizeUtil.isEmpty(this); } @Override public String toString() { return this.getClass().getName(); } @Override public void clear() { while (poll() != null) { // if you stare into the void } } @Override public int capacity() { return (int) (mask + 1); } @Override public long currentProducerIndex() { return lvProducerIndex(); } @Override public long currentConsumerIndex() { return lvConsumerIndex(); } /** * Get an iterator for this queue. This method is thread safe. * <p> * The iterator provides a best-effort snapshot of the elements in the queue. * The returned iterator is not guaranteed to return elements in queue order, * and races with the consumer thread may cause gaps in the sequence of returned elements. * Like {link #relaxedPoll}, the iterator may not immediately return newly inserted elements. * * @return The iterator. */ @Override public Iterator<E> iterator() { final long cIndex = lvConsumerIndex(); final long pIndex = lvProducerIndex(); return new WeakIterator(cIndex, pIndex, mask, buffer); } private static class WeakIterator<E> implements Iterator<E> { private final long pIndex; private final long mask; private final E[] buffer; private long nextIndex; private E nextElement; WeakIterator(long cIndex, long pIndex, long mask, E[] buffer) { this.nextIndex = cIndex; this.pIndex = pIndex; this.mask = mask; this.buffer = buffer; nextElement = getNext(); } @Override public void remove() { throw new UnsupportedOperationException("remove"); } @Override public boolean hasNext() { return nextElement != null; } @Override public E next() { final E e = nextElement; if (e == null) throw new NoSuchElementException(); nextElement = getNext(); return e; } private E getNext() { while (nextIndex < pIndex) { long offset = calcCircularRefElementOffset(nextIndex++, mask); E e = lvRefElement(buffer, offset); if (e != null) { return e; } } return null; } } }
2,249
1,350
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.mysqlflexibleserver; import com.azure.core.credential.TokenCredential; import com.azure.core.http.HttpClient; import com.azure.core.http.HttpPipeline; import com.azure.core.http.HttpPipelineBuilder; import com.azure.core.http.policy.AddDatePolicy; import com.azure.core.http.policy.HttpLogOptions; import com.azure.core.http.policy.HttpLoggingPolicy; import com.azure.core.http.policy.HttpPipelinePolicy; import com.azure.core.http.policy.HttpPolicyProviders; import com.azure.core.http.policy.RequestIdPolicy; import com.azure.core.http.policy.RetryPolicy; import com.azure.core.http.policy.UserAgentPolicy; import com.azure.core.management.http.policy.ArmChallengeAuthenticationPolicy; import com.azure.core.management.profile.AzureProfile; import com.azure.core.util.Configuration; import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.mysqlflexibleserver.fluent.MySqlManagementClient; import com.azure.resourcemanager.mysqlflexibleserver.implementation.BackupsImpl; import com.azure.resourcemanager.mysqlflexibleserver.implementation.CheckNameAvailabilitiesImpl; import com.azure.resourcemanager.mysqlflexibleserver.implementation.CheckVirtualNetworkSubnetUsagesImpl; import com.azure.resourcemanager.mysqlflexibleserver.implementation.ConfigurationsImpl; import com.azure.resourcemanager.mysqlflexibleserver.implementation.DatabasesImpl; import com.azure.resourcemanager.mysqlflexibleserver.implementation.FirewallRulesImpl; import com.azure.resourcemanager.mysqlflexibleserver.implementation.GetPrivateDnsZoneSuffixesImpl; import com.azure.resourcemanager.mysqlflexibleserver.implementation.LocationBasedCapabilitiesImpl; import com.azure.resourcemanager.mysqlflexibleserver.implementation.MySqlManagementClientBuilder; import com.azure.resourcemanager.mysqlflexibleserver.implementation.OperationsImpl; import com.azure.resourcemanager.mysqlflexibleserver.implementation.ReplicasImpl; import com.azure.resourcemanager.mysqlflexibleserver.implementation.ServersImpl; import com.azure.resourcemanager.mysqlflexibleserver.models.Backups; import com.azure.resourcemanager.mysqlflexibleserver.models.CheckNameAvailabilities; import com.azure.resourcemanager.mysqlflexibleserver.models.CheckVirtualNetworkSubnetUsages; import com.azure.resourcemanager.mysqlflexibleserver.models.Configurations; import com.azure.resourcemanager.mysqlflexibleserver.models.Databases; import com.azure.resourcemanager.mysqlflexibleserver.models.FirewallRules; import com.azure.resourcemanager.mysqlflexibleserver.models.GetPrivateDnsZoneSuffixes; import com.azure.resourcemanager.mysqlflexibleserver.models.LocationBasedCapabilities; import com.azure.resourcemanager.mysqlflexibleserver.models.Operations; import com.azure.resourcemanager.mysqlflexibleserver.models.Replicas; import com.azure.resourcemanager.mysqlflexibleserver.models.Servers; import java.time.Duration; import java.time.temporal.ChronoUnit; import java.util.ArrayList; import java.util.List; import java.util.Objects; /** * Entry point to MySqlManager. The Microsoft Azure management API provides create, read, update, and delete * functionality for Azure MySQL resources including servers, databases, firewall rules, VNET rules, log files and * configurations with new business model. */ public final class MySqlManager { private Servers servers; private Replicas replicas; private Backups backups; private FirewallRules firewallRules; private Databases databases; private Configurations configurations; private LocationBasedCapabilities locationBasedCapabilities; private CheckVirtualNetworkSubnetUsages checkVirtualNetworkSubnetUsages; private CheckNameAvailabilities checkNameAvailabilities; private GetPrivateDnsZoneSuffixes getPrivateDnsZoneSuffixes; private Operations operations; private final MySqlManagementClient clientObject; private MySqlManager(HttpPipeline httpPipeline, AzureProfile profile, Duration defaultPollInterval) { Objects.requireNonNull(httpPipeline, "'httpPipeline' cannot be null."); Objects.requireNonNull(profile, "'profile' cannot be null."); this.clientObject = new MySqlManagementClientBuilder() .pipeline(httpPipeline) .endpoint(profile.getEnvironment().getResourceManagerEndpoint()) .subscriptionId(profile.getSubscriptionId()) .defaultPollInterval(defaultPollInterval) .buildClient(); } /** * Creates an instance of MySql service API entry point. * * @param credential the credential to use. * @param profile the Azure profile for client. * @return the MySql service API instance. */ public static MySqlManager authenticate(TokenCredential credential, AzureProfile profile) { Objects.requireNonNull(credential, "'credential' cannot be null."); Objects.requireNonNull(profile, "'profile' cannot be null."); return configure().authenticate(credential, profile); } /** * Gets a Configurable instance that can be used to create MySqlManager with optional configuration. * * @return the Configurable instance allowing configurations. */ public static Configurable configure() { return new MySqlManager.Configurable(); } /** The Configurable allowing configurations to be set. */ public static final class Configurable { private final ClientLogger logger = new ClientLogger(Configurable.class); private HttpClient httpClient; private HttpLogOptions httpLogOptions; private final List<HttpPipelinePolicy> policies = new ArrayList<>(); private final List<String> scopes = new ArrayList<>(); private RetryPolicy retryPolicy; private Duration defaultPollInterval; private Configurable() { } /** * Sets the http client. * * @param httpClient the HTTP client. * @return the configurable object itself. */ public Configurable withHttpClient(HttpClient httpClient) { this.httpClient = Objects.requireNonNull(httpClient, "'httpClient' cannot be null."); return this; } /** * Sets the logging options to the HTTP pipeline. * * @param httpLogOptions the HTTP log options. * @return the configurable object itself. */ public Configurable withLogOptions(HttpLogOptions httpLogOptions) { this.httpLogOptions = Objects.requireNonNull(httpLogOptions, "'httpLogOptions' cannot be null."); return this; } /** * Adds the pipeline policy to the HTTP pipeline. * * @param policy the HTTP pipeline policy. * @return the configurable object itself. */ public Configurable withPolicy(HttpPipelinePolicy policy) { this.policies.add(Objects.requireNonNull(policy, "'policy' cannot be null.")); return this; } /** * Adds the scope to permission sets. * * @param scope the scope. * @return the configurable object itself. */ public Configurable withScope(String scope) { this.scopes.add(Objects.requireNonNull(scope, "'scope' cannot be null.")); return this; } /** * Sets the retry policy to the HTTP pipeline. * * @param retryPolicy the HTTP pipeline retry policy. * @return the configurable object itself. */ public Configurable withRetryPolicy(RetryPolicy retryPolicy) { this.retryPolicy = Objects.requireNonNull(retryPolicy, "'retryPolicy' cannot be null."); return this; } /** * Sets the default poll interval, used when service does not provide "Retry-After" header. * * @param defaultPollInterval the default poll interval. * @return the configurable object itself. */ public Configurable withDefaultPollInterval(Duration defaultPollInterval) { this.defaultPollInterval = Objects.requireNonNull(defaultPollInterval, "'retryPolicy' cannot be null."); if (this.defaultPollInterval.isNegative()) { throw logger.logExceptionAsError(new IllegalArgumentException("'httpPipeline' cannot be negative")); } return this; } /** * Creates an instance of MySql service API entry point. * * @param credential the credential to use. * @param profile the Azure profile for client. * @return the MySql service API instance. */ public MySqlManager authenticate(TokenCredential credential, AzureProfile profile) { Objects.requireNonNull(credential, "'credential' cannot be null."); Objects.requireNonNull(profile, "'profile' cannot be null."); StringBuilder userAgentBuilder = new StringBuilder(); userAgentBuilder .append("azsdk-java") .append("-") .append("com.azure.resourcemanager.mysqlflexibleserver") .append("/") .append("1.0.0-beta.1"); if (!Configuration.getGlobalConfiguration().get("AZURE_TELEMETRY_DISABLED", false)) { userAgentBuilder .append(" (") .append(Configuration.getGlobalConfiguration().get("java.version")) .append("; ") .append(Configuration.getGlobalConfiguration().get("os.name")) .append("; ") .append(Configuration.getGlobalConfiguration().get("os.version")) .append("; auto-generated)"); } else { userAgentBuilder.append(" (auto-generated)"); } if (scopes.isEmpty()) { scopes.add(profile.getEnvironment().getManagementEndpoint() + "/.default"); } if (retryPolicy == null) { retryPolicy = new RetryPolicy("Retry-After", ChronoUnit.SECONDS); } List<HttpPipelinePolicy> policies = new ArrayList<>(); policies.add(new UserAgentPolicy(userAgentBuilder.toString())); policies.add(new RequestIdPolicy()); HttpPolicyProviders.addBeforeRetryPolicies(policies); policies.add(retryPolicy); policies.add(new AddDatePolicy()); policies.add(new ArmChallengeAuthenticationPolicy(credential, scopes.toArray(new String[0]))); policies.addAll(this.policies); HttpPolicyProviders.addAfterRetryPolicies(policies); policies.add(new HttpLoggingPolicy(httpLogOptions)); HttpPipeline httpPipeline = new HttpPipelineBuilder() .httpClient(httpClient) .policies(policies.toArray(new HttpPipelinePolicy[0])) .build(); return new MySqlManager(httpPipeline, profile, defaultPollInterval); } } /** @return Resource collection API of Servers. */ public Servers servers() { if (this.servers == null) { this.servers = new ServersImpl(clientObject.getServers(), this); } return servers; } /** @return Resource collection API of Replicas. */ public Replicas replicas() { if (this.replicas == null) { this.replicas = new ReplicasImpl(clientObject.getReplicas(), this); } return replicas; } /** @return Resource collection API of Backups. */ public Backups backups() { if (this.backups == null) { this.backups = new BackupsImpl(clientObject.getBackups(), this); } return backups; } /** @return Resource collection API of FirewallRules. */ public FirewallRules firewallRules() { if (this.firewallRules == null) { this.firewallRules = new FirewallRulesImpl(clientObject.getFirewallRules(), this); } return firewallRules; } /** @return Resource collection API of Databases. */ public Databases databases() { if (this.databases == null) { this.databases = new DatabasesImpl(clientObject.getDatabases(), this); } return databases; } /** @return Resource collection API of Configurations. */ public Configurations configurations() { if (this.configurations == null) { this.configurations = new ConfigurationsImpl(clientObject.getConfigurations(), this); } return configurations; } /** @return Resource collection API of LocationBasedCapabilities. */ public LocationBasedCapabilities locationBasedCapabilities() { if (this.locationBasedCapabilities == null) { this.locationBasedCapabilities = new LocationBasedCapabilitiesImpl(clientObject.getLocationBasedCapabilities(), this); } return locationBasedCapabilities; } /** @return Resource collection API of CheckVirtualNetworkSubnetUsages. */ public CheckVirtualNetworkSubnetUsages checkVirtualNetworkSubnetUsages() { if (this.checkVirtualNetworkSubnetUsages == null) { this.checkVirtualNetworkSubnetUsages = new CheckVirtualNetworkSubnetUsagesImpl(clientObject.getCheckVirtualNetworkSubnetUsages(), this); } return checkVirtualNetworkSubnetUsages; } /** @return Resource collection API of CheckNameAvailabilities. */ public CheckNameAvailabilities checkNameAvailabilities() { if (this.checkNameAvailabilities == null) { this.checkNameAvailabilities = new CheckNameAvailabilitiesImpl(clientObject.getCheckNameAvailabilities(), this); } return checkNameAvailabilities; } /** @return Resource collection API of GetPrivateDnsZoneSuffixes. */ public GetPrivateDnsZoneSuffixes getPrivateDnsZoneSuffixes() { if (this.getPrivateDnsZoneSuffixes == null) { this.getPrivateDnsZoneSuffixes = new GetPrivateDnsZoneSuffixesImpl(clientObject.getGetPrivateDnsZoneSuffixes(), this); } return getPrivateDnsZoneSuffixes; } /** @return Resource collection API of Operations. */ public Operations operations() { if (this.operations == null) { this.operations = new OperationsImpl(clientObject.getOperations(), this); } return operations; } /** * @return Wrapped service client MySqlManagementClient providing direct access to the underlying auto-generated API * implementation, based on Azure REST API. */ public MySqlManagementClient serviceClient() { return this.clientObject; } }
5,679
14,668
<gh_stars>1000+ // Copyright 2020 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef UI_OZONE_PLATFORM_WAYLAND_TEST_TEST_VIEWPORTER_H_ #define UI_OZONE_PLATFORM_WAYLAND_TEST_TEST_VIEWPORTER_H_ #include "ui/ozone/platform/wayland/test/global_object.h" namespace wl { // Manage wl_viewporter object. class TestViewporter : public GlobalObject { public: TestViewporter(); ~TestViewporter() override; TestViewporter(const TestViewporter& rhs) = delete; TestViewporter& operator=(const TestViewporter& rhs) = delete; }; } // namespace wl #endif // UI_OZONE_PLATFORM_WAYLAND_TEST_TEST_VIEWPORTER_H_
255
403
<filename>tests/nipaptest.py<gh_stars>100-1000 #!/usr/bin/env python # -*- coding: utf-8 -*- # # Most of the tests here are performed via Pynipap which makes it a lot easier # to test things given that we receive python objects and not just basic data # structures like those returned in xmlrpc.py. If you want to write a new test, # it is recommended that you place it here rather than in xmlrpc.py. # import datetime import logging import unittest import sys import os import time MODULE_DIR = os.path.dirname(os.path.abspath(__file__)) sys.path.insert(0, MODULE_DIR + '/..') sys.path.insert(0, MODULE_DIR + '/../pynipap') sys.path.insert(0, MODULE_DIR + '/../nipap') sys.path.insert(0, MODULE_DIR + '/../nipap-cli') import nipap.backend from nipap.backend import Nipap from nipap.authlib import SqliteAuth from nipap.nipapconfig import NipapConfig from pynipap import AuthOptions, VRF, Pool, Prefix, NipapNonExistentError, NipapDuplicateError, NipapValueError import pynipap pynipap.xmlrpc_uri = 'http://unittest:[email protected]:1337' o = AuthOptions({ 'authoritative_source': 'nipap' }) # disable caching of objects in Pynipap pynipap.CACHE = False class TestHelper: @classmethod def clear_database(cls): cfg = NipapConfig('/etc/nipap/nipap.conf') n = Nipap() # have to delete hosts before we can delete the rest n._execute("DELETE FROM ip_net_plan WHERE masklen(prefix) = 32") # the rest n._execute("DELETE FROM ip_net_plan") # delete all except for the default VRF with id 0 n._execute("DELETE FROM ip_net_vrf WHERE id > 0") # set default info for VRF 0 n._execute("UPDATE ip_net_vrf SET name = 'default', description = 'The default VRF, typically the Internet.' WHERE id = 0") n._execute("DELETE FROM ip_net_pool") n._execute("DELETE FROM ip_net_asn") def add_prefix(self, prefix, type, description, tags=None, pool_id=None): if tags is None: tags = [] p = Prefix() p.prefix = prefix p.type = type p.status = 'assigned' p.description = description p.tags = tags if pool_id: pool = Pool.get(pool_id) p.pool = pool p.save() return p def add_prefix_from_pool(self, pool, family, description): p = Prefix() args = {} args['from-pool'] = pool args['family'] = family p.type = pool.default_type p.status = 'assigned' p.save(args) return p def add_pool(self, name, default_type, ipv4_default_prefix_length, ipv6_default_prefix_length): pool = Pool() pool.name = name pool.default_type = default_type pool.ipv4_default_prefix_length = ipv4_default_prefix_length pool.ipv6_default_prefix_length = ipv6_default_prefix_length pool.save() return pool class TestPrefixExpires(unittest.TestCase): """ Test expires related stuff """ def setUp(self): """ Test setup, which essentially means to empty the database """ TestHelper.clear_database() def test_expires1(self): th = TestHelper() # make sure default is infinite expiry time p1 = th.add_prefix('1.3.0.0/16', 'reservation', 'test') self.assertEqual(p1.expires, None) # test absolute time by creating local datetime object and sending. # set expires to now but skip the microseconds as the backend doesn't # support that precision now = datetime.datetime.now().replace(microsecond = 0) p1.expires = now p1.save() self.assertEqual(p1.expires, now) # test the relative time parsing of the backend by setting "tomorrow", # which parsedatetime interprets as 09:00 the next day # tomorrow = datetime.datetime.now().replace(hour = 9, minute = 0, second = 0, microsecond = 0) + datetime.timedelta(days = 1) p1.expires = "tomorrow" p1.save() self.assertEqual(p1.expires, tomorrow) class TestParentPrefix(unittest.TestCase): """ Test parent prefix related stuff """ def setUp(self): """ Test setup, which essentially means to empty the database """ TestHelper.clear_database() def test_parent_prefix(self): """ Verify that listing with parent_prefix returns match for 'foo' """ expected = [] parent = self.add_prefix('1.3.0.0/16', 'reservation', 'test') expected.append([parent.prefix, False]) expected.append([self.add_prefix('1.3.1.0/24', 'assignment', 'foo').prefix, True]) expected.append([self.add_prefix('1.3.2.0/24', 'assignment', 'test').prefix, False]) expected.append([self.add_prefix('1.3.3.0/24', 'assignment', 'test').prefix, False]) expected.append([self.add_prefix('1.3.4.0/24', 'assignment', 'test').prefix, False]) self.add_prefix('1.2.4.0/24', 'assignment', 'test') res = Prefix.smart_search('foo', { 'parent_prefix': parent.id }) result = [] for prefix in res['result']: result.append([prefix.prefix, prefix.match]) self.assertEqual(expected, result) def test_parent_prefix2(self): """ Verify that listing with parent_prefix returns a list with no matches Nothing matches foo but we should still get a list of prefixes """ expected = [] parent = self.add_prefix('1.3.0.0/16', 'reservation', 'test') expected.append([parent.prefix, False]) expected.append([self.add_prefix('1.3.1.0/24', 'assignment', 'test').prefix, False]) expected.append([self.add_prefix('1.3.2.0/24', 'assignment', 'test').prefix, False]) expected.append([self.add_prefix('1.3.3.0/24', 'assignment', 'test').prefix, False]) expected.append([self.add_prefix('1.3.4.0/24', 'assignment', 'test').prefix, False]) self.add_prefix('1.2.4.0/24', 'assignment', 'test') res = Prefix.smart_search('foo', { 'parent_prefix': parent.id }) result = [] for prefix in res['result']: result.append([prefix.prefix, prefix.match]) self.assertEqual(expected, result) def add_prefix(self, prefix, type, description): p = Prefix() p.prefix = prefix p.type = type p.status = 'assigned' p.description = description p.save() return p class TestPrefixDisplayPrefix(unittest.TestCase): """ Test calculation of display_prefix on child prefixes """ def setUp(self): """ Test setup, which essentially means to empty the database """ TestHelper.clear_database() def test_prefix_edit(self): """ Make sure display_prefix is correctly updated on modification of parent """ # we ran into display_prefix not being updated correctly in #515 th = TestHelper() # add a few prefixes p1 = th.add_prefix('192.168.0.0/24', 'assignment', 'test') p2 = th.add_prefix('192.168.0.1/32', 'host', 'test') # now edit the "middle prefix" so that it now covers 192.168.1.0/24 p1.prefix = '192.168.0.0/23' p1.save() # check that display_prefix of host is as expected res = Prefix.smart_search('192.168.0.1/32', {}) self.assertEqual('192.168.0.1/23', res['result'][0].display_prefix) class TestPrefixIndent(unittest.TestCase): """ Test prefix indent calculation """ def setUp(self): """ Test setup, which essentially means to empty the database """ TestHelper.clear_database() def test_prefix_edit(self): """ Verify indent is correct after prefix edit """ th = TestHelper() # add a few prefixes p1 = th.add_prefix('192.168.0.0/16', 'reservation', 'test') p2 = th.add_prefix('192.168.0.0/24', 'reservation', 'test') p3 = th.add_prefix('192.168.1.0/24', 'reservation', 'test') # now edit the "middle prefix" so that it now covers 192.168.1.0/24 p3.prefix = '192.168.0.0/20' p3.save() expected = [] # expected result is a list of list, each row is a prefix, first value is prefix, next is indent level # notice how p2 and p3 switch places efter the edit expected.append([p1.prefix, 0]) expected.append([p3.prefix, 1]) expected.append([p2.prefix, 2]) res = Prefix.smart_search('0.0.0.0/0', {}) result = [] for prefix in res['result']: result.append([prefix.prefix, prefix.indent]) self.assertEqual(expected, result) class TestPrefixTags(unittest.TestCase): """ Test prefix tag calculation """ def setUp(self): """ Test setup, which essentially means to empty the database """ TestHelper.clear_database() def test_prefix_edit(self): """ Verify tags are correct after prefix edit """ # ran into this issue in #507 th = TestHelper() # add to "top level" prefix, each with a unique tag p1 = th.add_prefix('1.0.0.0/8', 'reservation', 'test', tags=['a']) p2 = th.add_prefix('2.0.0.0/8', 'reservation', 'test', tags=['b']) # add a subnet of p1 p3 = th.add_prefix('1.0.0.0/24', 'reservation', 'test') # p3 should have inherited_tags = ['a'] from p1 res = Prefix.smart_search('1.0.0.0/24', {}) self.assertEqual(['a'], res['result'][0].inherited_tags.keys()) # edit p3 to become subnet of p2 p3.prefix = '2.0.0.0/24' p3.save() # p3 should have inherited_tags = ['b'] from p2 res = Prefix.smart_search('2.0.0.0/24', {}) self.assertEqual(['b'], res['result'][0].inherited_tags.keys()) def test_tags1(self): """ Verify tags are correctly inherited """ th = TestHelper() # add to "top level" prefix, each with a unique tag p1 = th.add_prefix('1.0.0.0/8', 'reservation', 'test', tags=['a']) p2 = th.add_prefix('1.0.0.0/9', 'reservation', 'test') p3 = th.add_prefix('1.0.0.0/10', 'reservation', 'test') # p3 should have inherited_tags = ['a'] from p1 res = Prefix.smart_search('1.0.0.0/10', {}) self.assertEqual(['a'], res['result'][0].inherited_tags.keys()) p4 = th.add_prefix('1.0.0.0/24', 'reservation', 'test') p5 = th.add_prefix('1.0.0.0/23', 'reservation', 'test') p6 = th.add_prefix('1.0.0.0/22', 'reservation', 'test') # p4 should have inherited_tags = ['a'] from p1 res = Prefix.smart_search('1.0.0.0/24', {}) self.assertEqual(['a'], res['result'][0].inherited_tags.keys()) # change tags on top level prefix p1.tags = ['b'] p1.save() # p4 should have inherited_tags = ['a'] from p1 res = Prefix.smart_search('1.0.0.0/8', {}) self.assertEqual([], res['result'][0].inherited_tags.keys()) self.assertEqual(['b'], res['result'][1].inherited_tags.keys()) self.assertEqual(['b'], res['result'][2].inherited_tags.keys()) self.assertEqual(['b'], res['result'][3].inherited_tags.keys()) self.assertEqual(['b'], res['result'][4].inherited_tags.keys()) self.assertEqual(['b'], res['result'][5].inherited_tags.keys()) class TestPrefixChildren(unittest.TestCase): """ Test calculation of children prefixes """ def setUp(self): """ Test setup, which essentially means to empty the database """ TestHelper.clear_database() def test_children1(self): """ Add some prefixes and make sure number of children is correct """ th = TestHelper() # add a few prefixes p1 = th.add_prefix('192.168.0.0/16', 'reservation', 'test') p2 = th.add_prefix('192.168.0.0/20', 'reservation', 'test') p3 = th.add_prefix('192.168.0.0/24', 'reservation', 'test') p4 = th.add_prefix('192.168.1.0/24', 'reservation', 'test') p5 = th.add_prefix('192.168.2.0/24', 'reservation', 'test') p6 = th.add_prefix('192.168.32.0/20', 'reservation', 'test') p7 = th.add_prefix('192.168.32.0/24', 'reservation', 'test') expected = [] # expected result is a list of list, each row is a prefix, first value # is prefix, next is number of children expected.append([p1.prefix, 2]) expected.append([p2.prefix, 3]) expected.append([p3.prefix, 0]) expected.append([p4.prefix, 0]) expected.append([p5.prefix, 0]) expected.append([p6.prefix, 1]) expected.append([p7.prefix, 0]) res = Prefix.smart_search('0.0.0.0/0', {}) result = [] for prefix in res['result']: result.append([prefix.prefix, prefix.children]) self.assertEqual(expected, result) p5.prefix = '192.0.2.0/24' p5.save() expected = [] expected.append([p5.prefix, 0]) expected.append([p1.prefix, 2]) expected.append([p2.prefix, 2]) expected.append([p3.prefix, 0]) expected.append([p4.prefix, 0]) expected.append([p6.prefix, 1]) expected.append([p7.prefix, 0]) res = Prefix.smart_search('0.0.0.0/0', {}) result = [] for prefix in res['result']: result.append([prefix.prefix, prefix.children]) self.assertEqual(expected, result) # p4 192.168.1.0/24 => 192.168.0.0/21 p4.prefix = '192.168.0.0/21' p4.save() expected = [] expected.append([p5.prefix, 0]) expected.append([p1.prefix, 2]) expected.append([p2.prefix, 1]) expected.append([p4.prefix, 1]) expected.append([p3.prefix, 0]) expected.append([p6.prefix, 1]) expected.append([p7.prefix, 0]) res = Prefix.smart_search('0.0.0.0/0', {}) result = [] for prefix in res['result']: result.append([prefix.prefix, prefix.children]) self.assertEqual(expected, result) p1.remove() expected = [] expected.append([p5.prefix, 0]) expected.append([p2.prefix, 1]) expected.append([p4.prefix, 1]) expected.append([p3.prefix, 0]) expected.append([p6.prefix, 1]) expected.append([p7.prefix, 0]) res = Prefix.smart_search('0.0.0.0/0', {}) result = [] for prefix in res['result']: result.append([prefix.prefix, prefix.children]) self.assertEqual(expected, result) def test_children2(self): """ Add an assignment and a host and make children calculation works after modifying the assignment """ # we ran into children not being updated correctly in #515 th = TestHelper() # add a few prefixes p1 = th.add_prefix('192.168.0.0/24', 'assignment', 'test') p2 = th.add_prefix('192.168.0.1/32', 'host', 'test') # now edit the "middle prefix" so that it now covers 192.168.1.0/24 p1.prefix = '192.168.0.0/23' p1.save() # check that children of parent is as expected res = Prefix.smart_search('192.168.0.0/23', {}) self.assertEqual(1, res['result'][0].children) def test_children3(self): """ Check children are correct when adding prefix """ th = TestHelper() # add a top level prefix p1 = th.add_prefix('1.0.0.0/24', 'assignment', 'test') # check stats for p1 res = Prefix.smart_search('1.0.0.0/24', {}) self.assertEqual(0, res['result'][0].children) # add a covering supernet around p1 p2 = th.add_prefix('1.0.0.0/20', 'reservation', 'bar') # check stats for p2, our new top level prefix res = Prefix.smart_search('1.0.0.0/20', {}) self.assertEqual(1, res['result'][0].children) def test_children4(self): """ Check children are correct when enlarging prefix """ th = TestHelper() # add a top level prefix p1 = th.add_prefix('1.0.0.0/24', 'assignment', 'test') p2 = th.add_prefix('1.0.7.0/24', 'assignment', 'test') # add a covering supernet around p1 p3 = th.add_prefix('1.0.0.0/22', 'reservation', 'bar') # check that p3 looks good res = Prefix.smart_search('1.0.0.0/22', {}) self.assertEqual(1, res['result'][0].children) # now move our supernet, so we see that the update thingy works p3.prefix = '1.0.0.0/21' p3.save() # check stats for p2, our new top level prefix res = Prefix.smart_search('1.0.0.0/21', {}) self.assertEqual(2, res['result'][0].children) def test_children5(self): """ Check children are correct when shrinking prefix """ th = TestHelper() # add a top level prefix p1 = th.add_prefix('1.0.0.0/24', 'assignment', 'test') p2 = th.add_prefix('1.0.7.0/24', 'assignment', 'test') # add a covering supernet around p1 and p2 p3 = th.add_prefix('1.0.0.0/21', 'reservation', 'bar') # check that p3 looks good res = Prefix.smart_search('1.0.0.0/21', {}) self.assertEqual(2, res['result'][0].children) # shrink our supernet, so it only covers p1 p3.prefix = '1.0.0.0/22' p3.save() # check that p3 only covers p1 res = Prefix.smart_search('1.0.0.0/22', {}) self.assertEqual(1, res['result'][0].children) def test_children6(self): """ Check children are correct when moving prefix """ th = TestHelper() # add a top level prefix p1 = th.add_prefix('1.0.0.0/24', 'assignment', 'test') p2 = th.add_prefix('2.0.0.0/25', 'reservation', 'bar') # now move our supernet, so we see that the update thingy works p2.prefix = '2.0.0.0/22' p2.save() # check stats for p2, we shouldn't see children based on our old # position (2.0.0.0/25) res = Prefix.smart_search('2.0.0.0/22', {}) self.assertEqual(0, res['result'][0].children) # now move our supernet, so we see that the update thingy works p2.prefix = '1.0.0.0/22' p2.save() # check stats for p2, we should get p1 as child res = Prefix.smart_search('1.0.0.0/22', {}) self.assertEqual(1, res['result'][0].children) def test_children7(self): """ Add prefixes within other prefix and verify parent prefix has correct children """ th = TestHelper() # add a top level prefix p1 = th.add_prefix('1.0.0.0/24', 'assignment', 'test') # check stats for p1 res = Prefix.smart_search('1.0.0.0/24', {}) self.assertEqual(0, res['result'][0].children) # add a host in our top prefix p2 = th.add_prefix('1.0.0.1/32', 'host', 'bar') # check stats for p1, our top level prefix res = Prefix.smart_search('1.0.0.0/24', {}) self.assertEqual(1, res['result'][0].children) # check stats for p2, our new host prefix res = Prefix.smart_search('1.0.0.1/32', {}) self.assertEqual(0, res['result'][0].children) def test_children8(self): """ Remove prefix and check old parent is correctly updated """ th = TestHelper() # p1 children are p2 (which covers p3 and p4) and p5 p1 = th.add_prefix('1.0.0.0/20', 'reservation', 'test') p2 = th.add_prefix('1.0.0.0/22', 'reservation', 'test') p3 = th.add_prefix('1.0.0.0/24', 'reservation', 'test') p4 = th.add_prefix('1.0.1.0/24', 'reservation', 'test') p5 = th.add_prefix('1.0.7.0/24', 'reservation', 'test') # moving p2 means that p1 get p3, p4 and p5 as children p2.prefix = '2.0.0.0/22' p2.save() # check stats for p1 res = Prefix.smart_search('1.0.0.0/20', {}) self.assertEqual(3, res['result'][0].children) # moving back p2 which means that p1 get p2 and p5 as children p2.prefix = '1.0.0.0/22' p2.save() # check stats for p1 res = Prefix.smart_search('1.0.0.0/20', {}) self.assertEqual(2, res['result'][0].children) def test_children9(self): """ Move prefix several indent steps and check children is correct """ th = TestHelper() # tree of prefixes p1 = th.add_prefix('1.0.0.0/20', 'reservation', 'test') p2 = th.add_prefix('1.0.0.0/21', 'reservation', 'test') p3 = th.add_prefix('1.0.0.0/22', 'reservation', 'test') p4 = th.add_prefix('1.0.0.0/23', 'reservation', 'test') p5 = th.add_prefix('1.0.0.0/24', 'reservation', 'test') p6 = th.add_prefix('1.0.2.0/24', 'reservation', 'test') p7 = th.add_prefix('1.0.4.0/22', 'reservation', 'test') # check stats for p2 res = Prefix.smart_search('1.0.0.0/21', {}) self.assertEqual(2, res['result'][0].children) # move p3 outside of the tree p3.prefix = '2.0.0.0/22' p3.save() # check stats for p2 res = Prefix.smart_search('1.0.0.0/21', {}) self.assertEqual(3, res['result'][0].children) # move p3 into the tree again p3.prefix = '1.0.0.0/22' p3.save() # check stats for p2 res = Prefix.smart_search('1.0.0.0/21', {}) self.assertEqual(2, res['result'][0].children) class TestCountryCodeValue(unittest.TestCase): """ Test sanity for country value - should be ISO 3166-1 alpha-2 compliant """ def setUp(self): """ Test setup, which essentially means to empty the database """ TestHelper.clear_database() def test_country_code_length(self): """ Make sure only two character country codes are allowed """ p = Prefix() p.prefix = '1.3.3.0/24' p.type = 'assignment' p.status = 'assigned' # try to input one character - should fail - this will be a INSERT operation p.country = 'a' with self.assertRaisesRegexp(NipapValueError, 'Please enter a two letter country code according to ISO 3166-1 alpha-2'): p.save() # try to input one character - should fail - this will be an UPDATE operation p.country = 'a' with self.assertRaisesRegexp(NipapValueError, 'Please enter a two letter country code according to ISO 3166-1 alpha-2'): p.save() # try to input three character - should fail p.country = 'aaa' with self.assertRaisesRegexp(NipapValueError, 'Please enter a two letter country code according to ISO 3166-1 alpha-2'): p.save() # try to input a number character - should fail p.country = 'a1' with self.assertRaisesRegexp(NipapValueError, 'Please enter a two letter country code according to ISO 3166-1 alpha-2'): p.save() # try to input two character - should succeed p.country = 'se' p.save() # output should be capitalized self.assertEqual('SE', p.country) class TestPoolStatistics(unittest.TestCase): """ Test calculation of statistics for pools """ def setUp(self): """ Test setup, which essentially means to empty the database """ TestHelper.clear_database() def test_stats1(self): """ Check total stats are correct when adding and removing member prefix """ th = TestHelper() # add a pool pool1 = th.add_pool('test', 'assignment', 31, 112) # check stats for pool1 res = Pool.list({ 'id': pool1.id }) # ipv4 self.assertEqual(0, res[0].member_prefixes_v4) self.assertEqual(0, res[0].used_prefixes_v4) self.assertEqual(None, res[0].free_prefixes_v4) self.assertEqual(None, res[0].total_prefixes_v4) self.assertEqual(0, res[0].total_addresses_v4) self.assertEqual(0, res[0].used_addresses_v4) self.assertEqual(0, res[0].free_addresses_v4) # ipv6 self.assertEqual(0, res[0].member_prefixes_v6) self.assertEqual(0, res[0].used_prefixes_v6) self.assertEqual(None, res[0].free_prefixes_v6) self.assertEqual(None, res[0].total_prefixes_v6) self.assertEqual(0, res[0].total_addresses_v6) self.assertEqual(0, res[0].used_addresses_v6) self.assertEqual(0, res[0].free_addresses_v6) # add some members to the pool p1 = th.add_prefix('1.0.0.0/24', 'assignment', 'test', pool_id=pool1.id) p2 = th.add_prefix('2.0.0.0/24', 'assignment', 'test', pool_id=pool1.id) p3 = th.add_prefix('2001:db8::/48', 'assignment', 'test', pool_id=pool1.id) p4 = th.add_prefix('2001:db8:1::/48', 'assignment', 'test', pool_id=pool1.id) # check stats for pool1 res = Pool.list({ 'id': pool1.id }) # ipv4 self.assertEqual(2, res[0].member_prefixes_v4) self.assertEqual(0, res[0].used_prefixes_v4) self.assertEqual(256, res[0].free_prefixes_v4) self.assertEqual(256, res[0].total_prefixes_v4) self.assertEqual(512, res[0].total_addresses_v4) self.assertEqual(0, res[0].used_addresses_v4) self.assertEqual(512, res[0].free_addresses_v4) # ipv6 self.assertEqual(2, res[0].member_prefixes_v6) self.assertEqual(0, res[0].used_prefixes_v6) self.assertEqual(36893488147419103232, res[0].free_prefixes_v6) self.assertEqual(36893488147419103232, res[0].total_prefixes_v6) self.assertEqual(2417851639229258349412352, res[0].total_addresses_v6) self.assertEqual(0, res[0].used_addresses_v6) self.assertEqual(2417851639229258349412352, res[0].free_addresses_v6) # remove one IPv4 and one IPv6 member from the pool p1.remove() p3.remove() # check stats for pool1 res = Pool.list({ 'id': pool1.id }) # ipv4 self.assertEqual(1, res[0].member_prefixes_v4) self.assertEqual(0, res[0].used_prefixes_v4) self.assertEqual(128, res[0].free_prefixes_v4) self.assertEqual(128, res[0].total_prefixes_v4) self.assertEqual(256, res[0].total_addresses_v4) self.assertEqual(0, res[0].used_addresses_v4) self.assertEqual(256, res[0].free_addresses_v4) # ipv6 self.assertEqual(1, res[0].member_prefixes_v6) self.assertEqual(0, res[0].used_prefixes_v6) self.assertEqual(18446744073709551616, res[0].free_prefixes_v6) self.assertEqual(18446744073709551616, res[0].total_prefixes_v6) self.assertEqual(1208925819614629174706176, res[0].total_addresses_v6) self.assertEqual(0, res[0].used_addresses_v6) self.assertEqual(1208925819614629174706176, res[0].free_addresses_v6) pool1.ipv4_default_prefix_length = 30 pool1.ipv6_default_prefix_length = 96 pool1.save() # check stats for pool1 res = Pool.list({ 'id': pool1.id }) # ipv4 self.assertEqual(1, res[0].member_prefixes_v4) self.assertEqual(0, res[0].used_prefixes_v4) self.assertEqual(64, res[0].free_prefixes_v4) self.assertEqual(64, res[0].total_prefixes_v4) self.assertEqual(256, res[0].total_addresses_v4) self.assertEqual(0, res[0].used_addresses_v4) self.assertEqual(256, res[0].free_addresses_v4) # ipv6 self.assertEqual(1, res[0].member_prefixes_v6) self.assertEqual(0, res[0].used_prefixes_v6) self.assertEqual(281474976710656, res[0].free_prefixes_v6) self.assertEqual(281474976710656, res[0].total_prefixes_v6) self.assertEqual(1208925819614629174706176, res[0].total_addresses_v6) self.assertEqual(0, res[0].used_addresses_v6) self.assertEqual(1208925819614629174706176, res[0].free_addresses_v6) def test_stats2(self): """ Check total stats are correct when updating member prefix """ th = TestHelper() # add a pool pool1 = th.add_pool('test', 'assignment', 31, 112) # add some members to the pool p1 = th.add_prefix('1.0.0.0/24', 'reservation', 'test', pool_id=pool1.id) p2 = th.add_prefix('2001:db8::/48', 'reservation', 'test', pool_id=pool1.id) p1.prefix = '1.0.0.0/25' p1.save() p2.prefix = '2001:db8::/64' p2.save() # check stats for pool1 res = Pool.list({ 'id': pool1.id }) # ipv4 self.assertEqual(1, res[0].member_prefixes_v4) self.assertEqual(0, res[0].used_prefixes_v4) self.assertEqual(64, res[0].free_prefixes_v4) self.assertEqual(64, res[0].total_prefixes_v4) self.assertEqual(128, res[0].total_addresses_v4) self.assertEqual(0, res[0].used_addresses_v4) self.assertEqual(128, res[0].free_addresses_v4) # ipv6 self.assertEqual(1, res[0].member_prefixes_v6) self.assertEqual(0, res[0].used_prefixes_v6) self.assertEqual(281474976710656, res[0].free_prefixes_v6) self.assertEqual(281474976710656, res[0].total_prefixes_v6) self.assertEqual(18446744073709551616, res[0].total_addresses_v6) self.assertEqual(0, res[0].used_addresses_v6) self.assertEqual(18446744073709551616, res[0].free_addresses_v6) def test_stats3(self): """ Check total stats are correct when adding and removing child prefixes from pool """ th = TestHelper() # add a pool pool1 = th.add_pool('test', 'assignment', 31, 112) # add some members to the pool p1 = th.add_prefix('1.0.0.0/24', 'reservation', 'test', pool_id=pool1.id) p2 = th.add_prefix('2001:db8::/48', 'reservation', 'test', pool_id=pool1.id) # add child from pool pc1 = th.add_prefix_from_pool(pool1, 4, 'foo') pc2 = th.add_prefix_from_pool(pool1, 6, 'foo') # check stats for pool1 res = Pool.list({ 'id': pool1.id }) # ipv4 self.assertEqual(1, res[0].member_prefixes_v4) self.assertEqual(1, res[0].used_prefixes_v4) self.assertEqual(127, res[0].free_prefixes_v4) self.assertEqual(128, res[0].total_prefixes_v4) self.assertEqual(256, res[0].total_addresses_v4) self.assertEqual(2, res[0].used_addresses_v4) self.assertEqual(254, res[0].free_addresses_v4) # ipv6 self.assertEqual(1, res[0].member_prefixes_v6) self.assertEqual(1, res[0].used_prefixes_v6) self.assertEqual(18446744073709551615, res[0].free_prefixes_v6) self.assertEqual(18446744073709551616, res[0].total_prefixes_v6) self.assertEqual(1208925819614629174706176, res[0].total_addresses_v6) self.assertEqual(65536, res[0].used_addresses_v6) self.assertEqual(1208925819614629174640640, res[0].free_addresses_v6) # remove child prefixes pc1.remove() pc2.remove() # check stats for pool1 res = Pool.list({ 'id': pool1.id }) # ipv4 self.assertEqual(1, res[0].member_prefixes_v4) self.assertEqual(0, res[0].used_prefixes_v4) self.assertEqual(128, res[0].free_prefixes_v4) self.assertEqual(128, res[0].total_prefixes_v4) self.assertEqual(256, res[0].total_addresses_v4) self.assertEqual(0, res[0].used_addresses_v4) self.assertEqual(256, res[0].free_addresses_v4) # ipv6 self.assertEqual(1, res[0].member_prefixes_v6) self.assertEqual(0, res[0].used_prefixes_v6) self.assertEqual(18446744073709551616, res[0].free_prefixes_v6) self.assertEqual(18446744073709551616, res[0].total_prefixes_v6) self.assertEqual(1208925819614629174706176, res[0].total_addresses_v6) self.assertEqual(0, res[0].used_addresses_v6) self.assertEqual(1208925819614629174706176, res[0].free_addresses_v6) def test_stats4(self): """ Check total stats are correct when modifying child prefixes in pool """ th = TestHelper() # add a pool pool1 = th.add_pool('test', 'assignment', 31, 112) # add some members to the pool p1 = th.add_prefix('1.0.0.0/24', 'reservation', 'test', pool_id=pool1.id) p2 = th.add_prefix('2001:db8::/48', 'reservation', 'test', pool_id=pool1.id) # add child from pool pc1 = th.add_prefix_from_pool(pool1, 4, 'foo') pc2 = th.add_prefix_from_pool(pool1, 6, 'foo') # change child prefix and size and make sure stats are updated correctly pc1.prefix = '1.0.0.128/25' pc1.save() pc2.prefix = '2001:db8:0:1::/64' pc2.save() # check stats for pool1 res = Pool.list({ 'id': pool1.id }) # ipv4 self.assertEqual(1, res[0].member_prefixes_v4) self.assertEqual(1, res[0].used_prefixes_v4) self.assertEqual(64, res[0].free_prefixes_v4) self.assertEqual(65, res[0].total_prefixes_v4) self.assertEqual(256, res[0].total_addresses_v4) self.assertEqual(128, res[0].used_addresses_v4) self.assertEqual(128, res[0].free_addresses_v4) # ipv6 self.assertEqual(1, res[0].member_prefixes_v6) self.assertEqual(1, res[0].used_prefixes_v6) self.assertEqual(18446462598732840960, res[0].free_prefixes_v6) self.assertEqual(18446462598732840961, res[0].total_prefixes_v6) self.assertEqual(1208925819614629174706176, res[0].total_addresses_v6) self.assertEqual(18446744073709551616, res[0].used_addresses_v6) self.assertEqual(1208907372870555465154560, res[0].free_addresses_v6) def test_stats5(self): """ Check total stats are correct when adding and removing member prefix with childs from pool This is trickier as there is now a child in the pool that needs to be accounted for. """ th = TestHelper() # add a pool pool1 = th.add_pool('test', 'assignment', 31, 112) # add some members to the pool p1 = th.add_prefix('1.0.0.0/24', 'reservation', 'test', pool_id=pool1.id) p2 = th.add_prefix('2.0.0.0/24', 'reservation', 'test', pool_id=pool1.id) p3 = th.add_prefix('2001:db8:1::/48', 'reservation', 'test', pool_id=pool1.id) p4 = th.add_prefix('2001:db8:2::/48', 'reservation', 'test', pool_id=pool1.id) # add child from pool pc1 = th.add_prefix_from_pool(pool1, 4, 'foo') pc2 = th.add_prefix_from_pool(pool1, 6, 'foo') # remove first member prefixes from pool p1.pool = None p1.save() p3.pool = None p3.save() # check stats for pool1 res = Pool.list({ 'id': pool1.id }) # ipv4 self.assertEqual(1, res[0].member_prefixes_v4) self.assertEqual(0, res[0].used_prefixes_v4) self.assertEqual(128, res[0].free_prefixes_v4) self.assertEqual(128, res[0].total_prefixes_v4) self.assertEqual(256, res[0].total_addresses_v4) self.assertEqual(0, res[0].used_addresses_v4) self.assertEqual(256, res[0].free_addresses_v4) # ipv6 self.assertEqual(1, res[0].member_prefixes_v6) self.assertEqual(0, res[0].used_prefixes_v6) self.assertEqual(18446744073709551616, res[0].free_prefixes_v6) self.assertEqual(18446744073709551616, res[0].total_prefixes_v6) self.assertEqual(1208925819614629174706176, res[0].total_addresses_v6) self.assertEqual(0, res[0].used_addresses_v6) self.assertEqual(1208925819614629174706176, res[0].free_addresses_v6) # readd prefixes to pool p1.pool = pool1 p1.save() p3.pool = pool1 p3.save() # check stats for pool1 res = Pool.list({ 'id': pool1.id }) # ipv4 self.assertEqual(2, res[0].member_prefixes_v4) self.assertEqual(1, res[0].used_prefixes_v4) self.assertEqual(255, res[0].free_prefixes_v4) self.assertEqual(256, res[0].total_prefixes_v4) self.assertEqual(512, res[0].total_addresses_v4) self.assertEqual(2, res[0].used_addresses_v4) self.assertEqual(510, res[0].free_addresses_v4) # ipv6 self.assertEqual(2, res[0].member_prefixes_v6) self.assertEqual(1, res[0].used_prefixes_v6) self.assertEqual(36893488147419103231, res[0].free_prefixes_v6) self.assertEqual(36893488147419103232, res[0].total_prefixes_v6) self.assertEqual(2417851639229258349412352, res[0].total_addresses_v6) self.assertEqual(65536, res[0].used_addresses_v6) self.assertEqual(2417851639229258349346816, res[0].free_addresses_v6) def test_stats6(self): """ Check total stats are correct when adding member prefix with childs to pool """ th = TestHelper() # add a pool pool1 = th.add_pool('test', 'assignment', 31, 112) # add some members to the pool p1 = th.add_prefix('1.0.0.0/24', 'reservation', 'test', pool_id=pool1.id) p2 = th.add_prefix('2.0.0.0/24', 'reservation', 'test', pool_id=pool1.id) p3 = th.add_prefix('2001:db8::/48', 'reservation', 'test', pool_id=pool1.id) p4 = th.add_prefix('2001:db8:1::/48', 'reservation', 'test', pool_id=pool1.id) # add child from pool pc1 = th.add_prefix_from_pool(pool1, 4, 'foo') pc2 = th.add_prefix_from_pool(pool1, 6, 'foo') class TestPrefixStatistics(unittest.TestCase): """ Test calculation of statistics for prefixes """ def setUp(self): """ Test setup, which essentially means to empty the database """ TestHelper.clear_database() def test_stats1(self): """ Check stats are correct when adding prefix """ th = TestHelper() # add a top level prefix p1 = th.add_prefix('1.0.0.0/24', 'assignment', 'test') # check stats for p1 res = Prefix.smart_search('1.0.0.0/24', {}) self.assertEqual(256, res['result'][0].total_addresses) self.assertEqual(0, res['result'][0].used_addresses) self.assertEqual(256, res['result'][0].free_addresses) # add a covering supernet around p1 p2 = th.add_prefix('1.0.0.0/20', 'reservation', 'bar') # check stats for p2, our new top level prefix res = Prefix.smart_search('1.0.0.0/20', {}) self.assertEqual(4096, res['result'][0].total_addresses) self.assertEqual(256, res['result'][0].used_addresses) self.assertEqual(3840, res['result'][0].free_addresses) def test_stats2(self): """ Check stats are correct when enlarging prefix """ th = TestHelper() # add a top level prefix p1 = th.add_prefix('1.0.0.0/24', 'assignment', 'test') p2 = th.add_prefix('1.0.7.0/24', 'assignment', 'test') # add a covering supernet around p1 p3 = th.add_prefix('1.0.0.0/22', 'reservation', 'bar') # check that p3 looks good res = Prefix.smart_search('1.0.0.0/22', {}) self.assertEqual(1024, res['result'][0].total_addresses) self.assertEqual(256, res['result'][0].used_addresses) self.assertEqual(768, res['result'][0].free_addresses) # now move our supernet, so we see that the update thingy works p3.prefix = '1.0.0.0/21' p3.save() # check stats for p2, our new top level prefix res = Prefix.smart_search('1.0.0.0/21', {}) self.assertEqual(2048, res['result'][0].total_addresses) self.assertEqual(512, res['result'][0].used_addresses) self.assertEqual(1536, res['result'][0].free_addresses) def test_stats3(self): """ Check stats are correct when shrinking prefix """ th = TestHelper() # add a top level prefix p1 = th.add_prefix('1.0.0.0/24', 'assignment', 'test') p2 = th.add_prefix('1.0.7.0/24', 'assignment', 'test') # add a covering supernet around p1 and p2 p3 = th.add_prefix('1.0.0.0/21', 'reservation', 'bar') # check that p3 looks good res = Prefix.smart_search('1.0.0.0/21', {}) self.assertEqual(2048, res['result'][0].total_addresses) self.assertEqual(512, res['result'][0].used_addresses) self.assertEqual(1536, res['result'][0].free_addresses) # now move our supernet, so we see that the update thingy works p3.prefix = '1.0.0.0/22' p3.save() # check that p3 only covers p1 res = Prefix.smart_search('1.0.0.0/22', {}) self.assertEqual(1024, res['result'][0].total_addresses) self.assertEqual(256, res['result'][0].used_addresses) self.assertEqual(768, res['result'][0].free_addresses) def test_stats4(self): """ Check stats are correct when moving prefix """ th = TestHelper() # add a top level prefix p1 = th.add_prefix('1.0.0.0/24', 'assignment', 'test') p2 = th.add_prefix('2.0.0.0/25', 'reservation', 'bar') # now move our supernet, so we see that the update thingy works p2.prefix = '2.0.0.0/22' p2.save() # check stats for p2, we shouldn't see stats based on our old position # (2.0.0.0/25) res = Prefix.smart_search('2.0.0.0/22', {}) self.assertEqual(1024, res['result'][0].total_addresses) self.assertEqual(0, res['result'][0].used_addresses) self.assertEqual(1024, res['result'][0].free_addresses) def test_stats5(self): """ Add prefixes within other prefix and verify parent prefix has correct statistics """ th = TestHelper() # add a top level prefix p1 = th.add_prefix('1.0.0.0/24', 'assignment', 'test') # check stats for p1 res = Prefix.smart_search('1.0.0.0/24', {}) self.assertEqual(256, res['result'][0].total_addresses) self.assertEqual(0, res['result'][0].used_addresses) self.assertEqual(256, res['result'][0].free_addresses) # add a host in our top prefix p2 = th.add_prefix('1.0.0.1/32', 'host', 'bar') # check stats for p1, our top level prefix res = Prefix.smart_search('1.0.0.0/24', {}) self.assertEqual(256, res['result'][0].total_addresses) self.assertEqual(1, res['result'][0].used_addresses) self.assertEqual(255, res['result'][0].free_addresses) # check stats for p2, our new host prefix res = Prefix.smart_search('1.0.0.1/32', {}) self.assertEqual(1, res['result'][0].total_addresses) self.assertEqual(1, res['result'][0].used_addresses) self.assertEqual(0, res['result'][0].free_addresses) def test_stats6(self): """ Remove prefix and check old parent is correctly updated """ th = TestHelper() # p1 children are p2 (which covers p3 and p4) and p5 p1 = th.add_prefix('1.0.0.0/20', 'reservation', 'test') p2 = th.add_prefix('1.0.0.0/22', 'reservation', 'test') p3 = th.add_prefix('1.0.0.0/24', 'reservation', 'test') p4 = th.add_prefix('1.0.1.0/24', 'reservation', 'test') p5 = th.add_prefix('1.0.7.0/24', 'reservation', 'test') # moving p2 means that p1 get p3, p4 and p5 as children p2.prefix = '2.0.0.0/22' p2.save() # check stats for p1 res = Prefix.smart_search('1.0.0.0/20', {}) self.assertEqual(4096, res['result'][0].total_addresses) self.assertEqual(768, res['result'][0].used_addresses) self.assertEqual(3328, res['result'][0].free_addresses) # moving back p2 which means that p1 get p2 and p5 as children p2.prefix = '1.0.0.0/22' p2.save() # check stats for p1 res = Prefix.smart_search('1.0.0.0/20', {}) self.assertEqual(4096, res['result'][0].total_addresses) self.assertEqual(1280, res['result'][0].used_addresses) self.assertEqual(2816, res['result'][0].free_addresses) def test_stats7(self): """ Move prefix several indent steps and check stats are correct """ th = TestHelper() # tree of prefixes p1 = th.add_prefix('1.0.0.0/20', 'reservation', 'test') p2 = th.add_prefix('1.0.0.0/21', 'reservation', 'test') p3 = th.add_prefix('1.0.0.0/22', 'reservation', 'test') p4 = th.add_prefix('1.0.0.0/23', 'reservation', 'test') p5 = th.add_prefix('1.0.0.0/24', 'reservation', 'test') p6 = th.add_prefix('1.0.2.0/24', 'reservation', 'test') p7 = th.add_prefix('1.0.4.0/22', 'reservation', 'test') # check stats for p2 res = Prefix.smart_search('1.0.0.0/21', {}) self.assertEqual(2048, res['result'][0].total_addresses) self.assertEqual(2048, res['result'][0].used_addresses) self.assertEqual(0, res['result'][0].free_addresses) # move p3 outside of the tree p3.prefix = '2.0.0.0/22' p3.save() # check stats for p2 res = Prefix.smart_search('1.0.0.0/21', {}) self.assertEqual(2048, res['result'][0].total_addresses) self.assertEqual(1792, res['result'][0].used_addresses) self.assertEqual(256, res['result'][0].free_addresses) # move p3 into the tree again p3.prefix = '1.0.0.0/22' p3.save() # check stats for p2 res = Prefix.smart_search('1.0.0.0/21', {}) self.assertEqual(2048, res['result'][0].total_addresses) self.assertEqual(2048, res['result'][0].used_addresses) self.assertEqual(0, res['result'][0].free_addresses) def test_stats7(self): """ Enlarge / shrink prefix over several indent levels """ th = TestHelper() # p1 children are p2 (which covers p3 and p4) and p5 p1 = th.add_prefix('1.0.0.0/16', 'reservation', 'test') p2 = th.add_prefix('1.0.0.0/22', 'reservation', 'test') p3 = th.add_prefix('1.0.0.0/23', 'reservation', 'FOO') p4 = th.add_prefix('1.0.0.0/24', 'reservation', 'test') p5 = th.add_prefix('1.0.1.0/24', 'reservation', 'test') p6 = th.add_prefix('1.0.2.0/24', 'reservation', 'test') p7 = th.add_prefix('1.0.3.0/24', 'reservation', 'test') # enlarge p3 so that it covers p2, ie moved up several indent levels p3.prefix = '1.0.0.0/21' p3.save() # check stats for p3 res = Prefix.smart_search('1.0.0.0/21', {}) self.assertEqual(2048, res['result'][0].total_addresses) self.assertEqual(1024, res['result'][0].used_addresses) self.assertEqual(1024, res['result'][0].free_addresses) # move back p3 p3.prefix = '1.0.0.0/23' p3.save() # check stats for p3 res = Prefix.smart_search('1.0.0.0/23', {}) self.assertEqual(512, res['result'][0].total_addresses) self.assertEqual(512, res['result'][0].used_addresses) self.assertEqual(0, res['result'][0].free_addresses) def test_stats8(self): """ Make sure stats are correct """ # we ran into this problem with #590 th = TestHelper() p1 = th.add_prefix('1.0.0.0/24', 'reservation', 'test') p2 = th.add_prefix('1.0.0.0/32', 'reservation', 'test') # check stats for p1 res = Prefix.smart_search('1.0.0.0/24', {}) self.assertEqual(256, res['result'][0].total_addresses) self.assertEqual(1, res['result'][0].used_addresses) self.assertEqual(255, res['result'][0].free_addresses) p3 = th.add_prefix('1.0.0.2/31', 'reservation', 'test') # check stats for p1 res = Prefix.smart_search('1.0.0.0/24', {}) self.assertEqual(256, res['result'][0].total_addresses) self.assertEqual(3, res['result'][0].used_addresses) self.assertEqual(253, res['result'][0].free_addresses) p3.prefix = '1.0.0.2/32' p3.save() # check stats for p1 res = Prefix.smart_search('1.0.0.0/24', {}) self.assertEqual(256, res['result'][0].total_addresses) self.assertEqual(2, res['result'][0].used_addresses) self.assertEqual(254, res['result'][0].free_addresses) class TestVrf(unittest.TestCase): """ Test various VRF related things """ def setUp(self): """ Test setup, which essentially means to empty the database """ TestHelper.clear_database() def test_vrf1(self): """ Test VRF RT input values """ v = VRF() v.name = "test-vrf" broken_values = [ "foo", "123:foo", "foo:123", "123.456.789.123:123", "123.123.200. 1:123", " 123.456.789.123:123" ] for bv in broken_values: with self.assertRaisesRegexp(pynipap.NipapValueError, 'Invalid input for column rt'): v.rt = bv v.save() # valid value v.rt = "123:456" v.save() self.assertEqual("123:456", VRF.list({"name": "test-vrf"})[0].rt) # valid value but with whitespace which should be stripped v.rt = " 123:456" v.save() self.assertEqual("123:456", VRF.list({"name": "test-vrf"})[0].rt) # valid IP:id value v.rt = "172.16.31.10:456" v.save() self.assertEqual("172.16.31.10:456", VRF.list({"name": "test-vrf"})[0].rt) class TestVrfStatistics(unittest.TestCase): """ Test calculation of statistics for VRFs """ def setUp(self): """ Test setup, which essentially means to empty the database """ TestHelper.clear_database() def test_stats1(self): """ Check stats are correct when adding and removing prefixes """ th = TestHelper() # add some top level prefixes to the default VRF p1 = th.add_prefix('1.0.0.0/24', 'reservation', 'test') p2 = th.add_prefix('2.0.0.0/24', 'reservation', 'test') p3 = th.add_prefix('2001:db8:1::/48', 'reservation', 'test') p4 = th.add_prefix('2001:db8:2::/48', 'reservation', 'test') # check stats for VRF res = VRF.get(0) # ipv4 self.assertEqual(2, res.num_prefixes_v4) self.assertEqual(512, res.total_addresses_v4) self.assertEqual(0, res.used_addresses_v4) self.assertEqual(512, res.free_addresses_v4) # ipv6 self.assertEqual(2, res.num_prefixes_v6) self.assertEqual(2417851639229258349412352, res.total_addresses_v6) self.assertEqual(0, res.used_addresses_v6) self.assertEqual(2417851639229258349412352, res.free_addresses_v6) # remove some prefixes p1.remove() p3.remove() # check stats for VRF res = VRF.get(0) # ipv4 self.assertEqual(1, res.num_prefixes_v4) self.assertEqual(256, res.total_addresses_v4) self.assertEqual(0, res.used_addresses_v4) self.assertEqual(256, res.free_addresses_v4) # ipv6 self.assertEqual(1, res.num_prefixes_v6) self.assertEqual(1208925819614629174706176, res.total_addresses_v6) self.assertEqual(0, res.used_addresses_v6) self.assertEqual(1208925819614629174706176, res.free_addresses_v6) def test_stats2(self): """ Check stats are correct when adding and removing prefixes """ th = TestHelper() # add some top level prefixes to the default VRF p1 = th.add_prefix('1.0.0.0/24', 'reservation', 'test') p2 = th.add_prefix('172.16.31.10/25', 'assignment', 'test') p3 = th.add_prefix('2001:db8:1::/48', 'reservation', 'test') p4 = th.add_prefix('2001:db8:1:1::/64', 'reservation', 'test') # check stats for VRF res = VRF.get(0) # ipv4 self.assertEqual(2, res.num_prefixes_v4) self.assertEqual(256, res.total_addresses_v4) self.assertEqual(128, res.used_addresses_v4) self.assertEqual(128, res.free_addresses_v4) # ipv6 self.assertEqual(2, res.num_prefixes_v6) self.assertEqual(1208925819614629174706176, res.total_addresses_v6) self.assertEqual(18446744073709551616, res.used_addresses_v6) self.assertEqual(1208907372870555465154560, res.free_addresses_v6) # remove some prefixes p1.remove() p3.remove() # check stats for VRF res = VRF.get(0) # ipv4 self.assertEqual(1, res.num_prefixes_v4) self.assertEqual(128, res.total_addresses_v4) self.assertEqual(0, res.used_addresses_v4) self.assertEqual(128, res.free_addresses_v4) # ipv6 self.assertEqual(1, res.num_prefixes_v6) self.assertEqual(18446744073709551616, res.total_addresses_v6) self.assertEqual(0, res.used_addresses_v6) self.assertEqual(18446744073709551616, res.free_addresses_v6) class TestAddressListing(unittest.TestCase): """ """ def setUp(self): """ Test setup, which essentially means to empty the database """ TestHelper.clear_database() def testPrefixInclusion(self): """ Test prefix inclusion like include_neighbors, include_parents and include_children """ th = TestHelper() # add a few prefixes p1 = th.add_prefix('192.168.0.0/16', 'reservation', 'root') p2 = th.add_prefix('192.168.0.0/20', 'reservation', 'test') p3 = th.add_prefix('192.168.0.0/24', 'reservation', 'foo') p4 = th.add_prefix('192.168.1.0/24', 'reservation', 'test') p5 = th.add_prefix('192.168.2.0/24', 'reservation', 'test') p6 = th.add_prefix('192.168.32.0/20', 'reservation', 'bar') p7 = th.add_prefix('192.168.32.0/24', 'assignment', 'test') p8 = th.add_prefix('192.168.32.1/32', 'host', 'test') p9 = th.add_prefix('192.168.32.2/32', 'host', 'xyz') p10 = th.add_prefix('192.168.32.3/32', 'host', 'test') expected = [] # expected result is a list where each row is a prefix expected.append(p1.prefix) expected.append(p2.prefix) expected.append(p3.prefix) expected.append(p4.prefix) expected.append(p5.prefix) expected.append(p6.prefix) expected.append(p7.prefix) expected.append(p8.prefix) expected.append(p9.prefix) expected.append(p10.prefix) res = Prefix.smart_search('0.0.0.0/0', {}) result = [] for prefix in res['result']: result.append(prefix.prefix) self.assertEqual(expected, result) expected = [] # expected result is a list where each row is a prefix expected.append(p1.prefix) res = Prefix.smart_search('root', {}) result = [] for prefix in res['result']: result.append(prefix.prefix) self.assertEqual(expected, result) expected = [] # expected result is a list where each row is a prefix expected.append(p3.prefix) res = Prefix.smart_search('foo', {}) result = [] for prefix in res['result']: result.append(prefix.prefix) self.assertEqual(expected, result) expected = [] # expected result is a list where each row is a prefix expected.append(p1.prefix) expected.append(p2.prefix) expected.append(p3.prefix) expected.append(p4.prefix) expected.append(p5.prefix) expected.append(p6.prefix) expected.append(p7.prefix) expected.append(p8.prefix) expected.append(p9.prefix) expected.append(p10.prefix) res = Prefix.smart_search('root', { 'children_depth': -1 }) result = [] for prefix in res['result']: result.append(prefix.prefix) self.assertEqual(expected, result) expected = [] # expected result is a list where each row is a prefix expected.append(p1.prefix) expected.append(p2.prefix) expected.append(p3.prefix) res = Prefix.smart_search('foo', { 'parents_depth': -1 }) result = [] for prefix in res['result']: result.append(prefix.prefix) self.assertEqual(expected, result) expected = [] # expected result is a list where each row is a prefix expected.append(p8.prefix) expected.append(p9.prefix) expected.append(p10.prefix) res = Prefix.smart_search('xyz', { 'include_neighbors': True }) result = [] for prefix in res['result']: result.append(prefix.prefix) self.assertEqual(expected, result) def testTags(self): """ Verify that search matches tags """ th = TestHelper() # add a few prefixes p1 = th.add_prefix('192.168.0.0/16', 'reservation', 'root', [ 'tag1' ]) p2 = th.add_prefix('192.168.0.0/20', 'reservation', 'test', [ 'tag2' ]) p3 = th.add_prefix('192.168.0.0/24', 'reservation', 'foo', ['tag3']) expected = [] # match a tag expected.append(p3.prefix) res = Prefix.smart_search('#tag3') result = [] for prefix in res['result']: result.append(prefix.prefix) self.assertEqual(expected, result) # match an inherited tag expected = [ p2.prefix ] + expected res = Prefix.smart_search('#tag2') result = [] for prefix in res['result']: if prefix.match is True: result.append(prefix.prefix) self.assertEqual(expected, result) # match two levels of inherited tags expected = [ p1.prefix ] + expected res = Prefix.smart_search('#tag1') result = [] for prefix in res['result']: if prefix.match is True: result.append(prefix.prefix) self.assertEqual(expected, result) class TestPrefixLastModified(unittest.TestCase): """ Test updates of the last modified value """ def setUp(self): """ Test setup, which essentially means to empty the database """ TestHelper.clear_database() def test1(self): """ The last_modified timestamp should be updated when the prefix is edited """ th = TestHelper() p1 = th.add_prefix('1.3.0.0/16', 'reservation', 'test') # make sure added and last_modified are equal self.assertEqual(p1.added, p1.last_modified) # this is a bit silly, but as the last_modified time is returned with a # precision of seconds, we need to make sure that we fall on the next # second to actually notice that last_modified is not equal to added time.sleep(1) p1.description = 'updated description' p1.save() # last_modified should have a later timestamp than added self.assertNotEqual(p1.added, p1.last_modified) class TestCli(unittest.TestCase): """ CLI tests """ def test_extra_args(self): """ Extra arg should raise exception """ from nipap_cli.command import Command, InvalidCommand from nipap_cli import nipap_cli from pynipap import NipapError # 'FOO' should not be there and should raise an exception with self.assertRaisesRegexp(InvalidCommand, 'Invalid argument:'): cmd = Command(nipap_cli.cmds, ['address', 'modify', '1.3.3.1/32', 'vrf_rt', 'none', 'set', 'FOO' ]) class TestCliPrefixAutoType(unittest.TestCase): """ Test CLI prefix auto type guessing """ def setUp(self): """ Test setup, which essentially means to empty the database """ TestHelper.clear_database() def mock_cfg(self): import ConfigParser cfg = ConfigParser.ConfigParser() cfg.add_section('global') cfg.set('global', 'default_vrf_rt', '-') cfg.set('global', 'default_list_vrf_rt', 'all') return cfg def test_auto_type1(self): """ Test automatic prefix type guessing """ from nipap_cli import nipap_cli from pynipap import NipapError nipap_cli.cfg = self.mock_cfg() th = TestHelper() expected = [] # add a few prefixes expected.append([th.add_prefix('10.0.0.0/16', 'reservation', 'test').prefix, 'reservation']) expected.append([th.add_prefix('10.0.0.0/24', 'assignment', 'test').prefix, 'assignment']) opts = { 'prefix': '10.0.0.0/8', 'type': 'reservation', 'description': 'root' } expected.insert(0, [opts['prefix'], opts['type']]) nipap_cli.add_prefix({}, opts, {}) result = [[p.prefix, p.type] for p in Prefix.smart_search('')['result']] self.assertEqual(expected, result) def test_auto_type2(self): """ Test automatic prefix type guessing """ from nipap_cli import nipap_cli from pynipap import NipapError nipap_cli.cfg = self.mock_cfg() th = TestHelper() expected = [] # add a few prefixes expected.append([th.add_prefix('10.0.0.0/16', 'reservation', 'test').prefix, 'reservation']) expected.append([th.add_prefix('10.0.0.0/24', 'assignment', 'test').prefix, 'assignment']) opts = { 'prefix': '10.0.0.0/24', 'description': 'host' } expected.append(['10.0.0.0/32', 'host']) nipap_cli.add_prefix({}, opts, {}) result = [[p.prefix, p.type] for p in Prefix.smart_search('')['result']] self.assertEqual(expected, result) def test_auto_type3(self): """ Test automatic prefix type guessing """ from nipap_cli import nipap_cli from pynipap import NipapError nipap_cli.cfg = self.mock_cfg() th = TestHelper() expected = [] # add a few prefixes expected.append([th.add_prefix('10.0.0.0/16', 'reservation', 'test').prefix, 'reservation']) expected.append([th.add_prefix('10.0.0.0/24', 'assignment', 'test').prefix, 'assignment']) opts = { 'prefix': '10.0.0.0', 'description': 'host' } expected.append([opts['prefix'] + '/32', 'host']) nipap_cli.add_prefix({}, opts, {}) result = [[p.prefix, p.type] for p in Prefix.smart_search('')['result']] self.assertEqual(expected, result) def test_auto_type4(self): """ Test automatic prefix type guessing """ from nipap_cli import nipap_cli from pynipap import NipapError nipap_cli.cfg = self.mock_cfg() th = TestHelper() expected = [] # add a few prefixes expected.append([th.add_prefix('10.0.0.0/16', 'reservation', 'test').prefix, 'reservation']) expected.append([th.add_prefix('10.0.0.0/24', 'assignment', 'test').prefix, 'assignment']) opts = { 'prefix': '10.0.0.1/24', 'description': 'host' } expected.append(['10.0.0.1/32', 'host']) nipap_cli.add_prefix({}, opts, {}) result = [[p.prefix, p.type] for p in Prefix.smart_search('')['result']] self.assertEqual(expected, result) def test_auto_type5(self): """ Test automatic prefix type guessing """ from nipap_cli import nipap_cli from pynipap import NipapError nipap_cli.cfg = self.mock_cfg() th = TestHelper() expected = [] # add a few prefixes expected.append([th.add_prefix('10.0.0.0/16', 'reservation', 'test').prefix, 'reservation']) expected.append([th.add_prefix('10.0.0.0/24', 'assignment', 'test').prefix, 'assignment']) opts = { 'prefix': '10.0.0.1', 'description': 'host' } expected.append([opts['prefix'] + '/32', 'host']) nipap_cli.add_prefix({}, opts, {}) result = [[p.prefix, p.type] for p in Prefix.smart_search('')['result']] self.assertEqual(expected, result) def test_auto_type6(self): """ Test automatic prefix type guessing """ from nipap_cli import nipap_cli from pynipap import NipapError nipap_cli.cfg = self.mock_cfg() th = TestHelper() expected = [] # add a few prefixes expected.append([th.add_prefix('10.0.0.0/16', 'reservation', 'test').prefix, 'reservation']) expected.append([th.add_prefix('10.0.0.0/24', 'assignment', 'test').prefix, 'assignment']) opts = { 'prefix': '10.0.0.1/32', 'description': 'host' } expected.append([opts['prefix'], 'host']) nipap_cli.add_prefix({}, opts, {}) result = [[p.prefix, p.type] for p in Prefix.smart_search('')['result']] self.assertEqual(expected, result) def test_auto_type7(self): """ Test automatic prefix type guessing """ from nipap_cli import nipap_cli from pynipap import NipapError nipap_cli.cfg = self.mock_cfg() th = TestHelper() expected = [] # add a few prefixes expected.append([th.add_prefix('10.0.0.0/16', 'reservation', 'test').prefix, 'reservation']) expected.append([th.add_prefix('10.0.0.0/24', 'assignment', 'test').prefix, 'assignment']) opts = { 'prefix': '10.0.0.1/25', 'description': 'host' } with self.assertRaisesRegexp(SystemExit, "^1$"): nipap_cli.add_prefix({}, opts, {}) class TestNipapHelper(unittest.TestCase): """ Test the nipap helper app """ def test_test1(self): from nipap_cli.command import Command, InvalidCommand from nipap_cli import nipap_cli from pynipap import NipapError cmd = Command(nipap_cli.cmds, ['pool', 'res']) self.assertEqual(['resize'], sorted(cmd.complete())) cmd = Command(nipap_cli.cmds, ['pool', 'resize']) self.assertEqual(['resize'], sorted(cmd.complete())) class TestSmartParser(unittest.TestCase): """ Test the smart parsing functions """ maxDiff = None def test_prefix1(self): cfg = NipapConfig('/etc/nipap/nipap.conf') n = Nipap() success, query = n._parse_prefix_query('foo') exp_query = { 'interpretation': { 'attribute': 'description or comment or node or order_id or customer_id', 'interpretation': 'text', 'operator': 'regex', 'string': 'foo', 'error': False }, 'operator': 'or', 'val1': { 'operator': 'or', 'val1': { 'operator': 'or', 'val1': { 'operator': 'or', 'val1': { 'operator': 'regex_match', 'val1': 'comment', 'val2': u'foo' }, 'val2': { 'operator': 'regex_match', 'val1': 'description', 'val2': u'foo' } }, 'val2': { 'operator': 'regex_match', 'val1': 'node', 'val2': u'foo' } }, 'val2': { 'operator': 'regex_match', 'val1': 'order_id', 'val2': u'foo' } }, 'val2': { 'operator': 'regex_match', 'val1': 'customer_id', 'val2': u'foo' } } self.assertEqual(success, True) self.assertEqual(query, exp_query) def test_prefix2(self): cfg = NipapConfig('/etc/nipap/nipap.conf') n = Nipap() success, query = n._parse_prefix_query('1.3.3.0/24') exp_query = { 'interpretation': { 'attribute': 'prefix', 'interpretation': 'IPv4 prefix', 'operator': 'contained_within_equals', 'string': '1.3.3.0/24', 'error': False }, 'operator': 'contained_within_equals', 'val1': 'prefix', 'val2': '1.3.3.0/24' } self.assertEqual(success, True) self.assertEqual(query, exp_query) def test_prefix3(self): cfg = NipapConfig('/etc/nipap/nipap.conf') n = Nipap() success, query = n._parse_prefix_query('1.3.3.0/24 foo') exp_query = { 'interpretation': { 'interpretation': 'and', 'operator': 'and', 'error': False }, 'operator': 'and', 'val1': { 'interpretation': { 'attribute': 'prefix', 'interpretation': 'IPv4 prefix', 'operator': 'contained_within_equals', 'string': u'1.3.3.0/24', 'error': False }, 'operator': 'contained_within_equals', 'val1': 'prefix', 'val2': '1.3.3.0/24' }, 'val2': { 'interpretation': { 'attribute': 'description or comment or node or order_id or customer_id', 'interpretation': 'text', 'operator': 'regex', 'string': u'foo', 'error': False }, 'operator': 'or', 'val1': { 'operator': 'or', 'val1': { 'operator': 'or', 'val1': { 'operator': 'or', 'val1': { 'operator': 'regex_match', 'val1': 'comment', 'val2': u'foo' }, 'val2': { 'operator': 'regex_match', 'val1': 'description', 'val2': u'foo' } }, 'val2': { 'operator': 'regex_match', 'val1': 'node', 'val2': u'foo' } }, 'val2': { 'operator': 'regex_match', 'val1': 'order_id', 'val2': u'foo' } }, 'val2': { 'operator': 'regex_match', 'val1': 'customer_id', 'val2': u'foo' } } } self.assertEqual(success, True) self.assertEqual(query, exp_query) def test_prefix4(self): """ Test unclosed quotes """ cfg = NipapConfig('/etc/nipap/nipap.conf') n = Nipap() expected = { 'operator': None, 'val1': None, 'val2': None, 'interpretation': { 'interpretation': None, 'string': None, 'attribute': 'text', 'operator': None, 'error': True, 'error_message': 'unclosed quote' } } success, query = n._parse_vrf_query('"') expected['interpretation']['string'] = '"' self.assertEqual(success, False) self.assertEquals(query, expected) success, query = n._parse_prefix_query('\'') expected['interpretation']['string'] = '\'' self.assertEqual(success, False) self.assertEquals(query, expected) def test_prefix5(self): """ Test unclosed parentheses """ cfg = NipapConfig('/etc/nipap/nipap.conf') n = Nipap() expected = { 'operator': None, 'val1': None, 'val2': None, 'interpretation': { 'interpretation': None, 'string': None, 'attribute': 'text', 'operator': None, 'error': True, 'error_message': 'unclosed parentheses' } } success, query = n._parse_prefix_query('(') expected['interpretation']['string'] = '(' self.assertEqual(success, False) self.assertEquals(query, expected) success, query = n._parse_prefix_query(')') expected['interpretation']['string'] = ')' self.assertEqual(success, False) self.assertEquals(query, expected) def test_prefix6(self): cfg = NipapConfig('/etc/nipap/nipap.conf') n = Nipap() success, query = n._parse_prefix_query('foo-agg-1 vlan>100 vlan< 200') exp_query = { 'interpretation': {'interpretation': 'and', 'operator': 'and', 'error': False}, 'operator': 'and', 'val1': {'interpretation': {'interpretation': 'and', 'operator': 'and', 'error': False}, 'operator': 'and', 'val1': {'interpretation': {'attribute': 'description or comment or node or order_id or customer_id', 'interpretation': 'text', 'operator': 'regex', 'string': 'foo-agg-1', 'error': False}, 'operator': 'or', 'val1': {'operator': 'or', 'val1': {'operator': 'or', 'val1': {'operator': 'or', 'val1': {'operator': 'regex_match', 'val1': 'comment', 'val2': 'foo-agg-1'}, 'val2': {'operator': 'regex_match', 'val1': 'description', 'val2': 'foo-agg-1'}}, 'val2': {'operator': 'regex_match', 'val1': 'node', 'val2': 'foo-agg-1'}}, 'val2': {'operator': 'regex_match', 'val1': 'order_id', 'val2': 'foo-agg-1'}}, 'val2': {'operator': 'regex_match', 'val1': 'customer_id', 'val2': 'foo-agg-1'}}, 'val2': { 'interpretation': { 'interpretation': 'expression', 'attribute': 'vlan', 'operator': '>', 'string': 'vlan>100', 'error': False }, 'operator': '>', 'val1': 'vlan', 'val2': '100' } }, 'val2': { 'interpretation': { 'interpretation': 'expression', 'attribute': 'vlan', 'operator': '<', 'string': 'vlan<200', 'error': False }, 'operator': '<', 'val1': 'vlan', 'val2': '200' } } self.assertEqual(success, True) self.assertEqual(query, exp_query) def test_prefix7(self): cfg = NipapConfig('/etc/nipap/nipap.conf') n = Nipap() success, query = n._parse_prefix_query('123:456') exp_query = { 'interpretation': { 'attribute': 'VRF RT', 'string': '123:456', 'interpretation': 'vrf_rt', 'operator': 'equals', 'error': False }, 'operator': 'equals', 'val1': 'vrf_rt', 'val2': u'123:456' } self.assertEqual(success, True) self.assertEqual(query, exp_query) def test_prefix8(self): cfg = NipapConfig('/etc/nipap/nipap.conf') n = Nipap() success, query = n._parse_prefix_query('2001:1000::/32') exp_query = { 'interpretation': { 'attribute': 'prefix', 'interpretation': 'IPv6 prefix', 'operator': 'contained_within_equals', 'string': '2001:1000::/32', 'error': False }, 'operator': 'contained_within_equals', 'val1': 'prefix', 'val2': '2001:1000::/32' } self.assertEqual(success, True) self.assertEqual(query, exp_query) def test_prefix9(self): cfg = NipapConfig('/etc/nipap/nipap.conf') n = Nipap() success, query = n._parse_prefix_query('2001:1000:1234::/32') exp_query = { 'interpretation': { 'attribute': 'prefix', 'interpretation': 'IPv6 prefix', 'operator': 'contained_within_equals', 'string': '2001:1000:1234::/32', 'strict_prefix': '2001:1000::/32', 'error': False }, 'operator': 'contained_within_equals', 'val1': 'prefix', 'val2': '2001:1000::/32' } self.assertEqual(success, True) self.assertEqual(query, exp_query) def test_prefix10(self): cfg = NipapConfig('/etc/nipap/nipap.conf') n = Nipap() success, query = n._parse_prefix_query('2001:1000::') exp_query = { 'interpretation': { 'attribute': 'prefix', 'interpretation': 'IPv6 address', 'operator': 'contains_equals', 'string': '2001:1000::', 'error': False }, 'operator': 'contains_equals', 'val1': 'prefix', 'val2': '2001:1000::' } self.assertEqual(success, True) self.assertEqual(query, exp_query) def test_prefix11(self): cfg = NipapConfig('/etc/nipap/nipap.conf') n = Nipap() success, query = n._parse_prefix_query('1.3.3.0') exp_query = { 'interpretation': { 'attribute': 'prefix', 'interpretation': 'IPv4 address', 'operator': 'contains_equals', 'string': '1.3.3.0', 'error': False }, 'operator': 'contains_equals', 'val1': 'prefix', 'val2': '1.3.3.0' } self.assertEqual(success, True) self.assertEqual(query, exp_query) def test_prefix12(self): cfg = NipapConfig('/etc/nipap/nipap.conf') n = Nipap() success, query = n._parse_prefix_query('1.3.3.0/16') exp_query = { 'interpretation': { 'attribute': 'prefix', 'interpretation': 'IPv4 prefix', 'operator': 'contained_within_equals', 'string': '1.3.3.0/16', 'strict_prefix': '1.3.0.0/16', 'error': False }, 'operator': 'contained_within_equals', 'val1': 'prefix', 'val2': '1.3.0.0/16' } self.assertEqual(success, True) self.assertEqual(query, exp_query) def test_prefix13(self): cfg = NipapConfig('/etc/nipap/nipap.conf') n = Nipap() success, query = n._parse_prefix_query('1.3.3/16') exp_query = { 'interpretation': { 'attribute': 'prefix', 'interpretation': 'IPv4 prefix', 'operator': 'contained_within_equals', 'string': '1.3.3/16', 'strict_prefix': '1.3.0.0/16', 'expanded': '1.3.3.0/16', 'error': False }, 'operator': 'contained_within_equals', 'val1': 'prefix', 'val2': '1.3.0.0/16' } self.assertEqual(success, True) self.assertEqual(query, exp_query) def test_prefix14(self): """ Match against invalid attribute """ cfg = NipapConfig('/etc/nipap/nipap.conf') n = Nipap() expected = { 'operator': "=", 'val1': "foo", 'val2': "bar", 'interpretation': { 'interpretation': 'expression', 'string': 'foo=bar', 'attribute': 'foo', 'operator': '=', 'error': True, 'error_message': 'unknown attribute' } } success, query = n._parse_vrf_query('foo=bar') self.assertEqual(success, False) self.assertEquals(expected, query) def test_prefix15(self): """ Match invalid prefix type """ cfg = NipapConfig('/etc/nipap/nipap.conf') n = Nipap() expected = { 'operator': "=", 'val1': "type", 'val2': "foo", 'interpretation': { 'interpretation': 'expression', 'string': 'type=foo', 'attribute': 'type', 'operator': '=', 'error': True, 'error_message': 'invalid value' } } success, query = n._parse_prefix_query('type=foo') self.assertEqual(success, False) self.assertEquals(expected, query) def test_prefix16(self): """ Single quoted string, double quotes - "foo bar" """ cfg = NipapConfig('/etc/nipap/nipap.conf') n = Nipap() success, query = n._parse_prefix_query('"foo bar"') expected = { 'interpretation': { 'string': 'foo bar', 'interpretation': 'text', 'operator': 'regex', 'attribute': 'description or comment or node or order_id or customer_id', 'error': False }, 'operator': 'or', 'val1': { 'operator': 'or', 'val1': { 'operator': 'or', 'val1': { 'operator': 'or', 'val1': { 'operator': 'regex_match', 'val1': 'comment', 'val2': 'foo bar', }, 'val2': { 'operator': 'regex_match', 'val1': 'description', 'val2': 'foo bar' } }, 'val2': { 'operator': 'regex_match', 'val1': 'node', 'val2': 'foo bar' } }, 'val2': { 'operator': 'regex_match', 'val1': 'order_id', 'val2': 'foo bar' } }, 'val2': { 'operator': 'regex_match', 'val1': 'customer_id', 'val2': 'foo bar' } } self.assertEqual(success, True) self.assertEqual(query, expected) def test_prefix17(self): """ Mixed quoted and un-quoted strings, single quotes - 'foo bar' baz """ cfg = NipapConfig('/etc/nipap/nipap.conf') n = Nipap() success, query = n._parse_prefix_query('\'foo bar\' baz') expected = { 'interpretation': { 'interpretation': 'and', 'operator': 'and', 'error': False }, 'operator': 'and', 'val1': { 'interpretation': { 'string': 'foo bar', 'interpretation': 'text', 'operator': 'regex', 'attribute': 'description or comment or node or order_id or customer_id', 'error': False }, 'operator': 'or', 'val1': { 'operator': 'or', 'val1': { 'operator': 'or', 'val1': { 'operator': 'or', 'val1': { 'operator': 'regex_match', 'val1': 'comment', 'val2': 'foo bar' }, 'val2': { 'operator': 'regex_match', 'val1': 'description', 'val2': 'foo bar' } }, 'val2': { 'operator': 'regex_match', 'val1': 'node', 'val2': 'foo bar' }, }, 'val2': { 'operator': 'regex_match', 'val1': 'order_id', 'val2': 'foo bar' }, }, 'val2': { 'operator': 'regex_match', 'val1': 'customer_id', 'val2': 'foo bar' } }, 'val2': { 'interpretation': { 'string': 'baz', 'interpretation': 'text', 'operator': 'regex', 'attribute': 'description or comment or node or order_id or customer_id', 'error': False }, 'operator': 'or', 'val1': { 'operator': 'or', 'val1': { 'operator': 'or', 'val1': { 'operator': 'or', 'val1': { 'operator': 'regex_match', 'val1': 'comment', 'val2': 'baz' }, 'val2': { 'operator': 'regex_match', 'val1': 'description', 'val2': 'baz' } }, 'val2': { 'operator': 'regex_match', 'val1': 'node', 'val2': 'baz' } }, 'val2': { 'operator': 'regex_match', 'val1': 'order_id', 'val2': 'baz' } }, 'val2': { 'val2': 'baz', 'val1': 'customer_id', 'operator': 'regex_match' } } } self.assertEqual(success, True) self.assertEqual(query, expected) def test_prefix18(self): cfg = NipapConfig('/etc/nipap/nipap.conf') n = Nipap() success, query = n._parse_prefix_query('#foo') expected = { 'interpretation': { 'attribute': 'tag', 'error': False, 'interpretation': '(inherited) tag', 'operator': 'equals_any', 'string': '#foo' }, 'operator': 'or', 'val1': { 'operator': 'equals_any', 'val1': 'tags', 'val2': 'foo' }, 'val2': { 'operator': 'equals_any', 'val1': 'inherited_tags', 'val2': 'foo' } } self.assertEqual(success, True) self.assertEqual(query, expected) def test_prefix19(self): """ Test smart parser using unicode characters """ cfg = NipapConfig('/etc/nipap/nipap.conf') n = Nipap() success, query = n._parse_prefix_query(u'åäö') exp_query = { 'interpretation': { 'attribute': 'description or comment or node or order_id or customer_id', 'interpretation': 'text', 'operator': 'regex', 'string': u'åäö', 'error': False }, 'operator': 'or', 'val1': { 'operator': 'or', 'val1': { 'operator': 'or', 'val1': { 'operator': 'or', 'val1': { 'operator': 'regex_match', 'val1': 'comment', 'val2': u'åäö' }, 'val2': { 'operator': 'regex_match', 'val1': 'description', 'val2': u'åäö' } }, 'val2': { 'operator': 'regex_match', 'val1': 'node', 'val2': u'åäö' } }, 'val2': { 'operator': 'regex_match', 'val1': 'order_id', 'val2': u'åäö' } }, 'val2': { 'operator': 'regex_match', 'val1': 'customer_id', 'val2': u'åäö' } } self.assertEqual(success, True) self.assertEqual(query, exp_query) def test_prefix20(self): """ Test smart parsing with a "contained by" operator (<<=) on the prefix attribute """ cfg = NipapConfig('/etc/nipap/nipap.conf') n = Nipap() success, query = n._parse_prefix_query('prefix<<=1.3.0.0/16') exp_query = { 'interpretation': { 'attribute': 'prefix', 'interpretation': 'expression', 'operator': '<<=', 'string': 'prefix<<=1.3.0.0/16', 'error': False }, 'operator': '<<=', 'val1': 'prefix', 'val2': u'1.3.0.0/16' } self.assertEqual(success, True) self.assertEqual(query, exp_query) def test_vrf1(self): cfg = NipapConfig('/etc/nipap/nipap.conf') n = Nipap() success, query = n._parse_vrf_query('foo') exp_query = { 'interpretation': { 'attribute': 'vrf or name or description', 'interpretation': 'text', 'operator': 'regex', 'string': u'foo', 'error': False }, 'operator': 'or', 'val1': { 'operator': 'or', 'val1': { 'operator': 'regex_match', 'val1': 'name', 'val2': u'foo' }, 'val2': { 'operator': 'regex_match', 'val1': 'description', 'val2': u'foo' } }, 'val2': { 'operator': 'regex_match', 'val1': 'rt', 'val2': u'foo' } } self.assertEqual(success, True) self.assertEqual(query, exp_query) def test_vrf2(self): cfg = NipapConfig('/etc/nipap/nipap.conf') n = Nipap() success, query = n._parse_vrf_query('123:456') exp_query = { 'interpretation': { 'attribute': 'vrf or name or description', 'interpretation': 'text', 'operator': 'regex', 'string': u'123:456', 'error': False }, 'operator': 'or', 'val1': { 'operator': 'or', 'val1': { 'operator': 'regex_match', 'val1': 'name', 'val2': u'123:456' }, 'val2': { 'operator': 'regex_match', 'val1': 'description', 'val2': u'123:456' } }, 'val2': { 'operator': 'regex_match', 'val1': 'rt', 'val2': u'123:456' } } self.assertEqual(success, True) self.assertEqual(query, exp_query) def test_vrf3(self): cfg = NipapConfig('/etc/nipap/nipap.conf') n = Nipap() success, query = n._parse_vrf_query('#bar') exp_query = { 'interpretation': { 'attribute': 'tag', 'interpretation': 'tag', 'operator': 'equals_any', 'string': u'#bar', 'error': False }, 'operator': 'equals_any', 'val1': 'tags', 'val2': u'bar' } self.assertEqual(success, True) self.assertEqual(query, exp_query) def test_vrf4(self): """ Unclosed quotes """ cfg = NipapConfig('/etc/nipap/nipap.conf') n = Nipap() expected = { 'operator': None, 'val1': None, 'val2': None, 'interpretation': { 'interpretation': None, 'string': None, 'attribute': 'text', 'operator': None, 'error': True, 'error_message': 'unclosed quote' } } success, query = n._parse_vrf_query('"') expected['interpretation']['string'] = '"' self.assertEqual(success, False) self.assertEqual(query, expected) success, query = n._parse_vrf_query('\'') expected['interpretation']['string'] = '\'' self.assertEqual(success, False) self.assertEqual(query, expected) def test_vrf5(self): """ Unclosed parentheses """ cfg = NipapConfig('/etc/nipap/nipap.conf') n = Nipap() expected = { 'operator': None, 'val1': None, 'val2': None, 'interpretation': { 'interpretation': None, 'string': None, 'attribute': 'text', 'operator': None, 'error': True, 'error_message': 'unclosed parentheses' } } success, query = n._parse_vrf_query('(') expected['interpretation']['string'] = '(' self.assertEqual(success, False) self.assertEqual(query, expected) success, query = n._parse_vrf_query(')') expected['interpretation']['string'] = ')' self.assertEqual(success, False) self.assertEqual(query, expected) def test_vrf6(self): cfg = NipapConfig('/etc/nipap/nipap.conf') n = Nipap() success, query = n._parse_vrf_query('foo bar') exp_query = { 'interpretation': { 'interpretation': 'and', 'operator': 'and', 'error': False }, 'operator': 'and', 'val1': { 'interpretation': { 'attribute': 'vrf or name or description', 'interpretation': 'text', 'operator': 'regex', 'string': u'foo', 'error': False }, 'operator': 'or', 'val1': { 'operator': 'or', 'val1': { 'operator': 'regex_match', 'val1': 'name', 'val2': u'foo' }, 'val2': { 'operator': 'regex_match', 'val1': 'description', 'val2': u'foo' } }, 'val2': { 'operator': 'regex_match', 'val1': 'rt', 'val2': u'foo' } }, 'val2': { 'interpretation': { 'attribute': 'vrf or name or description', 'interpretation': 'text', 'operator': 'regex', 'string': u'bar', 'error': False }, 'operator': 'or', 'val1': { 'operator': 'or', 'val1': { 'operator': 'regex_match', 'val1': 'name', 'val2': u'bar' }, 'val2': { 'operator': 'regex_match', 'val1': 'description', 'val2': u'bar' } }, 'val2': { 'operator': 'regex_match', 'val1': 'rt', 'val2': u'bar' } } } self.assertEqual(success, True) self.assertEqual(query, exp_query) def test_vrf7(self): cfg = NipapConfig('/etc/nipap/nipap.conf') n = Nipap() success, query = n._parse_vrf_query('#foo') expected = { 'interpretation': { 'attribute': 'tag', 'error': False, 'interpretation': 'tag', 'operator': 'equals_any', 'string': '#foo' }, 'operator': 'equals_any', 'val1': 'tags', 'val2': 'foo' } self.assertEqual(success, True) self.assertEqual(query, expected) def test_pool1(self): cfg = NipapConfig('/etc/nipap/nipap.conf') n = Nipap() success, query = n._parse_pool_query('foo') exp_query = { 'interpretation': { 'attribute': 'name or description', 'interpretation': 'text', 'operator': 'regex', 'string': u'foo', 'error': False }, 'operator': 'or', 'val1': { 'operator': 'regex_match', 'val1': 'name', 'val2': u'foo' }, 'val2': { 'operator': 'regex_match', 'val1': 'description', 'val2': u'foo' } } self.assertEqual(success, True) self.assertEqual(query, exp_query) def test_pool2(self): cfg = NipapConfig('/etc/nipap/nipap.conf') n = Nipap() success, query = n._parse_pool_query('123:456') exp_query = { 'interpretation': { 'attribute': 'name or description', 'interpretation': 'text', 'operator': 'regex', 'string': u'123:456', 'error': False }, 'operator': 'or', 'val1': { 'operator': 'regex_match', 'val1': 'name', 'val2': u'123:456' }, 'val2': { 'operator': 'regex_match', 'val1': 'description', 'val2': u'123:456' } } self.assertEqual(success, True) self.assertEqual(exp_query, query) def test_pool3(self): cfg = NipapConfig('/etc/nipap/nipap.conf') n = Nipap() success, query = n._parse_pool_query('#bar') exp_query = { 'interpretation': { 'attribute': 'tag', 'interpretation': 'tag', 'operator': 'equals_any', 'string': '#bar', 'error': False }, 'operator': 'equals_any', 'val1': 'tags', 'val2': 'bar' } self.assertEqual(success, True) self.assertEqual(exp_query, query) def test_pool4(self): """ Unclosed quote """ cfg = NipapConfig('/etc/nipap/nipap.conf') n = Nipap() expected = { 'operator': None, 'val1': None, 'val2': None, 'interpretation': { 'interpretation': None, 'string': None, 'attribute': 'text', 'operator': None, 'error': True, 'error_message': 'unclosed quote' } } success, query = n._parse_pool_query('"') expected['interpretation']['string'] = '"' self.assertEqual(success, False) self.assertEqual(query, expected) success, query = n._parse_pool_query('\'') expected['interpretation']['string'] = '\'' self.assertEqual(success, False) self.assertEqual(query, expected) def test_pool5(self): """ Unclosed parentheses """ cfg = NipapConfig('/etc/nipap/nipap.conf') n = Nipap() expected = { 'operator': None, 'val1': None, 'val2': None, 'interpretation': { 'interpretation': None, 'string': None, 'attribute': 'text', 'operator': None, 'error': True, 'error_message': 'unclosed parentheses' } } success, query = n._parse_pool_query('(') expected['interpretation']['string'] = '(' self.assertEqual(success, False) self.assertEqual(query, expected) success, query = n._parse_pool_query(')') expected['interpretation']['string'] = ')' self.assertEqual(success, False) self.assertEqual(query, expected) def test_pool6(self): cfg = NipapConfig('/etc/nipap/nipap.conf') n = Nipap() success, query = n._parse_pool_query('#foo and bar') exp_query = { 'interpretation': { 'interpretation': 'and', 'operator': 'and', 'error': False }, 'operator': 'and', 'val1': { 'interpretation': { 'attribute': 'tag', 'interpretation': 'tag', 'operator': 'equals_any', 'string': '#foo', 'error': False }, 'operator': 'equals_any', 'val1': 'tags', 'val2': 'foo' }, 'val2': { 'interpretation': { 'attribute': 'name or description', 'interpretation': 'text', 'operator': 'regex', 'string': u'bar', 'error': False }, 'operator': 'or', 'val1': { 'operator': 'regex_match', 'val1': 'name', 'val2': u'bar' }, 'val2': { 'operator': 'regex_match', 'val1': 'description', 'val2': u'bar' } } } self.assertEqual(success, True) self.assertEqual(query, exp_query) def test_pool7(self): cfg = NipapConfig('/etc/nipap/nipap.conf') n = Nipap() success, query = n._parse_pool_query('#foo') expected = { 'interpretation': { 'attribute': 'tag', 'error': False, 'interpretation': 'tag', 'operator': 'equals_any', 'string': '#foo' }, 'operator': 'equals_any', 'val1': 'tags', 'val2': 'foo' } self.assertEqual(success, True) self.assertEqual(query, expected) class TestAvpEmptyName(unittest.TestCase): """ Test AVP with empty name """ def setUp(self): """ Test setup, which essentially means to empty the database """ TestHelper.clear_database() def test_pool_add_avp(self): p = Pool() p.name = 'test AVP with empty name' p.avps = { '': '1337' } with self.assertRaisesRegexp(NipapValueError, "AVP with empty name is not allowed"): p.save() def test_pool_edit_avp(self): th = TestHelper() # add a pool p = th.add_pool('test', 'assignment', 31, 112) p.avps = { '': '1337' } with self.assertRaisesRegexp(NipapValueError, "AVP with empty name is not allowed"): p.save() def test_prefix_add_avp(self): p = Prefix() p.prefix = '1.2.3.0/24' p.type = 'assignment' p.status = 'assigned' p.description = 'test AVP with empty name' p.avps = { '': '1337' } with self.assertRaisesRegexp(NipapValueError, "AVP with empty name is not allowed"): p.save() def test_prefix_edit_avp(self): th = TestHelper() p = th.add_prefix('192.0.2.0/24', 'assignment', 'test AVP with empty name') p.avps = { '': '1337' } with self.assertRaisesRegexp(NipapValueError, "AVP with empty name is not allowed"): p.save() def test_vrf_add_avp(self): v = VRF() v.rt = '123:456' v.name = 'test AVP with empty name' v.avps = { '': '1337' } with self.assertRaisesRegexp(NipapValueError, "AVP with empty name is not allowed"): v.save() def test_vrf_edit_avp(self): v = VRF() v.rt = '123:456' v.name = 'test AVP with empty name' v.save() v.avps = { '': '1337' } with self.assertRaisesRegexp(NipapValueError, "AVP with empty name is not allowed"): v.save() class TestDatabaseConstraints(unittest.TestCase): """ Test if the database constraints are correctly implemented """ def setUp(self): """ Test setup, which essentially means to empty the database """ TestHelper.clear_database() def test_constraints(self): """Testing of database constraints """ th = TestHelper() d = "test description" th.add_prefix('1.3.0.0/16', 'reservation', d) with self.assertRaisesRegexp(NipapDuplicateError, "Duplicate"): # exact duplicate th.add_prefix('1.3.0.0/16', 'reservation', d) p2 = th.add_prefix('1.3.3.0/24', 'reservation', d) p3 = th.add_prefix('1.3.3.0/27', 'assignment', d) th.add_prefix('1.3.3.0/32', 'host', d) th.add_prefix('1.3.3.1/32', 'host', d) with self.assertRaisesRegexp(NipapValueError, "Prefix of type host must have all bits set in netmask"): # do not allow /31 as type 'host' th.add_prefix('1.3.3.2/31', 'host', d) with self.assertRaisesRegexp(NipapValueError, "Parent prefix .* is of type assignment"): # unable to create assignment within assignment th.add_prefix('1.3.3.3/32', 'assignment', d) with self.assertRaisesRegexp(NipapValueError, "contains hosts"): # unable to remove assignment containing hosts p3.remove() with self.assertRaisesRegexp(NipapValueError, "'assignment' must not have any subnets other than of type 'host'"): p2.type = 'assignment' p2.save() if __name__ == '__main__': # set up logging log = logging.getLogger() logging.basicConfig() log.setLevel(logging.INFO) if sys.version_info >= (2,7): unittest.main(verbosity=2) else: unittest.main()
59,606
2,180
<reponame>alonebehappy/kafka-manager package com.xiaojukeji.kafka.manager.dao.impl; import com.xiaojukeji.kafka.manager.dao.HeartbeatDao; import com.xiaojukeji.kafka.manager.common.entity.pojo.HeartbeatDO; import org.mybatis.spring.SqlSessionTemplate; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Repository; import java.util.Date; import java.util.List; /** * @author zengqiao * @date 20/8/10 */ @Repository("heartbeatDao") public class HeartbeatDaoImpl implements HeartbeatDao { @Autowired private SqlSessionTemplate sqlSession; public void setSqlSession(SqlSessionTemplate sqlSession) { this.sqlSession = sqlSession; } @Override public int replace(HeartbeatDO heartbeatDO) { return sqlSession.insert("HeartbeatDao.replace", heartbeatDO); } @Override public List<HeartbeatDO> selectActiveHosts(Date afterTime) { return sqlSession.selectList("HeartbeatDao.selectActiveHosts", afterTime); } }
367
2,990
/* * GDevelop Core * Copyright 2008-2016 <NAME> (<EMAIL>). All rights reserved. * This project is released under the MIT License. */ #ifndef COMMONTOOLS_H #define COMMONTOOLS_H #include <algorithm> #include <cmath> #include <iterator> #include <sstream> #include <string> #include <vector> #include "Utf8/utf8.h" #include <SFML/System/String.hpp> #ifdef __GNUC__ #define GD_DEPRECATED __attribute__((deprecated)) #else #define GD_DEPRECATED #endif namespace gd { inline double Pi() { return 3.141592653589793238; } #ifdef __GNUC__ /** * Round the number to the nearest integer * \ingroup CommonProgrammingTools */ inline int Round(float x) { return round(x); } #else /** * Round the number to the nearest integer * \ingroup CommonProgrammingTools */ inline double Round( double d ) { return ( d >= 0 ? floor(d+0.5) : ceil(d-0.5) ); } #endif } #endif // COMMONTOOLS_H
397
713
package org.infinispan.server.router.routes; import javax.net.ssl.SSLContext; public interface SniRouteSource extends RouteSource { SSLContext getSslContext(); String getSniHostName(); }
68
335
{ "word": "Pup", "definitions": [ "(of bitches and certain other female animals) give birth to young." ], "parts-of-speech": "Verb" }
66
787
<reponame>pietelite/SpongeAPI /* * This file is part of SpongeAPI, licensed under the MIT License (MIT). * * Copyright (c) SpongePowered <https://www.spongepowered.org> * Copyright (c) contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.spongepowered.api.event.command; import org.checkerframework.checker.nullness.qual.Nullable; import org.spongepowered.api.command.CommandCause; import org.spongepowered.api.command.CommandResult; import org.spongepowered.api.event.Cancellable; import org.spongepowered.api.event.Cause; import org.spongepowered.api.event.Event; import java.util.Optional; /** * Events that fire when commands are executed. */ public interface ExecuteCommandEvent extends Event { /** * Gets the {@link CommandCause} that is involved in this event. * * <p>{@link CommandCause#cause()} returns the same {@link Cause} as * {@link #cause()}.</p> * * @return The {@link CommandCause} */ CommandCause commandCause(); /** * Gets the command that were requested by the {@link Cause} before any * events were fired, without any sort of command prefix. * * <p>For example, if the message was {@code /example bob 3 -f}, then * the command would be {@code example}.</p> * * @return The original command */ String originalCommand(); /** * Gets the command that will be/has been executed, without any prefix. * * @return The command */ String command(); /** * Gets the arguments that were requested by the {@link Cause} before any * events were fired. * * <p>For example, if the message was {@code /example bob 3 -f}, then * the arguments would be {@code bob 3 -f}.</p> * * @return The original arguments */ String originalArguments(); /** * Gets the arguments as a string. * * @return The arguments */ String arguments(); /** * Fired before the command is executed. */ interface Pre extends ExecuteCommandEvent, Cancellable { /** * Sets the command as a string, without any sort of command prefix. * * <p>For example, if the message was {@code /example bob 3 -f}, then * the command would be {@code example}.</p> * * @param command The command */ void setCommand(String command); /** * Sets the arguments as a string. * * <p>For example, if the message was {@code /example bob 3 -f}, then * the arguments would be {@code bob 3 -f}.</p> * * @param arguments The arguments */ void setArguments(String arguments); /** * The result of the command. * * <p>If set, this indicates cancellation of the command.</p> * * @return The result of the command, if set. */ Optional<CommandResult> result(); /** * Sets the result of the command. * * @param result The result of the command */ void setResult(@Nullable CommandResult result); /** * If true, the command will not run and the {@link CommandResult} * given by {@link #result()} will be returned. * * @return The cancellation status of the event. */ @Override boolean isCancelled(); /** * Sets whether the command will run. If the command is cancelled using * this method, an appropriate {@link CommandResult} will be set (and * returned from {@link #result()}). * * @param cancel The new cancelled state */ @Override void setCancelled(boolean cancel); } /** * Fired after the command is executed. */ interface Post extends ExecuteCommandEvent { /** * The result of the command. * * @return The result */ CommandResult result(); } }
1,852
750
<gh_stars>100-1000 import enum import random import string import typing as t import uuid from datetime import date, datetime, time, timedelta class RandomBuilder: @classmethod def next_bool(cls) -> bool: return random.choice([True, False]) @classmethod def next_bytes(cls, length=8) -> bytes: return random.getrandbits(length * 8).to_bytes(length, "little") @classmethod def next_date(cls) -> date: return date( year=random.randint(2000, 2050), month=random.randint(1, 12), day=random.randint(1, 28), ) @classmethod def next_datetime(cls) -> datetime: return datetime( year=random.randint(2000, 2050), month=random.randint(1, 12), day=random.randint(1, 28), hour=random.randint(0, 23), minute=random.randint(0, 59), second=random.randint(0, 59), ) @classmethod def next_enum(cls, e: t.Type[enum.Enum]) -> t.Any: return random.choice([item.value for item in e]) @classmethod def next_float(cls, minimum=0, maximum=2147483647, scale=5) -> float: return round(random.uniform(minimum, maximum), scale) @classmethod def next_int(cls, minimum=0, maximum=2147483647) -> int: return random.randint(minimum, maximum) @classmethod def next_str(cls, length=16) -> str: return "".join( random.choice(string.ascii_letters) for _ in range(length) ) @classmethod def next_time(cls) -> time: return time( hour=random.randint(0, 23), minute=random.randint(0, 59), second=random.randint(0, 59), ) @classmethod def next_timedelta(cls) -> timedelta: return timedelta( days=random.randint(1, 7), hours=random.randint(1, 23), minutes=random.randint(0, 59), ) @classmethod def next_uuid(cls) -> uuid.UUID: return uuid.uuid4()
950
18,599
<reponame>tanhx2008/GSYVideoPlayer package com.example.gsyvideoplayer.effect; import android.graphics.Bitmap; import android.opengl.GLSurfaceView; import com.shuyu.gsyvideoplayer.render.view.GSYVideoGLView.ShaderInterface; /** * 水印效果 */ public class BitmapIconEffect implements ShaderInterface { private final static int NEVER_SET = -5555; private GLSurfaceView mGlSurfaceViewl; private Bitmap mBitmap; private int mWidth = -1; private int mHeight = -1; private float mAlpha = 1.0f; private float mPositionOffset = 1.0f; private float mPositionX = NEVER_SET; private float mPositionY = NEVER_SET; public BitmapIconEffect(Bitmap bitmap) { this(bitmap, bitmap.getWidth(), bitmap.getHeight()); } public BitmapIconEffect(Bitmap bitmap, int width, int height) { this(bitmap, width, height, 1); } public BitmapIconEffect(Bitmap bitmap, int width, int height, float alpha) { this.mBitmap = bitmap; this.mWidth = width; this.mHeight = height; this.mAlpha = alpha; } @Override public String getShader(GLSurfaceView mGlSurfaceView) { this.mGlSurfaceViewl = mGlSurfaceView; String shader = "#extension GL_OES_EGL_image_external : require\n" + "precision mediump float;\n" + "varying vec2 vTextureCoord;\n" + "uniform samplerExternalOES sTexture;\n" + "uniform sampler2D sTexture2;\n" + "void main() {\n" + " vec4 c1 = texture2D(sTexture2, vTextureCoord);\n" + " gl_FragColor = vec4(c1.rgb, c1.a *" + mAlpha + ");\n" + "}\n"; return shader; } public void setPositionX(float positionX) { this.mPositionX = positionX; } public void setPositionY(float positionY) { this.mPositionY = positionY; } public float getAlpha() { return mAlpha; } public float getPositionOffset() { return mPositionOffset; } public float getWidth() { return (float) mWidth; } public float getHeight() { return (float) mHeight; } /** * 水印图的默认比例 */ public float getScaleW() { return getWidth() / mGlSurfaceViewl.getWidth(); } /** * 水印图的默认比例 */ public float getScaleH() { return getHeight() / mGlSurfaceViewl.getHeight(); } /** * 水印图的起始位置,默认右边 */ public float getPositionX() { if (mPositionX != NEVER_SET) { return mPositionX; } return -(mGlSurfaceViewl.getWidth() / (getWidth()) - mPositionOffset); } /** * 水印图的起始位置,默认上 */ public float getPositionY() { if (mPositionY != NEVER_SET) { return mPositionY; } return -(mGlSurfaceViewl.getHeight() / (getHeight()) - mPositionOffset); } public float getMaxPositionX() { return mGlSurfaceViewl.getWidth() / (getWidth()) - mPositionOffset; } public float getMaxPositionY() { return mGlSurfaceViewl.getHeight() / (getHeight()) - mPositionOffset; } public float getMinPositionX() { return -(mGlSurfaceViewl.getWidth() / (getWidth()) - mPositionOffset); } public float getMinPositionY() { return -(mGlSurfaceViewl.getHeight() / (getHeight()) - mPositionOffset); } public Bitmap getBitmap() { return mBitmap; } }
1,693
355
package net.tomp2p.message; import net.tomp2p.message.Message.Content; /** * Describes the index of a {@code Message.Content} enum in a {@code Message}. * <b>Note:</b> Each {@code Message} can contain up to 8 contents, so indices range from 0 to 7. * * @author <NAME> * */ public class MessageContentIndex { private final int index; private final Content content; public MessageContentIndex(int index, Content content) { this.index = index; this.content = content; } /** * The index of the associated content. * * @return The index of the associated content. */ public int index() { return index; } /** * The content of the associated index. * * @return The content of the associated index. */ public Content content() { return content; } }
326
357
/* * Copyright (c) 2012-2015 VMware, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy * of the License at http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, without * warranties or conditions of any kind, EITHER EXPRESS OR IMPLIED. See the * License for the specific language governing permissions and limitations * under the License. */ package com.vmware.identity.interop.ldap; import java.util.Arrays; import java.util.List; import org.apache.commons.lang.SystemUtils; import com.sun.jna.Structure; import com.sun.jna.win32.W32APITypeMapper; enum SecWinntAuthFlags { SEC_WINNT_AUTH_ID_ANSI(0x1), SEC_WINNT_AUTH_ID_UNICODE(0x2); private int _code; private SecWinntAuthFlags(int code) { _code = code; } public int getCode() { return _code; } } /* SecWinntAuthId constructs SEC_WINNT_AUTH_ID * which encapsulates credentials information for * ldap_bind_s on Windows * Intended not to be used by ldap package on Linux */ public final class SecWinntAuthId extends Structure { public String _pszUserName; public int _userNameLength; public String _pszDomain; public int _domainNameLength; public String _password; public int _passwordLength; public int _Flags; public SecWinntAuthId(String pszUserName, String pszDomain, String password) { super(SystemUtils.IS_OS_WINDOWS ? W32APITypeMapper.UNICODE : null); this._pszUserName = pszUserName; this._userNameLength = pszUserName == null ? 0 : pszUserName.length(); this._pszDomain = pszDomain; this._domainNameLength = pszDomain == null ? 0 : pszDomain.length(); this._password = password; this._passwordLength = password == null ? 0 : <PASSWORD>(); this._Flags = SecWinntAuthFlags.SEC_WINNT_AUTH_ID_UNICODE.getCode(); write(); } @Override protected List<String> getFieldOrder() { return Arrays.asList(new String[] { "_pszUserName", "_userNameLength", "_pszDomain", "_domainNameLength", "_password", "_passwordLength", "_Flags" }); } }
883
892
<gh_stars>100-1000 { "schema_version": "1.2.0", "id": "GHSA-6j47-h927-wmwr", "modified": "2022-05-01T17:57:56Z", "published": "2022-05-01T17:57:56Z", "aliases": [ "CVE-2007-1865" ], "details": "** DISPUTED ** The ipv6_getsockopt_sticky function in the kernel in Red Hat Enterprise Linux (RHEL) Beta 5.1.0 allows local users to obtain sensitive information (kernel memory contents) via a negative value of the len parameter. NOTE: this issue has been disputed in a bug comment, stating that \"len is ignored when copying header info to the user's buffer.\"", "severity": [ ], "affected": [ ], "references": [ { "type": "ADVISORY", "url": "https://nvd.nist.gov/vuln/detail/CVE-2007-1865" }, { "type": "WEB", "url": "https://bugzilla.redhat.com/show_bug.cgi?id=232045" }, { "type": "WEB", "url": "http://osvdb.org/45909" } ], "database_specific": { "cwe_ids": [ ], "severity": "LOW", "github_reviewed": false } }
437
349
#ifndef _IVW_MODULE_OPENGL_DEFINE_H_ #define _IVW_MODULE_OPENGL_DEFINE_H_ #ifdef INVIWO_ALL_DYN_LINK // DYNAMIC // If we are building DLL files we must declare dllexport/dllimport #ifdef IVW_MODULE_OPENGL_EXPORTS #ifdef _WIN32 #define IVW_MODULE_OPENGL_API __declspec(dllexport) #else // UNIX (GCC) #define IVW_MODULE_OPENGL_API __attribute__((visibility("default"))) #endif #else #ifdef _WIN32 #define IVW_MODULE_OPENGL_API __declspec(dllimport) #else #define IVW_MODULE_OPENGL_API #endif #endif #else // STATIC #define IVW_MODULE_OPENGL_API #endif #endif /* _IVW_MODULE_OPENGL_DEFINE_H_ */
264
1,056
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.netbeans.modules.lsp.client.bindings.refactoring; import java.io.IOException; import java.io.OutputStream; import java.lang.reflect.Method; import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.function.Function; import org.eclipse.lsp4j.CreateFile; import org.eclipse.lsp4j.DeleteFile; import org.eclipse.lsp4j.InitializeParams; import org.eclipse.lsp4j.InitializeResult; import org.eclipse.lsp4j.Position; import org.eclipse.lsp4j.Range; import org.eclipse.lsp4j.RenameFile; import org.eclipse.lsp4j.RenameOptions; import org.eclipse.lsp4j.RenameParams; import org.eclipse.lsp4j.ResourceOperation; import org.eclipse.lsp4j.ServerCapabilities; import org.eclipse.lsp4j.TextDocumentEdit; import org.eclipse.lsp4j.TextDocumentIdentifier; import org.eclipse.lsp4j.TextEdit; import org.eclipse.lsp4j.VersionedTextDocumentIdentifier; import org.eclipse.lsp4j.WorkspaceEdit; import org.eclipse.lsp4j.jsonrpc.messages.Either; import org.eclipse.lsp4j.services.LanguageServer; import org.eclipse.lsp4j.services.TextDocumentService; import org.eclipse.lsp4j.services.WorkspaceService; import org.junit.Test; import org.netbeans.junit.MockServices; import org.netbeans.modules.lsp.client.Utils; import org.openide.cookies.EditorCookie; import org.openide.filesystems.FileObject; import org.openide.filesystems.FileUtil; import org.openide.text.CloneableEditorSupport; import static org.junit.Assert.*; import org.netbeans.api.editor.mimelookup.MimePath; import org.netbeans.junit.NbTestCase; import org.netbeans.modules.editor.NbEditorKit; import org.netbeans.modules.lsp.client.LSPBindings; import org.netbeans.modules.lsp.client.TestUtils.BaseTextDocumentServiceImpl; import org.netbeans.modules.lsp.client.TestUtils.BaseWorkspaceServiceImpl; import static org.netbeans.modules.lsp.client.TestUtils.MIME_TYPE; import org.netbeans.modules.lsp.client.TestUtils.MimeDataProviderImpl; import org.netbeans.modules.lsp.client.TestUtils.MockLSP; import org.netbeans.modules.lsp.client.TestUtils.MockMimeResolver; import org.netbeans.modules.lsp.client.bindings.refactoring.Refactoring.LSPCreateFile; import org.netbeans.modules.lsp.client.bindings.refactoring.Refactoring.LSPDeleteFile; import org.netbeans.modules.lsp.client.bindings.refactoring.Refactoring.LSPRenameFile; import org.netbeans.modules.lsp.client.bindings.refactoring.tree.DiffElement; import org.netbeans.modules.refactoring.api.Problem; import org.netbeans.modules.refactoring.api.RefactoringElement; import org.netbeans.modules.refactoring.api.RefactoringSession; import org.netbeans.modules.refactoring.api.RenameRefactoring; import org.netbeans.modules.refactoring.api.impl.APIAccessor; import org.netbeans.modules.refactoring.spi.RefactoringElementImplementation; import org.netbeans.modules.refactoring.spi.impl.UndoableWrapper; import org.netbeans.spi.editor.mimelookup.MimeDataProvider; import org.openide.text.PositionRef; import org.openide.util.Lookup; import org.openide.util.lookup.Lookups; /** * * @author lahvac */ public class RenameRefactoringTest { @Test public void testSimpleRename() throws Exception { MockLSP.createServer = () -> new TestLanguageServer(); MockServices.setServices(MimeDataProviderImpl.class, MockMimeResolver.class, RootMimeDataProviderImpl.class); FileObject folder = FileUtil.createMemoryFileSystem().getRoot().createFolder("myfolder"); FileObject file1 = createFile(folder, "data1.mock-txt"); try (OutputStream out = file1.getOutputStream()) { out.write((" test other\n" + " other test\n").getBytes(StandardCharsets.UTF_8)); } FileObject file2 = createFile(folder, "data2.mock-txt"); try (OutputStream out = file2.getOutputStream()) { out.write((" 2test other\n" + " 2other test\n").getBytes(StandardCharsets.UTF_8)); } String uri = Utils.toURI(file1); LSPBindings bindings = LSPBindings.getBindings(file1); RenameParams renameParams = new RenameParams(new TextDocumentIdentifier(uri), new Position(1, 8), "newName"); List<Function<RenameParams, WorkspaceEdit>> renameFunctions = Arrays.asList( params -> { assertEquals(uri, params.getTextDocument().getUri()); assertEquals("newName", params.getNewName()); WorkspaceEdit result = new WorkspaceEdit(); String file1URI = params.getTextDocument().getUri(); TextDocumentEdit file1Edits = new TextDocumentEdit(new VersionedTextDocumentIdentifier(file1URI, -1), Arrays.asList(new TextEdit(new Range(new Position(0, 2), new Position(0, 6)), "newName"), new TextEdit(new Range(new Position(1, 8), new Position(1, 12)), "newName"))); String file2URI = file1URI.replace("data1", "data2"); TextDocumentEdit file2Edits = new TextDocumentEdit(new VersionedTextDocumentIdentifier(file2URI, -1), Arrays.asList(new TextEdit(new Range(new Position(0, 3), new Position(0, 7)), "newName"), new TextEdit(new Range(new Position(1, 9), new Position(1, 13)), "newName"))); result.setDocumentChanges(Arrays.asList(Either.forLeft(file1Edits), Either.forLeft(file2Edits))); return result; }, params -> { assertEquals(uri, params.getTextDocument().getUri()); assertEquals("newName", params.getNewName()); WorkspaceEdit result = new WorkspaceEdit(); Map<String, List<TextEdit>> file2Edits = new HashMap<>(); String file1URI = params.getTextDocument().getUri(); file2Edits.put(file1URI, Arrays.asList(new TextEdit(new Range(new Position(0, 2), new Position(0, 6)), "newName"), new TextEdit(new Range(new Position(1, 8), new Position(1, 12)), "newName"))); String file2URI = file1URI.replace("data1", "data2"); file2Edits.put(file2URI, Arrays.asList(new TextEdit(new Range(new Position(0, 3), new Position(0, 7)), "newName"), new TextEdit(new Range(new Position(1, 9), new Position(1, 13)), "newName"))); result.setChanges(file2Edits); return result; } ); for (Function<RenameParams, WorkspaceEdit> renameFunc : renameFunctions) { renameFunction = renameFunc; RenameRefactoring refactoring = new RenameRefactoring(Lookups.fixed(bindings, renameParams)); RefactoringSession session = RefactoringSession.create("test rename"); assertNull(refactoring.checkParameters()); assertNull(refactoring.preCheck()); assertNull(refactoring.prepare(session)); Set<String> elements = new HashSet<>(); for (RefactoringElement re : session.getRefactoringElements()) { RefactoringElementImplementation impl = APIAccessor.DEFAULT.getRefactoringElementImplementation(re); Method getNewFileContent = impl.getClass().getDeclaredMethod("getNewFileContent"); getNewFileContent.setAccessible(true); String newFileContent = (String) getNewFileContent.invoke(impl); String element = positionToString(re.getPosition().getBegin()) + "-" + positionToString(re.getPosition().getEnd()) + ":" + newFileContent; elements.add(element); } Set<String> expectedElements = new HashSet<>(Arrays.asList( "1:9-1:13: 2newName other\n" + " 2other newName\n", "0:3-0:7: 2newName other\n" + " 2other newName\n", "1:8-1:12: newName other\n" + " other newName\n", "0:2-0:6: newName other\n" + " other newName\n" )); assertEquals(expectedElements, elements); session.doRefactoring(true); assertFile(file1, " newName other\n" + " other newName\n"); assertFile(file2, " 2newName other\n" + " 2other newName\n"); session.undoRefactoring(true); assertFile(file1, " test other\n" + " other test\n"); assertFile(file2, " 2test other\n" + " 2other test\n"); } } @Test public void testFileOperations() throws Exception { MockLSP.createServer = () -> new TestLanguageServer(); MockServices.setServices(MimeDataProviderImpl.class, MockMimeResolver.class, RootMimeDataProviderImpl.class); FileObject folder = FileUtil.createMemoryFileSystem().getRoot().createFolder("myfolder"); FileObject file1 = createFile(folder, "data1.mock-txt"); try (OutputStream out = file1.getOutputStream()) { out.write((" test other\n" + " other test\n").getBytes(StandardCharsets.UTF_8)); } FileObject file2 = createFile(folder, "data2.mock-txt"); try (OutputStream out = file2.getOutputStream()) { out.write((" 2test other\n" + " 2other test\n").getBytes(StandardCharsets.UTF_8)); } String uri = Utils.toURI(file1); LSPBindings bindings = LSPBindings.getBindings(file1); RenameParams renameParams = new RenameParams(new TextDocumentIdentifier(uri), new Position(1, 8), "newName"); renameFunction = params -> { assertEquals(uri, params.getTextDocument().getUri()); assertEquals("newName", params.getNewName()); WorkspaceEdit result = new WorkspaceEdit(); String file1URI = params.getTextDocument().getUri(); TextDocumentEdit file1Edits = new TextDocumentEdit(new VersionedTextDocumentIdentifier(file1URI, -1), Arrays.asList(new TextEdit(new Range(new Position(1, 8), new Position(1, 12)), "newName"))); String file1aURI = file1URI.replace("data1", "data1a"); ResourceOperation file1Operation = new RenameFile(file1URI, file1aURI); TextDocumentEdit file1aEdits = new TextDocumentEdit(new VersionedTextDocumentIdentifier(file1aURI, -1), Arrays.asList(new TextEdit(new Range(new Position(0, 2), new Position(0, 6)), "newName"))); String file2URI = file1URI.replace("data1", "data2"); ResourceOperation file2Operation = new DeleteFile(file2URI); String file3URI = file1URI.replace("data1", "data3"); ResourceOperation file3Operation = new CreateFile(file3URI); TextDocumentEdit file3Edits = new TextDocumentEdit(new VersionedTextDocumentIdentifier(file3URI, -1), Arrays.asList(new TextEdit(new Range(new Position(0, 0), new Position(0, 0)), "newName content\n"))); result.setDocumentChanges(Arrays.asList(Either.forLeft(file1Edits), Either.forRight(file1Operation), Either.forLeft(file1aEdits), Either.forRight(file2Operation), Either.forRight(file3Operation), Either.forLeft(file3Edits)) ); return result; }; RenameRefactoring refactoring = new RenameRefactoring(Lookups.fixed(bindings, renameParams)); RefactoringSession session = RefactoringSession.create("test rename"); assertNull(refactoring.checkParameters()); assertNull(refactoring.preCheck()); Problem problem = refactoring.prepare(session); assertNull(problem2String(problem), problem); Set<String> elements = new HashSet<>(); for (RefactoringElement re : session.getRefactoringElements()) { RefactoringElementImplementation impl = APIAccessor.DEFAULT.getRefactoringElementImplementation(re); if (impl instanceof DiffElement) { Method getNewFileContent = impl.getClass().getDeclaredMethod("getNewFileContent"); getNewFileContent.setAccessible(true); String newFileContent = (String) getNewFileContent.invoke(impl); String element = positionToString(re.getPosition().getBegin()) + "-" + positionToString(re.getPosition().getEnd()) + ":" + newFileContent; elements.add(element); } else if (impl instanceof LSPRenameFile || impl instanceof LSPDeleteFile || impl instanceof LSPCreateFile) { elements.add(impl.toString()); } else { fail("Unknown element class: " + impl.getClass()); } } Set<String> expectedElements = new HashSet<>(Arrays.asList( "data2.mock-txt=>", "data1.mock-txt=>data1a.mock-txt", "1:8-1:12: newName other\n" + " other newName\n", "0:2-0:6: newName other\n" + " other newName\n", "=>data3.mock-txt(newName content\n)" )); assertEquals(expectedElements, elements); session.doRefactoring(true); assertEquals("data1a.mock-txt", file1.getNameExt()); assertFile(file1, " newName other\n" + " other newName\n"); assertFalse(file2.isValid()); //Backup facility does not handle non file:// URLs: // session.undoRefactoring(true); // // assertFile(file1, " test other\n" + // " other test\n"); // assertFile(file2, " 2test other\n" + // " 2other test\n"); } private FileObject createFile(FileObject folder, String name) throws Exception { FileObject file = folder.createData(name); EditorCookie ec = file.getLookup().lookup(EditorCookie.class); ((CloneableEditorSupport) ec).setMIMEType(MIME_TYPE); return file; } private String positionToString(PositionRef p) throws IOException { return "" + p.getLine() + ":" + p.getColumn(); } private void assertFile(FileObject file, String expectedContent) throws IOException { assertEquals(expectedContent, file.asText()); } private String problem2String(Problem p) { if (p == null){ return null; } return p.getMessage() + ":" + p.isFatal() + (p.getNext() != null ? "[" + problem2String(p.getNext()) + "]" : ""); } private static Function<RenameParams, WorkspaceEdit> renameFunction; private static final class TestLanguageServer implements LanguageServer { @Override public CompletableFuture<InitializeResult> initialize(InitializeParams params) { ServerCapabilities caps = new ServerCapabilities(); RenameOptions renameOptions = new RenameOptions(); caps.setRenameProvider(renameOptions); InitializeResult initResult = new InitializeResult(caps); return CompletableFuture.completedFuture(initResult); } @Override public CompletableFuture<Object> shutdown() { return CompletableFuture.completedFuture(null); } @Override public void exit() { } @Override public TextDocumentService getTextDocumentService() { return new BaseTextDocumentServiceImpl() { @Override public CompletableFuture<WorkspaceEdit> rename(RenameParams params) { WorkspaceEdit result = renameFunction.apply(params); return CompletableFuture.completedFuture(result); } }; } @Override public WorkspaceService getWorkspaceService() { return new BaseWorkspaceServiceImpl(); } } public static final class RootMimeDataProviderImpl implements MimeDataProvider { @Override public Lookup getLookup(MimePath mp) { if ("".equals(mp.getPath())) { return Lookups.fixed(new UndoableWrapper()); } return Lookup.EMPTY; } } }
8,119
2,151
/* * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.databinding; /** * Utility class for managing Observable callbacks. */ public class PropertyChangeRegistry extends CallbackRegistry<Observable.OnPropertyChangedCallback, Observable, Void> { private static final CallbackRegistry.NotifierCallback<Observable.OnPropertyChangedCallback, Observable, Void> NOTIFIER_CALLBACK = new CallbackRegistry.NotifierCallback<Observable.OnPropertyChangedCallback, Observable, Void>() { @Override public void onNotifyCallback(Observable.OnPropertyChangedCallback callback, Observable sender, int arg, Void notUsed) { callback.onPropertyChanged(sender, arg); } }; public PropertyChangeRegistry() { super(NOTIFIER_CALLBACK); } /** * Notifies registered callbacks that a specific property has changed. * * @param observable The Observable that has changed. * @param propertyId The BR id of the property that has changed or BR._all if the entire * Observable has changed. */ public void notifyChange(Observable observable, int propertyId) { notifyCallbacks(observable, propertyId, null); } }
570
429
<reponame>Andy-K-Sparklight/ponyos<gh_stars>100-1000 #include <stdlib.h> #include <stdint.h> #include <stdio.h> #include <syscall.h> #include <syscall_nums.h> #include <signal.h> #include <pthread.h> #include <errno.h> #include <sys/wait.h> #define ACQUIRE_LOCK() do { while (__sync_lock_test_and_set(&lock->atomic_lock, 0x01)) { syscall_yield(); } } while (0) #define RELEASE_LOCK() do { __sync_lock_release(&lock->atomic_lock); } while (0) int pthread_rwlock_init(pthread_rwlock_t * lock, void * args) { lock->readers = 0; lock->atomic_lock = 0; if (args != NULL) { fprintf(stderr, "pthread: pthread_rwlock_init arg unsupported\n"); return 1; } return 0; } int pthread_rwlock_wrlock(pthread_rwlock_t * lock) { ACQUIRE_LOCK(); while (1) { if (lock->readers == 0) { lock->readers = -1; lock->writerPid = syscall_getpid(); RELEASE_LOCK(); return 0; } syscall_yield(); } } int pthread_rwlock_rdlock(pthread_rwlock_t * lock) { ACQUIRE_LOCK(); while (1) { if (lock->readers >= 0) { lock->readers++; RELEASE_LOCK(); return 0; } syscall_yield(); } } int pthread_rwlock_unlock(pthread_rwlock_t * lock) { ACQUIRE_LOCK(); if (lock->readers > 0) lock->readers--; else if (lock->readers < 0) lock->readers = 0; else fprintf(stderr, "pthread: bad lock state detected\n"); RELEASE_LOCK(); return 0; } int pthread_rwlock_destroy(pthread_rwlock_t * lock) { return 0; }
618
368
package io.charlescd.villager.infrastructure.integration.registry.configuration; import io.charlescd.villager.infrastructure.integration.registry.authentication.CommonBasicAuthenticator; import io.charlescd.villager.infrastructure.persistence.DockerRegistryConfigurationEntity; public final class AzureConfig { private AzureConfig() {} public static Object execute(ConfigParameters config) { var azureConfig = (DockerRegistryConfigurationEntity.AzureDockerRegistryConnectionData) config.getConfiguration(); return new CommonBasicAuthenticator(azureConfig.username, azureConfig.password); } }
192
9,367
// // MIKMIDIOutputPort.h // MIDI Testbed // // Created by <NAME> on 3/8/13. // Copyright (c) 2013 Mixed In Key. All rights reserved. // #import "MIKMIDIPort.h" #import "MIKMIDICompilerCompatibility.h" @class MIKMIDICommand; @class MIKMIDIDestinationEndpoint; NS_ASSUME_NONNULL_BEGIN /** * MIKMIDIOutputPort is an Objective-C wrapper for CoreMIDI's MIDIPort class, and is only for destination ports. * It is not intended for use by clients/users of of MIKMIDI. Rather, it should be thought of as an * MIKMIDI private class. */ @interface MIKMIDIOutputPort : MIKMIDIPort - (BOOL)sendCommands:(MIKArrayOf(MIKMIDICommand *) *)commands toDestination:(MIKMIDIDestinationEndpoint *)destination error:(NSError **)error; @end NS_ASSUME_NONNULL_END
289
388
/** * Definition for a binary tree node. * struct TreeNode { * int val; * TreeNode *left; * TreeNode *right; * TreeNode() : val(0), left(nullptr), right(nullptr) {} * TreeNode(int x) : val(x), left(nullptr), right(nullptr) {} * TreeNode(int x, TreeNode *left, TreeNode *right) : val(x), left(left), right(right) {} * }; */ class Solution { public: int ans=0; unordered_set<TreeNode*> covered; void dfs(TreeNode* root,TreeNode* parent){ if(root==NULL) return; dfs(root->left,root); dfs(root->right,root); if((parent==NULL && covered.find(root)==covered.end()) || covered.find(root->left)==covered.end() || covered.find(root->right)==covered.end()){ ans++; covered.insert(root); covered.insert(parent); covered.insert(root->left); covered.insert(root->right); } } int minCameraCover(TreeNode* root) { covered.insert(NULL); dfs(root,NULL); return ans; } };
475
5,169
{ "name": "FormBuilder", "version": "0.5.8", "summary": "A swift library to make building data entry forms fast and simple.", "description": "This is a library written in swift which is intended to make the creation of data entry forms in iOS fast and easy. I have tried to abstract out all of the\nboiler plate code so that you can focus on what makes your project unique and avoid unnecessary headaches and duplicated code. It is highly recommended that you read the instructions document included with the example project so that you can understand how this library works and how to use it in your project.", "homepage": "https://github.com/n8glenn/FormBuilder", "screenshots": [ "https://dl.dropboxusercontent.com/s/517byyh4riws6qc/Screen1.png", "https://dl.dropboxusercontent.com/s/r5u4sdno3pv2ovs/Screen2.png", "https://dl.dropboxusercontent.com/s/8r6jkr4viv0ci9j/Screen3.png", "https://dl.dropboxusercontent.com/s/y757ga2aq0wthnj/Screen4.png", "https://dl.dropboxusercontent.com/s/i1u261uamwcj5bn/Screen5.png" ], "license": { "type": "MIT", "file": "LICENSE" }, "authors": { "n8glenn": "<EMAIL>" }, "source": { "git": "https://github.com/n8glenn/FormBuilder.git", "tag": "0.5.8" }, "social_media_url": "https://twitter.com/n8glenn", "platforms": { "ios": "9.0" }, "source_files": "FormBuilder/Classes/**/*", "resources": [ "FormBuilder/Assets/**/*.storyboard", "FormBuilder/Assets/*" ] }
542
1,056
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ /* * DefaultVisitor.java * * Created on August 18, 2005, 6:04 PM * * To change this template, choose Tools | Options and locate the template under * the Source Creation and Management node. Right-click the template and choose * Open. You can then make changes to the template in the Source Editor. */ package org.netbeans.modules.xml.xdm.visitor; /** * * @author ChrisWebster */ public class DefaultVisitor implements XMLNodeVisitor { public void visit(org.netbeans.modules.xml.xdm.nodes.Attribute attr) { visitNode(attr); } public void visit(org.netbeans.modules.xml.xdm.nodes.Document doc) { visitNode(doc); } public void visit(org.netbeans.modules.xml.xdm.nodes.Element e) { visitNode(e); } public void visit(org.netbeans.modules.xml.xdm.nodes.Text txt) { visitNode(txt); } protected void visitNode(org.netbeans.modules.xml.xdm.nodes.Node node) { } }
560
23,901
# coding=utf-8 # Copyright 2021 The Google Research Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Postprocessing mnist and cifar10/100 outputs for simple, precond, dropout. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import os import numpy as np import tensorflow.compat.v1 as tf def postprocess_mnist(workingdir): """preprocessing mnist and notmnist outputs. Args: workingdir: path to the working directory """ path = os.path.join(workingdir, 'proba_tab_*.npy') if tf.gfile.IsDirectory(os.path.join(workingdir, 'mnist/temp')): tf.gfile.DeleteRecursively(os.path.join(workingdir, 'mnist/temp')) if tf.gfile.IsDirectory(os.path.join(workingdir, 'notmnist/temp')): tf.gfile.DeleteRecursively(os.path.join(workingdir, 'notmnist/temp')) tf.gfile.MakeDirs(os.path.join(workingdir, 'mnist/temp')) tf.gfile.MakeDirs(os.path.join(workingdir, 'notmnist/temp')) files_list = tf.gfile.Glob(path) n = len(files_list) for i in np.arange(n): path = os.path.join(workingdir, 'proba_tab_' + str(i) + '.npy') with tf.gfile.Open(path, 'rb') as f: p = np.load(f) p_mnist = p[:10000, :, :] p_notmnist = p[10000:, :, :] for k in np.arange(10): path = os.path.join(workingdir, 'mnist/temp', 'proba_' + str(i) + '_' + str(k) + '.npy') with tf.gfile.Open(path, 'wb') as f: np.save(f, p_mnist[k*1000:(k+1)*1000, :, :]) path = os.path.join(workingdir, 'notmnist/temp', 'proba_' + str(i) + '_' + str(k) + '.npy') with tf.gfile.Open(path, 'wb') as f: np.save(f, p_notmnist[k*1000:(k+1)*1000, :, :]) for dataset in ['mnist', 'notmnist']: for k in np.arange(10): p_list = [] for i in np.arange(n): path = os.path.join(workingdir, dataset, 'temp', 'proba_' + str(i) + '_' + str(k) + '.npy') with tf.gfile.Open(path, 'rb') as f: p = np.load(f) p_list.append(p) proba = np.concatenate(tuple(p_list), axis=-1) path = os.path.join(workingdir, dataset, 'proba_' + str(k) + '.npy') with tf.gfile.Open(path, 'wb') as f: np.save(f, proba) tf.gfile.DeleteRecursively(os.path.join(workingdir, dataset, 'temp')) def postprocess_cifar(workingdir, dataset): """preprocessing cifar10 outputs. Args: workingdir: path to the working directory dataset: string, 'cifar10' or cifar100' """ path = os.path.join(workingdir, 'proba_tab_*.npy') if tf.gfile.IsDirectory(os.path.join(workingdir, dataset)): tf.gfile.DeleteRecursively(os.path.join(workingdir, dataset)) if tf.gfile.IsDirectory(os.path.join(workingdir, 'temp')): tf.gfile.DeleteRecursively(os.path.join(workingdir, 'temp')) tf.gfile.MakeDirs(os.path.join(workingdir, dataset)) tf.gfile.MakeDirs(os.path.join(workingdir, 'temp')) files_list = tf.gfile.Glob(path) n = len(files_list) for i in np.arange(n): path = os.path.join(workingdir, 'proba_tab_' + str(i) + '.npy') with tf.gfile.Open(path, 'rb') as f: p = np.load(f) for k in np.arange(10): path = os.path.join(workingdir, 'temp', 'proba_' + str(i) + '_' + str(k) + '.npy') with tf.gfile.Open(path, 'wb') as f: np.save(f, p[k*1000:(k+1)*1000, :, :]) for k in np.arange(10): p_list = [] for i in np.arange(n): path = os.path.join(workingdir, 'temp', 'proba_' + str(i) + '_' + str(k) + '.npy') with tf.gfile.Open(path, 'rb') as f: p = np.load(f) p_list.append(p) proba = np.concatenate(tuple(p_list), axis=-1) path = os.path.join(workingdir, dataset, 'proba_' + str(k) + '.npy') with tf.gfile.Open(path, 'wb') as f: np.save(f, proba) tf.gfile.DeleteRecursively(os.path.join(workingdir, 'temp')) def postprocess_bootstrap_mnist(workingdir): """preprocessing mnist bootstrap outputs. Args: workingdir: path to the working directory """ if tf.gfile.IsDirectory(os.path.join(workingdir, 'mnist')): tf.gfile.DeleteRecursively(os.path.join(workingdir, 'mnist')) if tf.gfile.IsDirectory(os.path.join(workingdir, 'notmnist')): tf.gfile.DeleteRecursively(os.path.join(workingdir, 'notmnist')) list_tasks = tf.gfile.ListDirectory(workingdir) num_samples = len(list_tasks) tf.gfile.MakeDirs(os.path.join(workingdir, 'mnist')) tf.gfile.MakeDirs(os.path.join(workingdir, 'notmnist')) for k in np.arange(10): p_mnist_list = [] p_notmnist_list = [] for i in np.arange(1, num_samples + 1): path_task = os.path.join(workingdir, 'task_' + str(i), 'proba_tab_' + str(i-1) + '.npy') with tf.gfile.Open(path_task, 'rb') as f: p = np.load(f) p_mnist = p[:10000, :, :] p_notmnist = p[10000:, :, :] p_mnist_list.append(p_mnist[k*1000:(k+1)*1000, :, :]) p_notmnist_list.append(p_notmnist[k*1000:(k+1)*1000, :, :]) proba_mnist = np.concatenate(tuple(p_mnist_list), axis=-1) proba_notmnist = np.concatenate(tuple(p_notmnist_list), axis=-1) path = os.path.join(workingdir, 'mnist', 'proba_' + str(k) + '.npy') with tf.gfile.Open(path, 'wb') as f: np.save(f, proba_mnist) path = os.path.join(workingdir, 'notmnist', 'proba_' + str(k) + '.npy') with tf.gfile.Open(path, 'wb') as f: np.save(f, proba_notmnist) def postprocess_bootstrap_cifar(workingdir, dataset): """preprocessing cifar10 bootstrap outputs. Args: workingdir: path to the working directory dataset: string, 'cifar10' or cifar100' """ if tf.gfile.IsDirectory(os.path.join(workingdir, dataset)): tf.gfile.DeleteRecursively(os.path.join(workingdir, dataset)) list_tasks = tf.gfile.ListDirectory(workingdir) num_samples = len(list_tasks) tf.gfile.MakeDirs(os.path.join(workingdir, dataset)) for k in np.arange(10): p_list = [] for i in np.arange(1, num_samples + 1): path_task = os.path.join(workingdir, 'task_' + str(i), 'proba_tab_' + str(i-1) + '.npy') with tf.gfile.Open(path_task, 'rb') as f: p = np.load(f) p_list.append(p[k*1000:(k+1)*1000, :, :]) proba = np.concatenate(tuple(p_list), axis=-1) path = os.path.join(workingdir, dataset, 'proba_' + str(k) + '.npy') with tf.gfile.Open(path, 'wb') as f: np.save(f, proba)
3,203
368
package org.nem.core.model; import org.nem.core.crypto.*; import org.nem.core.serialization.*; /** * Simple key pair view model. */ public class KeyPairViewModel implements SerializableEntity { private final KeyPair keyPair; private final byte networkVersion; /** * Creates a key pair view model. * * @param keyPair The key pair. * @param networkVersion The network version. */ public KeyPairViewModel(final KeyPair keyPair, final byte networkVersion) { this.keyPair = keyPair; this.networkVersion = networkVersion; } /** * Deserializes a key pair view model. * * @param deserializer The deserializer. */ public KeyPairViewModel(final Deserializer deserializer) { final PrivateKey privateKey = PrivateKey.fromHexString(deserializer.readString("privateKey")); final PublicKey publicKey = PublicKey.fromHexString(deserializer.readOptionalString("publicKey")); final Address address = Address.fromEncoded(deserializer.readString("address")); this.networkVersion = NetworkInfos.fromAddress(address).getVersion(); if (!addressIsDerivedFromPublicKey(publicKey, this.networkVersion, address)) { throw new IllegalArgumentException("public key and address mismatch"); } this.keyPair = new KeyPair(privateKey); if (!this.keyPair.getPublicKey().equals(publicKey)) { throw new IllegalArgumentException("private key and public key mismatch"); } } /** * Gets the key pair. * * @return The key pair. */ public KeyPair getKeyPair() { return this.keyPair; } /** * Gets the network version. * * @return The network version. */ public byte getNetworkVersion() { return this.networkVersion; } private static boolean addressIsDerivedFromPublicKey(final PublicKey publicKey, final byte networkVersion, final Address address) { final Address derivedAddress = Address.fromPublicKey(networkVersion, publicKey); return derivedAddress.equals(address); } @Override public void serialize(final Serializer serializer) { serializer.writeString("privateKey", this.keyPair.getPrivateKey().toString()); serializer.writeString("publicKey", this.keyPair.getPublicKey().toString()); serializer.writeString("address", Address.fromPublicKey(this.networkVersion, this.keyPair.getPublicKey()).getEncoded()); } }
709
358
<gh_stars>100-1000 // Copyright(c) 2015-present, <NAME> & spdlog contributors. // Distributed under the MIT License (http://opensource.org/licenses/MIT) #pragma once #ifndef SPDLOG_HEADER_ONLY #include "spdlog/sinks/wincolor_sink.h" #endif #include "spdlog/common.h" #include "spdlog/details/pattern_formatter.h" namespace spdlog { namespace sinks { template<typename ConsoleMutex> SPDLOG_INLINE wincolor_sink<ConsoleMutex>::wincolor_sink(HANDLE out_handle, color_mode mode) : out_handle_(out_handle) , mutex_(ConsoleMutex::mutex()) , formatter_(details::make_unique<spdlog::pattern_formatter>()) { // check if out_handle is points to the actual console. // ::GetConsoleMode() should return 0 if it is redirected or not valid console handle. DWORD console_mode; in_console_ = ::GetConsoleMode(out_handle, &console_mode) != 0; set_color_mode(mode); colors_[level::trace] = WHITE; colors_[level::debug] = CYAN; colors_[level::info] = GREEN; colors_[level::warn] = YELLOW | BOLD; colors_[level::err] = RED | BOLD; // red bold colors_[level::critical] = BACKGROUND_RED | WHITE | BOLD; // white bold on red background colors_[level::off] = 0; } template<typename ConsoleMutex> SPDLOG_INLINE wincolor_sink<ConsoleMutex>::~wincolor_sink() { this->flush(); } // change the color for the given level template<typename ConsoleMutex> void SPDLOG_INLINE wincolor_sink<ConsoleMutex>::set_color(level::level_enum level, WORD color) { std::lock_guard<mutex_t> lock(mutex_); colors_[level] = color; } template<typename ConsoleMutex> void SPDLOG_INLINE wincolor_sink<ConsoleMutex>::log(const details::log_msg &msg) { std::lock_guard<mutex_t> lock(mutex_); memory_buf_t formatted; formatter_->format(msg, formatted); if (!in_console_) { write_to_file_(formatted); return; } if (should_do_colors_ && msg.color_range_end > msg.color_range_start) { // before color range print_range_(formatted, 0, msg.color_range_start); // in color range auto orig_attribs = set_foreground_color_(colors_[msg.level]); print_range_(formatted, msg.color_range_start, msg.color_range_end); // reset to orig colors ::SetConsoleTextAttribute(out_handle_, orig_attribs); print_range_(formatted, msg.color_range_end, formatted.size()); } else // print without colors if color range is invalid (or color is disabled) { print_range_(formatted, 0, formatted.size()); } } template<typename ConsoleMutex> void SPDLOG_INLINE wincolor_sink<ConsoleMutex>::flush() { // windows console always flushed? } template<typename ConsoleMutex> void SPDLOG_INLINE wincolor_sink<ConsoleMutex>::set_pattern(const std::string &pattern) { std::lock_guard<mutex_t> lock(mutex_); formatter_ = std::unique_ptr<spdlog::formatter>(new pattern_formatter(pattern)); } template<typename ConsoleMutex> void SPDLOG_INLINE wincolor_sink<ConsoleMutex>::set_formatter(std::unique_ptr<spdlog::formatter> sink_formatter) { std::lock_guard<mutex_t> lock(mutex_); formatter_ = std::move(sink_formatter); } template<typename ConsoleMutex> void SPDLOG_INLINE wincolor_sink<ConsoleMutex>::set_color_mode(color_mode mode) { switch (mode) { case color_mode::always: case color_mode::automatic: should_do_colors_ = true; break; case color_mode::never: should_do_colors_ = false; break; default: should_do_colors_ = true; } } // set foreground color and return the orig console attributes (for resetting later) template<typename ConsoleMutex> WORD SPDLOG_INLINE wincolor_sink<ConsoleMutex>::set_foreground_color_(WORD attribs) { CONSOLE_SCREEN_BUFFER_INFO orig_buffer_info; ::GetConsoleScreenBufferInfo(out_handle_, &orig_buffer_info); WORD back_color = orig_buffer_info.wAttributes; // retrieve the current background color back_color &= static_cast<WORD>(~(FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE | FOREGROUND_INTENSITY)); // keep the background color unchanged ::SetConsoleTextAttribute(out_handle_, attribs | back_color); return orig_buffer_info.wAttributes; // return orig attribs } // print a range of formatted message to console template<typename ConsoleMutex> void SPDLOG_INLINE wincolor_sink<ConsoleMutex>::print_range_(const memory_buf_t &formatted, size_t start, size_t end) { auto size = static_cast<DWORD>(end - start); ::WriteConsoleA(out_handle_, formatted.data() + start, size, nullptr, nullptr); } template<typename ConsoleMutex> void SPDLOG_INLINE wincolor_sink<ConsoleMutex>::write_to_file_(const memory_buf_t &formatted) { if (out_handle_ == nullptr) // no console and no file redirect { return; } auto size = static_cast<DWORD>(formatted.size()); if (size == 0) { return; } DWORD total_written = 0; do { DWORD bytes_written = 0; bool ok = ::WriteFile(out_handle_, formatted.data() + total_written, size - total_written, &bytes_written, nullptr) != 0; if (!ok || bytes_written == 0) { SPDLOG_THROW(spdlog_ex("wincolor_sink: write_to_file_ failed. GetLastError(): " + std::to_string(::GetLastError()))); } total_written += bytes_written; } while (total_written < size); } // wincolor_stdout_sink template<typename ConsoleMutex> SPDLOG_INLINE wincolor_stdout_sink<ConsoleMutex>::wincolor_stdout_sink(color_mode mode) : wincolor_sink<ConsoleMutex>(::GetStdHandle(STD_OUTPUT_HANDLE), mode) {} // wincolor_stderr_sink template<typename ConsoleMutex> SPDLOG_INLINE wincolor_stderr_sink<ConsoleMutex>::wincolor_stderr_sink(color_mode mode) : wincolor_sink<ConsoleMutex>(::GetStdHandle(STD_ERROR_HANDLE), mode) {} } // namespace sinks } // namespace spdlog
2,324
3,405
<filename>src/core/kext/VirtualKey/VK_LOCK.cpp #include "diagnostic_macros.hpp" BEGIN_IOKIT_INCLUDE; #include <IOKit/IOLib.h> END_IOKIT_INCLUDE; #include "EventOutputQueue.hpp" #include "FlagStatus.hpp" #include "KeyCodeModifierFlagPairs.hpp" #include "VK_LOCK.hpp" namespace org_pqrs_Karabiner { bool VirtualKey::VK_LOCK::handle(const Params_KeyboardEventCallBack& params, AutogenId autogenId, PhysicalEventType physicalEventType) { bool isFirstKeyDownEvent = (params.ex_iskeydown && params.repeat == false); #define MODIFY_FLAGSTATUS(KEYCODETYPE, METHOD) \ { \ ModifierFlag modifierFlag = KeyCodeModifierFlagPairs::getModifierFlag(params.key, KEYCODETYPE); \ if (modifierFlag != ModifierFlag::ZERO) { \ if (isFirstKeyDownEvent) { \ FlagStatus::globalFlagStatus().METHOD(modifierFlag); \ } \ goto remapped; \ } \ } // VK_LOCK MODIFY_FLAGSTATUS(KeyCodeModifierFlagPairs::KeyCodeType::VK_LOCK, lock_toggle); MODIFY_FLAGSTATUS(KeyCodeModifierFlagPairs::KeyCodeType::VK_LOCK_FORCE_ON, lock_increase); MODIFY_FLAGSTATUS(KeyCodeModifierFlagPairs::KeyCodeType::VK_LOCK_FORCE_OFF, lock_decrease); // VK_NEGATIVE_LOCK MODIFY_FLAGSTATUS(KeyCodeModifierFlagPairs::KeyCodeType::VK_NEGATIVE_LOCK, negative_lock_toggle); MODIFY_FLAGSTATUS(KeyCodeModifierFlagPairs::KeyCodeType::VK_NEGATIVE_LOCK_FORCE_ON, negative_lock_increase); MODIFY_FLAGSTATUS(KeyCodeModifierFlagPairs::KeyCodeType::VK_NEGATIVE_LOCK_FORCE_OFF, negative_lock_decrease); if (params.key == KeyCode::VK_LOCK_ALL_FORCE_OFF) { if (isFirstKeyDownEvent) { FlagStatus::globalFlagStatus().lock_clear(); } goto remapped; } if (params.key == KeyCode::VK_NEGATIVE_LOCK_ALL_FORCE_OFF) { if (isFirstKeyDownEvent) { FlagStatus::globalFlagStatus().negative_lock_clear(); } goto remapped; } return false; remapped: EventOutputQueue::FireModifiers::fire(autogenId, physicalEventType); return true; } }
1,339
3,358
/* -*- mode: c++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */ /* Copyright (C) 2004 <NAME> Copyright (C) 2007 StatPro Italia srl This file is part of QuantLib, a free-software/open-source library for financial quantitative analysts and developers - http://quantlib.org/ QuantLib is free software: you can redistribute it and/or modify it under the terms of the QuantLib license. You should have received a copy of the license along with this program; if not, please email <<EMAIL>>. The license is also available online at <http://quantlib.org/license.shtml>. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the license for more details. */ /*! \file stulzengine.hpp \brief 2D European Basket formulae, due to Stulz (1982) */ #ifndef quantlib_stulz_engine_hpp #define quantlib_stulz_engine_hpp #include <ql/instruments/basketoption.hpp> #include <ql/processes/blackscholesprocess.hpp> namespace QuantLib { //! Pricing engine for 2D European Baskets /*! This class implements formulae from "Options on the Minimum or the Maximum of Two Risky Assets", <NAME>, Journal of Financial Ecomomics (1982) 10, 161-185. \ingroup basketengines \test the correctness of the returned value is tested by reproducing results available in literature. */ class StulzEngine : public BasketOption::engine { public: StulzEngine(ext::shared_ptr<GeneralizedBlackScholesProcess> process1, ext::shared_ptr<GeneralizedBlackScholesProcess> process2, Real correlation); void calculate() const override; private: ext::shared_ptr<GeneralizedBlackScholesProcess> process1_; ext::shared_ptr<GeneralizedBlackScholesProcess> process2_; Real rho_; }; } #endif
689
391
import sys from unittest.mock import MagicMock def mock_anki_modules(): """ I'd like to get rid of the situation when this is required, but for now this helps with the situation that anki modules are not available during test runtime. """ modules_list = ['anki', 'anki.hooks', 'anki.exporting', 'anki.decks', 'anki.utils', 'anki.cards', 'anki.models', 'anki.notes', 'aqt''', 'aqt.qt', 'aqt.exporting', 'aqt.utils'] for module in modules_list: sys.modules[module] = MagicMock()
208
30,023
"""The smtp component.""" from homeassistant.const import Platform DOMAIN = "smtp" PLATFORMS = [Platform.NOTIFY]
40
335
<gh_stars>100-1000 # ifndef CPPAD_CORE_FUN_EVAL_HPP # define CPPAD_CORE_FUN_EVAL_HPP /* -------------------------------------------------------------------------- CppAD: C++ Algorithmic Differentiation: Copyright (C) 2003-17 <NAME> CppAD is distributed under multiple licenses. This distribution is under the terms of the GNU General Public License Version 3. A copy of this license is included in the COPYING file of this distribution. Please visit http://www.coin-or.org/CppAD/ for information on other licenses. -------------------------------------------------------------------------- */ # include <cppad/core/forward.hpp> # include <cppad/core/reverse.hpp> # include <cppad/core/sparse.hpp> # endif
213
838
<reponame>FlorianPoot/MicroPython_ESP32_psRAM_LoBo /* * MIT License * * Copyright (c) 2017 <NAME> * Copyright (c) 2017 <NAME> <<EMAIL>> * Copyright (c) 2018 LoBo (https://github.com/loboris) * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ /** * @file */ #include <stddef.h> #include <stdbool.h> #include <inttypes.h> #include <string.h> #include <stdlib.h> #include "freertos/FreeRTOS.h" #include "freertos/task.h" #include "esp_log.h" #include "sdkconfig.h" #include "driver/gpio.h" #include "owb.h" static const char * TAG = "owb"; static bool _is_init(const OneWireBus * bus) { bool ok = false; if (bus != NULL) { if (bus->driver) { // OK ok = true; } else { ESP_LOGE(TAG, "bus is not initialised"); } } else { ESP_LOGE(TAG, "bus is NULL"); } return ok; } /** * @brief 1-Wire 8-bit CRC lookup. * @param[in] crc Starting CRC value. Pass in prior CRC to accumulate. * @param[in] data Byte to feed into CRC. * @return Resultant CRC value. */ static uint8_t _calc_crc(uint8_t crc, uint8_t data) { // https://www.maximintegrated.com/en/app-notes/index.mvp/id/27 static const uint8_t table[256] = { 0, 94, 188, 226, 97, 63, 221, 131, 194, 156, 126, 32, 163, 253, 31, 65, 157, 195, 33, 127, 252, 162, 64, 30, 95, 1, 227, 189, 62, 96, 130, 220, 35, 125, 159, 193, 66, 28, 254, 160, 225, 191, 93, 3, 128, 222, 60, 98, 190, 224, 2, 92, 223, 129, 99, 61, 124, 34, 192, 158, 29, 67, 161, 255, 70, 24, 250, 164, 39, 121, 155, 197, 132, 218, 56, 102, 229, 187, 89, 7, 219, 133, 103, 57, 186, 228, 6, 88, 25, 71, 165, 251, 120, 38, 196, 154, 101, 59, 217, 135, 4, 90, 184, 230, 167, 249, 27, 69, 198, 152, 122, 36, 248, 166, 68, 26, 153, 199, 37, 123, 58, 100, 134, 216, 91, 5, 231, 185, 140, 210, 48, 110, 237, 179, 81, 15, 78, 16, 242, 172, 47, 113, 147, 205, 17, 79, 173, 243, 112, 46, 204, 146, 211, 141, 111, 49, 178, 236, 14, 80, 175, 241, 19, 77, 206, 144, 114, 44, 109, 51, 209, 143, 12, 82, 176, 238, 50, 108, 142, 208, 83, 13, 239, 177, 240, 174, 76, 18, 145, 207, 45, 115, 202, 148, 118, 40, 171, 245, 23, 73, 8, 86, 180, 234, 105, 55, 213, 139, 87, 9, 235, 181, 54, 104, 138, 212, 149, 203, 41, 119, 244, 170, 72, 22, 233, 183, 85, 11, 136, 214, 52, 106, 43, 117, 151, 201, 74, 20, 246, 168, 116, 42, 200, 150, 21, 75, 169, 247, 182, 232, 10, 84, 215, 137, 107, 53 }; return table[crc ^ data]; } static uint8_t _calc_crc_block(uint8_t crc, const uint8_t * buffer, size_t len) { do { crc = _calc_crc(crc, *buffer++); ESP_LOGD(TAG, "crc 0x%02x, len %d", (int)crc, (int)len); } while (--len > 0); return crc; } /** * @param[out] is_found true if a device was found, false if not * @return status */ static owb_status _search(const OneWireBus * bus, OneWireBus_SearchState * state, bool *is_found) { // Based on https://www.maximintegrated.com/en/app-notes/index.mvp/id/187 // initialize for search int id_bit_number = 1; int last_zero = 0; int rom_byte_number = 0; uint8_t id_bit = 0; uint8_t cmp_id_bit = 0; uint8_t rom_byte_mask = 1; uint8_t search_direction = 0; bool search_result = false; uint8_t crc8 = 0; owb_status status; // if the last call was not the last one if (!state->last_device_flag) { // 1-Wire reset bool is_present; bus->driver->reset(bus, &is_present); if (!is_present) { // reset the search state->last_discrepancy = 0; state->last_device_flag = false; state->last_family_discrepancy = 0; *is_found = false; return OWB_STATUS_OK; } // issue the search command bus->driver->write_bits(bus, OWB_ROM_SEARCH, 8); // loop to do the search do { id_bit = cmp_id_bit = 0; // read a bit and its complement bus->driver->read_bits(bus, &id_bit, 1); bus->driver->read_bits(bus, &cmp_id_bit, 1); // check for no devices on 1-wire (signal level is high in both bit reads) if (id_bit && cmp_id_bit) { break; } else { // all devices coupled have 0 or 1 if (id_bit != cmp_id_bit) { search_direction = (id_bit) ? 1 : 0; // bit write value for search } else { // if this discrepancy if before the Last Discrepancy // on a previous next then pick the same as last time if (id_bit_number < state->last_discrepancy) search_direction = ((state->rom_code.bytes[rom_byte_number] & rom_byte_mask) > 0); else // if equal to last pick 1, if not then pick 0 search_direction = (id_bit_number == state->last_discrepancy); // if 0 was picked then record its position in LastZero if (search_direction == 0) { last_zero = id_bit_number; // check for Last discrepancy in family if (last_zero < 9) state->last_family_discrepancy = last_zero; } } // set or clear the bit in the ROM byte rom_byte_number // with mask rom_byte_mask if (search_direction == 1) state->rom_code.bytes[rom_byte_number] |= rom_byte_mask; else state->rom_code.bytes[rom_byte_number] &= ~rom_byte_mask; // serial number search direction write bit bus->driver->write_bits(bus, search_direction, 1); // increment the byte counter id_bit_number // and shift the mask rom_byte_mask id_bit_number++; rom_byte_mask <<= 1; // if the mask is 0 then go to new SerialNum byte rom_byte_number and reset mask if (rom_byte_mask == 0) { crc8 = owb_crc8_byte(crc8, state->rom_code.bytes[rom_byte_number]); // accumulate the CRC rom_byte_number++; rom_byte_mask = 1; } } } while(rom_byte_number < 8); // loop until through all ROM bytes 0-7 // if the search was successful then if (!((id_bit_number < 65) || (crc8 != 0))) { // search successful so set LastDiscrepancy,LastDeviceFlag,search_result state->last_discrepancy = last_zero; // check for last device if (state->last_discrepancy == 0) state->last_device_flag = true; search_result = true; } } // if no device found then reset counters so next 'search' will be like a first if (!search_result || !state->rom_code.bytes[0]) { state->last_discrepancy = 0; state->last_device_flag = false; state->last_family_discrepancy = 0; search_result = false; } status = OWB_STATUS_OK; *is_found = search_result; return status; } // Public API owb_status owb_uninitialize(OneWireBus * bus) { owb_status status; if(!_is_init(bus)) { status = OWB_STATUS_NOT_INITIALIZED; } else { bus->driver->uninitialize(bus); status = OWB_STATUS_OK; } return status; } owb_status owb_use_crc(OneWireBus * bus, bool use_crc) { owb_status status; if(!bus) { status = OWB_STATUS_PARAMETER_NULL; } else if (!_is_init(bus)) { status = OWB_STATUS_NOT_INITIALIZED; } else { bus->use_crc = use_crc; ESP_LOGD(TAG, "use_crc %d", bus->use_crc); status = OWB_STATUS_OK; } return status; } owb_status owb_read_rom(const OneWireBus * bus, OneWireBus_ROMCode *rom_code) { owb_status status; memset(rom_code, 0, sizeof(OneWireBus_ROMCode)); if(!bus || !rom_code) { status = OWB_STATUS_PARAMETER_NULL; } else if (!_is_init(bus)) { status = OWB_STATUS_NOT_INITIALIZED; } else { bool is_present; bus->driver->reset(bus, &is_present); if (is_present) { uint8_t value = OWB_ROM_READ; bus->driver->write_bits(bus, value, 8); owb_read_bytes(bus, rom_code->bytes, sizeof(OneWireBus_ROMCode)); if (bus->use_crc) { if (owb_crc8_bytes(0, rom_code->bytes, sizeof(OneWireBus_ROMCode)) != 0) { ESP_LOGE(TAG, "CRC failed"); memset(rom_code->bytes, 0, sizeof(OneWireBus_ROMCode)); status = OWB_STATUS_CRC_FAILED; } else { status = OWB_STATUS_OK; } } else { status = OWB_STATUS_OK; } char rom_code_s[17]; owb_string_from_rom_code(*rom_code, rom_code_s, sizeof(rom_code_s)); ESP_LOGD(TAG, "rom_code %s", rom_code_s); } else { status = OWB_STATUS_DEVICE_NOT_RESPONDING; ESP_LOGE(TAG, "ds18b20 device not responding"); } } return status; } owb_status owb_verify_rom(const OneWireBus * bus, OneWireBus_ROMCode rom_code, bool* is_present) { owb_status status; bool result = false; if(!bus || !is_present) { status = OWB_STATUS_PARAMETER_NULL; } else if (!_is_init(bus)) { status = OWB_STATUS_NOT_INITIALIZED; } else { OneWireBus_SearchState state = { .last_discrepancy = 64, .last_device_flag = false, }; bool is_found; _search(bus, &state, &is_found); if (is_found) { result = true; for (int i = 0; i < sizeof(state.rom_code.bytes) && result; ++i) { result = rom_code.bytes[i] == state.rom_code.bytes[i]; ESP_LOGD(TAG, "%02x %02x", rom_code.bytes[i], state.rom_code.bytes[i]); } ESP_LOGD(TAG, "rom code %sfound", result ? "" : "not "); } status = OWB_STATUS_OK; *is_present = result; } return status; } owb_status owb_reset(const OneWireBus * bus, bool* a_device_present) { owb_status status; if(!bus || !a_device_present) { status = OWB_STATUS_PARAMETER_NULL; } else if(!_is_init(bus)) { status = OWB_STATUS_NOT_INITIALIZED; } else { bus->driver->reset(bus, a_device_present); status = OWB_STATUS_OK; } return status; } owb_status owb_write_byte(const OneWireBus * bus, uint8_t data) { owb_status status; if(!bus) { status = OWB_STATUS_PARAMETER_NULL; } else if (!_is_init(bus)) { status = OWB_STATUS_NOT_INITIALIZED; } else { bus->driver->write_bits(bus, data, 8); status = OWB_STATUS_OK; } return status; } owb_status owb_read_byte(const OneWireBus * bus, uint8_t *out) { owb_status status; if(!bus || !out) { status = OWB_STATUS_PARAMETER_NULL; } else if (!_is_init(bus)) { status = OWB_STATUS_NOT_INITIALIZED; } else { bus->driver->read_bits(bus, out, 8); status = OWB_STATUS_OK; } return status; } owb_status owb_read_bytes(const OneWireBus * bus, uint8_t * buffer, unsigned int len) { owb_status status; if(!bus || !buffer) { status = OWB_STATUS_PARAMETER_NULL; } else if (!_is_init(bus)) { status = OWB_STATUS_NOT_INITIALIZED; } else { for (int i = 0; i < len; ++i) { uint8_t out; bus->driver->read_bits(bus, &out, 8); buffer[i] = out; } status = OWB_STATUS_OK; } return status; } owb_status owb_write_bytes(const OneWireBus * bus, const uint8_t * buffer, unsigned int len) { owb_status status; if(!bus || !buffer) { status = OWB_STATUS_PARAMETER_NULL; } else if (!_is_init(bus)) { status = OWB_STATUS_NOT_INITIALIZED; } else { for (int i = 0; i < len; i++) { bus->driver->write_bits(bus, buffer[i], 8); } status = OWB_STATUS_OK; } return status; } owb_status owb_write_rom_code(const OneWireBus * bus, OneWireBus_ROMCode rom_code) { owb_status status; if(!bus) { status = OWB_STATUS_PARAMETER_NULL; } else if (!_is_init(bus)) { status = OWB_STATUS_NOT_INITIALIZED; } else { owb_write_bytes(bus, (uint8_t*)&rom_code, sizeof(rom_code)); status = OWB_STATUS_OK; } return status; } uint8_t owb_crc8_byte(uint8_t crc, uint8_t data) { return _calc_crc(crc, data); } uint8_t owb_crc8_bytes(uint8_t crc, const uint8_t * data, size_t len) { return _calc_crc_block(crc, data, len); } owb_status owb_search_first(const OneWireBus * bus, OneWireBus_SearchState * state, bool* found_device) { bool result; owb_status status; if(!bus || !state || !found_device) { status = OWB_STATUS_PARAMETER_NULL; } else if (!_is_init(bus)) { status = OWB_STATUS_NOT_INITIALIZED; } else { memset(&state->rom_code, 0, sizeof(state->rom_code)); state->last_discrepancy = 0; state->last_family_discrepancy = 0; state->last_device_flag = false; _search(bus, state, &result); status = OWB_STATUS_OK; *found_device = result; } return status; } owb_status owb_search_next(const OneWireBus * bus, OneWireBus_SearchState * state, bool* found_device) { owb_status status; bool result = false; if(!bus || !state || !found_device) { status = OWB_STATUS_PARAMETER_NULL; } else if (!_is_init(bus)) { status = OWB_STATUS_NOT_INITIALIZED; } else { _search(bus, state, &result); status = OWB_STATUS_OK; *found_device = result; } return status; } char * owb_string_from_rom_code(OneWireBus_ROMCode rom_code, char * buffer, size_t len) { int idx = len; *buffer = '\0'; for (int i = sizeof(rom_code.bytes) - 1; i >= 0; i--) { idx -= 2; if (idx < 0) break; sprintf(buffer, "%02x", rom_code.bytes[i]); buffer += 2; } return buffer; }
7,884
575
<gh_stars>100-1000 // Copyright 2020 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef CHROME_BROWSER_FEEDBACK_SYSTEM_LOGS_LOG_SOURCES_USER_LOG_FILES_LOG_SOURCE_H_ #define CHROME_BROWSER_FEEDBACK_SYSTEM_LOGS_LOG_SOURCES_USER_LOG_FILES_LOG_SOURCE_H_ #include <string> #include "base/files/file_path.h" #include "base/memory/weak_ptr.h" #include "components/feedback/system_logs/system_logs_source.h" namespace system_logs { // This class gathers log data from user log files. class UserLogFilesLogSource : public SystemLogsSource { public: UserLogFilesLogSource(const base::FilePath& log_file_path, const std::string& log_key); UserLogFilesLogSource(const UserLogFilesLogSource&) = delete; UserLogFilesLogSource& operator=(const UserLogFilesLogSource&) = delete; ~UserLogFilesLogSource() override; // SystemLogsSource override: void Fetch(SysLogsSourceCallback callback) override; private: void ReadFile(const base::FilePath& log_file_path, const std::string& log_key, SystemLogsResponse* response); const base::FilePath log_file_path_; const std::string log_key_; base::WeakPtrFactory<UserLogFilesLogSource> weak_ptr_factory_{this}; }; } // namespace system_logs #endif // CHROME_BROWSER_FEEDBACK_SYSTEM_LOGS_LOG_SOURCES_USER_LOG_FILES_LOG_SOURCE_H_
532
335
<gh_stars>100-1000 { "word": "Alternation", "definitions": [ "The repeated occurrence of two things in turn." ], "parts-of-speech": "Noun" }
70
892
<filename>advisories/unreviewed/2022/01/GHSA-x4ff-3f7v-q327/GHSA-x4ff-3f7v-q327.json { "schema_version": "1.2.0", "id": "GHSA-x4ff-3f7v-q327", "modified": "2022-01-29T00:01:21Z", "published": "2022-01-20T00:02:02Z", "aliases": [ "CVE-2022-22160" ], "details": "An Unchecked Error Condition vulnerability in the subscriber management daemon (smgd) of Juniper Networks Junos OS allows an unauthenticated adjacent attacker to cause a crash of and thereby a Denial of Service (DoS). In a subscriber management / broadband edge environment if a single session group configuration contains dual-stack and a pp0 interface, smgd will crash and restart every time a PPPoE client sends a specific message. This issue affects Juniper Networks Junos OS on MX Series: 16.1 version 16.1R1 and later versions prior to 18.4R3-S10; 19.1 versions prior to 19.1R2-S3, 19.1R3-S7; 19.2 versions prior to 19.2R1-S8, 19.2R3-S4; 19.3 versions prior to 19.3R3-S4; 19.4 versions prior to 19.4R3-S5; 20.1 versions prior to 20.1R3-S3; 20.2 versions prior to 20.2R3-S3; 20.3 versions prior to 20.3R3-S2; 20.4 versions prior to 20.4R3; 21.1 versions prior to 21.1R3; 21.2 versions prior to 21.2R2. This issue does not affect Juniper Networks Junos OS versions prior to 16.1R1.", "severity": [ ], "affected": [ ], "references": [ { "type": "ADVISORY", "url": "https://nvd.nist.gov/vuln/detail/CVE-2022-22160" }, { "type": "WEB", "url": "https://kb.juniper.net/JSA11268" } ], "database_specific": { "cwe_ids": [ "CWE-391" ], "severity": "MODERATE", "github_reviewed": false } }
653
1,968
#ifndef Rtt_Linux_Clear_Sandbox_Dialog #define Rtt_Linux_Clear_Sandbox_Dialog #include "wx/wx.h" #include "wx/image.h" #include "wx/statline.h" namespace Rtt { class LinuxClearProjectSandboxDialog: public wxDialog { public: LinuxClearProjectSandboxDialog(wxWindow *parent, wxWindowID id, const wxString &title, const wxPoint &pos = wxDefaultPosition, const wxSize &size = wxDefaultSize, long style = wxDEFAULT_DIALOG_STYLE); void OnConfirmClicked(wxCommandEvent &event); void OnCancelClicked(wxCommandEvent &event); private: void SetProperties(); void DoLayout(); protected: wxButton *okButton; wxButton *cancelButton; DECLARE_EVENT_TABLE(); }; }; #endif // Rtt_Linux_Clear_Sandbox_Dialog
266
622
<filename>Duke-Java-Programming-Principles-of-Software-Design/week2- Earthquakes Sorting Algorithms/EfficientSortStarterProgram/src/TitleLastAndMagnitudeComparator.java<gh_stars>100-1000 /** * @author: salimt */ import java.util.Comparator; public class TitleLastAndMagnitudeComparator implements Comparator <QuakeEntry> { public int compare(QuakeEntry q1, QuakeEntry q2){ String lastWord1 = q1.getInfo().substring(q1.getInfo().lastIndexOf(", ") + 1).trim(); String lastWord2 = q2.getInfo().substring(q2.getInfo().lastIndexOf(", ") + 1).trim(); int titleComp = lastWord1.compareTo(lastWord2); if(titleComp!=0){ return titleComp; } return Double.compare(q1.getMagnitude(), q2.getMagnitude()); } }
296
13,249
<gh_stars>1000+ /* * Copyright 2015-2019 The OpenZipkin Authors * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package zipkin2.server.internal.throttle; import io.micrometer.core.instrument.Counter; import io.micrometer.core.instrument.MeterRegistry; import zipkin2.collector.CollectorMetrics; /** Follows the same naming convention as {@link CollectorMetrics} */ final class LimiterMetrics { final Counter requests, requestsSucceeded, requestsIgnored, requestsDropped; LimiterMetrics(MeterRegistry registry) { requests = Counter.builder("zipkin_storage.throttle.requests") .description("cumulative amount of limiter requests acquired") .register(registry); requestsSucceeded = Counter.builder("zipkin_storage.throttle.requests_succeeded") .description("cumulative amount of limiter requests acquired that later succeeded") .register(registry); requestsDropped = Counter.builder("zipkin_storage.throttle.requests_dropped") .description( "cumulative amount of limiter requests acquired that later dropped due to capacity") .register(registry); requestsIgnored = Counter.builder("zipkin_storage.throttle.requests_ignored") .description( "cumulative amount of limiter requests acquired that later dropped not due to capacity") .register(registry); } }
564
482
package io.cattle.platform.engine.eventing.impl; import io.cattle.platform.engine.eventing.ProcessEventListener; import io.cattle.platform.engine.manager.ProcessManager; import io.cattle.platform.engine.server.ProcessInstanceDispatcher; import io.cattle.platform.engine.server.ProcessInstanceReference; import io.cattle.platform.eventing.model.Event; import javax.inject.Inject; public class ProcessEventListenerImpl implements ProcessEventListener { @Inject ProcessInstanceDispatcher dispatcher; @Inject ProcessManager processManager; @Override public void processExecute(Event event) { if (event.getResourceId() == null) return; ProcessInstanceReference ref = processManager.loadReference(new Long(event.getResourceId())); if (ref != null) { ref.setEvent(true); dispatcher.dispatch(ref); } } }
315
2,805
<reponame>ziveo/ckan<filename>ckan/migration/versions/006_c83955e7acb6_add_ratings.py # encoding: utf-8 """006 Add ratings Revision ID: c83955e7acb6 Revises: <PASSWORD> Create Date: 2018-09-04 17:39:11.520922 """ from alembic import op import sqlalchemy as sa from ckan.migration import skip_based_on_legacy_engine_version # revision identifiers, used by Alembic. revision = 'c83955e7acb6' down_revision = '<PASSWORD>2<PASSWORD>' branch_labels = None depends_on = None def upgrade(): if skip_based_on_legacy_engine_version(op, __name__): return op.create_table( 'rating', sa.Column('id', sa.UnicodeText, primary_key=True), sa.Column('user_id', sa.UnicodeText, sa.ForeignKey('user.id')), sa.Column('user_ip_address', sa.UnicodeText), # alternative to user_id if not logged in sa.Column('package_id', sa.Integer, sa.ForeignKey('package.id')), sa.Column('rating', sa.Float) ) def downgrade(): op.drop_table('rating')
436
2,209
#!/usr/bin/env python2 """ hexstring.py """ from __future__ import print_function import sys def main(argv): hexdigits = '0123456789abcdef' for c in hexdigits: for d in hexdigits: for e in hexdigits: hexbyte = c + d + e #+ f byte = hexbyte byte = byte.replace('0', '0000') byte = byte.replace('1', '0001') byte = byte.replace('2', '0010') byte = byte.replace('3', '0011') byte = byte.replace('4', '0100') byte = byte.replace('5', '0101') byte = byte.replace('6', '0110') byte = byte.replace('7', '0111') byte = byte.replace('8', '1000') byte = byte.replace('9', '1001') byte = byte.replace('a', '1010') byte = byte.replace('b', '1011') byte = byte.replace('c', '1100') byte = byte.replace('d', '1101') byte = byte.replace('e', '1110') byte = byte.replace('f', '1111') #print(byte) ones = byte.replace('0', '') if len(ones) == 11: print(hexbyte, byte) if __name__ == '__main__': try: main(sys.argv) except RuntimeError as e: print('FATAL: %s' % e, file=sys.stderr) sys.exit(1)
564
5,975
/* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ package com.facebook.buck_project_builder.cache; import com.facebook.buck_project_builder.SimpleLogger; import com.facebook.buck_project_builder.targets.ThriftLibraryTarget; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.gson.Gson; import com.google.gson.JsonSyntaxException; import org.apache.commons.codec.digest.DigestUtils; import javax.annotation.Nullable; import java.io.File; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.io.Reader; import java.io.Writer; import java.nio.file.Paths; import java.util.Objects; import java.util.Set; import java.util.concurrent.TimeUnit; public class BuilderCache { private final long lastBuiltTime; private final Set<ThriftLibraryTarget> thriftCaches; public BuilderCache(long lastBuiltTime, Set<ThriftLibraryTarget> thriftCaches) { this.lastBuiltTime = lastBuiltTime; this.thriftCaches = thriftCaches; } public BuilderCache() { this(0, ImmutableSet.of()); } public static String getCachePath( ImmutableList<String> targets, String buckRoot, @Nullable String projectName) throws IOException { String escapedTargets = DigestUtils.md5Hex(String.join(";", targets)); if (escapedTargets.length() > 255) { // 255 is the Linux filename length limit for EXT4. // Most target list is not crazily long, and collision is unlikely to happen. escapedTargets = escapedTargets.substring(0, 255); } return Paths.get(BuilderCache.getBuckBuilderCachePath(buckRoot, projectName), escapedTargets) .toString(); } private static File getCacheJsonFile( ImmutableList<String> targets, String buckRoot, @Nullable String projectName) throws IOException { return Paths.get(getCachePath(targets, buckRoot, projectName), "cache.json").toFile(); } @VisibleForTesting static BuilderCache readFromCache(Reader reader) { try { return new Gson().fromJson(reader, BuilderCache.class); } catch (JsonSyntaxException exception) { SimpleLogger.warning("Buck builder cache is corrupted. Rebuilding everything..."); // Return a cache that will invalidate everything. return new BuilderCache(); } } public static BuilderCache readFromCache( ImmutableList<String> targets, String buckRoot, @Nullable String projectName) throws IOException { File cacheJson = getCacheJsonFile(targets, buckRoot, projectName); try (FileReader reader = new FileReader(cacheJson)) { return readFromCache(reader); } catch (IOException exception) { SimpleLogger.warning("Buck builder cache not found. Rebuilding everything..."); // Return a cache that will invalidate everything. return new BuilderCache(); } } @VisibleForTesting void writeToCache(Writer writer) { new Gson().toJson(this, writer); } public void writeToCache( ImmutableList<String> targets, String buckRoot, @Nullable String projectName) throws IOException { File cacheJsonFile = getCacheJsonFile(targets, buckRoot, projectName); if (cacheJsonFile.exists()) { cacheJsonFile.delete(); } cacheJsonFile.getParentFile().mkdirs(); try (FileWriter writer = new FileWriter(cacheJsonFile)) { writeToCache(writer); } catch (IOException exception) { SimpleLogger.warning("Failed to update builder cache."); } } public long getLastBuiltTime() { return lastBuiltTime; } public Set<ThriftLibraryTarget> getThriftCaches() { return thriftCaches; } public static String getBuckBuilderCachePath(String buckRoot, @Nullable String projectName) throws IOException { String suffix = projectName != null ? String.format("_%s", projectName) : ""; String cacheDirectoryName = String.format(".buck_builder_cache%s", suffix); return Paths.get(ScratchPath.getScratchPath(buckRoot), cacheDirectoryName).toString(); } public static String getLockPath(String buckRoot, @Nullable String projectName) throws IOException { return Paths.get(getBuckBuilderCachePath(buckRoot, projectName), "builder.lock").toString(); } public static String getThriftCachePath(String buckRoot, @Nullable String projectName) throws IOException { return Paths.get(getBuckBuilderCachePath(buckRoot, projectName), "thrift-gen").toString(); } public static String getWheelCachePath(String buckRoot, @Nullable String projectName) throws IOException { return Paths.get(getBuckBuilderCachePath(buckRoot, projectName), "downloaded-wheels") .toString(); } @Override public String toString() { return String.format("{lastBuiltTime=%d, getThriftCaches=%s}", lastBuiltTime, thriftCaches); } @Override public boolean equals(@Nullable Object other) { if (this == other) { return true; } if (other == null || getClass() != other.getClass()) { return false; } BuilderCache builderCache = (BuilderCache) other; return lastBuiltTime == builderCache.lastBuiltTime && thriftCaches.equals(builderCache.thriftCaches); } @Override public int hashCode() { return Objects.hash(lastBuiltTime, thriftCaches); } }
1,812
974
<filename>3rdparty/blend2d/src/blend2d/opentype/otname_p.h // Blend2D - 2D Vector Graphics Powered by a JIT Compiler // // * Official Blend2D Home Page: https://blend2d.com // * Official Github Repository: https://github.com/blend2d/blend2d // // Copyright (c) 2017-2020 The Blend2D Authors // // This software is provided 'as-is', without any express or implied // warranty. In no event will the authors be held liable for any damages // arising from the use of this software. // // Permission is granted to anyone to use this software for any purpose, // including commercial applications, and to alter it and redistribute it // freely, subject to the following restrictions: // // 1. The origin of this software must not be misrepresented; you must not // claim that you wrote the original software. If you use this software // in a product, an acknowledgment in the product documentation would be // appreciated but is not required. // 2. Altered source versions must be plainly marked as such, and must not be // misrepresented as being the original software. // 3. This notice may not be removed or altered from any source distribution. #ifndef BLEND2D_OPENTYPE_OTNAME_P_H_INCLUDED #define BLEND2D_OPENTYPE_OTNAME_P_H_INCLUDED #include "../opentype/otdefs_p.h" //! \cond INTERNAL //! \addtogroup blend2d_internal_opentype //! \{ namespace BLOpenType { // ============================================================================ // [BLOpenType::NameTable] // ============================================================================ //! OpenType 'name' table. //! //! External Resources: //! - https://docs.microsoft.com/en-us/typography/opentype/spec/name //! - https://developer.apple.com/fonts/TrueType-Reference-Manual/RM06/Chap6name.html struct NameTable { enum : uint32_t { kMinSize = 6 }; struct NameRecord { UInt16 platformId; UInt16 specificId; UInt16 languageId; UInt16 nameId; UInt16 length; Offset16 offset; }; struct LangTagRecord { UInt16 length; Offset16 offset; }; UInt16 format; UInt16 recordCount; Offset16 stringOffset; /* NameRecord nameRecords[count]; UInt16 langTagCount; LangTagRecord langTagRecords[langTagCount]; */ BL_INLINE bool hasLangTags() const noexcept { return format() >= 1; } //! The name records where count is the number of records. BL_INLINE const NameRecord* nameRecords() const noexcept { return blOffsetPtr<const NameRecord>(this, 6); } BL_INLINE uint16_t langTagCount(size_t recordCount_) const noexcept { return blOffsetPtr<const UInt16>(this, 6 + recordCount_ * sizeof(NameRecord))->value(); } BL_INLINE const LangTagRecord* langTagRecords(size_t recordCount_) const noexcept { return blOffsetPtr<const LangTagRecord>(this, 6 + recordCount_ * sizeof(NameRecord) + 2); } }; // ============================================================================ // [BLOpenType::NameImpl] // ============================================================================ namespace NameImpl { BL_HIDDEN BLResult init(BLOTFaceImpl* faceI, const BLFontData* fontData) noexcept; } // {NameImpl} } // {BLOpenType} //! \} //! \endcond #endif // BLEND2D_OPENTYPE_OTNAME_P_H_INCLUDED
1,003
371
<gh_stars>100-1000 // This file was adapted from the ze_oss project: https://github.com/zurich-eye/ze_oss/blob/master/ze_data_provider/include/ze/data_provider/data_provider_rosbag.hpp // Copyright (C) ETH Zurich, <NAME>, Zurich Eye - All Rights Reserved #pragma once #include <map> #include <string> #include <memory> #include <vector> #include<gflags/gflags.h> #include <rosbag/bag.h> #include <rosbag/view.h> #include <sensor_msgs/Image.h> #include <esim/data_provider/data_provider_base.hpp> namespace event_camera_simulator { class DataProviderRosbag : public DataProviderBase { public: DataProviderRosbag( const std::string& bag_filename, const std::map<std::string, size_t>& camera_topics); virtual ~DataProviderRosbag() = default; virtual bool spinOnce() override; virtual bool ok() const override; virtual size_t numCameras() const; size_t size() const; private: void loadRosbag(const std::string& bag_filename); void initBagView(const std::vector<std::string>& topics); inline bool cameraSpin(const sensor_msgs::ImageConstPtr m_img, const rosbag::MessageInstance& m); std::unique_ptr<rosbag::Bag> bag_; std::unique_ptr<rosbag::View> bag_view_; rosbag::View::iterator bag_view_it_; int n_processed_images_ = 0; // subscribed topics: std::map<std::string, size_t> img_topic_camidx_map_; // camera_topic --> camera_id SimulatorData sim_data_; }; } // namespace event_camera_simulator
541
435
{ "description": "Tytu\u0142/Topic: How to make killer robots with Python and Raspberry Pi\nPrelegent/Speaker: <NAME>\n\nKubik is a quadruped robot build around a Raspberry Pi. I will tell you what I learned, what challenges I encoutered building it. I will also tell you how I used the PyGame library to program in. If everything works, you will get to see the robot in action. Survivors will be assimilated.\n\nhttp://pl.pycon.org/2014/pl/agenda", "duration": 3251, "language": "eng", "recorded": "2014-10-18", "speakers": [ "<NAME>" ], "thumbnail_url": "https://i.ytimg.com/vi/-PisXGVe-lE/hqdefault.jpg", "title": "How to make killer robots with Python and Raspberry Pi", "videos": [ { "type": "youtube", "url": "https://www.youtube.com/watch?v=-PisXGVe-lE" } ] }
292
691
/*============================================================================= Copyright (c) 2011-2019 <NAME> https://github.com/bolero-MURAKAMI/Sprout Distributed under the sprout Software License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at http://www.sprout.org/LICENSE_1_0.txt) =============================================================================*/ #ifndef SPROUT_PREDEF_LIBRARY_STD_VACPP_HPP #define SPROUT_PREDEF_LIBRARY_STD_VACPP_HPP #include <sprout/config.hpp> #include <sprout/predef/library/std/prefix.hpp> #define SPROUT_LIB_STD_IBM 0 #if defined(__IBMCPP__) # undef SPROUT_LIB_STD_IBM # define SPROUT_LIB_STD_IBM 1 #endif #if SPROUT_LIB_STD_IBM # define SPROUT_LIB_STD_IBM_AVAILABLE #endif #define SPROUT_LIB_STD_IBM_NAME "IBM VACPP" #endif //#ifndef SPROUT_PREDEF_LIBRARY_STD_VACPP_HPP
341
378
<reponame>aleasto/AppImageUpdate #pragma once // system headers #include <algorithm> #include <climits> #include <fstream> #include <sstream> #include <string> #include <vector> #include <unistd.h> #ifdef FLTK_UI #include <FL/Fl.H> #endif #ifdef QT_UI #include <QMessageBox> #endif // library includes #include <zsutil.h> // AppImageKit includes extern "C" { #include "appimage/appimage_shared.h" } namespace appimage { namespace update { static void removeNewlineCharacters(std::string& str) { str.erase(std::remove(str.begin(), str.end(), '\n'), str.end()); } static inline bool ltrim(std::string& s, char to_trim = ' ') { // TODO: find more efficient way to check whether elements have been removed size_t initialLength = s.length(); s.erase(s.begin(), std::find_if(s.begin(), s.end(), [to_trim](int ch) { return ch != to_trim; })); return s.length() < initialLength; } static inline bool rtrim(std::string& s, char to_trim = ' ') { // TODO: find more efficient way to check whether elements have been removed auto initialLength = s.length(); s.erase(std::find_if(s.rbegin(), s.rend(), [to_trim](int ch) { return ch != to_trim; }).base(), s.end()); return s.length() < initialLength; } static inline bool trim(std::string& s, char to_trim = ' ') { // returns true if either modifies s auto ltrim_result = ltrim(s, to_trim); return rtrim(s, to_trim) && ltrim_result; } static bool callProgramAndGrepForLine(const std::string& command, const std::string& pattern, std::string& output) { FILE *stream = popen(command.c_str(), "r"); if (stream == nullptr) return false; char *line; size_t lineSize = 0; while(getline(&line, &lineSize, stream)) { // check whether line matches pattern std::string lineString = line; if (lineString.find(pattern) != std::string::npos) { if (pclose(stream) != 0) { free(line); return false; } output = line; removeNewlineCharacters(output); return true; } } if (pclose(stream) != 0) { free(line); return false; } return false; } static std::vector<std::string> split(const std::string& s, char delim = ' ') { std::vector<std::string> result; std::stringstream ss(s); std::string item; while (std::getline(ss, item, delim)) { result.push_back(item); } return result; } static inline std::string toLower(std::string s) { std::transform(s.begin(), s.end(), s.begin(), [](unsigned char c) { return std::tolower(c); }); return s; } static inline bool toLong(const std::string& str, long& retval, int base = 10) { char* end = nullptr; const auto* cstr = str.c_str(); auto rv = std::strtol(cstr, &end, base); if (errno == ERANGE || cstr == end || retval > LONG_MAX || retval < LONG_MIN) return false; retval = rv; return true; } static inline bool isFile(const std::string& path) { std::ifstream ifs(path); return (bool) ifs && ifs.good(); } static void copyPermissions(const std::string& oldPath, const std::string& newPath) { mode_t oldPerms; auto errCode = zsync2::getPerms(oldPath, oldPerms); if (errCode != 0) { std::ostringstream ss; ss << "Error calling stat(): " << strerror(errCode); #ifdef FLTK_UI fl_message("%s", ss.str().c_str()); #endif #ifdef QT_UI QMessageBox::critical(nullptr, "Error", QString::fromStdString(ss.str()), QMessageBox::Close); #endif exit(1); } chmod(newPath.c_str(), oldPerms); } static void runApp(const std::string& path) { // make executable mode_t newPerms; auto errCode = zsync2::getPerms(path, newPerms); if (errCode != 0) { std::ostringstream ss; ss << "Error calling stat(): " << strerror(errCode); #ifdef FLTK_UI fl_message("%s", ss.str().c_str()); #endif #ifdef QT_UI QMessageBox::critical(nullptr, "Error", QString::fromStdString(ss.str()), QMessageBox::Close); #endif exit(1); } chmod(path.c_str(), newPerms | S_IXUSR); // full path to AppImage, required for execl char* realPathToAppImage; if ((realPathToAppImage = realpath(path.c_str(), nullptr)) == nullptr) { auto error = errno; std::ostringstream ss; ss << "Error resolving full path of AppImage: code " << error << ": " << strerror(error) << std::endl; #ifdef FLTK_UI fl_message("%s", ss.str().c_str()); #endif #ifdef QT_UI QMessageBox::critical(nullptr, "Error", QString::fromStdString(ss.str()), QMessageBox::Close); #endif exit(1); } if (fork() == 0) { putenv(strdup("STARTED_BY_APPIMAGEUPDATE=1")); std::cerr << "Running " << realPathToAppImage << std::endl; // make sure to deactivate updater contained in the AppImage when running from AppImageUpdate execl(realPathToAppImage, realPathToAppImage, nullptr); // execle should never return, so if this code is reached, there must be an error auto error = errno; std::cerr << "Error executing AppImage " << realPathToAppImage << ": code " << error << ": " << strerror(error) << std::endl; exit(1); } } // Reads an ELF file section and returns its contents. static std::string readElfSection(const std::string& filePath, const std::string& sectionName) { unsigned long offset = 0, length = 0; auto rv = appimage_get_elf_section_offset_and_length(filePath.c_str(), sectionName.c_str(), &offset, &length); if (!rv || offset == 0 || length == 0) return ""; std::ifstream ifs(filePath); ifs.seekg(offset); std::vector<char> buffer(length+1, 0); ifs.read(buffer.data(), length); return buffer.data(); } static std::string findInPATH(const std::string& name) { const std::string PATH = getenv("PATH"); for (const auto& path : split(PATH, ':')) { std::ostringstream oss; oss << path << "/" << name; auto fullPath = oss.str(); // TODO: check whether file is actually executable if (isFile(fullPath)) return fullPath; } return ""; } static bool stringStartsWith(const std::string& string, const std::string& prefix) { return strncmp(string.c_str(), prefix.c_str(), prefix.size()) == 0; } static std::string abspath(const std::string& path) { char* fullPath = nullptr; if ((fullPath = realpath(path.c_str(), nullptr)) == nullptr) { auto error = errno; std::cerr << "Failed to resolve full path to AppImage: " << strerror(error) << std::endl; return ""; } std::string rv = fullPath; // clean up free(fullPath); fullPath = nullptr; return rv; } static std::string pathToOldAppImage(const std::string& oldPath, const std::string& newPath) { if (oldPath == newPath) { return newPath + ".zs-old"; } return abspath(oldPath); }; // workaround for AppImageLauncher limitation, see https://github.com/AppImage/AppImageUpdate/issues/131 static std::string ailfsRealpath(const std::string& path) { std::stringstream ailfsBasePath; ailfsBasePath << "/run/user/" << getuid() << "/appimagelauncherfs/"; if (path.find(ailfsBasePath.str()) == std::string::npos) return path; std::stringstream mapFilePath; mapFilePath << ailfsBasePath.str() << "/map"; std::ifstream ifs(mapFilePath.str()); if (!ifs) throw std::runtime_error("Could not open appimagelauncherfs map file"); std::string pathFileName; { std::unique_ptr<char> pathCStr(strdup(path.c_str())); pathFileName = basename(pathCStr.get()); } std::string currentLine; while (std::getline(ifs, currentLine)) { const std::string delim = " -> "; const auto delimiterPos = currentLine.find(delim); const auto ailfsFileName = currentLine.substr(0, delimiterPos); const auto targetFilePath = currentLine.substr(delimiterPos + delim.length()); if (ailfsFileName == pathFileName) return targetFilePath; } throw std::runtime_error("Could not resolve path in appimagelauncherfs map file"); } }; // namespace update } // namespace appimage
4,933
4,140
<reponame>FANsZL/hive<gh_stars>1000+ /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.llap.cli.status; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; /** * Representing the state of an Llap instance monitored. */ class LlapInstance { private final String hostname; private final String containerId; private String logUrl; // Only for live instances. private String statusUrl; private String webUrl; private Integer rpcPort; private Integer mgmtPort; private Integer shufflePort; // For completed instances private String diagnostics; private int yarnContainerExitStatus; // TODO HIVE-13454 Add additional information such as #executors, container size, etc LlapInstance(String hostname, String containerId) { this.hostname = hostname; this.containerId = containerId; } LlapInstance setLogUrl(String logUrl) { this.logUrl = logUrl; return this; } LlapInstance setStatusUrl(String statusUrl) { this.statusUrl = statusUrl; return this; } LlapInstance setWebUrl(String webUrl) { this.webUrl = webUrl; return this; } LlapInstance setRpcPort(int rpcPort) { this.rpcPort = rpcPort; return this; } LlapInstance setMgmtPort(int mgmtPort) { this.mgmtPort = mgmtPort; return this; } LlapInstance setShufflePort(int shufflePort) { this.shufflePort = shufflePort; return this; } LlapInstance setDiagnostics(String diagnostics) { this.diagnostics = diagnostics; return this; } LlapInstance setYarnContainerExitStatus(int yarnContainerExitStatus) { this.yarnContainerExitStatus = yarnContainerExitStatus; return this; } String getHostname() { return hostname; } String getContainerId() { return containerId; } String getLogUrl() { return logUrl; } String getStatusUrl() { return statusUrl; } String getWebUrl() { return webUrl; } Integer getRpcPort() { return rpcPort; } Integer getMgmtPort() { return mgmtPort; } Integer getShufflePort() { return shufflePort; } String getDiagnostics() { return diagnostics; } int getYarnContainerExitStatus() { return yarnContainerExitStatus; } @Override public String toString() { return ToStringBuilder.reflectionToString(this, ToStringStyle.SHORT_PREFIX_STYLE); } }
1,014
1,125
<reponame>chefmramos85/monster-mash // Copyright (c) 2017 <NAME> // Licensed under the MIT License. #include "imageUtils.h" #include <stack> using namespace std; using namespace Eigen; void floodFill(Imguc &I, int startX, int startY, unsigned char targetColor, unsigned char replacementColor) { stack<pair<int,int>> S; S.push(make_pair(startX,startY)); while (!S.empty()) { const pair<int,int> &front = S.top(); const int x = front.first, y = front.second; S.pop(); if (x < 0 || x >= I.w || y < 0 || y >= I.h) continue; unsigned char &sample = I(x, y, 0); if (targetColor == replacementColor || sample != targetColor) continue; sample = replacementColor; S.push(make_pair(x, y+1)); S.push(make_pair(x, y-1)); S.push(make_pair(x-1, y)); S.push(make_pair(x+1, y)); } } void floodFill(const Imguc &I, Imguc &O, int startX, int startY, unsigned char targetColor, unsigned char replacementColor) { stack<pair<int,int>> S; S.push(make_pair(startX,startY)); while (!S.empty()) { const pair<int,int> &front = S.top(); const int x = front.first, y = front.second; S.pop(); if (x < 0 || x >= I.w || y < 0 || y >= I.h) continue; const unsigned char &sampleI = I(x, y, 0); unsigned char &sampleO = O(x, y, 0); if (sampleI != targetColor || sampleO == replacementColor) continue; // sampleI == targetColor && sampleO != replacementColor sampleO = replacementColor; S.push(make_pair(x, y+1)); S.push(make_pair(x, y-1)); S.push(make_pair(x-1, y)); S.push(make_pair(x+1, y)); } } void scanlineFillOutline(const Imguc &outline, Imguc &out, unsigned char outlineColor) { const Imguc &O = outline; fora(y,1,out.h-1) { bool inside = false; unsigned char prev = O(0,0,0); int stepUp = 0, stepDown = 0; fora(x,1,out.w-1) { const unsigned char sample = O(x,y,0); if (sample != prev) { if (sample == outlineColor) { stepUp = 0; stepDown = 0; } if (sample == outlineColor && (O(x-1,y+1,0)==outlineColor || O(x,y+1,0)==outlineColor)) stepUp++; // start outline, step up if (sample != outlineColor && (O(x-1,y-1,0)==outlineColor || O(x,y-1,0)==outlineColor)) stepUp++; // end outline, step up if (sample == outlineColor && (O(x-1,y-1,0)==outlineColor || O(x,y-1,0)==outlineColor)) stepDown++; // start outline, step down if (sample != outlineColor && (O(x-1,y+1,0)==outlineColor || O(x,y+1,0)==outlineColor)) stepDown++; // end outline, step down if (sample != outlineColor && !(stepUp==stepDown && stepUp == 1)) inside = !inside; } if (inside) out(x,y,0) = outlineColor; prev = sample; } } }
1,118
310
<filename>Projects/Editor/Source/Editor/Space/Assets/CTreeFSController.h /* !@ MIT License Copyright (c) 2020 Skylicht Technology CO., LTD Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the Rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. This file is part of the "Skylicht Engine". https://github.com/skylicht-lab/skylicht-engine !# */ #pragma once #include "GUI/GUI.h" #include "AssetManager/CAssetManager.h" namespace Skylicht { namespace Editor { class CListFSController; class CSearchAssetController; class CTreeFSController { protected: GUI::CCanvas* m_canvas; GUI::CTreeControl* m_treeFS; CAssetManager* m_assetManager; GUI::CTreeNode* m_nodeAssets; GUI::CTreeNode* m_renameNode; std::wstring m_renameRevert; CListFSController* m_listController; CSearchAssetController* m_searchController; GUI::CMessageBox* m_msgBox; public: CTreeFSController(GUI::CCanvas* canvas, GUI::CTreeControl* treeFS); virtual ~CTreeFSController(); void OnExpand(GUI::CBase* node); void OnCollapse(GUI::CBase* node); void OnSelected(GUI::CBase* node); void OnKeyPress(GUI::CBase* control, int key, bool press); void OnRename(GUI::CBase* control); void OnCancelRename(GUI::CBase* control); void setListController(CListFSController* listController) { m_listController = listController; } void setSearchController(CSearchAssetController* searchController) { m_searchController = searchController; } void rename(GUI::CTreeNode* node); void removePath(const char* path); bool removePath(GUI::CTreeNode* node, const char* path); public: void expand(const std::string& folder); void add(GUI::CTreeNode* node, std::vector<SFileInfo>& files); void refresh(GUI::CTreeNode* node); void refresh(); protected: void initDragDrop(GUI::CTreeNode* node); }; } }
924
15,577
<filename>src/Disks/IVolume.h<gh_stars>1000+ #pragma once #include <Disks/IDisk.h> #include <Disks/DiskSelector.h> #include <Poco/Util/AbstractConfiguration.h> namespace DB { enum class VolumeType { JBOD, RAID1, SINGLE_DISK, UNKNOWN }; class IVolume; using VolumePtr = std::shared_ptr<IVolume>; using Volumes = std::vector<VolumePtr>; /** * Disks group by some (user) criteria. For example, * - VolumeJBOD("slow_disks", [d1, d2], 100) * - VolumeJBOD("fast_disks", [d3, d4], 200) * * Here VolumeJBOD is one of implementations of IVolume. * * Different of implementations of this interface implement different reserve behaviour — * VolumeJBOD reserves space on the next disk after the last used, other future implementations * will reserve, for example, equal spaces on all disks. */ class IVolume : public Space { public: IVolume(String name_, Disks disks_, size_t max_data_part_size_ = 0, bool perform_ttl_move_on_insert_ = true) : disks(std::move(disks_)) , name(name_) , max_data_part_size(max_data_part_size_) , perform_ttl_move_on_insert(perform_ttl_move_on_insert_) { } IVolume( String name_, const Poco::Util::AbstractConfiguration & config, const String & config_prefix, DiskSelectorPtr disk_selector ); virtual ReservationPtr reserve(UInt64 bytes) override = 0; /// Volume name from config const String & getName() const override { return name; } virtual VolumeType getType() const = 0; /// Return biggest unreserved space across all disks UInt64 getMaxUnreservedFreeSpace() const; DiskPtr getDisk() const { return getDisk(0); } virtual DiskPtr getDisk(size_t i) const { return disks[i]; } const Disks & getDisks() const { return disks; } /// Returns effective value of whether merges are allowed on this volume (true) or not (false). virtual bool areMergesAvoided() const { return false; } /// User setting for enabling and disabling merges on volume. virtual void setAvoidMergesUserOverride(bool /*avoid*/) {} protected: Disks disks; const String name; public: /// Max size of reservation, zero means unlimited size UInt64 max_data_part_size = 0; /// Should a new data part be synchronously moved to a volume according to ttl on insert /// or move this part in background task asynchronously after insert. bool perform_ttl_move_on_insert = true; }; /// Reservation for multiple disks at once. Can be used in RAID1 implementation. class MultiDiskReservation : public IReservation { public: MultiDiskReservation(Reservations & reservations, UInt64 size); UInt64 getSize() const override { return size; } DiskPtr getDisk(size_t i) const override { return reservations[i]->getDisk(); } Disks getDisks() const override; void update(UInt64 new_size) override; private: Reservations reservations; UInt64 size; }; }
1,013
332
/* * * Copyright 2020 WeBank * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.webank.wedatasphere.exchangis.datasource.conns.cache; import com.google.common.cache.Cache; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; import com.google.common.cache.RemovalListener; /** * * @author davidhua * 2019/1/9 */ public interface CacheManager { /** * Build simple cache * @param cacheId * @param removalListener * @return */ <V>Cache<String, V> buildCache(String cacheId, RemovalListener<String, V> removalListener); /** * Build loading cache * @param cacheId * @param loader * @param removalListener * @return */ <V>LoadingCache<String, V> buildCache(String cacheId, CacheLoader<String, V> loader, RemovalListener<String, V> removalListener); }
440
767
from collections import OrderedDict from io import BytesIO import numpy from numpy.testing import assert_raises, assert_equal from PIL import Image from picklable_itertools.extras import partition_all from six.moves import zip from fuel import config from fuel.datasets.base import IndexableDataset from fuel.schemes import ShuffledScheme, SequentialExampleScheme from fuel.streams import DataStream from fuel.transformers.image import (ImagesFromBytes, MinimumImageDimensions, RandomFixedSizeCrop, Random2DRotation) def reorder_axes(shp): if len(shp) == 3: shp = (shp[-1],) + shp[:-1] elif len(shp) == 2: shp = (1,) + shp return shp class ImageTestingMixin(object): def common_setup(self): ex_scheme = SequentialExampleScheme(self.dataset.num_examples) self.example_stream = DataStream(self.dataset, iteration_scheme=ex_scheme) self.batch_size = 2 scheme = ShuffledScheme(self.dataset.num_examples, batch_size=self.batch_size) self.batch_stream = DataStream(self.dataset, iteration_scheme=scheme) class TestImagesFromBytes(ImageTestingMixin): def setUp(self): rng = numpy.random.RandomState(config.default_seed) self.shapes = [ (10, 12, 3), (9, 8, 4), (12, 14, 3), (4, 7), (9, 8, 4), (7, 9, 3) ] pil1 = Image.fromarray(rng.random_integers(0, 255, size=self.shapes[0]) .astype('uint8'), mode='RGB') pil2 = Image.fromarray(rng.random_integers(0, 255, size=self.shapes[1]) .astype('uint8'), mode='CMYK') pil3 = Image.fromarray(rng.random_integers(0, 255, size=self.shapes[2]) .astype('uint8'), mode='RGB') pil4 = Image.fromarray(rng.random_integers(0, 255, size=self.shapes[3]) .astype('uint8'), mode='L') pil5 = Image.fromarray(rng.random_integers(0, 255, size=self.shapes[4]) .astype('uint8'), mode='RGBA') pil6 = Image.fromarray(rng.random_integers(0, 255, size=self.shapes[5]) .astype('uint8'), mode='YCbCr') source1 = [pil1, pil2, pil3] source2 = [pil4, pil5, pil6] bytesio1 = [BytesIO() for _ in range(3)] bytesio2 = [BytesIO() for _ in range(3)] formats1 = ['PNG', 'JPEG', 'BMP'] formats2 = ['GIF', 'PNG', 'JPEG'] for s, b, f in zip(source1, bytesio1, formats1): s.save(b, format=f) for s, b, f in zip(source2, bytesio2, formats2): s.save(b, format=f) self.dataset = IndexableDataset( OrderedDict([('source1', [b.getvalue() for b in bytesio1]), ('source2', [b.getvalue() for b in bytesio2])]), axis_labels={'source1': ('batch', 'bytes'), 'source2': ('batch', 'bytes')}) self.common_setup() def test_images_from_bytes_example_stream(self): stream = ImagesFromBytes(self.example_stream, which_sources=('source1', 'source2'), color_mode=None) s1, s2 = list(zip(*list(stream.get_epoch_iterator()))) s1_shape = set(s.shape for s in s1) s2_shape = set(s.shape for s in s2) actual_s1 = set(reorder_axes(s) for s in self.shapes[:3]) actual_s2 = set(reorder_axes(s) for s in self.shapes[3:]) assert actual_s1 == s1_shape assert actual_s2 == s2_shape def test_images_from_bytes_batch_stream(self): stream = ImagesFromBytes(self.batch_stream, which_sources=('source1', 'source2'), color_mode=None) s1, s2 = list(zip(*list(stream.get_epoch_iterator()))) s1 = sum(s1, []) s2 = sum(s2, []) s1_shape = set(s.shape for s in s1) s2_shape = set(s.shape for s in s2) actual_s1 = set(reorder_axes(s) for s in self.shapes[:3]) actual_s2 = set(reorder_axes(s) for s in self.shapes[3:]) assert actual_s1 == s1_shape assert actual_s2 == s2_shape def test_images_from_bytes_example_stream_convert_rgb(self): stream = ImagesFromBytes(self.example_stream, which_sources=('source1'), color_mode='RGB') s1, s2 = list(zip(*list(stream.get_epoch_iterator()))) actual_s1_gen = (reorder_axes(s) for s in self.shapes[:3]) actual_s1 = set((3,) + s[1:] for s in actual_s1_gen) s1_shape = set(s.shape for s in s1) assert actual_s1 == s1_shape def test_images_from_bytes_example_stream_convert_l(self): stream = ImagesFromBytes(self.example_stream, which_sources=('source2'), color_mode='L') s1, s2 = list(zip(*list(stream.get_epoch_iterator()))) actual_s2_gen = (reorder_axes(s) for s in self.shapes[3:]) actual_s2 = set((1,) + s[1:] for s in actual_s2_gen) s2_shape = set(s.shape for s in s2) assert actual_s2 == s2_shape def test_axis_labels(self): stream = ImagesFromBytes(self.example_stream, which_sources=('source2',)) assert stream.axis_labels['source1'] == ('bytes',) assert stream.axis_labels['source2'] == ('channel', 'height', 'width') bstream = ImagesFromBytes(self.batch_stream, which_sources=('source1',)) assert bstream.axis_labels['source1'] == ('batch', 'channel', 'height', 'width') assert bstream.axis_labels['source2'] == ('batch', 'bytes') def test_bytes_type_exception(self): stream = ImagesFromBytes(self.example_stream, which_sources=('source2',)) assert_raises(TypeError, stream.transform_source_example, 54321, 'source2') class TestMinimumDimensions(ImageTestingMixin): def setUp(self): rng = numpy.random.RandomState(config.default_seed) source1 = [] source2 = [] source3 = [] self.shapes = [(5, 9), (4, 6), (4, 3), (6, 4), (2, 5), (4, 8), (8, 3)] for i, shape in enumerate(self.shapes): source1.append(rng.normal(size=shape)) source2.append(rng.normal(size=shape[::-1])) source3.append(rng.random_integers(0, 255, size=(3,) + shape) .astype('uint8')) self.dataset = IndexableDataset(OrderedDict([('source1', source1), ('source2', source2), ('source3', source3)]), axis_labels={'source1': ('batch', 'channel', 'height', 'width'), 'source3': ('batch', 'channel', 'height', 'width')}) self.common_setup() def test_minimum_dimensions_example_stream(self): stream = MinimumImageDimensions(self.example_stream, (4, 5), which_sources=('source1', 'source3')) it = stream.get_epoch_iterator() for example, shp in zip(it, self.shapes): assert example[0].shape[0] >= 4 and example[0].shape[1] >= 5 assert (example[1].shape[1] == shp[0] and example[1].shape[0] == shp[1]) assert example[2].shape[0] == 3 assert example[2].shape[1] >= 4 and example[2].shape[2] >= 5 def test_minimum_dimensions_batch_stream(self): stream = MinimumImageDimensions(self.batch_stream, (4, 5), which_sources=('source1',)) it = stream.get_epoch_iterator() for batch, shapes in zip(it, partition_all(self.batch_size, self.shapes)): assert (example.shape[0] >= 4 and example.shape[1] >= 5 for example in batch[0]) assert (example.shape[1] == shp[0] and example.shape[0] == shp[1] for example, shp in zip(batch[1], shapes)) def test_axes_exception(self): stream = MinimumImageDimensions(self.example_stream, (4, 5), which_sources=('source1',)) assert_raises(NotImplementedError, stream.transform_source_example, numpy.empty((2, 3, 4, 2)), 'source1') def test_resample_exception(self): assert_raises(ValueError, MinimumImageDimensions, self.example_stream, (4, 5), resample='notarealresamplingmode') class TestFixedSizeRandomCrop(ImageTestingMixin): def setUp(self): source1 = numpy.zeros((9, 3, 7, 5), dtype='uint8') source1[:] = numpy.arange(3 * 7 * 5, dtype='uint8').reshape(3, 7, 5) shapes = [(5, 9), (6, 8), (5, 6), (5, 5), (6, 4), (7, 4), (9, 4), (5, 6), (6, 5)] source2 = [] biggest = 0 num_channels = 2 for shp in shapes: biggest = max(biggest, shp[0] * shp[1] * 2) ex = numpy.arange(shp[0] * shp[1] * num_channels).reshape( (num_channels,) + shp).astype('uint8') source2.append(ex) self.source2_biggest = biggest axis_labels = {'source1': ('batch', 'channel', 'height', 'width'), 'source2': ('batch', 'channel', 'height', 'width')} self.dataset = IndexableDataset(OrderedDict([('source1', source1), ('source2', source2)]), axis_labels=axis_labels) self.common_setup() def test_ndarray_batch_source(self): # Make sure that with enough epochs we sample everything. stream = RandomFixedSizeCrop(self.batch_stream, (5, 4), which_sources=('source1',)) seen_indices = numpy.array([], dtype='uint8') for i in range(30): for batch in stream.get_epoch_iterator(): assert batch[0].shape[1:] == (3, 5, 4) assert batch[0].shape[0] in (1, 2) seen_indices = numpy.union1d(seen_indices, batch[0].flatten()) if 3 * 7 * 5 == len(seen_indices): break else: assert False def test_list_batch_source(self): # Make sure that with enough epochs we sample everything. stream = RandomFixedSizeCrop(self.batch_stream, (5, 4), which_sources=('source2',)) seen_indices = numpy.array([], dtype='uint8') for i in range(30): for batch in stream.get_epoch_iterator(): for example in batch[1]: assert example.shape == (2, 5, 4) seen_indices = numpy.union1d(seen_indices, example.flatten()) assert len(batch[1]) in (1, 2) if self.source2_biggest == len(seen_indices): break else: assert False def test_format_exceptions(self): estream = RandomFixedSizeCrop(self.example_stream, (5, 4), which_sources=('source2',)) bstream = RandomFixedSizeCrop(self.batch_stream, (5, 4), which_sources=('source2',)) assert_raises(ValueError, estream.transform_source_example, numpy.empty((5, 6)), 'source2') assert_raises(ValueError, bstream.transform_source_batch, [numpy.empty((7, 6))], 'source2') assert_raises(ValueError, bstream.transform_source_batch, [numpy.empty((8, 6))], 'source2') def test_window_too_big_exceptions(self): stream = RandomFixedSizeCrop(self.example_stream, (5, 4), which_sources=('source2',)) assert_raises(ValueError, stream.transform_source_example, numpy.empty((3, 4, 2)), 'source2') bstream = RandomFixedSizeCrop(self.batch_stream, (5, 4), which_sources=('source1',)) assert_raises(ValueError, bstream.transform_source_batch, numpy.empty((5, 3, 4, 2)), 'source1') class TestRandom2DRotation(ImageTestingMixin): def setUp(self): source1 = numpy.zeros((2, 3, 4, 6), dtype='uint8') source1[:] = numpy.arange(3 * 4 * 6, dtype='uint8').reshape((3, 4, 6)) source2 = numpy.empty(2, dtype=object) source2[0] = numpy.arange(3 * 4 * 6, dtype='uint8').reshape((3, 4, 6)) source2[1] = numpy.arange(3 * 4 * 7, dtype='uint8').reshape((3, 4, 7)) source3 = [source2[0], source2[1]] self.source1 = source1 self.source2 = source2 self.source3 = source3 axis_labels = {'source1': ('batch', 'channel', 'height', 'width'), 'source2': ('batch', 'channel', 'height', 'width'), 'source3': ('batch', 'channel', 'height', 'width')} self.dataset = \ IndexableDataset(OrderedDict([('source1', source1), ('source2', source2), ('source3', source3)]), axis_labels=axis_labels) self.common_setup() def test_format_exceptions(self): estream = Random2DRotation(self.example_stream, which_sources=('source2',)) bstream = Random2DRotation(self.batch_stream, which_sources=('source2',)) assert_raises(ValueError, estream.transform_source_example, numpy.empty((5, 6)), 'source2') assert_raises(ValueError, bstream.transform_source_batch, [numpy.empty((7, 6))], 'source2') assert_raises(ValueError, bstream.transform_source_batch, [numpy.empty((8, 6))], 'source2') def test_maximum_rotation_invalid_exception(self): assert_raises(ValueError, Random2DRotation, self.example_stream, maximum_rotation=0.0, which_sources=('source2',)) assert_raises(ValueError, Random2DRotation, self.example_stream, maximum_rotation=3.1416, which_sources=('source2',)) def test_invalid_resample_exception(self): assert_raises(ValueError, Random2DRotation, self.example_stream, resample='nonexisting') def test_random_2D_rotation_example_stream(self): maximum_rotation = 0.5 rng = numpy.random.RandomState(123) estream = Random2DRotation(self.example_stream, maximum_rotation, rng=rng, which_sources=('source1',)) # the C x X x Y image should have equal rotation for all c in C out = estream.transform_source_example(self.source1[0], 'source1') expected = numpy.array([[[0, 1, 2, 3, 4, 11], [6, 7, 8, 9, 10, 11], [12, 13, 14, 15, 16, 17], [12, 19, 20, 21, 22, 23]], [[24, 25, 26, 27, 28, 35], [30, 31, 32, 33, 34, 35], [36, 37, 38, 39, 40, 41], [36, 43, 44, 45, 46, 47]], [[48, 49, 50, 51, 52, 59], [54, 55, 56, 57, 58, 59], [60, 61, 62, 63, 64, 65], [60, 67, 68, 69, 70, 71]]], dtype="uint8") assert_equal(out, expected) def test_random_2D_rotation_batch_stream(self): rng = numpy.random.RandomState(123) bstream = Random2DRotation(self.batch_stream, maximum_rotation=0.5, rng=rng, which_sources=('source1',)) # each C x X x Y image should have equal rotation for all c in C out = bstream.transform_source_batch(self.source1, 'source1') expected = numpy.array([[[[0, 1, 2, 3, 4, 11], [6, 7, 8, 9, 10, 11], [12, 13, 14, 15, 16, 17], [12, 19, 20, 21, 22, 23]], [[24, 25, 26, 27, 28, 35], [30, 31, 32, 33, 34, 35], [36, 37, 38, 39, 40, 41], [36, 43, 44, 45, 46, 47]], [[48, 49, 50, 51, 52, 59], [54, 55, 56, 57, 58, 59], [60, 61, 62, 63, 64, 65], [60, 67, 68, 69, 70, 71]]], [[[6, 1, 2, 3, 4, 5], [12, 7, 8, 9, 10, 5], [18, 13, 14, 15, 16, 11], [18, 19, 20, 21, 22, 17]], [[30, 25, 26, 27, 28, 29], [36, 31, 32, 33, 34, 29], [42, 37, 38, 39, 40, 35], [42, 43, 44, 45, 46, 41]], [[54, 49, 50, 51, 52, 53], [60, 55, 56, 57, 58, 53], [66, 61, 62, 63, 64, 59], [66, 67, 68, 69, 70, 65]]]], dtype='uint8') assert_equal(out, expected) expected = \ [numpy.array([[[0, 1, 2, 3, 4, 11], [6, 7, 8, 9, 10, 11], [12, 13, 14, 15, 16, 17], [12, 19, 20, 21, 22, 23]], [[24, 25, 26, 27, 28, 35], [30, 31, 32, 33, 34, 35], [36, 37, 38, 39, 40, 41], [36, 43, 44, 45, 46, 47]], [[48, 49, 50, 51, 52, 59], [54, 55, 56, 57, 58, 59], [60, 61, 62, 63, 64, 65], [60, 67, 68, 69, 70, 71]]], dtype='uint8'), numpy.array([[[7, 1, 2, 3, 4, 5, 0], [14, 8, 9, 10, 11, 12, 6], [21, 15, 16, 17, 18, 19, 13], [0, 22, 23, 24, 25, 26, 20]], [[35, 29, 30, 31, 32, 33, 0], [42, 36, 37, 38, 39, 40, 34], [49, 43, 44, 45, 46, 47, 41], [0, 50, 51, 52, 53, 54, 48]], [[63, 57, 58, 59, 60, 61, 0], [70, 64, 65, 66, 67, 68, 62], [77, 71, 72, 73, 74, 75, 69], [0, 78, 79, 80, 81, 82, 76]]], dtype='uint8')] rng = numpy.random.RandomState(123) bstream = Random2DRotation(self.batch_stream, maximum_rotation=0.5, rng=rng, which_sources=('source2',)) out = bstream.transform_source_batch(self.source2, 'source2') assert_equal(out[0], expected[0]) assert_equal(out[1], expected[1]) rng = numpy.random.RandomState(123) bstream = Random2DRotation(self.batch_stream, maximum_rotation=0.5, rng=rng, which_sources=('source3',)) out = bstream.transform_source_batch(self.source3, 'source3') assert_equal(out[0], expected[0]) assert_equal(out[1], expected[1])
12,302
1,947
#include "baldr/admin.h" namespace { // For transforming ISO 3166-1 country codes from alpha2 to alpha3 std::unordered_map<std::string, std::string> iso2_to_iso3 = {{"AD", "AND"}, {"AE", "ARE"}, {"AF", "AFG"}, {"AG", "ATG"}, {"AI", "AIA"}, {"AL", "ALB"}, {"AM", "ARM"}, {"AO", "AGO"}, {"AQ", "ATA"}, {"AR", "ARG"}, {"AS", "ASM"}, {"AT", "AUT"}, {"AU", "AUS"}, {"AW", "ABW"}, {"AX", "ALA"}, {"AZ", "AZE"}, {"BA", "BIH"}, {"BB", "BRB"}, {"BD", "BGD"}, {"BE", "BEL"}, {"BF", "BFA"}, {"BG", "BGR"}, {"BH", "BHR"}, {"BI", "BDI"}, {"BJ", "BEN"}, {"BL", "BLM"}, {"BM", "BMU"}, {"BN", "BRN"}, {"BO", "BOL"}, {"BQ", "BES"}, {"BR", "BRA"}, {"BS", "BHS"}, {"BT", "BTN"}, {"BV", "BVT"}, {"BW", "BWA"}, {"BY", "BLR"}, {"BZ", "BLZ"}, {"CA", "CAN"}, {"CC", "CCK"}, {"CD", "COD"}, {"CF", "CAF"}, {"CG", "COG"}, {"CH", "CHE"}, {"CI", "CIV"}, {"CK", "COK"}, {"CL", "CHL"}, {"CM", "CMR"}, {"CN", "CHN"}, {"CO", "COL"}, {"CR", "CRI"}, {"CU", "CUB"}, {"CV", "CPV"}, {"CW", "CUW"}, {"CX", "CXR"}, {"CY", "CYP"}, {"CZ", "CZE"}, {"DE", "DEU"}, {"DJ", "DJI"}, {"DK", "DNK"}, {"DM", "DMA"}, {"DO", "DOM"}, {"DZ", "DZA"}, {"EC", "ECU"}, {"EE", "EST"}, {"EG", "EGY"}, {"EH", "ESH"}, {"ER", "ERI"}, {"ES", "ESP"}, {"ET", "ETH"}, {"FI", "FIN"}, {"FJ", "FJI"}, {"FK", "FLK"}, {"FM", "FSM"}, {"FO", "FRO"}, {"FR", "FRA"}, {"GA", "GAB"}, {"GB", "GBR"}, {"GD", "GRD"}, {"GE", "GEO"}, {"GF", "GUF"}, {"GG", "GGY"}, {"GH", "GHA"}, {"GI", "GIB"}, {"GL", "GRL"}, {"GM", "GMB"}, {"GN", "GIN"}, {"GP", "GLP"}, {"GQ", "GNQ"}, {"GR", "GRC"}, {"GS", "SGS"}, {"GT", "GTM"}, {"GU", "GUM"}, {"GW", "GNB"}, {"GY", "GUY"}, {"HK", "HKG"}, {"HM", "HMD"}, {"HN", "HND"}, {"HR", "HRV"}, {"HT", "HTI"}, {"HU", "HUN"}, {"ID", "IDN"}, {"IE", "IRL"}, {"IL", "ISR"}, {"IM", "IMN"}, {"IN", "IND"}, {"IO", "IOT"}, {"IQ", "IRQ"}, {"IR", "IRN"}, {"IS", "ISL"}, {"IT", "ITA"}, {"JE", "JEY"}, {"JM", "JAM"}, {"JO", "JOR"}, {"JP", "JPN"}, {"KE", "KEN"}, {"KG", "KGZ"}, {"KH", "KHM"}, {"KI", "KIR"}, {"KM", "COM"}, {"KN", "KNA"}, {"KP", "PRK"}, {"KR", "KOR"}, {"XK", "XKX"}, {"KW", "KWT"}, {"KY", "CYM"}, {"KZ", "KAZ"}, {"LA", "LAO"}, {"LB", "LBN"}, {"LC", "LCA"}, {"LI", "LIE"}, {"LK", "LKA"}, {"LR", "LBR"}, {"LS", "LSO"}, {"LT", "LTU"}, {"LU", "LUX"}, {"LV", "LVA"}, {"LY", "LBY"}, {"MA", "MAR"}, {"MC", "MCO"}, {"MD", "MDA"}, {"ME", "MNE"}, {"MF", "MAF"}, {"MG", "MDG"}, {"MH", "MHL"}, {"MK", "MKD"}, {"ML", "MLI"}, {"MM", "MMR"}, {"MN", "MNG"}, {"MO", "MAC"}, {"MP", "MNP"}, {"MQ", "MTQ"}, {"MR", "MRT"}, {"MS", "MSR"}, {"MT", "MLT"}, {"MU", "MUS"}, {"MV", "MDV"}, {"MW", "MWI"}, {"MX", "MEX"}, {"MY", "MYS"}, {"MZ", "MOZ"}, {"NA", "NAM"}, {"NC", "NCL"}, {"NE", "NER"}, {"NF", "NFK"}, {"NG", "NGA"}, {"NI", "NIC"}, {"NL", "NLD"}, {"NO", "NOR"}, {"NP", "NPL"}, {"NR", "NRU"}, {"NU", "NIU"}, {"NZ", "NZL"}, {"OM", "OMN"}, {"PA", "PAN"}, {"PE", "PER"}, {"PF", "PYF"}, {"PG", "PNG"}, {"PH", "PHL"}, {"PK", "PAK"}, {"PL", "POL"}, {"PM", "SPM"}, {"PN", "PCN"}, {"PR", "PRI"}, {"PS", "PSE"}, {"PT", "PRT"}, {"PW", "PLW"}, {"PY", "PRY"}, {"QA", "QAT"}, {"RE", "REU"}, {"RO", "ROU"}, {"RS", "SRB"}, {"RU", "RUS"}, {"RW", "RWA"}, {"SA", "SAU"}, {"SB", "SLB"}, {"SC", "SYC"}, {"SD", "SDN"}, {"SS", "SSD"}, {"SE", "SWE"}, {"SG", "SGP"}, {"SH", "SHN"}, {"SI", "SVN"}, {"SJ", "SJM"}, {"SK", "SVK"}, {"SL", "SLE"}, {"SM", "SMR"}, {"SN", "SEN"}, {"SO", "SOM"}, {"SR", "SUR"}, {"ST", "STP"}, {"SV", "SLV"}, {"SX", "SXM"}, {"SY", "SYR"}, {"SZ", "SWZ"}, {"TC", "TCA"}, {"TD", "TCD"}, {"TF", "ATF"}, {"TG", "TGO"}, {"TH", "THA"}, {"TJ", "TJK"}, {"TK", "TKL"}, {"TL", "TLS"}, {"TM", "TKM"}, {"TN", "TUN"}, {"TO", "TON"}, {"TR", "TUR"}, {"TT", "TTO"}, {"TV", "TUV"}, {"TW", "TWN"}, {"TZ", "TZA"}, {"UA", "UKR"}, {"UG", "UGA"}, {"UM", "UMI"}, {"US", "USA"}, {"UY", "URY"}, {"UZ", "UZB"}, {"VA", "VAT"}, {"VC", "VCT"}, {"VE", "VEN"}, {"VG", "VGB"}, {"VI", "VIR"}, {"VN", "VNM"}, {"VU", "VUT"}, {"WF", "WLF"}, {"WS", "WSM"}, {"YE", "YEM"}, {"YT", "MYT"}, {"ZA", "ZAF"}, {"ZM", "ZMB"}, {"ZW", "ZWE"}, {"CS", "SCG"}, {"AN", "ANT"}}; } // namespace namespace valhalla { namespace baldr { // Returns the 3-char equivalent of the 2-char country code (iso_3166_1_alpha2) or an empty string // if the 2-char code is unknown std::string get_iso_3166_1_alpha3(const std::string& iso_3166_1_alpha2) { auto iter = iso2_to_iso3.find(iso_3166_1_alpha2); return iter == iso2_to_iso3.end() ? std::string() : iter->second; } // Constructor given parameters. Admin::Admin(const uint32_t country_offset, const uint32_t state_offset, const std::string& country_iso, const std::string& state_iso) : country_offset_(country_offset), state_offset_(state_offset) { std::size_t length = 0; // Example: GB or US if (country_iso.size() == kCountryIso) { length = country_iso.copy(country_iso_, kCountryIso); } else { country_iso_[0] = '\0'; } // Example: PA if (state_iso.size() == kStateIso - 1) { length = state_iso.copy(state_iso_, kStateIso - 1); state_iso_[length] = '\0'; } // Example: WLS else if (state_iso.size() == kStateIso) { length = state_iso.copy(state_iso_, kStateIso); } else { state_iso_[0] = '\0'; } } // Get the offset within the text/names list for the state text. uint32_t Admin::state_offset() const { return state_offset_; } // Get the offset within the text/names list for the country text. uint32_t Admin::country_offset() const { return country_offset_; } // country ISO3166-1 std::string Admin::country_iso() const { std::string str; for (int i = 0; i < kCountryIso; i++) { if (country_iso_[i] == '\0') { break; } str.append(1, country_iso_[i]); } return str; } // country ISO + dash + state ISO will give you ISO3166-2 for state. std::string Admin::state_iso() const { std::string str; for (int i = 0; i < kStateIso; i++) { if (state_iso_[i] == '\0') { break; } str.append(1, state_iso_[i]); } return str; } } // namespace baldr } // namespace valhalla
2,863
4,140
<gh_stars>1000+ /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.accumulo.predicate; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.ByteArrayOutputStream; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Base64; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import org.apache.accumulo.core.client.IteratorSetting; import org.apache.accumulo.core.data.Key; import org.apache.accumulo.core.data.Range; import org.apache.hadoop.hive.accumulo.columns.ColumnEncoding; import org.apache.hadoop.hive.accumulo.columns.ColumnMapper; import org.apache.hadoop.hive.accumulo.predicate.compare.CompareOp; import org.apache.hadoop.hive.accumulo.predicate.compare.DoubleCompare; import org.apache.hadoop.hive.accumulo.predicate.compare.Equal; import org.apache.hadoop.hive.accumulo.predicate.compare.GreaterThan; import org.apache.hadoop.hive.accumulo.predicate.compare.GreaterThanOrEqual; import org.apache.hadoop.hive.accumulo.predicate.compare.IntCompare; import org.apache.hadoop.hive.accumulo.predicate.compare.LessThan; import org.apache.hadoop.hive.accumulo.predicate.compare.LessThanOrEqual; import org.apache.hadoop.hive.accumulo.predicate.compare.LongCompare; import org.apache.hadoop.hive.accumulo.predicate.compare.NotEqual; import org.apache.hadoop.hive.accumulo.predicate.compare.PrimitiveComparison; import org.apache.hadoop.hive.accumulo.predicate.compare.StringCompare; import org.apache.hadoop.hive.accumulo.serde.AccumuloSerDeParameters; import org.apache.hadoop.hive.accumulo.serde.TooManyAccumuloColumnsException; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.exec.SerializationUtilities; import org.apache.hadoop.hive.ql.index.IndexSearchCondition; import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; import org.apache.hadoop.hive.ql.plan.TableScanDesc; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrGreaterThan; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrLessThan; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPGreaterThan; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPLessThan; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNotNull; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.lazy.LazyUtils; import org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaIntObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.util.StringUtils; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.mockito.Mockito; import com.google.common.base.Joiner; import com.google.common.collect.Lists; public class TestAccumuloPredicateHandler { private AccumuloPredicateHandler handler = AccumuloPredicateHandler.getInstance(); private JobConf conf; private ColumnMapper columnMapper; @Before public void setup() throws TooManyAccumuloColumnsException { FunctionRegistry.getFunctionNames(); conf = new JobConf(); List<String> columnNames = Arrays.asList("field1", "rid"); List<TypeInfo> columnTypes = Arrays.<TypeInfo> asList(TypeInfoFactory.stringTypeInfo, TypeInfoFactory.stringTypeInfo); conf.set(serdeConstants.LIST_COLUMNS, Joiner.on(',').join(columnNames)); conf.set(serdeConstants.LIST_COLUMN_TYPES, "string,string"); String columnMappingStr = "cf:f1,:rowID"; conf.set(AccumuloSerDeParameters.COLUMN_MAPPINGS, columnMappingStr); columnMapper = new ColumnMapper(columnMappingStr, ColumnEncoding.STRING.getName(), columnNames, columnTypes); } @Test public void testGetRowIDSearchCondition() { ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false); ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "hi"); List<ExprNodeDesc> children = Lists.newArrayList(); children.add(column); children.add(constant); ExprNodeGenericFuncDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqual(), children); assertNotNull(node); String filterExpr = SerializationUtilities.serializeExpression(node); conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr); List<IndexSearchCondition> sConditions = handler.getSearchConditions(conf); assertEquals(sConditions.size(), 1); } @Test() public void testRangeEqual() throws SerDeException { ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false); ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "aaa"); List<ExprNodeDesc> children = Lists.newArrayList(); children.add(column); children.add(constant); ExprNodeGenericFuncDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqual(), children); assertNotNull(node); String filterExpr = SerializationUtilities.serializeExpression(node); conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr); Collection<Range> ranges = handler.getRanges(conf, columnMapper); assertEquals(ranges.size(), 1); Range range = ranges.iterator().next(); assertTrue(range.isStartKeyInclusive()); assertFalse(range.isEndKeyInclusive()); assertTrue(range.contains(new Key(new Text("aaa")))); assertTrue(range.afterEndKey(new Key(new Text("aab")))); assertTrue(range.beforeStartKey(new Key(new Text("aa")))); } @Test() public void testRangeGreaterThan() throws SerDeException { ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false); ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "aaa"); List<ExprNodeDesc> children = Lists.newArrayList(); children.add(column); children.add(constant); ExprNodeGenericFuncDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPGreaterThan(), children); assertNotNull(node); String filterExpr = SerializationUtilities.serializeExpression(node); conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr); Collection<Range> ranges = handler.getRanges(conf, columnMapper); assertEquals(ranges.size(), 1); Range range = ranges.iterator().next(); assertTrue(range.isStartKeyInclusive()); assertFalse(range.isEndKeyInclusive()); assertFalse(range.contains(new Key(new Text("aaa")))); assertFalse(range.afterEndKey(new Key(new Text("ccccc")))); assertTrue(range.contains(new Key(new Text("aab")))); assertTrue(range.beforeStartKey(new Key(new Text("aa")))); assertTrue(range.beforeStartKey(new Key(new Text("aaa")))); } @Test public void rangeGreaterThanOrEqual() throws SerDeException { ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false); ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "aaa"); List<ExprNodeDesc> children = Lists.newArrayList(); children.add(column); children.add(constant); ExprNodeGenericFuncDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqualOrGreaterThan(), children); assertNotNull(node); String filterExpr = SerializationUtilities.serializeExpression(node); conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr); Collection<Range> ranges = handler.getRanges(conf, columnMapper); assertEquals(ranges.size(), 1); Range range = ranges.iterator().next(); assertTrue(range.isStartKeyInclusive()); assertFalse(range.isEndKeyInclusive()); assertTrue(range.contains(new Key(new Text("aaa")))); assertFalse(range.afterEndKey(new Key(new Text("ccccc")))); assertTrue(range.contains(new Key(new Text("aab")))); assertTrue(range.beforeStartKey(new Key(new Text("aa")))); } @Test public void rangeLessThan() throws SerDeException { ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false); ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "aaa"); List<ExprNodeDesc> children = Lists.newArrayList(); children.add(column); children.add(constant); ExprNodeGenericFuncDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPLessThan(), children); assertNotNull(node); String filterExpr = SerializationUtilities.serializeExpression(node); conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr); Collection<Range> ranges = handler.getRanges(conf, columnMapper); assertEquals(ranges.size(), 1); Range range = ranges.iterator().next(); assertTrue(range.isStartKeyInclusive()); assertFalse(range.isEndKeyInclusive()); assertFalse(range.contains(new Key(new Text("aaa")))); assertTrue(range.afterEndKey(new Key(new Text("ccccc")))); assertTrue(range.contains(new Key(new Text("aa")))); assertTrue(range.afterEndKey(new Key(new Text("aab")))); assertTrue(range.afterEndKey(new Key(new Text("aaa")))); } @Test public void rangeLessThanOrEqual() throws SerDeException { ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false); ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "aaa"); List<ExprNodeDesc> children = Lists.newArrayList(); children.add(column); children.add(constant); ExprNodeGenericFuncDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqualOrLessThan(), children); assertNotNull(node); String filterExpr = SerializationUtilities.serializeExpression(node); conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr); Collection<Range> ranges = handler.getRanges(conf, columnMapper); assertEquals(ranges.size(), 1); Range range = ranges.iterator().next(); assertTrue(range.isStartKeyInclusive()); assertFalse(range.isEndKeyInclusive()); assertTrue(range.contains(new Key(new Text("aaa")))); assertTrue(range.afterEndKey(new Key(new Text("ccccc")))); assertTrue(range.contains(new Key(new Text("aa")))); assertTrue(range.afterEndKey(new Key(new Text("aab")))); assertFalse(range.afterEndKey(new Key(new Text("aaa")))); } @Test public void testDisjointRanges() throws SerDeException { ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false); ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "aaa"); List<ExprNodeDesc> children = Lists.newArrayList(); children.add(column); children.add(constant); ExprNodeDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqualOrLessThan(), children); assertNotNull(node); ExprNodeDesc column2 = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false); ExprNodeDesc constant2 = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "bbb"); List<ExprNodeDesc> children2 = Lists.newArrayList(); children2.add(column2); children2.add(constant2); ExprNodeDesc node2 = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPGreaterThan(), children2); assertNotNull(node2); List<ExprNodeDesc> bothFilters = Lists.newArrayList(); bothFilters.add(node); bothFilters.add(node2); ExprNodeGenericFuncDesc both = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPAnd(), bothFilters); String filterExpr = SerializationUtilities.serializeExpression(both); conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr); Collection<Range> ranges = handler.getRanges(conf, columnMapper); // Impossible to get ranges for row <= 'aaa' and row >= 'bbb' assertEquals(0, ranges.size()); } @Test public void testMultipleRanges() throws SerDeException { ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false); ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "aaa"); List<ExprNodeDesc> children = Lists.newArrayList(); children.add(column); children.add(constant); ExprNodeDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqualOrGreaterThan(), children); assertNotNull(node); ExprNodeDesc column2 = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false); ExprNodeDesc constant2 = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "bbb"); List<ExprNodeDesc> children2 = Lists.newArrayList(); children2.add(column2); children2.add(constant2); ExprNodeDesc node2 = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPLessThan(), children2); assertNotNull(node2); List<ExprNodeDesc> bothFilters = Lists.newArrayList(); bothFilters.add(node); bothFilters.add(node2); ExprNodeGenericFuncDesc both = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPAnd(), bothFilters); String filterExpr = SerializationUtilities.serializeExpression(both); conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr); List<Range> ranges = handler.getRanges(conf, columnMapper); assertEquals(1, ranges.size()); Range range = ranges.get(0); assertEquals(new Range(new Key("aaa"), true, new Key("bbb"), false), range); } @Test public void testPushdownTuple() throws SerDeException, NoSuchPrimitiveComparisonException, NoSuchCompareOpException { ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.intTypeInfo, "field1", null, false); ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo, 5); List<ExprNodeDesc> children = Lists.newArrayList(); children.add(column); children.add(constant); ExprNodeGenericFuncDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqual(), children); assertNotNull(node); String filterExpr = SerializationUtilities.serializeExpression(node); conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr); List<IndexSearchCondition> sConditions = handler.getSearchConditions(conf); assertEquals(sConditions.size(), 1); IndexSearchCondition sc = sConditions.get(0); PushdownTuple tuple = new PushdownTuple(sConditions.get(0), handler.getPrimitiveComparison(sc .getColumnDesc().getTypeString(), sc), handler.getCompareOp(sc.getComparisonOp(), sc)); byte[] expectedVal = new byte[4]; ByteBuffer.wrap(expectedVal).putInt(5); assertArrayEquals(tuple.getConstVal(), expectedVal); assertEquals(tuple.getcOpt().getClass(), Equal.class); assertEquals(tuple.getpCompare().getClass(), IntCompare.class); } @Test(expected = NoSuchPrimitiveComparisonException.class) public void testPushdownColumnTypeNotSupported() throws SerDeException, NoSuchPrimitiveComparisonException, NoSuchCompareOpException { ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.floatTypeInfo, "field1", null, false); ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.floatTypeInfo, 5.5f); List<ExprNodeDesc> children = Lists.newArrayList(); children.add(column); children.add(constant); ExprNodeGenericFuncDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqual(), children); assertNotNull(node); String filterExpr = SerializationUtilities.serializeExpression(node); conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr); List<IndexSearchCondition> sConditions = handler.getSearchConditions(conf); assertEquals(sConditions.size(), 1); IndexSearchCondition sc = sConditions.get(0); handler.getPrimitiveComparison(sc.getColumnDesc().getTypeString(), sc); } @Test public void testPushdownComparisonOptNotSupported() { try { ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "field1", null, false); List<ExprNodeDesc> children = Lists.newArrayList(); children.add(column); ExprNodeGenericFuncDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPNotNull(), children); assertNotNull(node); String filterExpr = SerializationUtilities.serializeExpression(node); conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr); List<IndexSearchCondition> sConditions = handler.getSearchConditions(conf); assertEquals(sConditions.size(), 1); IndexSearchCondition sc = sConditions.get(0); new PushdownTuple(sc, handler.getPrimitiveComparison(sc.getColumnDesc().getTypeString(), sc), handler.getCompareOp(sc.getComparisonOp(), sc)); fail("Should fail: compare op not registered for index analyzer. Should leave undesirable residual predicate"); } catch (RuntimeException e) { assertTrue(e.getMessage().contains("Unexpected residual predicate: field1 is not null")); } catch (Exception e) { fail(StringUtils.stringifyException(e)); } } @Test public void testIteratorIgnoreRowIDFields() { ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false); ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "aaa"); List<ExprNodeDesc> children = Lists.newArrayList(); children.add(column); children.add(constant); ExprNodeDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqualOrLessThan(), children); assertNotNull(node); ExprNodeDesc column2 = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false); ExprNodeDesc constant2 = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "bbb"); List<ExprNodeDesc> children2 = Lists.newArrayList(); children2.add(column2); children2.add(constant2); ExprNodeDesc node2 = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPGreaterThan(), children2); assertNotNull(node2); List<ExprNodeDesc> bothFilters = Lists.newArrayList(); bothFilters.add(node); bothFilters.add(node2); ExprNodeGenericFuncDesc both = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPAnd(), bothFilters); String filterExpr = SerializationUtilities.serializeExpression(both); conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr); try { List<IteratorSetting> iterators = handler.getIterators(conf, columnMapper); assertEquals(iterators.size(), 0); } catch (SerDeException e) { StringUtils.stringifyException(e); } } @Test public void testIgnoreIteratorPushdown() throws TooManyAccumuloColumnsException { // Override what's placed in the Configuration by setup() conf = new JobConf(); List<String> columnNames = Arrays.asList("field1", "field2", "rid"); List<TypeInfo> columnTypes = Arrays.<TypeInfo> asList(TypeInfoFactory.stringTypeInfo, TypeInfoFactory.intTypeInfo, TypeInfoFactory.stringTypeInfo); conf.set(serdeConstants.LIST_COLUMNS, Joiner.on(',').join(columnNames)); conf.set(serdeConstants.LIST_COLUMN_TYPES, "string,int,string"); String columnMappingStr = "cf:f1,cf:f2,:rowID"; conf.set(AccumuloSerDeParameters.COLUMN_MAPPINGS, columnMappingStr); columnMapper = new ColumnMapper(columnMappingStr, ColumnEncoding.STRING.getName(), columnNames, columnTypes); ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "field1", null, false); ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "aaa"); List<ExprNodeDesc> children = Lists.newArrayList(); children.add(column); children.add(constant); ExprNodeDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqualOrLessThan(), children); assertNotNull(node); ExprNodeDesc column2 = new ExprNodeColumnDesc(TypeInfoFactory.intTypeInfo, "field2", null, false); ExprNodeDesc constant2 = new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo, 5); List<ExprNodeDesc> children2 = Lists.newArrayList(); children2.add(column2); children2.add(constant2); ExprNodeDesc node2 = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPGreaterThan(), children2); assertNotNull(node2); List<ExprNodeDesc> bothFilters = Lists.newArrayList(); bothFilters.add(node); bothFilters.add(node2); ExprNodeGenericFuncDesc both = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPAnd(), bothFilters); String filterExpr = SerializationUtilities.serializeExpression(both); conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr); conf.setBoolean(AccumuloSerDeParameters.ITERATOR_PUSHDOWN_KEY, false); try { List<IteratorSetting> iterators = handler.getIterators(conf, columnMapper); assertEquals(iterators.size(), 0); } catch (Exception e) { fail(StringUtils.stringifyException(e)); } } @Test public void testCreateIteratorSettings() throws Exception { // Override what's placed in the Configuration by setup() conf = new JobConf(); List<String> columnNames = Arrays.asList("field1", "field2", "rid"); List<TypeInfo> columnTypes = Arrays.<TypeInfo> asList(TypeInfoFactory.stringTypeInfo, TypeInfoFactory.intTypeInfo, TypeInfoFactory.stringTypeInfo); conf.set(serdeConstants.LIST_COLUMNS, Joiner.on(',').join(columnNames)); conf.set(serdeConstants.LIST_COLUMN_TYPES, "string,int,string"); conf.set(AccumuloSerDeParameters.DEFAULT_STORAGE_TYPE, ColumnEncoding.BINARY.getName()); String columnMappingStr = "cf:f1,cf:f2,:rowID"; conf.set(AccumuloSerDeParameters.COLUMN_MAPPINGS, columnMappingStr); columnMapper = new ColumnMapper(columnMappingStr, ColumnEncoding.STRING.getName(), columnNames, columnTypes); ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "field1", null, false); ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "aaa"); List<ExprNodeDesc> children = Lists.newArrayList(); children.add(column); children.add(constant); ExprNodeDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqualOrLessThan(), children); assertNotNull(node); ExprNodeDesc column2 = new ExprNodeColumnDesc(TypeInfoFactory.intTypeInfo, "field2", null, false); ExprNodeDesc constant2 = new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo, 5); List<ExprNodeDesc> children2 = Lists.newArrayList(); children2.add(column2); children2.add(constant2); ExprNodeDesc node2 = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPGreaterThan(), children2); assertNotNull(node2); List<ExprNodeDesc> bothFilters = Lists.newArrayList(); bothFilters.add(node); bothFilters.add(node2); ExprNodeGenericFuncDesc both = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPAnd(), bothFilters); String filterExpr = SerializationUtilities.serializeExpression(both); conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr); List<IteratorSetting> iterators = handler.getIterators(conf, columnMapper); assertEquals(iterators.size(), 2); IteratorSetting is1 = iterators.get(0); IteratorSetting is2 = iterators.get(1); boolean foundQual = false; boolean foundPCompare = false; boolean foundCOpt = false; boolean foundConst = false; for (Map.Entry<String,String> option : is1.getOptions().entrySet()) { String optKey = option.getKey(); if (optKey.equals(PrimitiveComparisonFilter.COLUMN)) { foundQual = true; assertEquals(option.getValue(), "cf:f1"); } else if (optKey.equals(PrimitiveComparisonFilter.CONST_VAL)) { foundConst = true; assertEquals(option.getValue(), Base64.getEncoder().encodeToString("aaa".getBytes())); } else if (optKey.equals(PrimitiveComparisonFilter.COMPARE_OPT_CLASS)) { foundCOpt = true; assertEquals(option.getValue(), LessThanOrEqual.class.getName()); } else if (optKey.equals(PrimitiveComparisonFilter.P_COMPARE_CLASS)) { foundPCompare = true; assertEquals(option.getValue(), StringCompare.class.getName()); } } assertTrue(foundConst & foundCOpt & foundPCompare & foundQual); foundQual = false; foundPCompare = false; foundCOpt = false; foundConst = false; for (Map.Entry<String,String> option : is2.getOptions().entrySet()) { String optKey = option.getKey(); if (optKey.equals(PrimitiveComparisonFilter.COLUMN)) { foundQual = true; assertEquals(option.getValue(), "cf:f2"); } else if (optKey.equals(PrimitiveComparisonFilter.CONST_VAL)) { foundConst = true; byte[] intVal = new byte[4]; ByteBuffer.wrap(intVal).putInt(5); assertEquals(option.getValue(), Base64.getEncoder().encodeToString(intVal)); } else if (optKey.equals(PrimitiveComparisonFilter.COMPARE_OPT_CLASS)) { foundCOpt = true; assertEquals(option.getValue(), GreaterThan.class.getName()); } else if (optKey.equals(PrimitiveComparisonFilter.P_COMPARE_CLASS)) { foundPCompare = true; assertEquals(option.getValue(), IntCompare.class.getName()); } } assertTrue(foundConst & foundCOpt & foundPCompare & foundQual); } @Test public void testBasicOptLookup() throws NoSuchCompareOpException { boolean foundEqual = false; boolean foundNotEqual = false; boolean foundGreaterThanOrEqual = false; boolean foundGreaterThan = false; boolean foundLessThanOrEqual = false; boolean foundLessThan = false; for (String opt : handler.cOpKeyset()) { Class<? extends CompareOp> compOpt = handler.getCompareOpClass(opt); if (compOpt.getName().equals(Equal.class.getName())) { foundEqual = true; } else if (compOpt.getName().equals(NotEqual.class.getName())) { foundNotEqual = true; } else if (compOpt.getName().equals(GreaterThan.class.getName())) { foundGreaterThan = true; } else if (compOpt.getName().equals(GreaterThanOrEqual.class.getName())) { foundGreaterThanOrEqual = true; } else if (compOpt.getName().equals(LessThan.class.getName())) { foundLessThan = true; } else if (compOpt.getName().equals(LessThanOrEqual.class.getName())) { foundLessThanOrEqual = true; } } assertTrue("Did not find Equal comparison op", foundEqual); assertTrue("Did not find NotEqual comparison op", foundNotEqual); assertTrue("Did not find GreaterThan comparison op", foundGreaterThan); assertTrue("Did not find GreaterThanOrEqual comparison op", foundGreaterThanOrEqual); assertTrue("Did not find LessThan comparison op", foundLessThan); assertTrue("Did not find LessThanOrEqual comparison op", foundLessThanOrEqual); } @Test(expected = NoSuchCompareOpException.class) public void testNoOptFound() throws NoSuchCompareOpException { handler.getCompareOpClass("blah"); } @Test public void testPrimitiveComparsionLookup() throws NoSuchPrimitiveComparisonException { boolean foundLong = false; boolean foundString = false; boolean foundInt = false; boolean foundDouble = false; for (String type : handler.pComparisonKeyset()) { Class<? extends PrimitiveComparison> pCompare = handler.getPrimitiveComparisonClass(type); if (pCompare.getName().equals(DoubleCompare.class.getName())) { foundDouble = true; } else if (pCompare.getName().equals(LongCompare.class.getName())) { foundLong = true; } else if (pCompare.getName().equals(IntCompare.class.getName())) { foundInt = true; } else if (pCompare.getName().equals(StringCompare.class.getName())) { foundString = true; } } assertTrue("Did not find DoubleCompare op", foundDouble); assertTrue("Did not find LongCompare op", foundLong); assertTrue("Did not find IntCompare op", foundInt); assertTrue("Did not find StringCompare op", foundString); } @Test public void testRowRangeIntersection() throws SerDeException { // rowId >= 'f' ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false); ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "f"); List<ExprNodeDesc> children = Lists.newArrayList(); children.add(column); children.add(constant); ExprNodeDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqualOrGreaterThan(), children); assertNotNull(node); // rowId <= 'm' ExprNodeDesc column2 = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, "rid", null, false); ExprNodeDesc constant2 = new ExprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, "m"); List<ExprNodeDesc> children2 = Lists.newArrayList(); children2.add(column2); children2.add(constant2); ExprNodeDesc node2 = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPEqualOrLessThan(), children2); assertNotNull(node2); List<ExprNodeDesc> bothFilters = Lists.newArrayList(); bothFilters.add(node); bothFilters.add(node2); ExprNodeGenericFuncDesc both = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPAnd(), bothFilters); String filterExpr = SerializationUtilities.serializeExpression(both); conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr); // Should make ['f', 'm\0') List<Range> ranges = handler.getRanges(conf, columnMapper); assertEquals(1, ranges.size()); assertEquals(new Range(new Key("f"), true, new Key("m\0"), false), ranges.get(0)); } @Test public void testRowRangeGeneration() throws SerDeException { List<String> columnNames = Arrays.asList("key", "column"); List<TypeInfo> columnTypes = Arrays.<TypeInfo> asList(TypeInfoFactory.stringTypeInfo, TypeInfoFactory.stringTypeInfo); conf.set(serdeConstants.LIST_COLUMNS, Joiner.on(',').join(columnNames)); conf.set(serdeConstants.LIST_COLUMN_TYPES, "string,string"); String columnMappingStr = ":rowID,cf:f1"; conf.set(AccumuloSerDeParameters.COLUMN_MAPPINGS, columnMappingStr); columnMapper = new ColumnMapper(columnMappingStr, ColumnEncoding.STRING.getName(), columnNames, columnTypes); // 100 < key ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.intTypeInfo, "key", null, false); ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo, 100); List<ExprNodeDesc> children = Lists.newArrayList(); children.add(constant); children.add(column); ExprNodeGenericFuncDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPLessThan(), children); assertNotNull(node); String filterExpr = SerializationUtilities.serializeExpression(node); conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr); // Should make (100, +inf) List<Range> ranges = handler.getRanges(conf, columnMapper); Assert.assertEquals(1, ranges.size()); Assert.assertEquals(new Range(new Text("100"), false, null, false), ranges.get(0)); } @Test public void testBinaryRangeGeneration() throws Exception { List<String> columnNames = Arrays.asList("key", "column"); List<TypeInfo> columnTypes = Arrays.<TypeInfo> asList(TypeInfoFactory.intTypeInfo, TypeInfoFactory.stringTypeInfo); conf.set(serdeConstants.LIST_COLUMNS, Joiner.on(',').join(columnNames)); conf.set(serdeConstants.LIST_COLUMN_TYPES, "int,string"); String columnMappingStr = ":rowID#b,cf:f1"; conf.set(AccumuloSerDeParameters.COLUMN_MAPPINGS, columnMappingStr); columnMapper = new ColumnMapper(columnMappingStr, ColumnEncoding.STRING.getName(), columnNames, columnTypes); int intValue = 100; // Make binary integer value in the bytearray ByteArrayOutputStream baos = new ByteArrayOutputStream(); JavaIntObjectInspector intOI = (JavaIntObjectInspector) PrimitiveObjectInspectorFactory .getPrimitiveJavaObjectInspector(TypeInfoFactory .getPrimitiveTypeInfo(serdeConstants.INT_TYPE_NAME)); LazyUtils.writePrimitive(baos, intValue, intOI); // 100 < key ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.intTypeInfo, "key", null, false); ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo, intValue); List<ExprNodeDesc> children = Lists.newArrayList(); children.add(constant); children.add(column); ExprNodeGenericFuncDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.stringTypeInfo, new GenericUDFOPLessThan(), children); assertNotNull(node); String filterExpr = SerializationUtilities.serializeExpression(node); conf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr); // Should make (100, +inf) List<Range> ranges = handler.getRanges(conf, columnMapper); Assert.assertEquals(1, ranges.size()); Assert.assertEquals(new Range(new Text(baos.toByteArray()), false, null, false), ranges.get(0)); } @Test public void testNullRangeGeneratorOutput() throws SerDeException { // The AccumuloRangeGenerator produces an Object (due to the limitations of the // traversal interface) which requires interpretation of that Object into Ranges. // Changes in the return object from the AccumuloRangeGenerator must also represent // a change in the AccumuloPredicateHandler. AccumuloPredicateHandler mockHandler = Mockito.mock(AccumuloPredicateHandler.class); ExprNodeDesc root = Mockito.mock(ExprNodeDesc.class); String hiveRowIdColumnName = "rid"; Mockito.when(mockHandler.getRanges(conf, columnMapper)).thenCallRealMethod(); Mockito.when(mockHandler.generateRanges(conf, columnMapper, hiveRowIdColumnName, root)).thenReturn(null); Mockito.when(mockHandler.getExpression(conf)).thenReturn(root); // A null result from AccumuloRangeGenerator is all ranges Assert.assertEquals(Arrays.asList(new Range()), mockHandler.getRanges(conf, columnMapper)); } @Test public void testEmptyListRangeGeneratorOutput() throws SerDeException { // The AccumuloRangeGenerator produces an Object (due to the limitations of the // traversal interface) which requires interpretation of that Object into Ranges. // Changes in the return object from the AccumuloRangeGenerator must also represent // a change in the AccumuloPredicateHandler. AccumuloPredicateHandler mockHandler = Mockito.mock(AccumuloPredicateHandler.class); ExprNodeDesc root = Mockito.mock(ExprNodeDesc.class); String hiveRowIdColumnName = "rid"; Mockito.when(mockHandler.getRanges(conf, columnMapper)).thenCallRealMethod(); Mockito.when(mockHandler.generateRanges(conf, columnMapper, hiveRowIdColumnName, root)) .thenReturn(Collections.emptyList()); Mockito.when(mockHandler.getExpression(conf)).thenReturn(root); // A null result from AccumuloRangeGenerator is all ranges Assert.assertEquals(Collections.emptyList(), mockHandler.getRanges(conf, columnMapper)); } @Test public void testSingleRangeGeneratorOutput() throws SerDeException { // The AccumuloRangeGenerator produces an Object (due to the limitations of the // traversal interface) which requires interpretation of that Object into Ranges. // Changes in the return object from the AccumuloRangeGenerator must also represent // a change in the AccumuloPredicateHandler. AccumuloPredicateHandler mockHandler = Mockito.mock(AccumuloPredicateHandler.class); ExprNodeDesc root = Mockito.mock(ExprNodeDesc.class); String hiveRowIdColumnName = "rid"; Range r = new Range("a"); Mockito.when(mockHandler.getRanges(conf, columnMapper)).thenCallRealMethod(); Mockito.when(mockHandler.generateRanges(conf, columnMapper, hiveRowIdColumnName, root)).thenReturn(r); Mockito.when(mockHandler.getExpression(conf)).thenReturn(root); // A null result from AccumuloRangeGenerator is all ranges Assert.assertEquals(Collections.singletonList(r), mockHandler.getRanges(conf, columnMapper)); } @Test public void testManyRangesGeneratorOutput() throws SerDeException { // The AccumuloRangeGenerator produces an Object (due to the limitations of the // traversal interface) which requires interpretation of that Object into Ranges. // Changes in the return object from the AccumuloRangeGenerator must also represent // a change in the AccumuloPredicateHandler. AccumuloPredicateHandler mockHandler = Mockito.mock(AccumuloPredicateHandler.class); ExprNodeDesc root = Mockito.mock(ExprNodeDesc.class); String hiveRowIdColumnName = "rid"; Range r1 = new Range("a"), r2 = new Range("z"); Mockito.when(mockHandler.getRanges(conf, columnMapper)).thenCallRealMethod(); Mockito.when(mockHandler.generateRanges(conf, columnMapper, hiveRowIdColumnName, root)) .thenReturn(Arrays.asList(r1, r2)); Mockito.when(mockHandler.getExpression(conf)).thenReturn(root); // A null result from AccumuloRangeGenerator is all ranges Assert.assertEquals(Arrays.asList(r1, r2), mockHandler.getRanges(conf, columnMapper)); } }
13,865