max_stars_count
int64 301
224k
| text
stringlengths 6
1.05M
| token_count
int64 3
727k
|
---|---|---|
413 | /* Copyright 2013-2021 MultiMC Contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include "../AccountTask.h"
#include <QString>
#include <QJsonObject>
#include <QTimer>
#include <qsslerror.h>
#include "../MinecraftAccount.h"
class QNetworkReply;
/**
* A Yggdrasil task is a task that performs an operation on a given mojang account.
*/
class Yggdrasil : public AccountTask
{
Q_OBJECT
public:
explicit Yggdrasil(AccountData * data, QObject *parent = 0);
virtual ~Yggdrasil() {};
void refresh();
void login(QString password);
protected:
void executeTask() override;
/**
* Processes the response received from the server.
* If an error occurred, this should emit a failed signal.
* If Yggdrasil gave an error response, it should call setError() first, and then return false.
* Otherwise, it should return true.
* Note: If the response from the server was blank, and the HTTP code was 200, this function is called with
* an empty QJsonObject.
*/
void processResponse(QJsonObject responseData);
/**
* Processes an error response received from the server.
* The default implementation will read data from Yggdrasil's standard error response format and set it as this task's Error.
* \returns a QString error message that will be passed to emitFailed.
*/
virtual void processError(QJsonObject responseData);
protected slots:
void processReply();
void refreshTimers(qint64, qint64);
void heartbeat();
void sslErrors(QList<QSslError>);
void abortByTimeout();
public slots:
virtual bool abort() override;
private:
void sendRequest(QUrl endpoint, QByteArray content);
protected:
QNetworkReply *m_netReply = nullptr;
QTimer timeout_keeper;
QTimer counter;
int count = 0; // num msec since time reset
const int timeout_max = 30000;
const int time_step = 50;
};
| 770 |
670 | // OCHamcrest by <NAME>, http://qualitycoding.org/about/
// Copyright 2016 hamcrest.org. See LICENSE.txt
#import <Foundation/Foundation.h>
@interface NSInvocation (OCHamcrest)
+ (NSInvocation *)och_invocationWithTarget:(id)target selector:(SEL)selector;
+ (NSInvocation *)och_invocationOnObjectOfType:(Class)aClass selector:(SEL)selector;
- (id)och_invoke;
@end
| 132 |
7,746 | /*++
Copyright (c) 2015 Microsoft Corporation
Module Name:
qe_mbp.h
Abstract:
Model-based projection utilities
Author:
<NAME> (nbjorner) 2015-5-28
Revision History:
--*/
#pragma once
#include "ast/ast.h"
#include "util/params.h"
#include "model/model.h"
#include "math/simplex/model_based_opt.h"
namespace qe {
class mbproj {
class impl;
impl * m_impl;
public:
mbproj(ast_manager& m, params_ref const& p = params_ref());
~mbproj();
void updt_params(params_ref const& p);
static void get_param_descrs(param_descrs & r);
/**
\brief
Apply model-based qe on constants provided as vector of variables.
Return the updated formula and updated set of variables that were not eliminated.
*/
void operator()(bool force_elim, app_ref_vector& vars, model& mdl, expr_ref_vector& fmls);
/**
\brief
Solve as many variables as possible using "cheap" quantifier elimination"
*/
void solve(model& model, app_ref_vector& vars, expr_ref_vector& lits);
/**
\brief
Maximize objective t under current model for constraints in fmls.
*/
opt::inf_eps maximize(expr_ref_vector const& fmls, model& mdl, app* t, expr_ref& ge, expr_ref& gt);
/**
\brief
Apply spacer friendly MBP.
Use parameters to control behavior.
- reduce_all_selects (false)
- dont_sub (false)
*/
void spacer(app_ref_vector& vars, model& mdl, expr_ref& fml);
};
}
| 731 |
2,428 | <reponame>fuh/wechatpy
# -*- coding: utf-8 -*-
import json
import time
import logging
import requests
from wechatpy.client.base import BaseWeChatClient
from wechatpy.exceptions import WeChatClientException
from wechatpy.work.services import api
logger = logging.getLogger(__name__)
class WeChatServiceClient(BaseWeChatClient):
"""
注意:access_token在第三方应用变更为suite_access_token参数
"""
API_BASE_URL = "https://qyapi.weixin.qq.com/cgi-bin/"
auth = api.WeChatAuth()
miniprogram = api.WeChatMiniProgram()
def __init__(
self,
corp_id,
suite_id,
suite_secret,
suite_ticket,
access_token=None,
session=None,
timeout=None,
auto_retry=True,
):
self.corp_id = corp_id
self.suite_id = suite_id
self.suite_secret = suite_secret
self.suite_ticket = suite_ticket
super().__init__(corp_id, access_token, session, timeout, auto_retry)
@property
def access_token_key(self):
return f"services_{self.corp_id}_{self.suite_id}_access_token"
def _fetch_access_token(self, url, params):
"""The real fetch access token"""
logger.info("Fetching access token")
res = self._http.post(url=url, json=params)
try:
res.raise_for_status()
except requests.RequestException as reqe:
raise WeChatClientException(
errcode=None,
errmsg=None,
client=self,
request=reqe.request,
response=reqe.response,
)
result = res.json()
if "errcode" in result and result["errcode"] != 0:
raise WeChatClientException(
result["errcode"],
result["errmsg"],
client=self,
request=res.request,
response=res,
)
expires_in = 7200
if "expires_in" in result:
expires_in = result["expires_in"]
self.session.set(self.access_token_key, result["suite_access_token"], expires_in)
self.expires_at = int(time.time()) + expires_in
return result
def _request(self, method, url_or_endpoint, **kwargs):
if not url_or_endpoint.startswith(("http://", "https://")):
api_base_url = kwargs.pop("api_base_url", self.API_BASE_URL)
url = f"{api_base_url}{url_or_endpoint}"
else:
url = url_or_endpoint
if "params" not in kwargs:
kwargs["params"] = {}
if isinstance(kwargs["params"], dict) and "suite_access_token" not in kwargs["params"]:
kwargs["params"]["suite_access_token"] = self.access_token
if isinstance(kwargs.get("data", ""), dict):
body = json.dumps(kwargs["data"], ensure_ascii=False)
body = body.encode("utf-8")
kwargs["data"] = body
kwargs["timeout"] = kwargs.get("timeout", self.timeout)
result_processor = kwargs.pop("result_processor", None)
res = self._http.request(method=method, url=url, **kwargs)
try:
res.raise_for_status()
except requests.RequestException as reqe:
raise WeChatClientException(
errcode=None,
errmsg=None,
client=self,
request=reqe.request,
response=reqe.response,
)
return self._handle_result(res, method, url, result_processor, **kwargs)
def fetch_access_token(self):
"""Fetch access token"""
return self._fetch_access_token(
url="https://qyapi.weixin.qq.com/cgi-bin/service/get_suite_token",
params={"suite_id": self.suite_id, "suite_secret": self.suite_secret, "suite_ticket": self.suite_ticket},
)
| 1,861 |
544 | # KidsCanCode - Game Development with Pygame video series
# Jumpy! (a platform game) - Part 3
# Video link: https://youtu.be/pN9pBx5ln40
# Gravity and Platforms
import pygame as pg
import random
from settings import *
from sprites import *
class Game:
def __init__(self):
# initialize game window, etc
pg.init()
pg.mixer.init()
self.screen = pg.display.set_mode((WIDTH, HEIGHT))
pg.display.set_caption(TITLE)
self.clock = pg.time.Clock()
self.running = True
def new(self):
# start a new game
self.all_sprites = pg.sprite.Group()
self.platforms = pg.sprite.Group()
self.player = Player()
self.all_sprites.add(self.player)
p1 = Platform(0, HEIGHT - 40, WIDTH, 40)
self.all_sprites.add(p1)
self.platforms.add(p1)
p2 = Platform(WIDTH / 2 - 50, HEIGHT * 3 / 4, 100, 20)
self.all_sprites.add(p2)
self.platforms.add(p2)
self.run()
def run(self):
# Game Loop
self.playing = True
while self.playing:
self.clock.tick(FPS)
self.events()
self.update()
self.draw()
def update(self):
# Game Loop - Update
self.all_sprites.update()
hits = pg.sprite.spritecollide(self.player, self.platforms, False)
if hits:
self.player.pos.y = hits[0].rect.top
self.player.vel.y = 0
def events(self):
# Game Loop - events
for event in pg.event.get():
# check for closing window
if event.type == pg.QUIT:
if self.playing:
self.playing = False
self.running = False
def draw(self):
# Game Loop - draw
self.screen.fill(BLACK)
self.all_sprites.draw(self.screen)
# *after* drawing everything, flip the display
pg.display.flip()
def show_start_screen(self):
# game splash/start screen
pass
def show_go_screen(self):
# game over/continue
pass
g = Game()
g.show_start_screen()
while g.running:
g.new()
g.show_go_screen()
pg.quit()
| 1,052 |
10,225 | <filename>extensions/vault/model/src/main/java/io/quarkus/vault/runtime/client/dto/pki/VaultPKIGenerateCertificateResult.java
package io.quarkus.vault.runtime.client.dto.pki;
import io.quarkus.vault.runtime.client.dto.AbstractVaultDTO;
public class VaultPKIGenerateCertificateResult extends AbstractVaultDTO<VaultPKIGenerateCertificateData, Object> {
}
| 128 |
505 | package de.rieckpil.blog;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class BookStoreClientApplication {
public static void main(String[] args) {
SpringApplication.run(BookStoreClientApplication.class, args);
}
}
| 91 |
725 | <gh_stars>100-1000
#pragma once
// Utils includes
#include <utils/Image.h>
// Hyperion includes
#include <hyperion/ImageProcessorFactory.h>
#include <hyperion/LedString.h>
#include <hyperion/ImageToLedsMap.h>
// Black border includes
#include <blackborder/BlackBorderProcessor.h>
///
/// The ImageProcessor translates an RGB-image to RGB-values for the leds. The processing is
/// performed in two steps. First the average color per led-region is computed. Second a
/// color-tranform is applied based on a gamma-correction.
///
class ImageProcessor
{
public:
~ImageProcessor();
///
/// Returns the number of attached leds
///
unsigned getLedCount() const;
///
/// Specifies the width and height of 'incomming' images. This will resize the buffer-image to
/// match the given size.
/// NB All earlier obtained references will be invalid.
///
/// @param[in] width The new width of the buffer-image
/// @param[in] height The new height of the buffer-image
///
void setSize(const unsigned width, const unsigned height);
/// Enable or disable the black border detector
void enableBalckBorderDetector(bool enable);
///
/// Processes the image to a list of led colors. This will update the size of the buffer-image
/// if required and call the image-to-leds mapping to determine the mean color per led.
///
/// @param[in] image The image to translate to led values
///
/// @return The color value per led
///
template <typename Pixel_T>
std::vector<ColorRgb> process(const Image<Pixel_T>& image)
{
// Ensure that the buffer-image is the proper size
setSize(image.width(), image.height());
// Check black border detection
verifyBorder(image);
// Create a result vector and call the 'in place' functionl
std::vector<ColorRgb> colors = _imageToLeds->getMeanLedColor(image);
// return the computed colors
return colors;
}
///
/// Determines the led colors of the image in the buffer.
///
/// @param[in] image The image to translate to led values
/// @param[out] ledColors The color value per led
///
template <typename Pixel_T>
void process(const Image<Pixel_T>& image, std::vector<ColorRgb>& ledColors)
{
// Ensure that the buffer-image is the proper size
setSize(image.width(), image.height());
// Check black border detection
verifyBorder(image);
// Determine the mean-colors of each led (using the existing mapping)
_imageToLeds->getMeanLedColor(image, ledColors);
}
///
/// Get the hscan and vscan parameters for a single led
///
/// @param[in] led Index of the led
/// @param[out] hscanBegin begin of the hscan
/// @param[out] hscanEnd end of the hscan
/// @param[out] vscanBegin begin of the hscan
/// @param[out] vscanEnd end of the hscan
/// @return true if the parameters could be retrieved
bool getScanParameters(size_t led, double & hscanBegin, double & hscanEnd, double & vscanBegin, double & vscanEnd) const;
private:
/// Friend declaration of the factory for creating ImageProcessor's
friend class ImageProcessorFactory;
///
/// Constructs an image-processor for translating an image to led-color values based on the
/// given led-string specification
///
/// @param[in] ledString The led-string specification
/// @param[in] enableBlackBorderDetector Flag indicating if the blacborder detector should be enabled
/// @param[in] blackborderThreshold The threshold which the blackborder detector should use
///
ImageProcessor(const LedString &ledString, const Json::Value &blackborderConfig);
///
/// Performs black-border detection (if enabled) on the given image
///
/// @param[in] image The image to perform black-border detection on
///
template <typename Pixel_T>
void verifyBorder(const Image<Pixel_T> & image)
{
if(_enableBlackBorderRemoval && _borderProcessor->process(image))
{
std::cout << "BORDER SWITCH REQUIRED!!" << std::endl;
const hyperion::BlackBorder border = _borderProcessor->getCurrentBorder();
// Clean up the old mapping
delete _imageToLeds;
if (border.unknown)
{
// Construct a new buffer and mapping
_imageToLeds = new hyperion::ImageToLedsMap(image.width(), image.height(), 0, 0, _ledString.leds());
}
else
{
// Construct a new buffer and mapping
_imageToLeds = new hyperion::ImageToLedsMap(image.width(), image.height(), border.horizontalSize, border.verticalSize, _ledString.leds());
}
std::cout << "CURRENT BORDER TYPE: unknown=" << border.unknown << " hor.size=" << border.horizontalSize << " vert.size=" << border.verticalSize << std::endl;
}
}
private:
/// The Led-string specification
const LedString _ledString;
/// Flag the enables(true)/disabled(false) blackborder detector
bool _enableBlackBorderRemoval;
/// The processor for black border detection
hyperion::BlackBorderProcessor * _borderProcessor;
/// The mapping of image-pixels to leds
hyperion::ImageToLedsMap* _imageToLeds;
};
| 1,525 |
575 | // Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CHROMEOS_COMPONENTS_WEB_APPLICATIONS_TEST_SANDBOXED_WEB_UI_TEST_BASE_H_
#define CHROMEOS_COMPONENTS_WEB_APPLICATIONS_TEST_SANDBOXED_WEB_UI_TEST_BASE_H_
#include <memory>
#include <string>
#include <vector>
#include "base/files/file_path.h"
#include "chrome/test/base/mojo_web_ui_browser_test.h"
// A base class that can be extended by SWA browser test to inject scripts.
class SandboxedWebUiAppTestBase : public MojoWebUIBrowserTest {
public:
// Initialize the test harnesss for the |host_url| web UI. Starts a content::
// TestNavigationObserver watching for |sandboxed_url| and, when it loads,
// automatically injects |scripts|, in order, into the sandboxed frame.
SandboxedWebUiAppTestBase(const std::string& host_url,
const std::string& sandboxed_url,
const std::vector<base::FilePath>& scripts);
~SandboxedWebUiAppTestBase() override;
SandboxedWebUiAppTestBase(const SandboxedWebUiAppTestBase&) = delete;
SandboxedWebUiAppTestBase& operator=(const SandboxedWebUiAppTestBase&) =
delete;
// Returns the contents of the JavaScript library used to help test the
// sandboxed frame.
static std::string LoadJsTestLibrary(const base::FilePath& script_path);
// Returns the sandboxed app frame within the provided |web_ui|.
static content::RenderFrameHost* GetAppFrame(content::WebContents* web_ui);
// Runs |script| in the untrusted app frame of |web_ui|. This function assumes
// the first <iframe> element in |web_ui| is the untrusted (sandboxed)
// content.
static content::EvalJsResult EvalJsInAppFrame(content::WebContents* web_ui,
const std::string& script);
// MojoWebUIBrowserTest:
void SetUpOnMainThread() override;
private:
class TestCodeInjector;
std::unique_ptr<TestCodeInjector> injector_;
const std::string host_url_;
const std::string sandboxed_url_;
const std::vector<base::FilePath> scripts_;
};
#endif // CHROMEOS_COMPONENTS_WEB_APPLICATIONS_TEST_SANDBOXED_WEB_UI_TEST_BASE_H_
| 819 |
343 | /* $Id$ $Revision$ */
/* vim:set shiftwidth=4 ts=8: */
/*************************************************************************
* Copyright (c) 2011 AT&T Intellectual Property
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors: See CVS logs. Details at http://www.graphviz.org/
*************************************************************************/
#include "sfhdr.h"
/* Get the size of a stream.
**
** Written by <NAME>.
*/
Sfoff_t sfsize(reg Sfio_t * f)
{
Sfdisc_t *disc;
reg int mode;
Sfoff_t s;
SFMTXSTART(f, (Sfoff_t) (-1));
if ((mode = f->mode & SF_RDWR) != (int) f->mode
&& _sfmode(f, mode, 0) < 0)
SFMTXRETURN(f, (Sfoff_t) (-1));
if (f->flags & SF_STRING) {
SFSTRSIZE(f);
SFMTXRETURN(f, f->extent);
}
SFLOCK(f, 0);
s = f->here;
if (f->extent >= 0) {
if (f->flags & (SF_SHARE | SF_APPENDWR)) {
for (disc = f->disc; disc; disc = disc->disc)
if (disc->seekf)
break;
if (!_sys_stat || disc) {
Sfoff_t e;
if ((e = SFSK(f, 0, SEEK_END, disc)) >= 0)
f->extent = e;
if (SFSK(f, f->here, SEEK_SET, disc) != f->here)
f->here = SFSK(f, (Sfoff_t) 0, SEEK_CUR, disc);
}
#if _sys_stat
else {
Stat_t st;
if (fstat(f->file, &st) < 0)
f->extent = -1;
else if ((f->extent = st.st_size) < f->here)
f->here = SFSK(f, (Sfoff_t) 0, SEEK_CUR, disc);
}
#endif
}
if ((f->flags & (SF_SHARE | SF_PUBLIC)) == (SF_SHARE | SF_PUBLIC))
f->here = SFSK(f, (Sfoff_t) 0, SEEK_CUR, f->disc);
}
if (f->here != s && (f->mode & SF_READ)) { /* buffered data is known to be invalid */
#ifdef MAP_TYPE
if ((f->bits & SF_MMAP) && f->data) {
SFMUNMAP(f, f->data, f->endb - f->data);
f->data = NIL(uchar *);
}
#endif
f->next = f->endb = f->endr = f->endw = f->data;
}
if (f->here < 0)
f->extent = -1;
else if (f->extent < f->here)
f->extent = f->here;
if ((s = f->extent) >= 0) {
if (f->flags & SF_APPENDWR)
s += (f->next - f->data);
else if (f->mode & SF_WRITE) {
s = f->here + (f->next - f->data);
if (s < f->extent)
s = f->extent;
}
}
SFOPEN(f, 0);
SFMTXRETURN(f, s);
}
| 1,087 |
1,327 | <reponame>markdryan/oneDNN
/*
* Copyright (C) 2021 Intel Corporation
*
* SPDX-License-Identifier: MIT
*
* @file ze_loader.h
*/
#ifndef _ZE_LOADER_H
#define _ZE_LOADER_H
#if defined(__cplusplus)
#pragma once
#endif
#include "../ze_api.h"
#if defined(__cplusplus)
extern "C" {
#endif
typedef struct _zel_version {
int major;
int minor;
int patch;
} zel_version_t;
//Ex component string "ze_tracing", "ze_validation", etc
#define ZEL_COMPONENT_STRING_SIZE 64
typedef struct zel_component_version {
char component_name[ZEL_COMPONENT_STRING_SIZE];
ze_api_version_t spec_version;
zel_version_t component_lib_version;
} zel_component_version_t;
ZE_APIEXPORT ze_result_t ZE_APICALL
zelLoaderGetVersions(
size_t *num_elems, //Pointer to num versions to get.
zel_component_version_t *versions); //Pointer to array of versions. If set to NULL, num_elems is returned
typedef enum _zel_handle_type_t {
ZEL_HANDLE_DRIVER,
ZEL_HANDLE_DEVICE,
ZEL_HANDLE_CONTEXT,
ZEL_HANDLE_COMMAND_QUEUE,
ZEL_HANDLE_COMMAND_LIST,
ZEL_HANDLE_FENCE,
ZEL_HANDLE_EVENT_POOL,
ZEL_HANDLE_EVENT,
ZEL_HANDLE_IMAGE,
ZEL_HANDLE_MODULE,
ZEL_HANDLE_MODULE_BUILD_LOG,
ZEL_HANDLE_KERNEL,
ZEL_HANDLE_SAMPLER,
ZEL_HANDLE_PHYSICAL_MEM
} zel_handle_type_t;
//Translates Loader Handles to Driver Handles if loader handle intercept is enabled.
//If handle intercept is not enabled handleOut is set to handleIn
ZE_APIEXPORT ze_result_t ZE_APICALL
zelLoaderTranslateHandle(
zel_handle_type_t handleType, //Handle Type
void *handleIn, //Input: handle to translate from loader handle to driver handle
void **handleOut); //Output: Pointer to handleOut is set to driver handle if successful
#if defined(__cplusplus)
} // extern "C"
#endif
#endif //_ZE_LOADER_H | 803 |
707 | import argparse
import datetime
import json
import os
from PIL import Image
import pycococreatortools
def get_arguments():
parser = argparse.ArgumentParser(description="transform mask annotation to coco annotation")
parser.add_argument("--dataset", type=str, default='CIHP', help="name of dataset (CIHP, MHPv2 or VIP)")
parser.add_argument("--json_save_dir", type=str, default='../data/CIHP/annotations',
help="path to save coco-style annotation json file")
parser.add_argument("--test_img_dir", type=str, default='../data/CIHP/Testing/Images',
help="test image path")
return parser.parse_args()
args = get_arguments()
INFO = {
"description": args.dataset + "Dataset",
"url": "",
"version": "",
"year": 2020,
"contributor": "yunqiuxu",
"date_created": datetime.datetime.utcnow().isoformat(' ')
}
LICENSES = [
{
"id": 1,
"name": "",
"url": ""
}
]
CATEGORIES = [
{
'id': 1,
'name': 'person',
'supercategory': 'person',
},
]
def main(args):
coco_output = {
"info": INFO,
"licenses": LICENSES,
"categories": CATEGORIES,
"images": [],
"annotations": []
}
image_id = 1
for image_name in os.listdir(args.test_img_dir):
image = Image.open(os.path.join(args.test_img_dir, image_name))
image_info = pycococreatortools.create_image_info(
image_id, image_name, image.size
)
coco_output["images"].append(image_info)
image_id += 1
if not os.path.exists(os.path.join(args.json_save_dir)):
os.mkdir(os.path.join(args.json_save_dir))
with open('{}/{}.json'.format(args.json_save_dir, args.dataset), 'w') as output_json_file:
json.dump(coco_output, output_json_file)
if __name__ == "__main__":
main(args)
| 854 |
679 | /**************************************************************
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*************************************************************/
package ifc.container;
import lib.MultiMethodTest;
import util.ValueComparer;
import com.sun.star.container.XIndexReplace;
import com.sun.star.container.XNameContainer;
import com.sun.star.lang.IllegalArgumentException;
import com.sun.star.lang.IndexOutOfBoundsException;
import com.sun.star.uno.UnoRuntime;
/**
* Testing <code>com.sun.star.container.XIndexReplace</code>
* interface methods :
* <ul>
* <li><code> replaceByIndex()</code></li>
* </ul>
* This test needs the following object relations :
* <ul>
* <li> <code>'INSTANCE1', ..., 'INSTANCEN'</code> : N relations
* which represents objects to be replaced with. See below
* for more information.</li>
* <li> <code>'XIndexReplaceINDEX'</code> : For internal test
* usage. Contains current thread number. </li>
* <li> Test environment variable <code>'THRCNT'</code> : number
* of interface threads running concurently. </li>
* <ul> <p>
* XIndexReplace needs n ObjectRelations "INSTANCEn" , where n = 1, ..., THRCNT.
* <p>
* When this interface tested by different threads, it must use different
* instances to replace - one for each thread.<p>
* That's why we use objRelation "XIndexReplaceINDEX" to store the number of
* last taken instance. If there is no such relation, it initialize with 1.
* <p>
* This ObjectRelations should be necessary to create an Object,
* which is can be replaced by index
* INSTANCEn are n Objectrelations so that every thread can isert it's own
* object. n depends on the variable THRCNT which and comes from API.INI
* <p>
* Why that:
* If you insert the same Object by replaceByIndex() several times you
* don't insert the Object several times. The first replaceByIndex() inserts
* the Object to the Container but all other replaceByIndex() changes
* the Index in the Continer because it's the same Object. <p>
* Test is multithread compilant. <p>
* @see com.sun.star.container.XIndexReplace
*/
public class _XIndexReplace extends MultiMethodTest {
public XIndexReplace oObj = null;
/**
* Primarily tries to replace elements in a proper way :
* replaces the first, middle and the last element then
* checks if elements were properly replaced.
* Then wrong parameters are passed : invalid index and
* null value for replacing, and test checks for proper
* exceptions to be thrown. <p>
* In different threads it replaces elements with different
* objects.
* Has <b>OK</b> status if in the first (correct) case
* elements were successfully replaced (i.e. values got
* after replacing must be equal to those replaced with),
* and in the second case proper exceptions were thrown.
*/
public void _replaceByIndex() {
boolean result = true;
Object old = null;
Object oInstance = null;
int Index = 0;
//get for every thread its own Object to insert it
log.println("get ObjRelation(\"XIndexReplaceINDEX\")");
String sIndex = (String)tEnv.getObjRelation("XIndexReplaceINDEX");
if (sIndex == null) {
log.println("No XIndexReplaceINDEX - so set it to 1.");
tEnv.addObjRelation("XIndexReplaceINDEX", Integer.toString(1));
Index = 1;
} else {
Index = Integer.parseInt(sIndex);
Index++;
tEnv.addObjRelation("XIndexReplaceINDEX", Integer.toString(Index));
}
log.println("get ObjRelation(\"INSTANCE" + Index +"\")");
oInstance = tEnv.getObjRelation("INSTANCE"+ Index);
if (oInstance == null) {
log.println("ObjRelation(\"INSTANCE" + Index +"\") Object n.a.");
}
log.println("testing replaceByIndex(0)...");
try {
log.println("Getting old object");
old = oObj.getByIndex(0);
oObj.replaceByIndex(0, oInstance);
result = !(oObj.getByIndex(0)).equals(old);
result = ! ValueComparer.equalValue(oObj,old);
} catch (com.sun.star.lang.IndexOutOfBoundsException e) {
e.printStackTrace(log) ;
result = false;
} catch (com.sun.star.lang.IllegalArgumentException e) {
e.printStackTrace(log) ;
result = false;
} catch (com.sun.star.lang.WrappedTargetException e) {
e.printStackTrace(log) ;
result = false;
}
log.println("replace with a wrong Object occurs Exceptions ...");
try {
oObj.replaceByIndex(999, oInstance);
result = false;
log.println("1. replaceByIndex(): Exception expected! - FAILED");
XNameContainer xNC = (XNameContainer)
UnoRuntime.queryInterface(XNameContainer.class, oObj) ;
String[] names = xNC.getElementNames() ;
log.println("Element names :") ;
for (int i = 0; i<names.length; i++) {
log.println(" '" + names[i] + "'") ;
}
} catch (IndexOutOfBoundsException e) {
log.println("1. replaceByIndex(): Expected exception - OK");
result &= true;
} catch (com.sun.star.lang.IllegalArgumentException e) {
result = false;
log.println("1. replaceByIndex(): Unexpected exception! - " +
e + " - FAILED");
} catch (com.sun.star.lang.WrappedTargetException e) {
result = false;
log.println("1. replaceByIndex(): Unexpected exception! - " +
e + " - FAILED");
}
log.println("replace with a wrong Object occurs Exceptions ...");
try {
oObj.replaceByIndex(0, null);
result = false;
log.println("2. replaceByIndex(): Exception expected! - FAILED");
XNameContainer xNC = (XNameContainer)
UnoRuntime.queryInterface(XNameContainer.class, oObj) ;
String[] names = xNC.getElementNames() ;
log.println("Element names :") ;
for (int i = 0; i<names.length; i++) {
log.println(" '" + names[i] + "'") ;
}
} catch (IllegalArgumentException e) {
log.println("2. replaceByIndex(): Expected exception - OK");
result &= true;
} catch (com.sun.star.lang.WrappedTargetException e) {
result = false;
log.println("2. replaceByIndex(): Unexpected exception! - " +
e + " - FAILED");
} catch (com.sun.star.lang.IndexOutOfBoundsException e) {
result = false;
log.println("2. replaceByIndex(): Unexpected exception! - " +
e + " - FAILED");
}
log.println("replace with the old object");
try {
oObj.replaceByIndex(0, old);
} catch (IllegalArgumentException e) {
e.printStackTrace(log) ;
} catch (com.sun.star.lang.WrappedTargetException e) {
e.printStackTrace(log) ;
} catch (com.sun.star.lang.IndexOutOfBoundsException e) {
e.printStackTrace(log) ;
}
tRes.tested("replaceByIndex()", result);
}
}
| 3,151 |
310 | <gh_stars>100-1000
{
"name": "Shake",
"description": "Discontinued image compositing software.",
"url": "https://en.wikipedia.org/wiki/Shake_(software)"
} | 57 |
1,459 | /** @file
*****************************************************************************
Declaration of interfaces for top-level SHA256 gadgets.
*****************************************************************************
* @author This file is part of libsnark, developed by SCIPR Lab
* and contributors (see AUTHORS).
* @copyright MIT license (see LICENSE file)
*****************************************************************************/
#ifndef SHA256_GADGET_HPP_
#define SHA256_GADGET_HPP_
#include <libsnark/common/data_structures/merkle_tree.hpp>
#include <libsnark/gadgetlib1/gadgets/basic_gadgets.hpp>
#include <libsnark/gadgetlib1/gadgets/hashes/hash_io.hpp>
#include <libsnark/gadgetlib1/gadgets/hashes/sha256/sha256_components.hpp>
namespace libsnark {
/**
* Gadget for the SHA256 compression function.
*/
template<typename FieldT>
class sha256_compression_function_gadget : public gadget<FieldT> {
public:
std::vector<pb_linear_combination_array<FieldT> > round_a;
std::vector<pb_linear_combination_array<FieldT> > round_b;
std::vector<pb_linear_combination_array<FieldT> > round_c;
std::vector<pb_linear_combination_array<FieldT> > round_d;
std::vector<pb_linear_combination_array<FieldT> > round_e;
std::vector<pb_linear_combination_array<FieldT> > round_f;
std::vector<pb_linear_combination_array<FieldT> > round_g;
std::vector<pb_linear_combination_array<FieldT> > round_h;
pb_variable_array<FieldT> packed_W;
std::shared_ptr<sha256_message_schedule_gadget<FieldT> > message_schedule;
std::vector<sha256_round_function_gadget<FieldT> > round_functions;
pb_variable_array<FieldT> unreduced_output;
pb_variable_array<FieldT> reduced_output;
std::vector<lastbits_gadget<FieldT> > reduce_output;
public:
pb_linear_combination_array<FieldT> prev_output;
pb_variable_array<FieldT> new_block;
digest_variable<FieldT> output;
sha256_compression_function_gadget(protoboard<FieldT> &pb,
const pb_linear_combination_array<FieldT> &prev_output,
const pb_variable_array<FieldT> &new_block,
const digest_variable<FieldT> &output,
const std::string &annotation_prefix);
void generate_r1cs_constraints();
void generate_r1cs_witness();
};
/**
* Gadget for the SHA256 compression function, viewed as a 2-to-1 hash
* function, and using the same initialization vector as in SHA256
* specification. Thus, any collision for
* sha256_two_to_one_hash_gadget trivially extends to a collision for
* full SHA256 (by appending the same padding).
*/
template<typename FieldT>
class sha256_two_to_one_hash_gadget : public gadget<FieldT> {
public:
typedef libff::bit_vector hash_value_type;
typedef merkle_authentication_path merkle_authentication_path_type;
std::shared_ptr<sha256_compression_function_gadget<FieldT> > f;
sha256_two_to_one_hash_gadget(protoboard<FieldT> &pb,
const digest_variable<FieldT> &left,
const digest_variable<FieldT> &right,
const digest_variable<FieldT> &output,
const std::string &annotation_prefix);
sha256_two_to_one_hash_gadget(protoboard<FieldT> &pb,
const size_t block_length,
const block_variable<FieldT> &input_block,
const digest_variable<FieldT> &output,
const std::string &annotation_prefix);
void generate_r1cs_constraints(const bool ensure_output_bitness=true); // TODO: ignored for now
void generate_r1cs_witness();
static size_t get_block_len();
static size_t get_digest_len();
static libff::bit_vector get_hash(const libff::bit_vector &input);
static size_t expected_constraints(const bool ensure_output_bitness=true); // TODO: ignored for now
};
} // libsnark
#include <libsnark/gadgetlib1/gadgets/hashes/sha256/sha256_gadget.tcc>
#endif // SHA256_GADGET_HPP_
| 1,752 |
348 | <filename>docs/data/leg-t2/067/06707491.json
{"nom":"Tieffenbach","circ":"7ème circonscription","dpt":"Bas-Rhin","inscrits":246,"abs":134,"votants":112,"blancs":4,"nuls":1,"exp":107,"res":[{"nuance":"LR","nom":"<NAME>","voix":85},{"nuance":"REM","nom":"<NAME>","voix":22}]} | 112 |
777 | <filename>bert-quantization/bert-pyt-quantization/data/TextSharding.py
# Copyright (c) 2019 NVIDIA CORPORATION. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from collections import defaultdict
from itertools import islice
import multiprocessing
import statistics
class Sharding:
def __init__(self, input_files, output_name_prefix, n_training_shards, n_test_shards, fraction_test_set):
assert len(input_files) > 0, 'The input file list must contain at least one file.'
assert n_training_shards > 0, 'There must be at least one output shard.'
assert n_test_shards > 0, 'There must be at least one output shard.'
self.n_training_shards = n_training_shards
self.n_test_shards = n_test_shards
self.fraction_test_set = fraction_test_set
self.input_files = input_files
self.output_name_prefix = output_name_prefix
self.output_training_identifier = '_training'
self.output_test_identifier = '_test'
self.output_file_extension = '.txt'
self.articles = {} # key: integer identifier, value: list of articles
self.sentences = {} # key: integer identifier, value: list of sentences
self.output_training_files = {} # key: filename, value: list of articles to go into file
self.output_test_files = {} # key: filename, value: list of articles to go into file
self.init_output_files()
# Remember, the input files contain one article per line (the whitespace check is to skip extraneous blank lines)
def load_articles(self):
print('Start: Loading Articles')
global_article_count = 0
for input_file in self.input_files:
print('input file:', input_file)
with open(input_file, mode='r', newline='\n') as f:
for i, line in enumerate(f):
if line.strip():
self.articles[global_article_count] = line.rstrip()
global_article_count += 1
print('End: Loading Articles: There are', len(self.articles), 'articles.')
def segment_articles_into_sentences(self, segmenter):
print('Start: Sentence Segmentation')
if len(self.articles) is 0:
self.load_articles()
assert len(self.articles) is not 0, 'Please check that input files are present and contain data.'
# TODO: WIP: multiprocessing (create independent ranges and spawn processes)
use_multiprocessing = 'serial'
def chunks(data, size=len(self.articles)):
it = iter(data)
for i in range(0, len(data), size):
yield {k: data[k] for k in islice(it, size)}
if use_multiprocessing == 'manager':
manager = multiprocessing.Manager()
return_dict = manager.dict()
jobs = []
n_processes = 7 # in addition to the main process, total = n_proc+1
def work(articles, return_dict):
sentences = {}
for i, article in enumerate(articles):
sentences[i] = segmenter.segment_string(articles[article])
if i % 5000 == 0:
print('Segmenting article', i)
return_dict.update(sentences)
for item in chunks(self.articles, len(self.articles)):
p = multiprocessing.Process(target=work, args=(item, return_dict))
# Busy wait
while len(jobs) >= n_processes:
pass
jobs.append(p)
p.start()
for proc in jobs:
proc.join()
elif use_multiprocessing == 'queue':
work_queue = multiprocessing.Queue()
jobs = []
for item in chunks(self.articles, len(self.articles)):
pass
else: # serial option
for i, article in enumerate(self.articles):
self.sentences[i] = segmenter.segment_string(self.articles[article])
if i % 5000 == 0:
print('Segmenting article', i)
print('End: Sentence Segmentation')
def init_output_files(self):
print('Start: Init Output Files')
assert len(self.output_training_files) is 0, 'Internal storage self.output_files already contains data. This function is intended to be used by the constructor only.'
assert len(self.output_test_files) is 0, 'Internal storage self.output_files already contains data. This function is intended to be used by the constructor only.'
for i in range(self.n_training_shards):
name = self.output_name_prefix + self.output_training_identifier + '_' + str(i) + self.output_file_extension
self.output_training_files[name] = []
for i in range(self.n_test_shards):
name = self.output_name_prefix + self.output_test_identifier + '_' + str(i) + self.output_file_extension
self.output_test_files[name] = []
print('End: Init Output Files')
def get_sentences_per_shard(self, shard):
result = 0
for article_id in shard:
result += len(self.sentences[article_id])
return result
def distribute_articles_over_shards(self):
print('Start: Distribute Articles Over Shards')
assert len(self.articles) >= self.n_training_shards + self.n_test_shards, 'There are fewer articles than shards. Please add more data or reduce the number of shards requested.'
# Create dictionary with - key: sentence count per article, value: article id number
sentence_counts = defaultdict(lambda: [])
max_sentences = 0
total_sentences = 0
for article_id in self.sentences:
current_length = len(self.sentences[article_id])
sentence_counts[current_length].append(article_id)
max_sentences = max(max_sentences, current_length)
total_sentences += current_length
n_sentences_assigned_to_training = int((1 - self.fraction_test_set) * total_sentences)
nominal_sentences_per_training_shard = n_sentences_assigned_to_training // self.n_training_shards
nominal_sentences_per_test_shard = (total_sentences - n_sentences_assigned_to_training) // self.n_test_shards
consumed_article_set = set({})
unused_article_set = set(self.articles.keys())
# Make first pass and add one article worth of lines per file
for file in self.output_training_files:
current_article_id = sentence_counts[max_sentences][-1]
sentence_counts[max_sentences].pop(-1)
self.output_training_files[file].append(current_article_id)
consumed_article_set.add(current_article_id)
unused_article_set.remove(current_article_id)
# Maintain the max sentence count
while len(sentence_counts[max_sentences]) == 0 and max_sentences > 0:
max_sentences -= 1
if len(self.sentences[current_article_id]) > nominal_sentences_per_training_shard:
nominal_sentences_per_training_shard = len(self.sentences[current_article_id])
print('Warning: A single article contains more than the nominal number of sentences per training shard.')
for file in self.output_test_files:
current_article_id = sentence_counts[max_sentences][-1]
sentence_counts[max_sentences].pop(-1)
self.output_test_files[file].append(current_article_id)
consumed_article_set.add(current_article_id)
unused_article_set.remove(current_article_id)
# Maintain the max sentence count
while len(sentence_counts[max_sentences]) == 0 and max_sentences > 0:
max_sentences -= 1
if len(self.sentences[current_article_id]) > nominal_sentences_per_test_shard:
nominal_sentences_per_test_shard = len(self.sentences[current_article_id])
print('Warning: A single article contains more than the nominal number of sentences per test shard.')
training_counts = []
test_counts = []
for shard in self.output_training_files:
training_counts.append(self.get_sentences_per_shard(self.output_training_files[shard]))
for shard in self.output_test_files:
test_counts.append(self.get_sentences_per_shard(self.output_test_files[shard]))
training_median = statistics.median(training_counts)
test_median = statistics.median(test_counts)
# Make subsequent passes over files to find articles to add without going over limit
history_remaining = []
n_history_remaining = 4
while len(consumed_article_set) < len(self.articles):
for fidx, file in enumerate(self.output_training_files):
nominal_next_article_size = min(nominal_sentences_per_training_shard - training_counts[fidx], max_sentences)
# Maintain the max sentence count
while len(sentence_counts[max_sentences]) == 0 and max_sentences > 0:
max_sentences -= 1
while len(sentence_counts[nominal_next_article_size]) == 0 and nominal_next_article_size > 0:
nominal_next_article_size -= 1
if nominal_next_article_size not in sentence_counts or nominal_next_article_size is 0 or training_counts[fidx] > training_median:
continue # skip adding to this file, will come back later if no file can accept unused articles
current_article_id = sentence_counts[nominal_next_article_size][-1]
sentence_counts[nominal_next_article_size].pop(-1)
self.output_training_files[file].append(current_article_id)
consumed_article_set.add(current_article_id)
unused_article_set.remove(current_article_id)
for fidx, file in enumerate(self.output_test_files):
nominal_next_article_size = min(nominal_sentences_per_test_shard - test_counts[fidx], max_sentences)
# Maintain the max sentence count
while len(sentence_counts[max_sentences]) == 0 and max_sentences > 0:
max_sentences -= 1
while len(sentence_counts[nominal_next_article_size]) == 0 and nominal_next_article_size > 0:
nominal_next_article_size -= 1
if nominal_next_article_size not in sentence_counts or nominal_next_article_size is 0 or test_counts[fidx] > test_median:
continue # skip adding to this file, will come back later if no file can accept unused articles
current_article_id = sentence_counts[nominal_next_article_size][-1]
sentence_counts[nominal_next_article_size].pop(-1)
self.output_test_files[file].append(current_article_id)
consumed_article_set.add(current_article_id)
unused_article_set.remove(current_article_id)
# If unable to place articles a few times, bump up nominal sizes by fraction until articles get placed
if len(history_remaining) == n_history_remaining:
history_remaining.pop(0)
history_remaining.append(len(unused_article_set))
history_same = True
for i in range(1, len(history_remaining)):
history_same = history_same and (history_remaining[i-1] == history_remaining[i])
if history_same:
nominal_sentences_per_training_shard += 1
# nominal_sentences_per_test_shard += 1
training_counts = []
test_counts = []
for shard in self.output_training_files:
training_counts.append(self.get_sentences_per_shard(self.output_training_files[shard]))
for shard in self.output_test_files:
test_counts.append(self.get_sentences_per_shard(self.output_test_files[shard]))
training_median = statistics.median(training_counts)
test_median = statistics.median(test_counts)
print('Distributing data over shards:', len(unused_article_set), 'articles remaining.')
if len(unused_article_set) != 0:
print('Warning: Some articles did not make it into output files.')
for shard in self.output_training_files:
print('Training shard:', self.get_sentences_per_shard(self.output_training_files[shard]))
for shard in self.output_test_files:
print('Test shard:', self.get_sentences_per_shard(self.output_test_files[shard]))
print('End: Distribute Articles Over Shards')
def write_shards_to_disk(self):
print('Start: Write Shards to Disk')
for shard in self.output_training_files:
self.write_single_shard(shard, self.output_training_files[shard])
for shard in self.output_test_files:
self.write_single_shard(shard, self.output_test_files[shard])
print('End: Write Shards to Disk')
def write_single_shard(self, shard_name, shard):
with open(shard_name, mode='w', newline='\n') as f:
for article_id in shard:
for line in self.sentences[article_id]:
f.write(line + '\n')
f.write('\n') # Line break between articles
import nltk
nltk.download('punkt')
class NLTKSegmenter:
def __init(self):
pass
def segment_string(self, article):
return nltk.tokenize.sent_tokenize(article)
| 5,976 |
384 | <filename>iOS/samples/SignInHelpers/include/SampleAccountActionFailureReason.h<gh_stars>100-1000
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
#pragma once
#import <Foundation/Foundation.h>
// @brief MSA failure reason for sign in or sign out action
typedef NS_ENUM(NSInteger, SampleAccountActionFailureReason)
{
SampleAccountActionNoFailure,
SampleAccountActionFailureReasonGeneric,
SampleAccountActionFailureReasonAlreadySignedIn,
SampleAccountActionFailureReasonAlreadySignedOut,
SampleAccountActionFailureReasonUserCancelled,
SampleAccountActionFailureReasonFailToRetrieveAuthCode,
SampleAccountActionFailureReasonFailToRetrieveRefreshToken,
SampleAccountActionFailureReasonSigninSignOutInProgress,
SampleAccountActionFailureReasonUnknown,
SampleAccountActionFailureReasonInvalidAccountId,
SampleAccountActionFailureReasonAccessTokenTemporaryError,
SampleAccountActionFailureReasonAccessTokenPermanentError,
SampleAccountActionFailureReasonADAL,
};
| 286 |
7,220 | <filename>trax/models/research/predict_terraformer.py<gh_stars>1000+
# coding=utf-8
# Copyright 2021 The Trax Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=line-too-long
# pylint: disable=unused-import
# pylint: disable=g-import-not-at-top
# pylint: disable=g-bad-import-order
# pylint: disable=reimported
# pylint: disable=g-too-many-blank-lines
# pylint: disable=g-wrong-blank-lines
# pylint: disable=bad-whitespace
# pylint: disable=missing-function-docstring
# pylint: disable=g-inconsistent-quotes
# pylint: disable=redefined-outer-name
"""bash.
pip install git+git://github.com/google/trax.git@$master
mkdir /tmp/Terraformer
cd /tmp/Terraformer
download the following into /tmp/Terraformer:
https://storage.googleapis.com/trax-ml/vocabs/en_16k.subword
https://storage.cloud.google.com/trax-ml/terraformer/med/config.gin
https://storage.cloud.google.com/trax-ml/terraformer/med/model_200000.opt_slots0.npy.gz
https://storage.cloud.google.com/trax-ml/terraformer/med/model_200000.pkl.gz
https://storage.cloud.google.com/trax-ml/terraformer/med/model_200000.weights.npy.gz
"""
import sys
import time
import os
import random
import time
import numpy as np
import trax
from trax import layers as tl
from trax import fastmath
from trax.fastmath import numpy as jnp
from trax.supervised import training
from trax.layers.assert_shape import assert_shape
import copy
import functools
import gc
import os
import time
from jax import test_util # pylint: disable=unused-import
from jax.config import config
import numpy as np
import psutil
from tensorflow.compat.v2 import test
from trax import fastmath
from trax import layers as tl
from trax import models
from trax import shapes
from trax.supervised import decoding
import gin
# from colabtools import adhoc_import
import json
import gc
import jax
import numpy as np
import os
import time
import gin
import tensorflow_datasets as tfds
# from colabtools import adhoc_import
import functools
from trax.data import tf_inputs
import tensorflow_datasets as tfds
from t5.data import preprocessors as t5_processors
import t5.data
from trax import data
from trax import layers as tl
from trax import models
from trax import optimizers
from trax.data import inputs
from trax.supervised import lr_schedules
from trax.supervised import trainer_lib
from trax.rl import serialization_utils
from trax.rl import space_serializer
import math
from trax.fastmath import numpy as numpy_math
import trax
import numpy as np
from trax import fastmath
from trax.fastmath import numpy as jnp
from trax.layers import base
from trax.layers import combinators as cb
from trax.layers import core
from trax.layers import initializers as init
from trax.layers.assert_shape import assert_shape
from trax.layers.base import Fn
from trax.layers.research import sparsity
import functools
from trax import layers as tl
from trax.fastmath import numpy as jnp
from trax.models.reformer import reformer
from trax.models.research import configurable_transformer as ct
from trax.models.research import transformer2 as t2
#####
og_PositionalEncoding = tl.PositionalEncoding
trax.layers.attention.PositionalEncoding = functools.partial(og_PositionalEncoding, d_feature=64)
trax.layers.PositionalEncoding = functools.partial(og_PositionalEncoding, d_feature=64)
tl.PositionalEncoding = functools.partial(og_PositionalEncoding, d_feature=64)
#####
import gin
gin.enter_interactive_mode()
def model_configure(*args, **kwargs):
kwargs['module'] = 'trax.models'
return gin.external_configurable(*args, **kwargs)
####
xm2a_main = '/tmp/Terraformer/model_200000.pkl.gz'
xm2a_weights = '/tmp/Terraformer/model_200000.weights.npy.gz'
xm2a_opt_slots = '/tmp/Terraformer/model_200000.opt_slots0.npy.gz'
xm2a_config = '/tmp/Terraformer/config.gin'
VOCAB_FILE = 'en_16k.subword'
VOCAB_DIR = '/tmp/Terraformer'
####
f = open(xm2a_config)
gin_config = list(f)
f.close()
# gin_config.append(
# 'DotProductCausalAttention.max_inference_length = 16384'
# )
og_DotProductCausalAttention = trax.layers.attention.DotProductCausalAttention
trax.layers.attention.DotProductCausalAttention = functools.partial(
og_DotProductCausalAttention, max_inference_length=16384,
)
# gin_config.append(
# '\nMixedLSHSelfAttention.std_length=16384'
# )
gin_config = [l for l in gin_config if 'mira' not in l]
gin_config = [l for l in gin_config if 'okenize' not in l] # tokenize
gin_config = ''.join(gin_config)
gin.parse_config(gin_config)
gin.operative_config_str().split('\n')
print(gin_config)
####
def model(mode):
return models.ConfigurableTerraformer(mode=mode)
# ####
padding_fun = trax.data.PadToLength(len_map={0: 15*1024, 1: 15*1024, 2: 15*1024},
pad_value = {0: 0, 1: 0, 2:0})
# padding_fun = lambda x: x
# padding_fun = trax.data.PadToLength(len_map={0: 128, 1: 128, 2:128}, pad_value={0: 0, 1: 0, 2: 0}, multiple=True)
####
dataset = tfds.summarization.scientific_papers.ScientificPapers()
valid = tfds.load(name='scientific_papers/arxiv:1.1.1')['test']
index = 0
xarts = []
for x in valid:
xarts.append(x)
index += 1
if index == 3:
break
model_file = xm2a_main
shape11 = trax.shapes.ShapeDtype((1, 1), dtype=numpy_math.int32)
shape1l = trax.shapes.ShapeDtype((1, 15*1024), dtype=numpy_math.int32)
with trax.fastmath.use_backend(trax.fastmath.Backend.JAX):
model = model(mode='eval')
model.init_from_file(model_file, weights_only=True)
# in mode='predict' use input_signature=(shape1l, shape11)
old_state = model.state
# Decode the first article
xart = xarts[2]['article']
question = xart.numpy().decode()
# print(question[:512])
tokenized = next(padding_fun(trax.data.tokenize([question,], vocab_file=VOCAB_FILE, vocab_dir=VOCAB_DIR, n_reserved_ids=100)))
def detokenize(x):
return trax.data.detokenize(x, vocab_file=VOCAB_FILE, vocab_dir=VOCAB_DIR,
n_reserved_ids=100)
with trax.fastmath.use_backend(trax.fastmath.Backend.JAX):
model.state = old_state
counter, tokens, max_length = 0, [], 30
for token in decoding.autoregressive_sample_stream(
model, tokenized[None, :15*1024], batch_size=1, temperature=0.0,
eval_mode=True, eval_min_length=1024):
print(f'Token {counter}: "{detokenize(token)}" {token}')
tokens.append(token[:, None])
counter += 1
if counter > max_length:
break
tokens = np.concatenate(tokens, axis=1)
print(tokens)
print(detokenize(tokens[0]))
| 2,566 |
828 | /*
* Copyright (C) 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.cloud.teleport.v2.utils;
import com.google.api.services.bigquery.model.TableRow;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.text.SimpleDateFormat;
import java.time.format.DateTimeFormatter;
import java.time.temporal.TemporalAccessor;
import org.apache.beam.sdk.io.jdbc.JdbcIO;
/** Common code for Teleport DataplexJdbcIngestion. */
public class JdbcConverters {
/** Factory method for {@link ResultSetToTableRow}. */
public static JdbcIO.RowMapper<TableRow> getResultSetToTableRow() {
return new ResultSetToTableRow();
}
/**
* {@link JdbcIO.RowMapper} implementation to convert Jdbc ResultSet rows to UTF-8 encoded JSONs.
*/
private static class ResultSetToTableRow implements JdbcIO.RowMapper<TableRow> {
static SimpleDateFormat dateFormatter = new SimpleDateFormat("yyyy-MM-dd");
static DateTimeFormatter datetimeFormatter =
DateTimeFormatter.ofPattern("yyyy-MM-dd hh:mm:ss.SSSSSS");
static SimpleDateFormat timestampFormatter =
new SimpleDateFormat("yyyy-MM-dd hh:mm:ss.SSSSSSXXX");
@Override
public TableRow mapRow(ResultSet resultSet) throws Exception {
ResultSetMetaData metaData = resultSet.getMetaData();
TableRow outputTableRow = new TableRow();
for (int i = 1; i <= metaData.getColumnCount(); i++) {
if (resultSet.getObject(i) == null) {
outputTableRow.set(metaData.getColumnName(i), resultSet.getObject(i));
continue;
}
/*
* DATE: EPOCH MILLISECONDS -> yyyy-MM-dd
* DATETIME: EPOCH MILLISECONDS -> yyyy-MM-dd hh:mm:ss.SSSSSS
* TIMESTAMP: EPOCH MILLISECONDS -> yyyy-MM-dd hh:mm:ss.SSSSSSXXX
*
* MySQL drivers have ColumnTypeName in all caps and postgres in small case
*/
switch (metaData.getColumnTypeName(i).toLowerCase()) {
case "date":
outputTableRow.set(
metaData.getColumnName(i), dateFormatter.format(resultSet.getObject(i)));
break;
case "datetime":
outputTableRow.set(
metaData.getColumnName(i),
datetimeFormatter.format((TemporalAccessor) resultSet.getObject(i)));
break;
case "timestamp":
outputTableRow.set(
metaData.getColumnName(i), timestampFormatter.format(resultSet.getObject(i)));
break;
default:
outputTableRow.set(metaData.getColumnName(i), resultSet.getObject(i));
}
}
return outputTableRow;
}
}
}
| 1,252 |
14,668 | <reponame>zealoussnow/chromium
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "third_party/blink/renderer/platform/media/web_content_decryption_module_session_impl.h"
#include <memory>
#include "base/bind.h"
#include "base/callback_helpers.h"
#include "base/check_op.h"
#include "base/metrics/histogram_functions.h"
#include "base/notreached.h"
#include "base/numerics/safe_conversions.h"
#include "base/strings/string_util.h"
#include "base/strings/utf_string_conversions.h"
#include "media/base/cdm_key_information.h"
#include "media/base/cdm_promise.h"
#include "media/base/content_decryption_module.h"
#include "media/base/key_system_names.h"
#include "media/base/key_systems.h"
#include "media/base/limits.h"
#include "media/cdm/cenc_utils.h"
#include "media/cdm/json_web_key.h"
#include "third_party/blink/public/platform/web_data.h"
#include "third_party/blink/public/platform/web_encrypted_media_key_information.h"
#include "third_party/blink/public/platform/web_string.h"
#include "third_party/blink/public/platform/web_url.h"
#include "third_party/blink/public/platform/web_vector.h"
#include "third_party/blink/public/web/modules/media/webmediaplayer_util.h"
#include "third_party/blink/renderer/platform/media/cdm_result_promise.h"
#include "third_party/blink/renderer/platform/media/cdm_result_promise_helper.h"
#include "third_party/blink/renderer/platform/media/cdm_session_adapter.h"
namespace blink {
namespace {
const char kCloseSessionUMAName[] = "CloseSession";
const char kGenerateRequestUMAName[] = "GenerateRequest";
const char kLoadSessionUMAName[] = "LoadSession";
const char kRemoveSessionUMAName[] = "RemoveSession";
const char kUpdateSessionUMAName[] = "UpdateSession";
const char kKeyStatusSystemCodeUMAName[] = "KeyStatusSystemCode";
media::CdmSessionType ConvertSessionType(
WebEncryptedMediaSessionType session_type) {
switch (session_type) {
case WebEncryptedMediaSessionType::kTemporary:
return media::CdmSessionType::kTemporary;
case WebEncryptedMediaSessionType::kPersistentLicense:
return media::CdmSessionType::kPersistentLicense;
case WebEncryptedMediaSessionType::kUnknown:
break;
}
NOTREACHED();
return media::CdmSessionType::kTemporary;
}
bool SanitizeInitData(media::EmeInitDataType init_data_type,
const unsigned char* init_data,
size_t init_data_length,
std::vector<uint8_t>* sanitized_init_data,
std::string* error_message) {
DCHECK_GT(init_data_length, 0u);
if (init_data_length > media::limits::kMaxInitDataLength) {
error_message->assign("Initialization data too long.");
return false;
}
switch (init_data_type) {
case media::EmeInitDataType::WEBM:
// |init_data| for WebM is a single key.
if (init_data_length > media::limits::kMaxKeyIdLength) {
error_message->assign("Initialization data for WebM is too long.");
return false;
}
sanitized_init_data->assign(init_data, init_data + init_data_length);
return true;
case media::EmeInitDataType::CENC:
sanitized_init_data->assign(init_data, init_data + init_data_length);
if (!media::ValidatePsshInput(*sanitized_init_data)) {
error_message->assign("Initialization data for CENC is incorrect.");
return false;
}
return true;
case media::EmeInitDataType::KEYIDS: {
// Extract the keys and then rebuild the message. This ensures that any
// extra data in the provided JSON is dropped.
std::string init_data_string(init_data, init_data + init_data_length);
media::KeyIdList key_ids;
if (!media::ExtractKeyIdsFromKeyIdsInitData(init_data_string, &key_ids,
error_message))
return false;
for (const auto& key_id : key_ids) {
if (key_id.size() < media::limits::kMinKeyIdLength ||
key_id.size() > media::limits::kMaxKeyIdLength) {
error_message->assign("Incorrect key size.");
return false;
}
}
media::CreateKeyIdsInitData(key_ids, sanitized_init_data);
return true;
}
case media::EmeInitDataType::UNKNOWN:
break;
}
NOTREACHED();
error_message->assign("Initialization data type is not supported.");
return false;
}
bool SanitizeSessionId(const WebString& session_id,
std::string* sanitized_session_id) {
// The user agent should thoroughly validate the sessionId value before
// passing it to the CDM. At a minimum, this should include checking that
// the length and value (e.g. alphanumeric) are reasonable.
if (!session_id.ContainsOnlyASCII())
return false;
sanitized_session_id->assign(session_id.Ascii());
if (sanitized_session_id->length() > media::limits::kMaxSessionIdLength)
return false;
// Check that |sanitized_session_id| only contains printable characters for
// easier logging. Note that checking alphanumeric is too strict because there
// are key systems using Base64 session IDs (which may include spaces). See
// https://crbug.com/902828.
for (const char c : *sanitized_session_id) {
if (!base::IsAsciiPrintable(c))
return false;
}
return true;
}
bool SanitizeResponse(const std::string& key_system,
const uint8_t* response,
size_t response_length,
std::vector<uint8_t>* sanitized_response) {
// The user agent should thoroughly validate the response before passing it
// to the CDM. This may include verifying values are within reasonable limits,
// stripping irrelevant data or fields, pre-parsing it, sanitizing it,
// and/or generating a fully sanitized version. The user agent should check
// that the length and values of fields are reasonable. Unknown fields should
// be rejected or removed.
if (response_length > media::limits::kMaxSessionResponseLength)
return false;
if (media::IsClearKey(key_system) || media::IsExternalClearKey(key_system)) {
std::string key_string(response, response + response_length);
media::KeyIdAndKeyPairs keys;
auto session_type = media::CdmSessionType::kTemporary;
if (!ExtractKeysFromJWKSet(key_string, &keys, &session_type))
return false;
// Must contain at least one key.
if (keys.empty())
return false;
for (const auto& key_pair : keys) {
if (key_pair.first.size() < media::limits::kMinKeyIdLength ||
key_pair.first.size() > media::limits::kMaxKeyIdLength) {
return false;
}
}
std::string sanitized_data = GenerateJWKSet(keys, session_type);
sanitized_response->assign(sanitized_data.begin(), sanitized_data.end());
return true;
}
// TODO(jrummell): Verify responses for Widevine.
sanitized_response->assign(response, response + response_length);
return true;
}
} // namespace
WebContentDecryptionModuleSessionImpl::WebContentDecryptionModuleSessionImpl(
const scoped_refptr<CdmSessionAdapter>& adapter,
WebEncryptedMediaSessionType session_type)
: adapter_(adapter),
session_type_(ConvertSessionType(session_type)),
has_close_been_called_(false),
is_closed_(false) {}
WebContentDecryptionModuleSessionImpl::
~WebContentDecryptionModuleSessionImpl() {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
if (!session_id_.empty()) {
adapter_->UnregisterSession(session_id_);
// From http://w3c.github.io/encrypted-media/#mediakeysession-interface
// "If a MediaKeySession object is not closed when it becomes inaccessible
// to the page, the CDM shall close the key session associated with the
// object."
//
// This object is destroyed when the corresponding blink object is no
// longer needed (which may be due to it becoming inaccessible to the
// page), so if the session is not closed and CloseSession() has not yet
// been called, call CloseSession() now. Since this object is being
// destroyed, there is no need for the promise to do anything as this
// session will be gone.
if (!is_closed_ && !has_close_been_called_) {
adapter_->CloseSession(session_id_,
std::make_unique<media::DoNothingCdmPromise<>>());
}
}
}
void WebContentDecryptionModuleSessionImpl::SetClientInterface(Client* client) {
client_ = client;
}
WebString WebContentDecryptionModuleSessionImpl::SessionId() const {
return WebString::FromUTF8(session_id_);
}
void WebContentDecryptionModuleSessionImpl::InitializeNewSession(
media::EmeInitDataType eme_init_data_type,
const unsigned char* init_data,
size_t init_data_length,
WebContentDecryptionModuleResult result) {
DCHECK(init_data);
DCHECK(session_id_.empty());
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
// From https://w3c.github.io/encrypted-media/#generateRequest.
// 6. If the Key System implementation represented by this object's cdm
// implementation value does not support initDataType as an Initialization
// Data Type, return a promise rejected with a NotSupportedError.
// String comparison is case-sensitive.
if (!IsSupportedKeySystemWithInitDataType(adapter_->GetKeySystem(),
eme_init_data_type)) {
std::string message =
"The initialization data type is not supported by the key system.";
result.CompleteWithError(
kWebContentDecryptionModuleExceptionNotSupportedError, 0,
WebString::FromUTF8(message));
return;
}
// 10.1 If the init data is not valid for initDataType, reject promise with
// a newly created TypeError.
// 10.2 Let sanitized init data be a validated and sanitized version of init
// data. The user agent must thoroughly validate the Initialization Data
// before passing it to the CDM. This includes verifying that the length
// and values of fields are reasonable, verifying that values are within
// reasonable limits, and stripping irrelevant, unsupported, or unknown
// data or fields. It is recommended that user agents pre-parse,
// sanitize, and/or generate a fully sanitized version of the
// Initialization Data. If the Initialization Data format specified by
// initDataType supports multiple entries, the user agent should remove
// entries that are not needed by the CDM. The user agent must not
// re-order entries within the Initialization Data.
// 10.3 If the preceding step failed, reject promise with a newly created
// TypeError.
std::vector<uint8_t> sanitized_init_data;
std::string message;
if (!SanitizeInitData(eme_init_data_type, init_data, init_data_length,
&sanitized_init_data, &message)) {
result.CompleteWithError(kWebContentDecryptionModuleExceptionTypeError, 0,
WebString::FromUTF8(message));
return;
}
// 10.4 If sanitized init data is empty, reject promise with a
// NotSupportedError.
if (sanitized_init_data.empty()) {
result.CompleteWithError(
kWebContentDecryptionModuleExceptionNotSupportedError, 0,
"No initialization data provided.");
return;
}
// 10.5 Let session id be the empty string.
// (Done in constructor.)
// 10.6 Let message be null.
// 10.7 Let message type be null.
// (Done by CDM.)
// 10.8 Let cdm be the CDM instance represented by this object's cdm
// instance value.
// 10.9 Use the cdm to execute the following steps:
adapter_->InitializeNewSession(
eme_init_data_type, sanitized_init_data, session_type_,
std::unique_ptr<media::NewSessionCdmPromise>(
new NewSessionCdmResultPromise(
result, adapter_->GetKeySystemUMAPrefix(),
kGenerateRequestUMAName,
base::BindOnce(
&WebContentDecryptionModuleSessionImpl::OnSessionInitialized,
weak_ptr_factory_.GetWeakPtr()),
{SessionInitStatus::NEW_SESSION})));
}
void WebContentDecryptionModuleSessionImpl::Load(
const WebString& session_id,
WebContentDecryptionModuleResult result) {
DCHECK(!session_id.IsEmpty());
DCHECK(session_id_.empty());
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
DCHECK(session_type_ == media::CdmSessionType::kPersistentLicense);
// From https://w3c.github.io/encrypted-media/#load.
// 8.1 Let sanitized session ID be a validated and/or sanitized version of
// sessionId. The user agent should thoroughly validate the sessionId
// value before passing it to the CDM. At a minimum, this should include
// checking that the length and value (e.g. alphanumeric) are reasonable.
// 8.2 If the preceding step failed, or if sanitized session ID is empty,
// reject promise with a newly created TypeError.
std::string sanitized_session_id;
if (!SanitizeSessionId(session_id, &sanitized_session_id)) {
result.CompleteWithError(kWebContentDecryptionModuleExceptionTypeError, 0,
"Invalid session ID.");
return;
}
adapter_->LoadSession(
session_type_, sanitized_session_id,
std::unique_ptr<media::NewSessionCdmPromise>(
new NewSessionCdmResultPromise(
result, adapter_->GetKeySystemUMAPrefix(), kLoadSessionUMAName,
base::BindOnce(
&WebContentDecryptionModuleSessionImpl::OnSessionInitialized,
weak_ptr_factory_.GetWeakPtr()),
{SessionInitStatus::NEW_SESSION,
SessionInitStatus::SESSION_NOT_FOUND})));
}
void WebContentDecryptionModuleSessionImpl::Update(
const uint8_t* response,
size_t response_length,
WebContentDecryptionModuleResult result) {
DCHECK(response);
DCHECK(!session_id_.empty());
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
// From https://w3c.github.io/encrypted-media/#update.
// 6.1 Let sanitized response be a validated and/or sanitized version of
// response copy. The user agent should thoroughly validate the response
// before passing it to the CDM. This may include verifying values are
// within reasonable limits, stripping irrelevant data or fields,
// pre-parsing it, sanitizing it, and/or generating a fully sanitized
// version. The user agent should check that the length and values of
// fields are reasonable. Unknown fields should be rejected or removed.
// 6.2 If the preceding step failed, or if sanitized response is empty,
// reject promise with a newly created TypeError.
std::vector<uint8_t> sanitized_response;
if (!SanitizeResponse(adapter_->GetKeySystem(), response, response_length,
&sanitized_response)) {
result.CompleteWithError(kWebContentDecryptionModuleExceptionTypeError, 0,
"Invalid response.");
return;
}
adapter_->UpdateSession(
session_id_, sanitized_response,
std::make_unique<CdmResultPromise<>>(
result, adapter_->GetKeySystemUMAPrefix(), kUpdateSessionUMAName));
}
void WebContentDecryptionModuleSessionImpl::Close(
WebContentDecryptionModuleResult result) {
DCHECK(!session_id_.empty());
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
// close() shouldn't be called if the session is already closed. Since the
// operation is asynchronous, there is a window where close() was called
// just before the closed event arrives. The CDM should handle the case where
// close() is called after it has already closed the session. However, if
// we can tell the session is now closed, simply resolve the promise.
if (is_closed_) {
result.Complete();
return;
}
has_close_been_called_ = true;
adapter_->CloseSession(
session_id_,
std::make_unique<CdmResultPromise<>>(
result, adapter_->GetKeySystemUMAPrefix(), kCloseSessionUMAName));
}
void WebContentDecryptionModuleSessionImpl::Remove(
WebContentDecryptionModuleResult result) {
DCHECK(!session_id_.empty());
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
adapter_->RemoveSession(
session_id_,
std::make_unique<CdmResultPromise<>>(
result, adapter_->GetKeySystemUMAPrefix(), kRemoveSessionUMAName));
}
void WebContentDecryptionModuleSessionImpl::OnSessionMessage(
media::CdmMessageType message_type,
const std::vector<uint8_t>& message) {
DCHECK(client_) << "Client not set before message event";
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
client_->OnSessionMessage(message_type, message.data(), message.size());
}
void WebContentDecryptionModuleSessionImpl::OnSessionKeysChange(
bool has_additional_usable_key,
media::CdmKeysInfo keys_info) {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
WebVector<WebEncryptedMediaKeyInformation> keys(keys_info.size());
for (size_t i = 0; i < keys_info.size(); ++i) {
auto& key_info = keys_info[i];
keys[i].SetId(WebData(reinterpret_cast<char*>(&key_info->key_id[0]),
key_info->key_id.size()));
keys[i].SetStatus(ConvertCdmKeyStatus(key_info->status));
keys[i].SetSystemCode(key_info->system_code);
base::UmaHistogramSparse(
adapter_->GetKeySystemUMAPrefix() + kKeyStatusSystemCodeUMAName,
key_info->system_code);
}
// Now send the event to blink.
client_->OnSessionKeysChange(keys, has_additional_usable_key);
}
void WebContentDecryptionModuleSessionImpl::OnSessionExpirationUpdate(
base::Time new_expiry_time) {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
// The check works around an issue in base::Time that converts null base::Time
// to |1601-01-01 00:00:00 UTC| in ToJsTime(). See http://crbug.com/679079
client_->OnSessionExpirationUpdate(
new_expiry_time.is_null() ? std::numeric_limits<double>::quiet_NaN()
: new_expiry_time.ToJsTime());
}
void WebContentDecryptionModuleSessionImpl::OnSessionClosed(
media::CdmSessionClosedReason reason) {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
// Only send one closed event to blink.
if (is_closed_)
return;
is_closed_ = true;
client_->OnSessionClosed(reason);
}
void WebContentDecryptionModuleSessionImpl::OnSessionInitialized(
const std::string& session_id,
SessionInitStatus* status) {
DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
// CDM will return NULL if the session to be loaded can't be found.
if (session_id.empty()) {
*status = SessionInitStatus::SESSION_NOT_FOUND;
return;
}
DCHECK(session_id_.empty()) << "Session ID may not be changed once set.";
session_id_ = session_id;
*status =
adapter_->RegisterSession(session_id_, weak_ptr_factory_.GetWeakPtr())
? SessionInitStatus::NEW_SESSION
: SessionInitStatus::SESSION_ALREADY_EXISTS;
}
} // namespace blink
| 6,875 |
303 | <gh_stars>100-1000
x = (a for a in l)
f(a for a in l)
f(a + b for a, b in f())
| 42 |
777 | <filename>third_party/WebKit/Source/core/editing/VisiblePosition.cpp
/*
* Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009 Apple Inc. All rights
* reserved.
* Portions Copyright (c) 2011 Motorola Mobility, Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY APPLE COMPUTER, INC. ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE COMPUTER, INC. OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "core/editing/VisiblePosition.h"
#include "bindings/core/v8/ExceptionState.h"
#include "core/HTMLNames.h"
#include "core/dom/Document.h"
#include "core/dom/Range.h"
#include "core/dom/Text.h"
#include "core/editing/EditingUtilities.h"
#include "core/editing/TextAffinity.h"
#include "core/editing/VisibleUnits.h"
#include "core/html/HTMLElement.h"
#include "platform/geometry/FloatQuad.h"
#include "wtf/text/CString.h"
#include <ostream> // NOLINT
namespace blink {
using namespace HTMLNames;
template <typename Strategy>
VisiblePositionTemplate<Strategy>::VisiblePositionTemplate()
#if DCHECK_IS_ON()
: m_domTreeVersion(0),
m_styleVersion(0)
#endif
{
}
template <typename Strategy>
VisiblePositionTemplate<Strategy>::VisiblePositionTemplate(
const PositionWithAffinityTemplate<Strategy>& positionWithAffinity)
: m_positionWithAffinity(positionWithAffinity)
#if DCHECK_IS_ON()
,
m_domTreeVersion(positionWithAffinity.document()->domTreeVersion()),
m_styleVersion(positionWithAffinity.document()->styleVersion())
#endif
{
}
template <typename Strategy>
VisiblePositionTemplate<Strategy> VisiblePositionTemplate<Strategy>::create(
const PositionWithAffinityTemplate<Strategy>& positionWithAffinity) {
if (positionWithAffinity.isNull())
return VisiblePositionTemplate<Strategy>();
DCHECK(positionWithAffinity.isConnected()) << positionWithAffinity;
Document& document = *positionWithAffinity.document();
DCHECK(!document.needsLayoutTreeUpdate());
DocumentLifecycle::DisallowTransitionScope disallowTransition(
document.lifecycle());
const PositionTemplate<Strategy> deepPosition =
canonicalPositionOf(positionWithAffinity.position());
if (deepPosition.isNull())
return VisiblePositionTemplate<Strategy>();
const PositionWithAffinityTemplate<Strategy> downstreamPosition(deepPosition);
if (positionWithAffinity.affinity() == TextAffinity::Downstream)
return VisiblePositionTemplate<Strategy>(downstreamPosition);
// When not at a line wrap, make sure to end up with
// |TextAffinity::Downstream| affinity.
const PositionWithAffinityTemplate<Strategy> upstreamPosition(
deepPosition, TextAffinity::Upstream);
if (inSameLine(downstreamPosition, upstreamPosition))
return VisiblePositionTemplate<Strategy>(downstreamPosition);
return VisiblePositionTemplate<Strategy>(upstreamPosition);
}
template <typename Strategy>
VisiblePositionTemplate<Strategy> VisiblePositionTemplate<Strategy>::afterNode(
Node* node) {
return create(PositionWithAffinityTemplate<Strategy>(
PositionTemplate<Strategy>::afterNode(node)));
}
template <typename Strategy>
VisiblePositionTemplate<Strategy> VisiblePositionTemplate<Strategy>::beforeNode(
Node* node) {
return create(PositionWithAffinityTemplate<Strategy>(
PositionTemplate<Strategy>::beforeNode(node)));
}
template <typename Strategy>
VisiblePositionTemplate<Strategy>
VisiblePositionTemplate<Strategy>::firstPositionInNode(Node* node) {
return create(PositionWithAffinityTemplate<Strategy>(
PositionTemplate<Strategy>::firstPositionInNode(node)));
}
template <typename Strategy>
VisiblePositionTemplate<Strategy>
VisiblePositionTemplate<Strategy>::inParentAfterNode(const Node& node) {
return create(PositionWithAffinityTemplate<Strategy>(
PositionTemplate<Strategy>::inParentAfterNode(node)));
}
template <typename Strategy>
VisiblePositionTemplate<Strategy>
VisiblePositionTemplate<Strategy>::inParentBeforeNode(const Node& node) {
return create(PositionWithAffinityTemplate<Strategy>(
PositionTemplate<Strategy>::inParentBeforeNode(node)));
}
template <typename Strategy>
VisiblePositionTemplate<Strategy>
VisiblePositionTemplate<Strategy>::lastPositionInNode(Node* node) {
return create(PositionWithAffinityTemplate<Strategy>(
PositionTemplate<Strategy>::lastPositionInNode(node)));
}
VisiblePosition createVisiblePosition(const Position& position,
TextAffinity affinity) {
return VisiblePosition::create(PositionWithAffinity(position, affinity));
}
VisiblePosition createVisiblePosition(
const PositionWithAffinity& positionWithAffinity) {
return VisiblePosition::create(positionWithAffinity);
}
VisiblePositionInFlatTree createVisiblePosition(
const PositionInFlatTree& position,
TextAffinity affinity) {
return VisiblePositionInFlatTree::create(
PositionInFlatTreeWithAffinity(position, affinity));
}
VisiblePositionInFlatTree createVisiblePosition(
const PositionInFlatTreeWithAffinity& positionWithAffinity) {
return VisiblePositionInFlatTree::create(positionWithAffinity);
}
#ifndef NDEBUG
template <typename Strategy>
void VisiblePositionTemplate<Strategy>::showTreeForThis() const {
deepEquivalent().showTreeForThis();
}
#endif
template <typename Strategy>
bool VisiblePositionTemplate<Strategy>::isValid() const {
#if DCHECK_IS_ON()
if (isNull())
return true;
Document& document = *m_positionWithAffinity.document();
return m_domTreeVersion == document.domTreeVersion() &&
m_styleVersion == document.styleVersion() &&
!document.needsLayoutTreeUpdate();
#else
return true;
#endif
}
template class CORE_TEMPLATE_EXPORT VisiblePositionTemplate<EditingStrategy>;
template class CORE_TEMPLATE_EXPORT
VisiblePositionTemplate<EditingInFlatTreeStrategy>;
std::ostream& operator<<(std::ostream& ostream,
const VisiblePosition& position) {
return ostream << position.deepEquivalent() << '/' << position.affinity();
}
std::ostream& operator<<(std::ostream& ostream,
const VisiblePositionInFlatTree& position) {
return ostream << position.deepEquivalent() << '/' << position.affinity();
}
} // namespace blink
#ifndef NDEBUG
void showTree(const blink::VisiblePosition* vpos) {
if (vpos) {
vpos->showTreeForThis();
return;
}
DVLOG(0) << "Cannot showTree for (nil) VisiblePosition.";
}
void showTree(const blink::VisiblePosition& vpos) {
vpos.showTreeForThis();
}
#endif
| 2,409 |
2,571 | <reponame>Exusial/jittor<gh_stars>1000+
# ***************************************************************
# Copyright (c) 2021 Jittor. All Rights Reserved.
# Maintainers: <NAME> <<EMAIL>>.
# This file is subject to the terms and conditions defined in
# file 'LICENSE.txt', which is part of this source code package.
# ***************************************************************
import unittest
import jittor as jt
import os
@unittest.skipIf(not jt.compile_extern.use_mkl, "Not use mkl, Skip")
class TestMklTestOp(unittest.TestCase):
def test(self):
assert jt.mkl_ops.mkl_test().data==123
if __name__ == "__main__":
unittest.main()
| 215 |
2,496 | /*
* Copyright (C) 2017 Ignite Realtime Foundation. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.jivesoftware.openfire.pubsub;
import org.jivesoftware.openfire.XMPPServer;
/**
* A PubSubService manager that is specific to the implemenation of XEP-60: Publish-Subscribe.
*
* @author <NAME>, <EMAIL>
*/
public class PubSubInfo extends PubSubServiceInfo
{
public PubSubInfo()
{
super( XMPPServer.getInstance().getPubSubModule() );
}
}
| 302 |
5,169 | <reponame>Gantios/Specs<gh_stars>1000+
{
"name": "AAInfographics",
"version": "5.0.0",
"summary": "📈📊📱📺💻An elegant and friendly data visualization chart framework for iOS, iPadOS and macOS. Extremely powerful ,supports line, spline, area, areaspline, column, bar, pie, scatter, angular gauges, arearange, areasplinerange, columnrange, bubble, box plot, error bars, funnel, waterfall and polar chart types.极其精美而又强大的跨平台数据可视化图表框架,支持柱状图、条形图、折线图、曲线图、折线填充图、曲线填充图、气泡图、扇形图、环形图、散点图、雷达图、混合图等各种类型的多达几十种的信息图图表,完全满足工作所需.",
"description": "## Features\n\n* 🎂 **Environment friendly**. Support `iOS `、`iPadOS` and `macOS`. Totally support `Swift` language, and there are more types version such as `Objective-C` language version [AAChartKit](https://github.com/AAChartModel/AAChartKit) 、 `Java` language version [AAChartCore](https://github.com/AAChartModel/AAChartCore) 、`Kotlin` language version [AAInfographics](https://github.com/AAChartModel/AAChartCore-Kotlin) . To get more details you can see the [source code links list](https://github.com/AAChartModel/AAChartKit-Swift#source-code).\n\n* 🚀 **Powerful and easy to use**. It supports `column chart`, `bar chart`, `area chart`, `areaspline chart`, `line chart`, `spline chart`, `radar chart`, `polar chart`, `pie chart`, `bubble chart`, `pyramid chart`, `funnel chart`, `columnrange chart`, `arearange chart`, `mixed chart` and other graphics. Support for more chart types is planned.\n\n* 📝 **Modern Declarative Syntax**. Unlike previous imperative programming techniques, drawing any custom chart in AAChartKit, you don't need to care about the inner implementation details which is annoying && boring. *Describe what you want, you will get what you described*.\n\n* 🎮 **Interactive and animated**. The charts `animation` effect is exquisite, delicate, smooth and beautiful.\n\n* ⛓ **Chain programming**. Supports `chain programming syntax` like *Masonry* .\n\n* 🦋 **Minimalist**. `AAChartView + AAChartModel = Chart`. The AAChartKit follows a minimalist formula: Chart view + Chart model = The chart you want, just like the powerful and beautiful charts framework [AAChartKit](https://github.com/AAChartModel/AAChartKit).\n\n* 🖱 **Interaction event callback**. Support for monitoring [user click events and single finger move over events](https://github.com/AAChartModel/AAChartKit-Swift#support-user-click-events-and-move-over-events), which can be used to achieve double charts linked-working and even multiple charts linkage, as well as other more complex custom user interaction effects.",
"homepage": "https://github.com/AAChartModel/AAChartKit-Swift",
"license": "MIT",
"authors": {
"<NAME>": "<EMAIL>"
},
"platforms": {
"ios": "9.0"
},
"source": {
"git": "https://github.com/AAChartModel/AAChartKit-Swift.git",
"tag": "5.0.0"
},
"source_files": [
"AAInfographics",
"AAInfographics/**/*.{swift}"
],
"resources": "AAInfographics/AAJSFiles.bundle",
"requires_arc": true,
"swift_versions": "5.0",
"swift_version": "5.0"
}
| 1,138 |
24,206 | <reponame>chenxian01/easyexcel
package com.alibaba.excel.analysis.v07.handlers;
import com.alibaba.excel.context.xlsx.XlsxReadContext;
/**
* Cell Value Handler
*
* @author jipengfei
*/
public abstract class AbstractCellValueTagHandler extends AbstractXlsxTagHandler {
@Override
public void characters(XlsxReadContext xlsxReadContext, char[] ch, int start, int length) {
xlsxReadContext.xlsxReadSheetHolder().getTempData().append(ch, start, length);
}
}
| 172 |
25,151 | // Licensed to the Software Freedom Conservancy (SFC) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The SFC licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.openqa.selenium.grid.config;
import org.openqa.selenium.internal.Require;
import java.util.Objects;
public class Role implements Comparable<Role> {
private final String roleName;
public Role(String roleName) {
this.roleName = Require.nonNull("Role name", roleName);
}
public static Role of(String name) {
return new Role(Require.nonNull("Role name", name));
}
public String getRoleName() {
return roleName;
}
@Override
public String toString() {
return roleName;
}
@Override
public boolean equals(Object o) {
if (!(o instanceof Role)) {
return false;
}
Role that = (Role) o;
return Objects.equals(this.roleName, that.roleName);
}
@Override
public int hashCode() {
return Objects.hash(roleName);
}
@Override
public int compareTo(Role o) {
return getRoleName().compareTo(o.getRoleName());
}
}
| 506 |
521 | /*
Copyright (C) 2007-2012 <NAME>. All Rights Reserved.
Portions Copyright (C) 2012 SN Systems Ltd. All rights reserved.
This program is free software; you can redistribute it and/or modify it
under the terms of version 2.1 of the GNU Lesser General Public License
published by the Free Software Foundation.
This program is distributed in the hope that it would be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
Further, this software is distributed without any warranty that it is
free of the rightful claim of any third person regarding infringement
or the like. Any license provided herein, whether implied or
otherwise, applies only to this software file. Patent licenses, if
any, provided herein do not apply to combinations of this program with
other software, or any other product whatsoever.
You should have received a copy of the GNU Lesser General Public License along
with this program; if not, write the Free Software Foundation, Inc., 51
Franklin Street - Fifth Floor, Boston MA 02110-1301, USA.
*/
/* The address of the Free Software Foundation is
Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
Boston, MA 02110-1301, USA.
*/
#ifndef DWARF_RELOC_386_H
#define DWARF_RELOC_386_H
/* Include the definitions only in the case of Windows */
#ifdef _WIN32
/* Relocation types for i386 architecture */
#define R_386_NONE 0
#define R_386_32 1
#define R_386_PC32 2
#define R_386_GOT32 3
#define R_386_PLT32 4
#define R_386_COPY 5
#define R_386_GLOB_DAT 6
#define R_386_JMP_SLOT 7
#define R_386_RELATIVE 8
#define R_386_GOTOFF 9
#define R_386_GOTPC 10
#define R_386_32PLT 11
#define R_386_TLS_TPOFF 14
#define R_386_TLS_IE 15
#define R_386_TLS_GOTIE 16
#define R_386_TLS_LE 17
#define R_386_TLS_LDM 19
#define R_386_16 20
#define R_386_PC16 21
#define R_386_8 22
#define R_386_PC8 23
#define R_386_TLS_GD_32 24
#define R_386_TLS_GD_PUSH 25
#define R_386_TLS_GD_CALL 26
#define R_386_TLS_GD_POP 27
#define R_386_TLS_LDM_32 28
#define R_386_TLS_LDM_PUSH 29
#define R_386_TLS_LDM_CALL 30
#define R_386_TLS_LDM_POP 31
#define R_386_TLS_LDO_32 32
#define R_386_TLS_IE_32 33
#define R_386_TLS_LE_32 34
#define R_386_TLS_DTPMOD32 35
#define R_386_TLS_DTPOFF32 36
#define R_386_TLS_TPOFF32 37
#define R_386_SIZE32 38
#define R_386_TLS_GOTDESC 39
#define R_386_TLS_DESC_CALL 40
#define R_386_TLS_DESC 41
#define R_386_IRELATIVE 42
#define R_386_NUM 43
/* Keep this the last entry. */
#define R_X86_64_NUM 39
#endif /* _WIN32 */
/* Relocation types for X86_64 */
static const char *reloc_type_names_386[] = {
"R_386_NONE",
"R_386_32",
"R_386_PC32",
"R_386_GOT32",
"R_386_PLT32",
"R_386_COPY", /* 5 */
"R_386_GLOB_DAT",
"R_386_JMP_SLOT",
"R_386_RELATIVE",
"R_386_GOTOFF",
"R_386_GOTPC", /* 10 */
"R_386_32PLT",
"R_386_TLS_TPOFF",
"R_386_TLS_IE",
"R_386_TLS_GOTIE",
"R_386_TLS_LE",
"R_386_TLS_LDM",
"R_386_16", /* 20 */
"R_386_PC16",
"R_386_8",
"R_386_PC8",
"R_386_TLS_GD_32",
"R_386_TLS_GD_PUSH", /* 25 */
"R_386_TLS_GD_CALL",
"R_386_TLS_GD_POP",
"R_386_TLS_LDM_32",
"R_386_TLS_LDM_PUSH",
"R_386_TLS_LDM_CALL", /* 30 */
"R_386_TLS_LDM_POP",
"R_386_TLS_LDO_32",
"R_386_TLS_IE_32",
"R_386_TLS_LE_32",
"R_386_TLS_DTPMOD32", /* 35 */
"R_386_TLS_DTPOFF32",
"R_386_TLS_TPOFF32",
"R_386_SIZE32",
"R_386_TLS_GOTDESC",
"R_386_TLS_DESC_CALL", /* 40 */
"R_386_TLS_DESC",
"R_386_IRELATIVE", /* 42 */
};
#endif /* DWARF_RELOC_386_H */
| 1,672 |
1,968 | //////////////////////////////////////////////////////////////////////////////
//
// This file is part of the Corona game engine.
// For overview and more information on licensing please refer to README.md
// Home page: https://github.com/coronalabs/corona
// Contact: <EMAIL>
//
//////////////////////////////////////////////////////////////////////////////
package com.ansca.corona.notifications;
/**
* Indicates the type of notification such as "local" or "remote".
* Provides string IDs matching types used by the native side of Corona.
* <p>
* Instances of this class are immutable.
* <p>
* You cannot create instances of this class.
* Instead, you use the pre-allocated objects from this class' static methods and fields.
*/
public class NotificationType {
/** The unique string ID for this notification type. This string is never localized. */
private String fInvariantName;
/**
* Creates a new notification type object with the given string ID.
* @param invariantName Unique string ID matching a notification type from the native side of Corona.
*/
private NotificationType(String invariantName) {
fInvariantName = invariantName;
}
/** Indicates a local/scheduled notification. */
public static final NotificationType LOCAL = new NotificationType("local");
/** Indicates a remote/push notification. */
public static final NotificationType REMOTE = new NotificationType("remote");
/** Indicates a setup notification for registering a remote notification. */
public static final NotificationType REMOTE_REGISTRATION = new NotificationType("remoteRegistration");
/**
* Gets the unique string ID matching a notification type from the native side of Corona.
* @return Returns this notification type's unqiue string ID.
*/
public String toInvariantString() {
return fInvariantName;
}
/**
* Gets a notification type matching the given invariant string ID.
* @param value Unique string ID matching a notification type from the native side of Corona.
* @return Returns a notification type object matching the given invariant string.
* <p>
* Returns null if the given string ID is unknown.
*/
public static NotificationType fromInvariantString(String value) {
// Return a pre-allocated object matching the given string ID via reflection.
try {
for (java.lang.reflect.Field field : NotificationType.class.getDeclaredFields()) {
if (field.getType().equals(NotificationType.class)) {
NotificationType notificationType = (NotificationType)field.get(null);
if (notificationType.fInvariantName.equals(value)) {
return notificationType;
}
}
}
}
catch (Exception ex) { }
// The given string ID is unknown.
return null;
}
}
| 742 |
1,392 | from dataclasses import dataclass, field
from enum import Enum
from typing import Dict, List, NamedTuple, Optional
from .const import (
FILL_MISSING_ADDRESS,
MERCHANT_CODE,
NP_TEST_URL,
NP_URL,
SHIPPING_COMPANY,
SKU_AS_NAME,
SP_CODE,
TERMINAL_ID,
USE_SANDBOX,
)
class NPResponse(NamedTuple):
result: dict
error_codes: List[str]
def error_np_response(error_message: str) -> NPResponse:
return NPResponse({}, [error_message])
@dataclass
class ApiConfig:
url: str
fill_missing_address: bool
merchant_code: str
terminal_id: str
sp_code: str
shipping_company: str
sku_as_name: bool
class PaymentStatus(str, Enum):
SUCCESS = "00"
PENDING = "10"
FAILED = "20"
FOR_REREGISTRATION = "RE"
@dataclass
class PaymentResult:
status: PaymentStatus
psp_reference: Optional[str] = None
raw_response: Dict[str, str] = field(default_factory=dict)
errors: List[str] = field(default_factory=list)
def error_payment_result(error_message: str) -> PaymentResult:
return PaymentResult(status=PaymentStatus.FAILED, errors=[error_message])
def errors_payment_result(errors: List[str]) -> PaymentResult:
return PaymentResult(status=PaymentStatus.FAILED, errors=errors)
def get_api_config(connection_params: dict) -> ApiConfig:
url = NP_TEST_URL if connection_params[USE_SANDBOX] else NP_URL
return ApiConfig(
url=url,
fill_missing_address=connection_params[FILL_MISSING_ADDRESS],
merchant_code=connection_params[MERCHANT_CODE],
sp_code=connection_params[SP_CODE],
terminal_id=connection_params[TERMINAL_ID],
shipping_company=connection_params[SHIPPING_COMPANY],
sku_as_name=connection_params[SKU_AS_NAME],
)
| 722 |
432 | <filename>test/debug/crc32.c<gh_stars>100-1000
/*
* cc crc32.c /usr/src/sys/libkern/{crc32.c,icrc32.c} -o ~/bin/crc32
*/
#include <sys/types.h>
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
uint32_t iscsi_crc32(const void *buf, size_t size);
uint32_t iscsi_crc32_ext(const void *buf, size_t size, uint32_t ocrc);
uint32_t crc32(const void *buf, size_t size);
uint32_t crc32_ext(const void *buf, size_t size, uint32_t ocrc);
#undef ISCSI
int
main(int ac, char **av)
{
char buf[16384];
int n;
#ifdef ISCSI
u_int32_t crc1 = iscsi_crc32(NULL, 0);
#else
u_int32_t crc2 = crc32(NULL, 0);
#endif
while ((n = read(0, buf, sizeof(buf))) > 0) {
#ifdef ISCSI
crc1 = iscsi_crc32_ext(buf, n, crc1);
#else
crc2 = crc32_ext(buf, n, crc2);
#endif
}
#ifdef ISCSI
printf("iscsi_crc32 %08x\n", crc1);
#else
printf("crc32 %08x\n", crc2);
#endif
return(0);
}
| 459 |
627 | /* Copyright 2019 The Chromium OS Authors. All rights reserved.
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*
* TI INA3221 Current/Power monitor driver.
*/
#ifndef __CROS_EC_INA3221_H
#define __CROS_EC_INA3221_H
#define INA3221_REG_CONFIG 0x00
#define INA3221_REG_MASK 0x0F
/*
* Common bits are:
* Reset
* average = 1
* conversion time = 1.1 ms
* mode = shunt and bus, continuous.
*/
#define INA3221_CONFIG_BASE 0x8127
/* Bus voltage: lower 3 bits clear, LSB = 8 mV */
#define INA3221_BUS_MV(reg) (reg)
/* Shunt voltage: lower 3 bits clear, LSB = 40 uV */
#define INA3221_SHUNT_UV(reg) ((reg) * (40/8))
enum ina3221_channel {
INA3221_CHAN_1 = 0,
INA3221_CHAN_2 = 1,
INA3221_CHAN_3 = 2,
INA3221_CHAN_COUNT = 3
};
/* Registers for each channel */
enum ina3221_register {
INA3221_SHUNT_VOLT = 0,
INA3221_BUS_VOLT = 1,
INA3221_CRITICAL = 2,
INA3221_WARNING = 3,
INA3221_MAX_REG = 4
};
/* Configuration table - defined in board file. */
struct ina3221_t {
int port; /* I2C port index */
uint8_t address; /* I2C address */
const char *name[INA3221_CHAN_COUNT]; /* Channel names */
};
/* External config in board file */
extern const struct ina3221_t ina3221[];
extern const unsigned int ina3221_count;
#endif /* __CROS_EC_INA3221_H */
| 549 |
303 | <gh_stars>100-1000
{"id":6362,"line-1":"<NAME>","line-2":"Mauritania","attribution":"©2015 CNES / Astrium, Cnes/Spot Image, DigitalGlobe, Landsat","url":"https://www.google.com/maps/@21.489721,-11.562430,14z/data=!3m1!1e3"} | 95 |
5,865 | <filename>api/api-shared-v9/src/main/java/com/thoughtworks/go/apiv9/admin/shared/representers/materials/SvnMaterialRepresenter.java
/*
* Copyright 2021 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.apiv9.admin.shared.representers.materials;
import com.thoughtworks.go.api.base.OutputWriter;
import com.thoughtworks.go.api.representers.JsonReader;
import com.thoughtworks.go.apiv9.admin.shared.representers.stages.ConfigHelperOptions;
import com.thoughtworks.go.config.materials.svn.SvnMaterialConfig;
public class SvnMaterialRepresenter extends ScmMaterialRepresenter<SvnMaterialConfig> {
@Override
public void toJSON(OutputWriter jsonWriter, SvnMaterialConfig svnMaterialConfig) {
super.toJSON(jsonWriter, svnMaterialConfig);
jsonWriter.add("check_externals", svnMaterialConfig.isCheckExternals());
}
@Override
public SvnMaterialConfig fromJSON(JsonReader jsonReader, ConfigHelperOptions options) {
SvnMaterialConfig svnMaterialConfig = new SvnMaterialConfig();
super.fromJSON(jsonReader, svnMaterialConfig, options);
jsonReader.readStringIfPresent("url", svnMaterialConfig::setUrl);
jsonReader.optBoolean("check_externals").ifPresent(svnMaterialConfig::setCheckExternals);
return svnMaterialConfig;
}
}
| 584 |
2,206 | /* ******************************************************************************
*
*
* This program and the accompanying materials are made available under the
* terms of the Apache License, Version 2.0 which is available at
* https://www.apache.org/licenses/LICENSE-2.0.
*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* SPDX-License-Identifier: Apache-2.0
******************************************************************************/
//
// Created by raver119 on 09.01.17.
//
#ifndef LIBND4J_LOGGER_H
#define LIBND4J_LOGGER_H
#include <stdio.h>
#include <stdlib.h>
#include <system/Environment.h>
#include <system/op_boilerplate.h>
#include <cstdarg>
#include <vector>
#ifndef __CUDA_ARCH__
#define sd_debug(FORMAT, ...) \
if (sd::Environment::getInstance().isDebug() && sd::Environment::getInstance().isVerbose()) \
sd::Logger::info(FORMAT, __VA_ARGS__);
#define sd_logger(FORMAT, ...) \
if (sd::Environment::getInstance().isDebug() && sd::Environment::getInstance().isVerbose()) \
sd::Logger::info(FORMAT, __VA_ARGS__);
#define sd_verbose(FORMAT, ...) \
if (sd::Environment::getInstance().isVerbose()) sd::Logger::info(FORMAT, __VA_ARGS__);
#define sd_printf(FORMAT, ...) sd::Logger::info(FORMAT, __VA_ARGS__);
#define sd_printv(FORMAT, VECTOR) sd::Logger::printv(FORMAT, VECTOR);
#else
#define sd_debug(FORMAT, A, ...)
#define sd_logger(FORMAT, A, ...)
#define sd_verbose(FORMAT, ...)
#define sd_printf(FORMAT, ...) sd::Logger::info(FORMAT, __VA_ARGS__);
#define sd_printv(FORMAT, VECTOR)
#endif
namespace sd {
class SD_LIB_EXPORT Logger {
public:
static SD_HOST void info(const char *format, ...);
static SD_HOST void printv(const char *format, const std::vector<int> &vec);
static SD_HOST void printv(const char *format, const std::vector<sd::LongType> &vec);
static SD_HOST_DEVICE Status logStatusMsg(Status code, const char *msg);
static SD_HOST_DEVICE Status logKernelFailureMsg(const char *msg = nullptr);
};
} // namespace sd
#endif // LIBND4J_LOGGER_H
| 908 |
360 | package indi.mybatis.flying.pojo;
import java.util.Date;
import org.apache.ibatis.type.JdbcType;
import com.alibaba.fastjson.annotation.JSONField;
import indi.mybatis.flying.annotations.FieldMapperAnnotation;
import indi.mybatis.flying.annotations.TableMapperAnnotation;
import indi.mybatis.flying.handler.ByteArrayHandler;
import indi.mybatis.flying.handlers.AdvancedStringTypeHandler;
import indi.mybatis.flying.model.MyCryptKeyAddition;
/**
* 个人考核结果
*/
@TableMapperAnnotation(tableName = "t_emp_score")
public class EmpScore {
/**
* 考核结果编号
*/
@FieldMapperAnnotation(dbFieldName = "id", jdbcType = JdbcType.BIGINT, isUniqueKey = true)
private Long id;
@FieldMapperAnnotation(dbFieldName = "ou", jdbcType = JdbcType.VARCHAR)
private String ou;
/**
* 部门名称
*/
@FieldMapperAnnotation(dbFieldName = "dept_name", jdbcType = JdbcType.VARCHAR)
private String deptName;
/**
* 员工编码
*/
@FieldMapperAnnotation(dbFieldName = "staff_id", jdbcType = JdbcType.VARCHAR, customTypeHandler = AdvancedStringTypeHandler.class)
private String staffId;
/**
* 员工姓名
*/
@FieldMapperAnnotation(dbFieldName = "staff_name", jdbcType = JdbcType.VARCHAR)
private String staffName;
/**
* 考核年度
*/
@FieldMapperAnnotation(dbFieldName = "year", jdbcType = JdbcType.VARCHAR)
private String year;
/**
* 考核季度(0:年度考核 1~4:季度考核)
*/
@FieldMapperAnnotation(dbFieldName = "season", jdbcType = JdbcType.VARCHAR)
private String season;
/**
* 考核结果
*/
@FieldMapperAnnotation(dbFieldName = "score", jdbcType = JdbcType.DOUBLE)
private Double score;
/**
* 考核得分系数
*/
@FieldMapperAnnotation(dbFieldName = "score_coefficient", jdbcType = JdbcType.DOUBLE)
private Double scoreCoefficient;
/**
* 备注
*/
@FieldMapperAnnotation(dbFieldName = "remark", jdbcType = JdbcType.VARCHAR)
private String remark;
/**
* 绩效结果类型(0:综合绩效 1:项目绩效)
*/
@FieldMapperAnnotation(dbFieldName = "score_type", jdbcType = JdbcType.VARCHAR)
private String scoreType;
/**
* 员工类别(0:普通员工 1:项目经理 2:分院常设机构负责人 3.小组长 4.业务架构师 5.年度考核中分中心经理 6.副总架构师 7.BP 8.巡查组)
*/
@FieldMapperAnnotation(dbFieldName = "emp_type", jdbcType = JdbcType.VARCHAR)
private String empType;
/**
* 员工职位名称
*/
@FieldMapperAnnotation(dbFieldName = "post_name", jdbcType = JdbcType.VARCHAR)
private String postName;
/**
* 指标状态(0:未录入 1:待审核 2:审核未通过 3:待评价 4:待确认 5:复议 6:评价完成,项目内待评级 7:部门内待评级 10:考核完成)
*/
@FieldMapperAnnotation(dbFieldName = "state", jdbcType = JdbcType.VARCHAR)
private String state;
/**
* 考核等级(A:1.25 B:1.1 C:1.00 D:0.8 )
*/
@FieldMapperAnnotation(dbFieldName = "rank", jdbcType = JdbcType.VARCHAR)
private String rank;
/**
* 标志位(0:综合/项目绩效考核结果 1:部门内考核结果)
*/
@FieldMapperAnnotation(dbFieldName = "tag", jdbcType = JdbcType.VARCHAR)
private String tag;
@FieldMapperAnnotation(dbFieldName = "proj_id", jdbcType = JdbcType.VARCHAR)
private String projId;
@FieldMapperAnnotation(dbFieldName = "proj_name", jdbcType = JdbcType.VARCHAR)
private String projName;
/**
* 员工季度工时
*/
@FieldMapperAnnotation(dbFieldName = "hours", jdbcType = JdbcType.DOUBLE)
private Double hours;
/**
* 正态分布结果调整人id
*/
@FieldMapperAnnotation(dbFieldName = "checker_id", jdbcType = JdbcType.VARCHAR)
private String checkerId;
/**
* 正态分布结果调整人姓名
*/
@FieldMapperAnnotation(dbFieldName = "checker_name", jdbcType = JdbcType.VARCHAR)
private String checkerName;
@JSONField(format = "yyyy-MM-dd HH:mm:ss")
@FieldMapperAnnotation(dbFieldName = "create_time", jdbcType = JdbcType.TIMESTAMP)
private Date createTime;
@FieldMapperAnnotation(dbFieldName = "cont_degree", jdbcType = JdbcType.DOUBLE)
private Double contDegree;
@FieldMapperAnnotation(dbFieldName = "adjust_reason", jdbcType = JdbcType.VARCHAR)
private String adjustReason;
/**
* 审核未通过理由
*/
@FieldMapperAnnotation(dbFieldName = "unpass_reason", jdbcType = JdbcType.VARCHAR)
private String unpassReason;
@JSONField(format = "yyyy-MM-dd HH:mm:ss")
@FieldMapperAnnotation(dbFieldName = "update_time", jdbcType = JdbcType.TIMESTAMP)
private Date updateTime;
@FieldMapperAnnotation(dbFieldName = "secret2", jdbcType = JdbcType.VARCHAR, customTypeHandler = ByteArrayHandler.class, cryptKeyColumn = {
"staff_id", "year", "staff_name", "staff_id" }, cryptKeyAdditional = MyCryptKeyAddition.class)
private String secret2;
@JSONField(serialize = false)
private java.util.Collection<Account3> account3;
public Date getUpdateTime() {
return updateTime;
}
/**
* @param newUpdateTime
*/
public void setUpdateTime(Date newUpdateTime) {
updateTime = newUpdateTime;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getOu() {
return ou;
}
public void setOu(String ou) {
this.ou = ou;
}
public String getDeptName() {
return deptName;
}
public void setDeptName(String deptName) {
this.deptName = deptName;
}
public String getStaffId() {
return staffId;
}
public void setStaffId(String staffId) {
this.staffId = staffId;
}
public String getStaffName() {
return staffName;
}
public void setStaffName(String staffName) {
this.staffName = staffName;
}
public String getYear() {
return year;
}
public void setYear(String year) {
this.year = year;
}
public String getSeason() {
return season;
}
public void setSeason(String season) {
this.season = season;
}
public Double getScore() {
return score;
}
public void setScore(Double score) {
this.score = score;
}
public Double getScoreCoefficient() {
return scoreCoefficient;
}
public void setScoreCoefficient(Double scoreCoefficient) {
this.scoreCoefficient = scoreCoefficient;
}
public String getRemark() {
return remark;
}
public void setRemark(String remark) {
this.remark = remark;
}
public String getScoreType() {
return scoreType;
}
public void setScoreType(String scoreType) {
this.scoreType = scoreType;
}
public String getEmpType() {
return empType;
}
public void setEmpType(String empType) {
this.empType = empType;
}
public String getPostName() {
return postName;
}
public void setPostName(String postName) {
this.postName = postName;
}
public String getState() {
return state;
}
public void setState(String state) {
this.state = state;
}
public String getRank() {
return rank;
}
public void setRank(String rank) {
this.rank = rank;
}
public String getTag() {
return tag;
}
public void setTag(String tag) {
this.tag = tag;
}
public String getProjId() {
return projId;
}
public void setProjId(String projId) {
this.projId = projId;
}
public String getProjName() {
return projName;
}
public void setProjName(String projName) {
this.projName = projName;
}
public Double getHours() {
return hours;
}
public void setHours(Double hours) {
this.hours = hours;
}
public String getCheckerId() {
return checkerId;
}
public void setCheckerId(String checkerId) {
this.checkerId = checkerId;
}
public String getCheckerName() {
return checkerName;
}
public void setCheckerName(String checkerName) {
this.checkerName = checkerName;
}
public Date getCreateTime() {
return createTime;
}
public void setCreateTime(Date createTime) {
this.createTime = createTime;
}
public Double getContDegree() {
return contDegree;
}
public void setContDegree(Double contDegree) {
this.contDegree = contDegree;
}
public String getAdjustReason() {
return adjustReason;
}
public void setAdjustReason(String adjustReason) {
this.adjustReason = adjustReason;
}
public String getUnpassReason() {
return unpassReason;
}
public void setUnpassReason(String unpassReason) {
this.unpassReason = unpassReason;
}
public String getSecret2() {
return secret2;
}
public void setSecret2(String secret2) {
this.secret2 = secret2;
}
public java.util.Collection<Account3> getAccount3() {
if (account3 == null) {
account3 = new java.util.LinkedHashSet<Account3>();
}
return account3;
}
public java.util.Iterator<Account3> getIteratorAccount3() {
if (account3 == null) {
account3 = new java.util.LinkedHashSet<Account3>();
}
return account3.iterator();
}
public void setAccount3(java.util.Collection<Account3> newAccount) {
removeAllAccount3();
for (java.util.Iterator<Account3> iter = newAccount.iterator(); iter.hasNext();) {
addAccount3((Account3) iter.next());
}
}
public void addAccount3(Account3 newAccount) {
if (newAccount == null) {
return;
}
if (this.account3 == null) {
this.account3 = new java.util.LinkedHashSet<Account3>();
}
if (!this.account3.contains(newAccount)) {
this.account3.add(newAccount);
newAccount.setEmpScore(this);
} else {
for (Account3 temp : this.account3) {
if (newAccount.equals(temp)) {
if (temp != newAccount) {
removeAccount3(temp);
this.account3.add(newAccount);
newAccount.setEmpScore(this);
}
break;
}
}
}
}
public void removeAccount3(Account3 oldAccount) {
if (oldAccount == null) {
return;
}
if (this.account3 != null) {
if (this.account3.contains(oldAccount)) {
for (Account3 temp : this.account3) {
if (oldAccount.equals(temp)) {
if (temp != oldAccount) {
temp.setEmpScore((EmpScore) null);
}
break;
}
}
this.account3.remove(oldAccount);
oldAccount.setEmpScore((EmpScore) null);
}
}
}
public void removeAllAccount3() {
if (account3 != null) {
Account3 oldAccount;
for (java.util.Iterator<Account3> iter = getIteratorAccount3(); iter.hasNext();) {
oldAccount = (Account3) iter.next();
iter.remove();
oldAccount.setEmpScore((EmpScore) null);
}
account3.clear();
}
}
} | 4,254 |
1,756 | package com.libmailcore;
/** Operation to render an IMAP message. */
public class IMAPMessageRenderingOperation extends IMAPOperation {
public native String result();
}
| 46 |
1,144 | // SPDX-License-Identifier: GPL-2.0+
/*
* Copyright (c) 2011 The Chromium OS Authors.
*/
#include <common.h>
#include <command.h>
#include <mapmem.h>
#include <trace.h>
#include <asm/io.h>
static int get_args(int argc, char * const argv[], char **buff,
size_t *buff_ptr, size_t *buff_size)
{
if (argc < 2)
return -1;
if (argc < 4) {
*buff_size = env_get_ulong("profsize", 16, 0);
*buff = map_sysmem(env_get_ulong("profbase", 16, 0),
*buff_size);
*buff_ptr = env_get_ulong("profoffset", 16, 0);
} else {
*buff_size = simple_strtoul(argv[3], NULL, 16);
*buff = map_sysmem(simple_strtoul(argv[2], NULL, 16),
*buff_size);
*buff_ptr = 0;
};
return 0;
}
static int create_func_list(int argc, char * const argv[])
{
size_t buff_size, avail, buff_ptr, used;
unsigned int needed;
char *buff;
int err;
if (get_args(argc, argv, &buff, &buff_ptr, &buff_size))
return -1;
avail = buff_size - buff_ptr;
err = trace_list_functions(buff + buff_ptr, avail, &needed);
if (err)
printf("Error: truncated (%#x bytes needed)\n", needed);
used = min(avail, (size_t)needed);
printf("Function trace dumped to %08lx, size %#zx\n",
(ulong)map_to_sysmem(buff + buff_ptr), used);
env_set_hex("profbase", map_to_sysmem(buff));
env_set_hex("profsize", buff_size);
env_set_hex("profoffset", buff_ptr + used);
return 0;
}
static int create_call_list(int argc, char * const argv[])
{
size_t buff_size, avail, buff_ptr, used;
unsigned int needed;
char *buff;
int err;
if (get_args(argc, argv, &buff, &buff_ptr, &buff_size))
return -1;
avail = buff_size - buff_ptr;
err = trace_list_calls(buff + buff_ptr, avail, &needed);
if (err)
printf("Error: truncated (%#x bytes needed)\n", needed);
used = min(avail, (size_t)needed);
printf("Call list dumped to %08lx, size %#zx\n",
(ulong)map_to_sysmem(buff + buff_ptr), used);
env_set_hex("profbase", map_to_sysmem(buff));
env_set_hex("profsize", buff_size);
env_set_hex("profoffset", buff_ptr + used);
return 0;
}
int do_trace(cmd_tbl_t *cmdtp, int flag, int argc, char * const argv[])
{
const char *cmd = argc < 2 ? NULL : argv[1];
if (!cmd)
return cmd_usage(cmdtp);
switch (*cmd) {
case 'p':
trace_set_enabled(0);
break;
case 'c':
if (create_call_list(argc, argv))
return cmd_usage(cmdtp);
break;
case 'r':
trace_set_enabled(1);
break;
case 'f':
if (create_func_list(argc, argv))
return cmd_usage(cmdtp);
break;
case 's':
trace_print_stats();
break;
default:
return CMD_RET_USAGE;
}
return 0;
}
U_BOOT_CMD(
trace, 4, 1, do_trace,
"trace utility commands",
"stats - display tracing statistics\n"
"trace pause - pause tracing\n"
"trace resume - resume tracing\n"
"trace funclist [<addr> <size>] - dump function list into buffer\n"
"trace calls [<addr> <size>] "
"- dump function call trace into buffer"
);
| 1,302 |
3,200 | <filename>mindspore/lite/src/delegate/npu/op/strided_slice_npu.cc
/**
* Copyright 2020-2021 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "src/delegate/npu/op/strided_slice_npu.h"
#include "src/delegate/npu/npu_converter_utils.h"
namespace mindspore {
int StridedSliceNPUOp::IsSupport(const schema::Primitive *primitive, const std::vector<mindspore::MSTensor> &in_tensors,
const std::vector<mindspore::MSTensor> &out_tensors) {
// Only onnx StridedSlice has 5 in_tensors, of which the 4th input is axes and the 5th input is strides.
if (in_tensors.size() == ONNX_INPUT_SIZE) {
vector<int> axes;
size_t size = in_tensors[STRIDE_INDEX].Shape()[0];
axes.resize(size);
MS_ASSERT(in_tensors[STRIDE_INDEX].Data());
memcpy(axes.data(), in_tensors[STRIDE_INDEX].Data().get(), sizeof(int) * size);
for (int i = 0; i < axes.size(); ++i) {
if (i != axes[i]) {
MS_LOG(WARNING) << "Does not support setting axis, so the axis must be continuous.";
return RET_NOT_SUPPORT;
}
}
}
auto input_x = in_tensors.at(0);
if (input_x.DataType() != DataType::kNumberTypeFloat32 || input_x.DataType() != DataType::kNumberTypeFloat16) {
need_cast_ = true;
MS_LOG(INFO) << "StridedSlice does not support input datatype other than FLOAT. Cast op will be inserted.";
}
return RET_OK;
}
int StridedSliceNPUOp::Init(const schema::Primitive *primitive, const std::vector<mindspore::MSTensor> &in_tensors,
const std::vector<mindspore::MSTensor> &out_tensors) {
strided_slice_ = new (std::nothrow) hiai::op::StridedSlice(name_);
if (strided_slice_ == nullptr) {
MS_LOG(ERROR) << "New stridedSlice npu operator for op " << name_ << " failed.";
return RET_ERROR;
}
auto strided_slice_prim = primitive->value_as_StridedSlice();
if (strided_slice_prim == nullptr) {
MS_LOG(ERROR) << "Get null primitive value for op ." << name_;
return RET_ERROR;
}
begins_mask_ = strided_slice_prim->begin_mask();
ends_mask_ = strided_slice_prim->end_mask();
ellipsis_mask_ = strided_slice_prim->ellipsis_mask();
new_axis_mask_ = strided_slice_prim->new_axis_mask();
shrink_axis_mask_ = strided_slice_prim->shrink_axis_mask();
return RET_OK;
}
int StridedSliceNPUOp::SetNPUInputs(const std::vector<mindspore::MSTensor> &in_tensors,
const std::vector<mindspore::MSTensor> &out_tensors,
const std::vector<ge::Operator *> &npu_inputs) {
strided_slice_->set_attr_begin_mask(begins_mask_);
strided_slice_->set_attr_ellipsis_mask(ellipsis_mask_);
strided_slice_->set_attr_end_mask(ends_mask_);
strided_slice_->set_attr_shrink_axis_mask(shrink_axis_mask_);
strided_slice_->set_attr_new_axis_mask(new_axis_mask_);
// StridedSliceV2 supports setting axes, but it will cause an endless loop.
if (need_cast_) {
auto ret = SetCast(npu_inputs[0], strided_slice_, in_tensors[0], out_tensors[0]);
if (ret != RET_OK) {
MS_LOG(ERROR) << "Insert Cast operator for op " << name_ << " failed.";
return ret;
}
} else {
strided_slice_->set_input_x(*npu_inputs[0]);
}
strided_slice_->set_input_begin(*npu_inputs[BEGIN_INDEX]);
strided_slice_->set_input_end(*npu_inputs[END_INDEX]);
// The strides position of onnx is the 5th, and the others are the 4th.
if (npu_inputs.size() == ONNX_INPUT_SIZE) {
strided_slice_->set_input_strides(*npu_inputs[ONNX_STRIDE_INDEX]);
} else {
strided_slice_->set_input_strides(*npu_inputs[STRIDE_INDEX]);
}
return RET_OK;
}
ge::Operator *StridedSliceNPUOp::GetNPUOp() {
if (need_cast_) {
return this->out_cast_;
} else {
return this->strided_slice_;
}
}
int StridedSliceNPUOp::HandleAxis() {
if (inputs_.size() < MIN_INPUT_SIZE) {
MS_LOG(ERROR) << "StridedSlice in tensors size < " << MIN_INPUT_SIZE;
return RET_ERROR;
}
auto begin_tensor = inputs_.at(BEGIN_INDEX);
int *begin = reinterpret_cast<int *>(begin_tensor.MutableData());
MS_ASSERT(begin);
AssistDataNHWC2NCHW(begin, 1);
auto end_tensor = inputs_.at(END_INDEX);
int *end = reinterpret_cast<int *>(end_tensor.MutableData());
MS_ASSERT(end);
AssistDataNHWC2NCHW(end, 1);
auto stride_tensor = inputs_.at(STRIDE_INDEX);
if (inputs_.size() == ONNX_INPUT_SIZE) {
stride_tensor = inputs_.at(ONNX_STRIDE_INDEX);
}
int *stride = reinterpret_cast<int *>(stride_tensor.MutableData());
MS_ASSERT(stride);
AssistDataNHWC2NCHW(stride, 1);
begins_mask_ = MaskDataNHWC2NCHW(begins_mask_);
ends_mask_ = MaskDataNHWC2NCHW(ends_mask_);
ellipsis_mask_ = MaskDataNHWC2NCHW(ellipsis_mask_);
shrink_axis_mask_ = MaskDataNHWC2NCHW(shrink_axis_mask_);
new_axis_mask_ = MaskDataNHWC2NCHW(new_axis_mask_);
return RET_OK;
}
int StridedSliceNPUOp::SetCast(const ge::Operator *input, const ge::Operator *cur_op,
const mindspore::MSTensor in_tensor, const mindspore::MSTensor out_tensor) {
in_cast_ = new (std::nothrow) hiai::op::CastT(name_ + "_in_cast");
out_cast_ = new (std::nothrow) hiai::op::CastT(name_ + "_out_cast");
if (in_cast_ == nullptr || out_cast_ == nullptr) {
MS_LOG(ERROR) << "New activation npu operator for op " << name_ << " failed.";
return RET_ERROR;
}
in_cast_->set_input_x(*input);
in_cast_->set_attr_src_dtype(ConverterToNPUDataType(static_cast<DataType>(in_tensor.DataType())));
in_cast_->set_attr_dst_dtype(ge::DT_FLOAT);
strided_slice_->set_input_x(*in_cast_);
out_cast_->set_input_x(*cur_op);
out_cast_->set_attr_src_dtype(ge::DT_FLOAT);
out_cast_->set_attr_dst_dtype(ConverterToNPUDataType(static_cast<DataType>(out_tensor.DataType())));
return RET_OK;
}
StridedSliceNPUOp::~StridedSliceNPUOp() {
if (strided_slice_ != nullptr) {
delete strided_slice_;
strided_slice_ = nullptr;
}
if (in_cast_ != nullptr) {
delete in_cast_;
in_cast_ = nullptr;
}
if (out_cast_ != nullptr) {
delete out_cast_;
out_cast_ = nullptr;
}
}
} // namespace mindspore
| 2,744 |
729 | """
@author: lileilei
@file: ddd.py
@time: 2018/4/13 13:24
"""
'''
字典的合并
'''
def hebingDict(dict_list: dict):
dictMerged = {}
for item in dict_list:
try:
dictMerged.update(eval(item))
except Exception as e:
print(e)
return dictMerged
| 154 |
777 | <filename>chrome/browser/ui/website_settings/mock_permission_prompt.cc
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/ui/website_settings/mock_permission_prompt.h"
#include "base/bind.h"
#include "base/run_loop.h"
#include "chrome/browser/permissions/permission_request_manager.h"
#include "chrome/browser/ui/website_settings/mock_permission_prompt_factory.h"
MockPermissionPrompt::~MockPermissionPrompt() {
Hide();
}
void MockPermissionPrompt::Show(const std::vector<PermissionRequest*>& requests,
const std::vector<bool>& accept_state) {
factory_->ShowView(this);
factory_->show_count_++;
factory_->requests_count_ = manager_->requests_.size();
factory_->total_requests_count_ += manager_->requests_.size();
factory_->UpdateResponseType();
is_visible_ = true;
}
bool MockPermissionPrompt::CanAcceptRequestUpdate() {
return can_update_ui_;
}
void MockPermissionPrompt::Hide() {
if (is_visible_ && factory_)
factory_->HideView(this);
is_visible_ = false;
}
bool MockPermissionPrompt::IsVisible() {
return is_visible_;
}
void MockPermissionPrompt::UpdateAnchorPosition() {}
gfx::NativeWindow MockPermissionPrompt::GetNativeWindow() {
// This class should only be used when the UI is not necessary.
NOTREACHED();
return nullptr;
}
MockPermissionPrompt::MockPermissionPrompt(MockPermissionPromptFactory* factory,
PermissionRequestManager* manager)
: factory_(factory),
manager_(manager),
can_update_ui_(true),
is_visible_(false) {}
| 624 |
441 | package org.basex.query.expr;
import static org.basex.query.QueryText.*;
import org.basex.query.*;
import org.basex.query.util.index.*;
import org.basex.query.util.list.*;
import org.basex.query.value.item.*;
import org.basex.query.var.*;
import org.basex.util.*;
import org.basex.util.hash.*;
/**
* Or expression.
*
* @author BaseX Team 2005-21, BSD License
* @author <NAME>
*/
public final class Or extends Logical {
/**
* Constructor.
* @param info input info
* @param exprs expressions
*/
public Or(final InputInfo info, final Expr... exprs) {
super(info, exprs);
}
@Override
public Expr optimize(final CompileContext cc) throws QueryException {
flatten(cc);
return optimize(cc, true);
}
@Override
public Item item(final QueryContext qc, final InputInfo ii) throws QueryException {
for(final Expr expr : exprs) {
if(expr.ebv(qc, info).bool(info)) return Bln.TRUE;
}
return Bln.FALSE;
}
@Override
public Or copy(final CompileContext cc, final IntObjMap<Var> vm) {
return copyType(new Or(info, copyAll(cc, vm, exprs)));
}
@Override
public boolean indexAccessible(final IndexInfo ii) throws QueryException {
IndexCosts costs = IndexCosts.ZERO;
final ExprList list = new ExprList(exprs.length);
for(final Expr expr : exprs) {
// check if expression can be rewritten, and if access is not sequential
if(!expr.indexAccessible(ii)) return false;
// skip expressions without results
if(ii.costs.results() == 0) continue;
costs = IndexCosts.add(costs, ii.costs);
list.add(ii.expr);
}
// use summarized costs for estimation
ii.costs = costs;
// no expressions means no costs: expression will later be pre-evaluated
ii.expr = list.size() == 1 ? list.get(0) : new Union(info, list.finish());
return true;
}
@Override
public boolean equals(final Object obj) {
return this == obj || obj instanceof Or && super.equals(obj);
}
@Override
public void toString(final QueryString qs) {
qs.tokens(exprs, ' ' + OR + ' ', true);
}
}
| 755 |
1,144 | /******************************************************************************
* Product: Adempiere ERP & CRM Smart Business Solution *
* Copyright (C) 1999-2006 ComPiere, Inc. All Rights Reserved. *
* This program is free software; you can redistribute it and/or modify it *
* under the terms version 2 of the GNU General Public License as published *
* by the Free Software Foundation. This program is distributed in the hope *
* that it will be useful, but WITHOUT ANY WARRANTY; without even the implied *
* warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. *
* See the GNU General Public License for more details. *
* You should have received a copy of the GNU General Public License along *
* with this program; if not, write to the Free Software Foundation, Inc., *
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA. *
* For the text or an alternative of this public license, you may reach us *
* ComPiere, Inc., 2620 Augustine Dr. #245, Santa Clara, CA 95054, USA *
* or via <EMAIL> or http://www.compiere.org/license.html *
*****************************************************************************/
package de.metas.banking.process;
/*
* #%L
* de.metas.banking.base
* %%
* Copyright (C) 2015 metas GmbH
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as
* published by the Free Software Foundation, either version 2 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/gpl-2.0.html>.
* #L%
*/
import java.sql.Timestamp;
import java.util.List;
import de.metas.common.util.time.SystemTime;
import org.adempiere.banking.model.I_C_Invoice;
import org.compiere.util.TimeUtil;
import de.metas.banking.service.IBankingBL;
import de.metas.process.JavaProcess;
import de.metas.util.Services;
/**
*
* @author ts
*
*/
public class C_RecurrentPaymentCreateInvoice extends JavaProcess
{
/**
* Prepare - e.g., get Parameters.
*/
@Override
protected void prepare()
{
// nothing to do
}
/**
* Perform process.
*
* @return Message (clear text)
* @throws Exception
* if not successful
*/
@Override
protected String doIt() throws Exception
{
final Timestamp startTime = SystemTime.asTimestamp();
final IBankingBL bankingBL = Services.get(IBankingBL.class);
final List<I_C_Invoice> invoices = bankingBL.createInvoicesForRecurrentPayments(get_TrxName());
addLog("Created " + invoices.size() + " invoices in " + TimeUtil.formatElapsed(startTime));
return "@Success@";
}
} // PaySelectionCreateCheck
| 997 |
678 | /* pt::rde::critcl - critcl - layer 1 declarations
* (c) PARAM functions
*/
#ifndef _P_INT_H
#define _P_INT_H 1
#include <p.h> /* Public decls */
#include <param.h> /* PARAM architectural state */
#include <util.h> /* Tracing support */
typedef struct RDE_STRING {
struct RDE_STRING* next;
Tcl_Obj* self;
int id;
} RDE_STRING;
typedef struct RDE_STATE_ {
RDE_PARAM p;
Tcl_Command c;
struct RDE_STRING* sfirst;
Tcl_HashTable str; /* Table to intern strings, i.e. convert them into
* unique numerical indices for the PARAM instructions.
*/
/* And the counter mapping from ids to strings, this is handed to the
* PARAM for use.
*/
long int maxnum; /* NOTE -- */
long int numstr; /* This is, essentially, an RDE_STACK (char* elements) */
char** string; /* Convert over to that instead of replicating the code */
#ifdef RDE_TRACE
int icount; /* Instruction counter, when tracing */
#endif
} RDE_STATE_;
long int param_intern (RDE_STATE p, const char* literal);
#endif /* _P_H */
/*
* Local Variables:
* mode: c
* c-basic-offset: 4
* fill-column: 78
* End:
*/
| 464 |
2,605 | class Solution {
vector<vector<int>> edges;
vector<int> score;
int longest(int v) {
if(score[v] > 0) {
return score[v];
}
score[v] = 1;
for(int b : edges[v]) { // O(M) = O(N*L)
score[v] = max(score[v], longest(b) + 1);
}
return score[v];
}
public: // O(N*L^2)
int longestStrChain(vector<string>& words) {
const int n = words.size();
edges.clear();
edges.resize(n);
score.clear();
score.resize(n);
unordered_map<string, int> his_position;
for(int i = 0; i < n; i++) { // O(N*L)
his_position[words[i]] = i;
}
for(int i = 0; i < n; i++) {
string s = words[i];
for(int j = 0; j < (int) s.length(); j++) {
string maybe = s.substr(0, j) + s.substr(j+1); // O(N*L^2)
auto it = his_position.find(maybe);
if(it == his_position.end()) {
continue;
}
edges[it->second].push_back(i);
}
}
int answer = 0;
for(int i = 0; i < n; i++) {
answer = max(answer, longest(i));
// cout << longest(i) << endl;
}
return answer;
}
};
| 694 |
1,350 | // Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.datafactory.models;
import com.azure.core.annotation.Fluent;
import com.azure.core.util.logging.ClientLogger;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.annotation.JsonTypeName;
/** Avro write settings. */
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type")
@JsonTypeName("AvroWriteSettings")
@Fluent
public final class AvroWriteSettings extends FormatWriteSettings {
@JsonIgnore private final ClientLogger logger = new ClientLogger(AvroWriteSettings.class);
/*
* Top level record name in write result, which is required in AVRO spec.
*/
@JsonProperty(value = "recordName")
private String recordName;
/*
* Record namespace in the write result.
*/
@JsonProperty(value = "recordNamespace")
private String recordNamespace;
/*
* Limit the written file's row count to be smaller than or equal to the
* specified count. Type: integer (or Expression with resultType integer).
*/
@JsonProperty(value = "maxRowsPerFile")
private Object maxRowsPerFile;
/*
* Specifies the file name pattern
* <fileNamePrefix>_<fileIndex>.<fileExtension> when copy from non-file
* based store without partitionOptions. Type: string (or Expression with
* resultType string).
*/
@JsonProperty(value = "fileNamePrefix")
private Object fileNamePrefix;
/**
* Get the recordName property: Top level record name in write result, which is required in AVRO spec.
*
* @return the recordName value.
*/
public String recordName() {
return this.recordName;
}
/**
* Set the recordName property: Top level record name in write result, which is required in AVRO spec.
*
* @param recordName the recordName value to set.
* @return the AvroWriteSettings object itself.
*/
public AvroWriteSettings withRecordName(String recordName) {
this.recordName = recordName;
return this;
}
/**
* Get the recordNamespace property: Record namespace in the write result.
*
* @return the recordNamespace value.
*/
public String recordNamespace() {
return this.recordNamespace;
}
/**
* Set the recordNamespace property: Record namespace in the write result.
*
* @param recordNamespace the recordNamespace value to set.
* @return the AvroWriteSettings object itself.
*/
public AvroWriteSettings withRecordNamespace(String recordNamespace) {
this.recordNamespace = recordNamespace;
return this;
}
/**
* Get the maxRowsPerFile property: Limit the written file's row count to be smaller than or equal to the specified
* count. Type: integer (or Expression with resultType integer).
*
* @return the maxRowsPerFile value.
*/
public Object maxRowsPerFile() {
return this.maxRowsPerFile;
}
/**
* Set the maxRowsPerFile property: Limit the written file's row count to be smaller than or equal to the specified
* count. Type: integer (or Expression with resultType integer).
*
* @param maxRowsPerFile the maxRowsPerFile value to set.
* @return the AvroWriteSettings object itself.
*/
public AvroWriteSettings withMaxRowsPerFile(Object maxRowsPerFile) {
this.maxRowsPerFile = maxRowsPerFile;
return this;
}
/**
* Get the fileNamePrefix property: Specifies the file name pattern
* <fileNamePrefix>_<fileIndex>.<fileExtension> when copy from non-file based store without
* partitionOptions. Type: string (or Expression with resultType string).
*
* @return the fileNamePrefix value.
*/
public Object fileNamePrefix() {
return this.fileNamePrefix;
}
/**
* Set the fileNamePrefix property: Specifies the file name pattern
* <fileNamePrefix>_<fileIndex>.<fileExtension> when copy from non-file based store without
* partitionOptions. Type: string (or Expression with resultType string).
*
* @param fileNamePrefix the fileNamePrefix value to set.
* @return the AvroWriteSettings object itself.
*/
public AvroWriteSettings withFileNamePrefix(Object fileNamePrefix) {
this.fileNamePrefix = fileNamePrefix;
return this;
}
/**
* Validates the instance.
*
* @throws IllegalArgumentException thrown if the instance is not valid.
*/
@Override
public void validate() {
super.validate();
}
}
| 1,701 |
918 | // This is a generated file! Please edit source .ksy file and use
// kaitai-struct-compiler to rebuild
#include "kaitai/quicktime_mov.h"
namespace veles {
namespace kaitai {
namespace quicktime_mov {
quicktime_mov_t::quicktime_mov_t(kaitai::kstream* p_io,
kaitai::kstruct* p_parent,
quicktime_mov_t* p_root)
: kaitai::kstruct(p_io) {
m__io->pushName("_parent");
m__parent = p_parent;
m__io->popName();
m__io->pushName("_root");
m__root = this;
m__io->popName();
veles_obj = m__io->startChunk("quicktime_mov");
m_atoms = new std::vector<atom_t*>();
while (!m__io->is_eof()) {
m__io->pushName("atoms");
m_atoms->push_back(new atom_t(m__io, this, m__root));
m__io->popName();
}
m__io->endChunk();
}
quicktime_mov_t::~quicktime_mov_t() {
for (std::vector<atom_t*>::iterator it = m_atoms->begin();
it != m_atoms->end(); ++it) {
delete *it;
}
delete m_atoms;
}
quicktime_mov_t::mvhd_body_t::mvhd_body_t(kaitai::kstream* p_io,
quicktime_mov_t::atom_t* p_parent,
quicktime_mov_t* p_root)
: kaitai::kstruct(p_io) {
m__io->pushName("_parent");
m__parent = p_parent;
m__io->popName();
m__io->pushName("_root");
m__root = p_root;
m__io->popName();
veles_obj = m__io->startChunk("mvhd_body");
m__io->pushName("version");
m_version = m__io->read_u1();
m__io->popName();
m__io->pushName("flags");
m_flags = m__io->read_bytes(3);
m__io->popName();
m__io->pushName("creation_time");
m_creation_time = m__io->read_u4be();
m__io->popName();
m__io->pushName("modification_time");
m_modification_time = m__io->read_u4be();
m__io->popName();
m__io->pushName("time_scale");
m_time_scale = m__io->read_u4be();
m__io->popName();
m__io->pushName("duration");
m_duration = m__io->read_u4be();
m__io->popName();
m__io->pushName("preferred_rate");
m_preferred_rate = new fixed32_t(m__io, this, m__root);
m__io->popName();
m__io->pushName("preferred_volume");
m_preferred_volume = new fixed16_t(m__io, this, m__root);
m__io->popName();
m__io->pushName("reserved1");
m_reserved1 = m__io->read_bytes(10);
m__io->popName();
m__io->pushName("matrix");
m_matrix = m__io->read_bytes(36);
m__io->popName();
m__io->pushName("preview_time");
m_preview_time = m__io->read_u4be();
m__io->popName();
m__io->pushName("preview_duration");
m_preview_duration = m__io->read_u4be();
m__io->popName();
m__io->pushName("poster_time");
m_poster_time = m__io->read_u4be();
m__io->popName();
m__io->pushName("selection_time");
m_selection_time = m__io->read_u4be();
m__io->popName();
m__io->pushName("selection_duration");
m_selection_duration = m__io->read_u4be();
m__io->popName();
m__io->pushName("current_time");
m_current_time = m__io->read_u4be();
m__io->popName();
m__io->pushName("next_track_id");
m_next_track_id = m__io->read_u4be();
m__io->popName();
m__io->endChunk();
}
quicktime_mov_t::mvhd_body_t::~mvhd_body_t() {
delete m_preferred_rate;
delete m_preferred_volume;
}
quicktime_mov_t::ftyp_body_t::ftyp_body_t(kaitai::kstream* p_io,
quicktime_mov_t::atom_t* p_parent,
quicktime_mov_t* p_root)
: kaitai::kstruct(p_io) {
m__io->pushName("_parent");
m__parent = p_parent;
m__io->popName();
m__io->pushName("_root");
m__root = p_root;
m__io->popName();
veles_obj = m__io->startChunk("ftyp_body");
m__io->pushName("major_brand");
m_major_brand = static_cast<quicktime_mov_t::brand_t>(m__io->read_u4be());
m__io->popName();
m__io->pushName("minor_version");
m_minor_version = m__io->read_bytes(4);
m__io->popName();
m_compatible_brands = new std::vector<brand_t>();
while (!m__io->is_eof()) {
m__io->pushName("compatible_brands");
m_compatible_brands->push_back(
static_cast<quicktime_mov_t::brand_t>(m__io->read_u4be()));
m__io->popName();
}
m__io->endChunk();
}
quicktime_mov_t::ftyp_body_t::~ftyp_body_t() { delete m_compatible_brands; }
quicktime_mov_t::fixed32_t::fixed32_t(kaitai::kstream* p_io,
kaitai::kstruct* p_parent,
quicktime_mov_t* p_root)
: kaitai::kstruct(p_io) {
m__io->pushName("_parent");
m__parent = p_parent;
m__io->popName();
m__io->pushName("_root");
m__root = p_root;
m__io->popName();
veles_obj = m__io->startChunk("fixed32");
m__io->pushName("int_part");
m_int_part = m__io->read_s2be();
m__io->popName();
m__io->pushName("frac_part");
m_frac_part = m__io->read_u2be();
m__io->popName();
m__io->endChunk();
}
quicktime_mov_t::fixed32_t::~fixed32_t() {}
quicktime_mov_t::fixed16_t::fixed16_t(kaitai::kstream* p_io,
quicktime_mov_t::mvhd_body_t* p_parent,
quicktime_mov_t* p_root)
: kaitai::kstruct(p_io) {
m__io->pushName("_parent");
m__parent = p_parent;
m__io->popName();
m__io->pushName("_root");
m__root = p_root;
m__io->popName();
veles_obj = m__io->startChunk("fixed16");
m__io->pushName("int_part");
m_int_part = m__io->read_s1();
m__io->popName();
m__io->pushName("frac_part");
m_frac_part = m__io->read_u1();
m__io->popName();
m__io->endChunk();
}
quicktime_mov_t::fixed16_t::~fixed16_t() {}
quicktime_mov_t::atom_t::atom_t(kaitai::kstream* p_io,
quicktime_mov_t* p_parent,
quicktime_mov_t* p_root)
: kaitai::kstruct(p_io) {
m__io->pushName("_parent");
m__parent = p_parent;
m__io->popName();
m__io->pushName("_root");
m__root = p_root;
m__io->popName();
veles_obj = m__io->startChunk("atom");
f_len = false;
m__io->pushName("len32");
m_len32 = m__io->read_u4be();
m__io->popName();
m__io->pushName("atom_type");
m_atom_type = static_cast<quicktime_mov_t::atom_type_t>(m__io->read_u4be());
m__io->popName();
n_len64 = true;
if (len32() == 1) {
n_len64 = false;
m__io->pushName("len64");
m_len64 = m__io->read_u8be();
m__io->popName();
}
switch (atom_type()) {
case ATOM_TYPE_STBL:
m__io->pushName("_skip_me_body");
m__skip_me_body = m__io->read_bytes(len());
m__io->popName();
m__io->pushName("m__skip_me_body" + 3);
m__io__skip_me_body = new kaitai::kstream(
m__io->blob(), m__io->pos() - m__skip_me_body.size(), veles_obj,
m__io->pos(), m__io->error());
m__io->popName();
m__io->pushName("body");
m_body = new quicktime_mov_t(m__io__skip_me_body);
m__io->popName();
break;
case ATOM_TYPE_MOOF:
m__io->pushName("_skip_me_body");
m__skip_me_body = m__io->read_bytes(len());
m__io->popName();
m__io->pushName("m__skip_me_body" + 3);
m__io__skip_me_body = new kaitai::kstream(
m__io->blob(), m__io->pos() - m__skip_me_body.size(), veles_obj,
m__io->pos(), m__io->error());
m__io->popName();
m__io->pushName("body");
m_body = new quicktime_mov_t(m__io__skip_me_body);
m__io->popName();
break;
case ATOM_TYPE_MVHD:
m__io->pushName("_skip_me_body");
m__skip_me_body = m__io->read_bytes(len());
m__io->popName();
m__io->pushName("m__skip_me_body" + 3);
m__io__skip_me_body = new kaitai::kstream(
m__io->blob(), m__io->pos() - m__skip_me_body.size(), veles_obj,
m__io->pos(), m__io->error());
m__io->popName();
m__io->pushName("body");
m_body = new mvhd_body_t(m__io__skip_me_body, this, m__root);
m__io->popName();
break;
case ATOM_TYPE_MINF:
m__io->pushName("_skip_me_body");
m__skip_me_body = m__io->read_bytes(len());
m__io->popName();
m__io->pushName("m__skip_me_body" + 3);
m__io__skip_me_body = new kaitai::kstream(
m__io->blob(), m__io->pos() - m__skip_me_body.size(), veles_obj,
m__io->pos(), m__io->error());
m__io->popName();
m__io->pushName("body");
m_body = new quicktime_mov_t(m__io__skip_me_body);
m__io->popName();
break;
case ATOM_TYPE_TRAK:
m__io->pushName("_skip_me_body");
m__skip_me_body = m__io->read_bytes(len());
m__io->popName();
m__io->pushName("m__skip_me_body" + 3);
m__io__skip_me_body = new kaitai::kstream(
m__io->blob(), m__io->pos() - m__skip_me_body.size(), veles_obj,
m__io->pos(), m__io->error());
m__io->popName();
m__io->pushName("body");
m_body = new quicktime_mov_t(m__io__skip_me_body);
m__io->popName();
break;
case ATOM_TYPE_TRAF:
m__io->pushName("_skip_me_body");
m__skip_me_body = m__io->read_bytes(len());
m__io->popName();
m__io->pushName("m__skip_me_body" + 3);
m__io__skip_me_body = new kaitai::kstream(
m__io->blob(), m__io->pos() - m__skip_me_body.size(), veles_obj,
m__io->pos(), m__io->error());
m__io->popName();
m__io->pushName("body");
m_body = new quicktime_mov_t(m__io__skip_me_body);
m__io->popName();
break;
case ATOM_TYPE_MDIA:
m__io->pushName("_skip_me_body");
m__skip_me_body = m__io->read_bytes(len());
m__io->popName();
m__io->pushName("m__skip_me_body" + 3);
m__io__skip_me_body = new kaitai::kstream(
m__io->blob(), m__io->pos() - m__skip_me_body.size(), veles_obj,
m__io->pos(), m__io->error());
m__io->popName();
m__io->pushName("body");
m_body = new quicktime_mov_t(m__io__skip_me_body);
m__io->popName();
break;
case ATOM_TYPE_FTYP:
m__io->pushName("_skip_me_body");
m__skip_me_body = m__io->read_bytes(len());
m__io->popName();
m__io->pushName("m__skip_me_body" + 3);
m__io__skip_me_body = new kaitai::kstream(
m__io->blob(), m__io->pos() - m__skip_me_body.size(), veles_obj,
m__io->pos(), m__io->error());
m__io->popName();
m__io->pushName("body");
m_body = new ftyp_body_t(m__io__skip_me_body, this, m__root);
m__io->popName();
break;
case ATOM_TYPE_MOOV:
m__io->pushName("_skip_me_body");
m__skip_me_body = m__io->read_bytes(len());
m__io->popName();
m__io->pushName("m__skip_me_body" + 3);
m__io__skip_me_body = new kaitai::kstream(
m__io->blob(), m__io->pos() - m__skip_me_body.size(), veles_obj,
m__io->pos(), m__io->error());
m__io->popName();
m__io->pushName("body");
m_body = new quicktime_mov_t(m__io__skip_me_body);
m__io->popName();
break;
case ATOM_TYPE_TKHD:
m__io->pushName("_skip_me_body");
m__skip_me_body = m__io->read_bytes(len());
m__io->popName();
m__io->pushName("m__skip_me_body" + 3);
m__io__skip_me_body = new kaitai::kstream(
m__io->blob(), m__io->pos() - m__skip_me_body.size(), veles_obj,
m__io->pos(), m__io->error());
m__io->popName();
m__io->pushName("body");
m_body = new tkhd_body_t(m__io__skip_me_body, this, m__root);
m__io->popName();
break;
case ATOM_TYPE_DINF:
m__io->pushName("_skip_me_body");
m__skip_me_body = m__io->read_bytes(len());
m__io->popName();
m__io->pushName("m__skip_me_body" + 3);
m__io__skip_me_body = new kaitai::kstream(
m__io->blob(), m__io->pos() - m__skip_me_body.size(), veles_obj,
m__io->pos(), m__io->error());
m__io->popName();
m__io->pushName("body");
m_body = new quicktime_mov_t(m__io__skip_me_body);
m__io->popName();
break;
default:
m__io->pushName("_skip_me_body");
m__skip_me_body = m__io->read_bytes(len());
m__io->popName();
break;
}
m__io->endChunk();
}
quicktime_mov_t::atom_t::~atom_t() {}
int32_t quicktime_mov_t::atom_t::len() {
if (f_len) {
return m_len;
}
m__io->pushName("len");
m__io->pushName("len");
m_len = (len32() == 0)
? ((_io()->size() - 8))
: ((len32() == 1) ? ((len64() - 16)) : ((len32() - 8)));
m__io->popName();
f_len = true;
m__io->popName();
return m_len;
}
quicktime_mov_t::tkhd_body_t::tkhd_body_t(kaitai::kstream* p_io,
quicktime_mov_t::atom_t* p_parent,
quicktime_mov_t* p_root)
: kaitai::kstruct(p_io) {
m__io->pushName("_parent");
m__parent = p_parent;
m__io->popName();
m__io->pushName("_root");
m__root = p_root;
m__io->popName();
veles_obj = m__io->startChunk("tkhd_body");
m__io->pushName("version");
m_version = m__io->read_u1();
m__io->popName();
m__io->pushName("flags");
m_flags = m__io->read_bytes(3);
m__io->popName();
m__io->pushName("creation_time");
m_creation_time = m__io->read_u4be();
m__io->popName();
m__io->pushName("modification_time");
m_modification_time = m__io->read_u4be();
m__io->popName();
m__io->pushName("track_id");
m_track_id = m__io->read_u4be();
m__io->popName();
m__io->pushName("reserved1");
m_reserved1 = m__io->read_bytes(4);
m__io->popName();
m__io->pushName("duration");
m_duration = m__io->read_u4be();
m__io->popName();
m__io->pushName("reserved2");
m_reserved2 = m__io->read_bytes(8);
m__io->popName();
m__io->pushName("layer");
m_layer = m__io->read_u2be();
m__io->popName();
m__io->pushName("alternative_group");
m_alternative_group = m__io->read_u2be();
m__io->popName();
m__io->pushName("volume");
m_volume = m__io->read_u2be();
m__io->popName();
m__io->pushName("reserved3");
m_reserved3 = m__io->read_u2be();
m__io->popName();
m__io->pushName("matrix");
m_matrix = m__io->read_bytes(36);
m__io->popName();
m__io->pushName("width");
m_width = new fixed32_t(m__io, this, m__root);
m__io->popName();
m__io->pushName("height");
m_height = new fixed32_t(m__io, this, m__root);
m__io->popName();
m__io->endChunk();
}
quicktime_mov_t::tkhd_body_t::~tkhd_body_t() {
delete m_width;
delete m_height;
}
} // namespace quicktime_mov
} // namespace kaitai
} // namespace veles
| 7,120 |
678 | <gh_stars>100-1000
/**
* This header is generated by class-dump-z 0.2b.
*
* Source: /System/Library/PrivateFrameworks/iWorkImport.framework/iWorkImport
*/
#import <iWorkImport/iWorkImport-Structs.h>
#import <iWorkImport/XXUnknownSuperclass.h>
__attribute__((visibility("hidden")))
@interface GQPProcessorFactory : XXUnknownSuperclass {
}
+ (CFStringRef)createUtiForDocument:(CFStringRef)document; // 0x41131
+ (int)applicationForDocumentUti:(CFStringRef)documentUti; // 0x411cd
+ (id)retainedProcessorForDocument:(CFStringRef)document uti:(CFStringRef)uti outputType:(int)type outputPath:(CFStringRef)path previewRequest:(QLPreviewRequestRef)request progressiveHelper:(id)helper; // 0x41469
+ (id)retainedProcessorForZipArchive:(id)zipArchive uti:(CFStringRef)uti outputType:(int)type outputPath:(CFStringRef)path previewRequest:(QLPreviewRequestRef)request progressiveHelper:(id)helper; // 0x41345
@end
| 294 |
454 | <gh_stars>100-1000
package io.vertx.up.uca.jooq.cache;
import io.vertx.core.Future;
import io.vertx.tp.plugin.cache.hit.CMessage;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Around;
import org.aspectj.lang.annotation.Aspect;
import java.util.List;
/**
* @author <a href="http://www.origin-x.cn">Lang</a>
* Empty aspect for placeholder here
*/
@Aspect
@SuppressWarnings("all")
public class AsideUpsert extends L1AsideWriting {
/*
@Before(value = "initialization(io.vertx.up.uca.jooq.UxJooq.new(..)) && args(clazz,dao)", argNames = "clazz,dao")
public void init(final Class<?> clazz, final VertxDAO dao) {
super.initialize(clazz, dao);
}
*/
/*
* upsert(id, T)
* <-- upsert(id, JsonObject)
* <-- upsert(id, JsonObject, pojo)
* <-- upsertJ(id, T)
* <-- upsertJ(id, JsonObject)
* <-- upsertJ(id, JsonObject, pojo)
*
* upsertAsync(id, T)
* <-- upsertAsync(id, JsonObject)
* <-- upsertAsync(id, JsonObject, pojo)
* <-- upsertJAsync(id, T)
* <-- upsertJAsync(id, JsonObject)
* <-- upsertJAsync(id, JsonObject, pojo)
*
* upsert(criteria, T)
* <-- upsert(criteria, JsonObject)
* <-- upsertJ(criteria, T)
* <-- upsertJ(criteria, JsonObject)
*
* upsert(criteria, T, pojo)
* <-- upsert(criteria, JsonObject, pojo)
* <-- upsertJ(criteria, T, pojo)
* <-- upsertJ(criteria, JsonObject, pojo)
*
* upsertAsync(criteria, T)
* <-- upsertAsync(criteria, JsonObject)
* <-- upsertJAsync(criteria, T)
* <-- upsertJAsync(criteria, JsonObject)
*
* upsertAsync(criteria, T, pojo)
* <-- upsertAsync(criteria, JsonObject, pojo)
* <-- upsertJAsync(criteria, T, pojo)
* <-- upsertJAsync(criteria, JsonObject, pojo)
*
* upsert(criteria, list, finder)
* <-- upsert(criteria, JsonArray, finder)
* <-- upsertJ(criteria, list, finder)
* <-- upsertJ(criteria, JsonArray, finder)
*
* upsert(criteria, list, finder, pojo)
* <-- upsert(criteria, JsonArray, finder, pojo)
* <-- upsertJ(criteria, list, finder, pojo)
* <-- upsertJ(criteria, JsonArray, finder, pojo)
*
* upsertAsync(criteria, list, finder)
* <-- upsertAsync(criteria, JsonArray, finder)
* <-- upsertJAsync(criteria, list, finder)
* <-- upsertJAsync(criteria, JsonArray, finder)
*
* upsertAsync(criteria, list, finder, pojo)
* <-- upsertAsync(criteria, JsonArray, finder, pojo)
* <-- upsertJAsync(criteria, list, finder, pojo)
* <-- upsertJAsync(criteria, JsonArray, finder, pojo)
*/
/*
* upsert(id, T)
*/
@Around(value = "execution(* io.vertx.up.uca.jooq.UxJooq.upsert(Object,T))")
public <T> T upsert(final ProceedingJoinPoint point) throws Throwable {
/*
* T
*/
final List<CMessage> messages = this.messagesT(point);
return this.writeAsync(messages, point);
}
/*
* upsertAsync(id, T)
*/
@Around(value = "execution(* io.vertx.up.uca.jooq.UxJooq.upsertAsync(Object,T))")
public <T> Future<T> upsertAsync(final ProceedingJoinPoint point) throws Throwable {
/*
* Future<T>
*/
final List<CMessage> messages = this.messagesT(point);
return this.writeAsync(messages, point);
}
/*
* upsert(JsonObject, T)
*/
@Around(value = "execution(* io.vertx.up.uca.jooq.UxJooq.upsert(io.vertx.core.json.JsonObject, T))")
public <T> T upsertByCond(final ProceedingJoinPoint point) throws Throwable {
/*
* T
*/
final List<CMessage> messages = this.messagesCond(point);
return this.writeAsync(messages, point);
}
/*
* upsertAsync(JsonObject, T)
*/
@Around(value = "execution(* io.vertx.up.uca.jooq.UxJooq.upsertAsync(io.vertx.core.json.JsonObject, T))")
public <T> Future<T> upsertByCondAsync(final ProceedingJoinPoint point) throws Throwable {
/*
* Future<T>
*/
final List<CMessage> messages = this.messagesCond(point);
return this.writeAsync(messages, point);
}
/*
* upsert(JsonObject, T)
*/
@Around(value = "execution(* io.vertx.up.uca.jooq.UxJooq.upsert(io.vertx.core.json.JsonObject, T, String))")
public <T> T upsertByPojo(final ProceedingJoinPoint point) throws Throwable {
/*
* T
*/
final List<CMessage> messages = this.messagesPojo(point, 0);
return this.writeAsync(messages, point);
}
/*
* upsertAsync(JsonObject, T)
*/
@Around(value = "execution(* io.vertx.up.uca.jooq.UxJooq.upsertAsync(io.vertx.core.json.JsonObject, T, String))")
public <T> Future<T> upsertByPojoAsync(final ProceedingJoinPoint point) throws Throwable {
/*
* Future<T>
*/
final List<CMessage> messages = this.messagesPojo(point, 0);
return this.writeAsync(messages, point);
}
/*
* upsert(JsonObject, T, BiPredicate)
*/
@Around(value = "execution(* io.vertx.up.uca.jooq.UxJooq.upsert(io.vertx.core.json.JsonObject, java.util.List, java.util.function.BiPredicate))")
public <T> List<T> upsertList(final ProceedingJoinPoint point) throws Throwable {
/*
* List<T>
*/
final List<CMessage> messages = this.messagesCond(point);
return this.writeAsync(messages, point);
}
/*
* upsertAsync(JsonObject, T, BiPredicate)
*/
@Around(value = "execution(* io.vertx.up.uca.jooq.UxJooq.upsertAsync(io.vertx.core.json.JsonObject, java.util.List, java.util.function.BiPredicate))")
public <T> Future<List<T>> upsertListAsync(final ProceedingJoinPoint point) throws Throwable {
/*
* Future<List<T>>
*/
final List<CMessage> messages = this.messagesCond(point);
return this.writeAsync(messages, point);
}
/*
* upsert(JsonObject, T, BiPredicate, String)
*/
@Around(value = "execution(* io.vertx.up.uca.jooq.UxJooq.upsert(io.vertx.core.json.JsonObject, java.util.List, java.util.function.BiPredicate, String))")
public <T> List<T> upsertListFn(final ProceedingJoinPoint point) throws Throwable {
/*
* List<T>
*/
final List<CMessage> messages = this.messagesPojo(point, 0);
return this.writeAsync(messages, point);
}
/*
* upsertAsync(JsonObject, T, BiPredicate, String)
*/
@Around(value = "execution(* io.vertx.up.uca.jooq.UxJooq.upsertAsync(io.vertx.core.json.JsonObject, java.util.List, java.util.function.BiPredicate, String))")
public <T> Future<List<T>> upsertListFnAsync(final ProceedingJoinPoint point) throws Throwable {
/*
* Future<List<T>>
*/
final List<CMessage> messages = this.messagesPojo(point, 0);
return this.writeAsync(messages, point);
}
}
| 3,329 |
432 | package com.thefinestartist.ytpa.sample;
import android.content.Intent;
import android.os.Bundle;
import android.os.Handler;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.view.View;
import android.widget.ImageButton;
import android.widget.ImageView;
import android.widget.TextView;
import com.afollestad.materialdialogs.MaterialDialog;
import com.crashlytics.android.Crashlytics;
import com.google.android.youtube.player.YouTubePlayer;
import com.squareup.picasso.Picasso;
import com.thefinestartist.ytpa.YouTubePlayerActivity;
import com.thefinestartist.ytpa.enums.Orientation;
import com.thefinestartist.ytpa.enums.Quality;
import com.thefinestartist.ytpa.utils.YouTubeThumbnail;
import butterknife.Bind;
import butterknife.ButterKnife;
import butterknife.Bind;
import io.fabric.sdk.android.Fabric;
public class MainActivity extends AppCompatActivity {
@Bind(R.id.toolbar)
Toolbar toolbar;
@Bind(R.id.thumbnail)
ImageView thumbnail;
@Bind(R.id.play_bt)
ImageButton play;
@Bind(R.id.player_style_bt)
View playerStyleBt;
@Bind(R.id.player_style_tv)
TextView playerStyleTv;
@Bind(R.id.screen_orientation_bt)
View screenOrientationBt;
@Bind(R.id.screen_orientation_tv)
TextView screenOrientationTv;
@Bind(R.id.volume_bt)
View volumeBt;
@Bind(R.id.volume_tv)
TextView volumeTv;
@Bind(R.id.animation_bt)
View animationBt;
@Bind(R.id.animation_tv)
TextView animationTv;
YouTubePlayer.PlayerStyle playerStyle;
Orientation orientation;
boolean showAudioUi;
boolean showFadeAnim;
private boolean advertised = false;
private static String VIDEO_ID = "iS1g8G_njx8";
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Fabric.with(this, new Crashlytics());
setContentView(R.layout.activity_main);
ButterKnife.bind(this);
setSupportActionBar(toolbar);
playerStyle = YouTubePlayer.PlayerStyle.DEFAULT;
orientation = Orientation.AUTO;
showAudioUi = true;
showFadeAnim = true;
Picasso.with(this)
.load(YouTubeThumbnail.getUrlFromVideoId(VIDEO_ID, Quality.HIGH))
.fit()
.centerCrop()
.into(thumbnail);
play.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent(MainActivity.this, YouTubePlayerActivity.class);
intent.putExtra(YouTubePlayerActivity.EXTRA_VIDEO_ID, VIDEO_ID);
intent.putExtra(YouTubePlayerActivity.EXTRA_PLAYER_STYLE, playerStyle);
intent.putExtra(YouTubePlayerActivity.EXTRA_ORIENTATION, orientation);
intent.putExtra(YouTubePlayerActivity.EXTRA_SHOW_AUDIO_UI, showAudioUi);
intent.putExtra(YouTubePlayerActivity.EXTRA_HANDLE_ERROR, true);
if (showFadeAnim) {
intent.putExtra(YouTubePlayerActivity.EXTRA_ANIM_ENTER, R.anim.fade_in);
intent.putExtra(YouTubePlayerActivity.EXTRA_ANIM_EXIT, R.anim.fade_out);
} else {
intent.putExtra(YouTubePlayerActivity.EXTRA_ANIM_ENTER, R.anim.modal_close_enter);
intent.putExtra(YouTubePlayerActivity.EXTRA_ANIM_EXIT, R.anim.modal_close_exit);
}
// intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
startActivityForResult(intent, 1);
}
});
playerStyleBt.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
new MaterialDialog.Builder(MainActivity.this)
.title(getString(R.string.player_style))
.items(getPlayerStyleNames())
.itemsCallbackSingleChoice(playerStyle.ordinal(), new MaterialDialog.ListCallbackSingleChoice() {
@Override
public boolean onSelection(MaterialDialog materialDialog, View view, int which, CharSequence charSequence) {
playerStyle = YouTubePlayer.PlayerStyle.values()[which];
playerStyleTv.setText(playerStyle.name());
return true;
}
})
.positiveText(getString(R.string.choose))
.show();
}
});
screenOrientationBt.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
new MaterialDialog.Builder(MainActivity.this)
.title(getString(R.string.screen_orientation))
.items(getScreenOrientationNames())
.itemsCallbackSingleChoice(orientation.ordinal(), new MaterialDialog.ListCallbackSingleChoice() {
@Override
public boolean onSelection(MaterialDialog materialDialog, View view, int which, CharSequence charSequence) {
orientation = Orientation.values()[which];
screenOrientationTv.setText(orientation.name());
return true;
}
})
.positiveText(getString(R.string.choose))
.show();
}
});
volumeBt.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
new MaterialDialog.Builder(MainActivity.this)
.title(getString(R.string.volume_ui_control))
.items(new String[]{getString(R.string.show), getString(R.string.dont_show)})
.itemsCallbackSingleChoice(showAudioUi ? 0 : 1, new MaterialDialog.ListCallbackSingleChoice() {
@Override
public boolean onSelection(MaterialDialog materialDialog, View view, int which, CharSequence charSequence) {
showAudioUi = which == 0;
volumeTv.setText(showAudioUi ? getString(R.string.show) : getString(R.string.dont_show));
return true;
}
})
.positiveText(getString(R.string.choose))
.show();
}
});
animationBt.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
new MaterialDialog.Builder(MainActivity.this)
.title(getString(R.string.animation_on_close))
.items(new String[]{getString(R.string.fade), getString(R.string.modal)})
.itemsCallbackSingleChoice(showFadeAnim ? 0 : 1, new MaterialDialog.ListCallbackSingleChoice() {
@Override
public boolean onSelection(MaterialDialog materialDialog, View view, int which, CharSequence charSequence) {
showFadeAnim = which == 0;
animationTv.setText(showFadeAnim ? getString(R.string.fade) : getString(R.string.modal));
return true;
}
})
.positiveText(getString(R.string.choose))
.show();
}
});
}
private String[] getScreenOrientationNames() {
Orientation[] states = Orientation.values();
String[] names = new String[states.length];
for (int i = 0; i < states.length; i++)
names[i] = states[i].name();
return names;
}
private String[] getPlayerStyleNames() {
YouTubePlayer.PlayerStyle[] states = YouTubePlayer.PlayerStyle.values();
String[] names = new String[states.length];
for (int i = 0; i < states.length; i++)
names[i] = states[i].name();
return names;
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == 1) {
advertised = true;
new Handler().postDelayed(new Runnable() {
@Override
public void run() {
AdHelper.popUpAd(MainActivity.this);
}
}, 1000 * 5);
}
}
@Override
protected void onDestroy() {
super.onDestroy();
if (!advertised)
new Handler().postDelayed(new Runnable() {
@Override
public void run() {
AdHelper.popUpAd(MainActivity.this);
}
}, 1000 * 10);
}
}
| 4,465 |
9,953 | <reponame>mathieui/twisted<gh_stars>1000+
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.web.resource}.
"""
from twisted.trial.unittest import TestCase
from twisted.python.compat import _PY3
from twisted.web.error import UnsupportedMethod
from twisted.web.resource import (
NOT_FOUND, FORBIDDEN, Resource, ErrorPage, NoResource, ForbiddenResource,
getChildForRequest)
from twisted.web.http_headers import Headers
from twisted.web.test.requesthelper import DummyRequest
class ErrorPageTests(TestCase):
"""
Tests for L{ErrorPage}, L{NoResource}, and L{ForbiddenResource}.
"""
errorPage = ErrorPage
noResource = NoResource
forbiddenResource = ForbiddenResource
def test_getChild(self):
"""
The C{getChild} method of L{ErrorPage} returns the L{ErrorPage} it is
called on.
"""
page = self.errorPage(321, "foo", "bar")
self.assertIdentical(page.getChild(b"name", object()), page)
def _pageRenderingTest(self, page, code, brief, detail):
request = DummyRequest([b''])
template = (
u"\n"
u"<html>\n"
u" <head><title>%s - %s</title></head>\n"
u" <body>\n"
u" <h1>%s</h1>\n"
u" <p>%s</p>\n"
u" </body>\n"
u"</html>\n")
expected = template % (code, brief, brief, detail)
self.assertEqual(
page.render(request), expected.encode('utf-8'))
self.assertEqual(request.responseCode, code)
self.assertEqual(
request.responseHeaders,
Headers({b'content-type': [b'text/html; charset=utf-8']}))
def test_errorPageRendering(self):
"""
L{ErrorPage.render} returns a C{bytes} describing the error defined by
the response code and message passed to L{ErrorPage.__init__}. It also
uses that response code to set the response code on the L{Request}
passed in.
"""
code = 321
brief = "brief description text"
detail = "much longer text might go here"
page = self.errorPage(code, brief, detail)
self._pageRenderingTest(page, code, brief, detail)
def test_noResourceRendering(self):
"""
L{NoResource} sets the HTTP I{NOT FOUND} code.
"""
detail = "long message"
page = self.noResource(detail)
self._pageRenderingTest(page, NOT_FOUND, "No Such Resource", detail)
def test_forbiddenResourceRendering(self):
"""
L{ForbiddenResource} sets the HTTP I{FORBIDDEN} code.
"""
detail = "longer message"
page = self.forbiddenResource(detail)
self._pageRenderingTest(page, FORBIDDEN, "Forbidden Resource", detail)
class DynamicChild(Resource):
"""
A L{Resource} to be created on the fly by L{DynamicChildren}.
"""
def __init__(self, path, request):
Resource.__init__(self)
self.path = path
self.request = request
class DynamicChildren(Resource):
"""
A L{Resource} with dynamic children.
"""
def getChild(self, path, request):
return DynamicChild(path, request)
class BytesReturnedRenderable(Resource):
"""
A L{Resource} with minimal capabilities to render a response.
"""
def __init__(self, response):
"""
@param response: A C{bytes} object giving the value to return from
C{render_GET}.
"""
Resource.__init__(self)
self._response = response
def render_GET(self, request):
"""
Render a response to a I{GET} request by returning a short byte string
to be written by the server.
"""
return self._response
class ImplicitAllowedMethods(Resource):
"""
A L{Resource} which implicitly defines its allowed methods by defining
renderers to handle them.
"""
def render_GET(self, request):
pass
def render_PUT(self, request):
pass
class ResourceTests(TestCase):
"""
Tests for L{Resource}.
"""
def test_staticChildren(self):
"""
L{Resource.putChild} adds a I{static} child to the resource. That child
is returned from any call to L{Resource.getChildWithDefault} for the
child's path.
"""
resource = Resource()
child = Resource()
sibling = Resource()
resource.putChild(b"foo", child)
resource.putChild(b"bar", sibling)
self.assertIdentical(
child, resource.getChildWithDefault(b"foo", DummyRequest([])))
def test_dynamicChildren(self):
"""
L{Resource.getChildWithDefault} delegates to L{Resource.getChild} when
the requested path is not associated with any static child.
"""
path = b"foo"
request = DummyRequest([])
resource = DynamicChildren()
child = resource.getChildWithDefault(path, request)
self.assertIsInstance(child, DynamicChild)
self.assertEqual(child.path, path)
self.assertIdentical(child.request, request)
def test_staticChildPathType(self):
"""
Test that passing the wrong type to putChild results in a warning,
and a failure in Python 3
"""
resource = Resource()
child = Resource()
sibling = Resource()
resource.putChild(u"foo", child)
warnings = self.flushWarnings([self.test_staticChildPathType])
self.assertEqual(len(warnings), 1)
self.assertIn("Path segment must be bytes",
warnings[0]['message'])
if _PY3:
# We expect an error here because u"foo" != b"foo" on Py3k
self.assertIsInstance(
resource.getChildWithDefault(b"foo", DummyRequest([])),
ErrorPage)
resource.putChild(None, sibling)
warnings = self.flushWarnings([self.test_staticChildPathType])
self.assertEqual(len(warnings), 1)
self.assertIn("Path segment must be bytes",
warnings[0]['message'])
def test_defaultHEAD(self):
"""
When not otherwise overridden, L{Resource.render} treats a I{HEAD}
request as if it were a I{GET} request.
"""
expected = b"insert response here"
request = DummyRequest([])
request.method = b'HEAD'
resource = BytesReturnedRenderable(expected)
self.assertEqual(expected, resource.render(request))
def test_explicitAllowedMethods(self):
"""
The L{UnsupportedMethod} raised by L{Resource.render} for an unsupported
request method has a C{allowedMethods} attribute set to the value of the
C{allowedMethods} attribute of the L{Resource}, if it has one.
"""
expected = [b'GET', b'HEAD', b'PUT']
resource = Resource()
resource.allowedMethods = expected
request = DummyRequest([])
request.method = b'FICTIONAL'
exc = self.assertRaises(UnsupportedMethod, resource.render, request)
self.assertEqual(set(expected), set(exc.allowedMethods))
def test_implicitAllowedMethods(self):
"""
The L{UnsupportedMethod} raised by L{Resource.render} for an unsupported
request method has a C{allowedMethods} attribute set to a list of the
methods supported by the L{Resource}, as determined by the
I{render_}-prefixed methods which it defines, if C{allowedMethods} is
not explicitly defined by the L{Resource}.
"""
expected = set([b'GET', b'HEAD', b'PUT'])
resource = ImplicitAllowedMethods()
request = DummyRequest([])
request.method = b'FICTIONAL'
exc = self.assertRaises(UnsupportedMethod, resource.render, request)
self.assertEqual(expected, set(exc.allowedMethods))
class GetChildForRequestTests(TestCase):
"""
Tests for L{getChildForRequest}.
"""
def test_exhaustedPostPath(self):
"""
L{getChildForRequest} returns whatever resource has been reached by the
time the request's C{postpath} is empty.
"""
request = DummyRequest([])
resource = Resource()
result = getChildForRequest(resource, request)
self.assertIdentical(resource, result)
def test_leafResource(self):
"""
L{getChildForRequest} returns the first resource it encounters with a
C{isLeaf} attribute set to C{True}.
"""
request = DummyRequest([b"foo", b"bar"])
resource = Resource()
resource.isLeaf = True
result = getChildForRequest(resource, request)
self.assertIdentical(resource, result)
def test_postPathToPrePath(self):
"""
As path segments from the request are traversed, they are taken from
C{postpath} and put into C{prepath}.
"""
request = DummyRequest([b"foo", b"bar"])
root = Resource()
child = Resource()
child.isLeaf = True
root.putChild(b"foo", child)
self.assertIdentical(child, getChildForRequest(root, request))
self.assertEqual(request.prepath, [b"foo"])
self.assertEqual(request.postpath, [b"bar"])
| 3,830 |
1,510 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.rpc.control;
import com.google.protobuf.MessageLite;
import io.netty.channel.ChannelFuture;
import io.netty.channel.socket.SocketChannel;
import io.netty.util.concurrent.GenericFutureListener;
import org.apache.drill.exec.memory.BufferAllocator;
import org.apache.drill.exec.proto.BitControl.BitControlHandshake;
import org.apache.drill.exec.proto.BitControl.RpcType;
import org.apache.drill.exec.rpc.BasicServer;
import org.apache.drill.exec.rpc.OutOfMemoryHandler;
import org.apache.drill.exec.rpc.ProtobufLengthDecoder;
import org.apache.drill.exec.rpc.RpcException;
import org.apache.drill.exec.rpc.security.ServerAuthenticationHandler;
public class ControlServer extends BasicServer<RpcType, ControlConnection>{
// private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ControlServer.class);
private final ControlConnectionConfig config;
private final ConnectionManagerRegistry connectionRegistry;
private volatile ProxyCloseHandler proxyCloseHandler;
public ControlServer(ControlConnectionConfig config, ConnectionManagerRegistry connectionRegistry) {
super(ControlRpcConfig.getMapping(config.getBootstrapContext().getConfig(),
config.getBootstrapContext().getExecutor()),
config.getAllocator().getAsByteBufAllocator(),
config.getBootstrapContext().getBitLoopGroup());
this.config = config;
this.connectionRegistry = connectionRegistry;
}
@Override
public MessageLite getResponseDefaultInstance(int rpcType) throws RpcException {
return DefaultInstanceHandler.getResponseDefaultInstance(rpcType);
}
@Override
protected GenericFutureListener<ChannelFuture> getCloseHandler(SocketChannel ch, ControlConnection connection) {
this.proxyCloseHandler = new ProxyCloseHandler(super.getCloseHandler(ch, connection));
return proxyCloseHandler;
}
@Override
protected ControlConnection initRemoteConnection(SocketChannel channel) {
super.initRemoteConnection(channel);
final ControlConnection controlConnection = new ControlConnection(channel, "control server", config,
config.getAuthMechanismToUse() == null
? config.getMessageHandler()
: new ServerAuthenticationHandler<>(config.getMessageHandler(),
RpcType.SASL_MESSAGE_VALUE, RpcType.SASL_MESSAGE),
this);
// Increase the connection count here since at this point it means that we already have the TCP connection.
// Later when connection fails for any reason then we will decrease the counter based on Netty's connection close
// handler.
controlConnection.incConnectionCounter();
return controlConnection;
}
@Override
protected ServerHandshakeHandler<BitControlHandshake> getHandshakeHandler(final ControlConnection connection) {
return new ServerHandshakeHandler<BitControlHandshake>(RpcType.HANDSHAKE, BitControlHandshake.PARSER) {
@Override
public MessageLite getHandshakeResponse(BitControlHandshake inbound) throws Exception {
// logger.debug("Handling handshake from other bit. {}", inbound);
if (inbound.getRpcVersion() != ControlRpcConfig.RPC_VERSION) {
throw new RpcException(String.format("Invalid rpc version. Expected %d, actual %d.",
inbound.getRpcVersion(), ControlRpcConfig.RPC_VERSION));
}
if (!inbound.hasEndpoint() ||
inbound.getEndpoint().getAddress().isEmpty() ||
inbound.getEndpoint().getControlPort() < 1) {
throw new RpcException(String.format("RPC didn't provide valid counter endpoint information. Received %s.",
inbound.getEndpoint()));
}
connection.setEndpoint(inbound.getEndpoint());
// add the
ControlConnectionManager manager = connectionRegistry.getConnectionManager(inbound.getEndpoint());
// update the close handler.
proxyCloseHandler.setHandler(manager.getCloseHandlerCreator().getHandler(connection,
proxyCloseHandler.getHandler()));
// add to the connection manager.
manager.addExternalConnection(connection);
final BitControlHandshake.Builder builder = BitControlHandshake.newBuilder();
builder.setRpcVersion(ControlRpcConfig.RPC_VERSION);
if (config.getAuthMechanismToUse() != null) {
builder.addAllAuthenticationMechanisms(config.getAuthProvider().getAllFactoryNames());
}
return builder.build();
}
};
}
@Override
protected ProtobufLengthDecoder getDecoder(BufferAllocator allocator, OutOfMemoryHandler outOfMemoryHandler) {
return new ControlProtobufLengthDecoder(allocator, outOfMemoryHandler);
}
private class ProxyCloseHandler implements GenericFutureListener<ChannelFuture> {
private volatile GenericFutureListener<ChannelFuture> handler;
public ProxyCloseHandler(GenericFutureListener<ChannelFuture> handler) {
super();
this.handler = handler;
}
public GenericFutureListener<ChannelFuture> getHandler() {
return handler;
}
public void setHandler(GenericFutureListener<ChannelFuture> handler) {
this.handler = handler;
}
@Override
public void operationComplete(ChannelFuture future) throws Exception {
handler.operationComplete(future);
}
}
}
| 1,910 |
882 | package water.api;
import java.util.Properties;
import water.NanoHTTPD;
import water.util.RString;
/**
*
* @author peta
*/
public class HTTP404 extends Request {
private transient final Str _error = new Str(ERROR,"Unknown error");
public HTTP404() {
_requestHelp = "Displays the HTTP 404 page with error specified in JSON"
+ " argument error.";
_error._requestHelp = "Error description for the 404. Generally the URL not found.";
}
@Override public Response serve() {
return Response.error(_error.value());
}
@Override protected String serveJava() {
return _error.value();
}
@Override public water.NanoHTTPD.Response serve(NanoHTTPD server, Properties parms, RequestType type) {
water.NanoHTTPD.Response r = super.serve(server, parms, type);
r.status = NanoHTTPD.HTTP_NOTFOUND;
return r;
}
private static final String _html =
"<h3>HTTP 404 - Not Found</h3>"
+ "<div class='alert alert-error'>%ERROR</div>"
;
@Override protected String build(Response response) {
StringBuilder sb = new StringBuilder();
sb.append("<div class='container'>");
sb.append("<div class='row-fluid'>");
sb.append("<div class='span12'>");
sb.append(buildResponseHeader(response));
RString str = new RString(_html);
str.replace("ERROR", response.error());
sb.append(str.toString());
sb.append("</div></div></div>");
return sb.toString();
}
}
| 521 |
501 | <filename>pyscf/cc/qcisd_slow.py
#!/usr/bin/env python
# Copyright 2014-2021 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Author: <NAME> <<EMAIL>>
#
'''
Restricted QCISD implementation
The 4-index integrals are saved on disk entirely (without using any symmetry).
Note MO integrals are treated in chemist's notation
Ref:
'''
import numpy
import numpy as np
from pyscf import lib
from pyscf.lib import logger
from pyscf.cc import rccsd_slow as rccsd
from pyscf.cc import rintermediates as imd
from pyscf import __config__
BLKMIN = getattr(__config__, 'cc_ccsd_blkmin', 4)
MEMORYMIN = getattr(__config__, 'cc_ccsd_memorymin', 2000)
def kernel(mycc, eris=None, t1=None, t2=None, max_cycle=50, tol=1e-8,
tolnormt=1e-6, verbose=None):
'''Same as ccsd.kernel with strings modified to correct the method name'''
log = logger.new_logger(mycc, verbose)
if eris is None:
eris = mycc.ao2mo(mycc.mo_coeff)
if t1 is None and t2 is None:
t1, t2 = mycc.get_init_guess(eris)
elif t2 is None:
t2 = mycc.get_init_guess(eris)[1]
cput1 = cput0 = (logger.process_clock(), logger.perf_counter())
eold = 0
eccsd = mycc.energy(t1, t2, eris)
log.info('Init E_corr(QCISD) = %.15g', eccsd)
if isinstance(mycc.diis, lib.diis.DIIS):
adiis = mycc.diis
elif mycc.diis:
adiis = lib.diis.DIIS(mycc, mycc.diis_file, incore=mycc.incore_complete)
adiis.space = mycc.diis_space
else:
adiis = None
conv = False
for istep in range(max_cycle):
t1new, t2new = mycc.update_amps(t1, t2, eris)
tmpvec = mycc.amplitudes_to_vector(t1new, t2new)
tmpvec -= mycc.amplitudes_to_vector(t1, t2)
normt = numpy.linalg.norm(tmpvec)
tmpvec = None
if mycc.iterative_damping < 1.0:
alpha = mycc.iterative_damping
t1new = (1-alpha) * t1 + alpha * t1new
t2new *= alpha
t2new += (1-alpha) * t2
t1, t2 = t1new, t2new
t1new = t2new = None
t1, t2 = mycc.run_diis(t1, t2, istep, normt, eccsd-eold, adiis)
eold, eccsd = eccsd, mycc.energy(t1, t2, eris)
log.info('cycle = %d E_corr(QCISD) = %.15g dE = %.9g norm(t1,t2) = %.6g',
istep+1, eccsd, eccsd - eold, normt)
cput1 = log.timer('QCISD iter', *cput1)
if abs(eccsd-eold) < tol and normt < tolnormt:
conv = True
break
log.timer('QCISD', *cput0)
return conv, eccsd, t1, t2
def update_amps(cc, t1, t2, eris):
# Ref: Hirata et al., J. Chem. Phys. 120, 2581 (2004) Eqs.(35)-(36)
nocc, nvir = t1.shape
fock = eris.fock
fov = fock[:nocc,nocc:].copy()
foo = fock[:nocc,:nocc].copy()
fvv = fock[nocc:,nocc:].copy()
Foo = imd.cc_Foo(0*t1,t2,eris)
Fvv = imd.cc_Fvv(0*t1,t2,eris)
Fov = imd.cc_Fov(t1,t2,eris)
Foo -= np.diag(np.diag(foo))
Fvv -= np.diag(np.diag(fvv))
# T1 equation
t1new = np.asarray(fov).conj().copy()
t1new += lib.einsum('ac,ic->ia', Fvv, t1)
t1new += -lib.einsum('ki,ka->ia', Foo, t1)
t1new += 2*lib.einsum('kc,kica->ia', Fov, t2)
t1new += -lib.einsum('kc,ikca->ia', Fov, t2)
t1new += 2*lib.einsum('kcai,kc->ia', eris.ovvo, t1)
t1new += -lib.einsum('kiac,kc->ia', eris.oovv, t1)
eris_ovvv = np.asarray(eris.ovvv)
t1new += 2*lib.einsum('kdac,ikcd->ia', eris_ovvv, t2)
t1new += -lib.einsum('kcad,ikcd->ia', eris_ovvv, t2)
t1new +=-2*lib.einsum('kilc,klac->ia', eris.ooov, t2)
t1new += lib.einsum('likc,klac->ia', eris.ooov, t2)
# T2 equation
t2new = np.asarray(eris.ovov).conj().transpose(0,2,1,3).copy()
Loo = imd.Loo(0*t1, t2, eris)
Lvv = imd.Lvv(0*t1, t2, eris)
Loo -= np.diag(np.diag(foo))
Lvv -= np.diag(np.diag(fvv))
Woooo = imd.cc_Woooo(0*t1, t2, eris)
Wvoov = imd.cc_Wvoov(0*t1, t2, eris)
Wvovo = imd.cc_Wvovo(0*t1, t2, eris)
Wvvvv = imd.cc_Wvvvv(0*t1, t2, eris)
t2new += lib.einsum('klij,klab->ijab', Woooo, t2)
t2new += lib.einsum('abcd,ijcd->ijab', Wvvvv, t2)
tmp = lib.einsum('ac,ijcb->ijab', Lvv, t2)
t2new += (tmp + tmp.transpose(1,0,3,2))
tmp = lib.einsum('ki,kjab->ijab', Loo, t2)
t2new -= (tmp + tmp.transpose(1,0,3,2))
tmp = 2*lib.einsum('akic,kjcb->ijab', Wvoov, t2)
tmp -= lib.einsum('akci,kjcb->ijab', Wvovo, t2)
t2new += (tmp + tmp.transpose(1,0,3,2))
tmp = lib.einsum('akic,kjbc->ijab', Wvoov, t2)
t2new -= (tmp + tmp.transpose(1,0,3,2))
tmp = lib.einsum('bkci,kjac->ijab', Wvovo, t2)
t2new -= (tmp + tmp.transpose(1,0,3,2))
tmp2 = np.asarray(eris.ovvv).conj().transpose(1,3,0,2)
tmp = lib.einsum('abic,jc->ijab', tmp2, t1)
t2new += (tmp + tmp.transpose(1,0,3,2))
tmp2 = np.asarray(eris.ooov).transpose(3,1,2,0).conj()
tmp = lib.einsum('akij,kb->ijab', tmp2, t1)
t2new -= (tmp + tmp.transpose(1,0,3,2))
mo_e = eris.fock.diagonal().real
eia = mo_e[:nocc,None] - mo_e[None,nocc:]
eijab = lib.direct_sum('ia,jb->ijab',eia,eia)
t1new /= eia
t2new /= eijab
return t1new, t2new
class QCISD(rccsd.RCCSD):
'''restricted QCISD
'''
def kernel(self, t1=None, t2=None, eris=None):
return self.qcisd(t1, t2, eris)
def qcisd(self, t1=None, t2=None, eris=None):
assert(self.mo_coeff is not None)
assert(self.mo_occ is not None)
if self.verbose >= logger.WARN:
self.check_sanity()
self.dump_flags()
if eris is None:
eris = self.ao2mo(self.mo_coeff)
self.e_hf = getattr(eris, 'e_hf', None)
if self.e_hf is None:
self.e_hf = self._scf.e_tot
self.converged, self.e_corr, self.t1, self.t2 = \
kernel(self, eris, t1, t2, max_cycle=self.max_cycle,
tol=self.conv_tol, tolnormt=self.conv_tol_normt,
verbose=self.verbose)
self._finalize()
return self.e_corr, self.t1, self.t2
def energy(self, t1=None, t2=None, eris=None):
return rccsd.energy(self, t1*0, t2, eris)
update_amps = update_amps
def qcisd_t(self, t1=None, t2=None, eris=None):
from pyscf.cc import qcisd_t_slow as qcisd_t
if t1 is None: t1 = self.t1
if t2 is None: t2 = self.t2
if eris is None: eris = self.ao2mo(self.mo_coeff)
return qcisd_t.kernel(self, eris, t1, t2, self.verbose)
def density_fit(self, auxbasis=None, with_df=None):
raise NotImplementedError
if __name__ == '__main__':
from pyscf import gto, scf
mol = gto.Mole()
mol.atom = """C 0.000 0.000 0.000
H 0.637 0.637 0.637
H -0.637 -0.637 0.637
H -0.637 0.637 -0.637
H 0.637 -0.637 -0.637"""
mol.basis = 'cc-pvdz'
mol.verbose = 7
mol.spin = 0
mol.build()
mf = scf.RHF(mol).run(conv_tol=1e-14)
mycc = QCISD(mf, frozen=1)
ecc, t1, t2 = mycc.kernel()
print(mycc.e_tot - -40.383989)
et = mycc.qcisd_t()
print(mycc.e_tot+et - -40.387679)
| 3,987 |
450 | /*-------------------------------------------------------------------------
*
* pg_amop.h
* definition of the system "amop" relation (pg_amop)
* along with the relation's initial contents.
*
* The amop table identifies the operators associated with each index opclass.
*
* The primary key for this table is <amopclaid, amopsubtype, amopstrategy>.
* amopsubtype is equal to zero for an opclass's "default" operators
* (which normally are those that accept the opclass's opcintype on both
* left and right sides). Some index AMs allow nondefault operators to
* exist for a single strategy --- for example, in the btree AM nondefault
* operators can have right-hand input data types different from opcintype,
* and their amopsubtype is equal to the right-hand input data type.
*
* We also keep a unique index on <amopclaid, amopopr>, so that we can
* use a syscache to quickly answer questions of the form "is this operator
* in this opclass?". This implies that the same operator cannot be listed
* for multiple subtypes or strategy numbers of a single opclass.
*
*
* Portions Copyright (c) 1996-2008, PostgreSQL Global Development Group
* Portions Copyright (c) 1994, Regents of the University of California
*
* $PostgreSQL: pgsql/src/include/catalog/pg_amop.h,v 1.75 2006/10/04 00:30:07 momjian Exp $
*
* NOTES
* the genbki.sh script reads this file and generates .bki
* information from the DATA() statements.
*
*-------------------------------------------------------------------------
*/
#ifndef PG_AMOP_H
#define PG_AMOP_H
#include "catalog/genbki.h"
/* TIDYCAT_BEGINFAKEDEF
CREATE TABLE pg_amop
with (camelcase=AccessMethodOperator, oid=false, relid=2602)
(
amopclaid oid,
amopsubtype oid,
amopstrategy smallint,
amopreqcheck boolean,
amopopr oid
);
create unique index on pg_amop(amopclaid, amopsubtype, amopstrategy) with (indexid=2653, CamelCase=AccessMethodStrategy, syscacheid=AMOPSTRATEGY, syscache_nbuckets=64);
create unique index on pg_amop(amopopr, amopclaid) with (indexid=2654, CamelCase=AccessMethodOperator, syscacheid=AMOPOPID, syscache_nbuckets=64);
alter table pg_amop add fk amopclaid on pg_opclass(oid);
alter table pg_amop add fk amopsubtype on pg_type(oid);
alter table pg_amop add fk amopopr on pg_operator(oid);
TIDYCAT_ENDFAKEDEF
*/
/* ----------------
* pg_amop definition. cpp turns this into
* typedef struct FormData_pg_amop
* ----------------
*/
#define AccessMethodOperatorRelationId 2602
CATALOG(pg_amop,2602) BKI_WITHOUT_OIDS
{
Oid amopclaid; /* the index opclass this entry is for */
Oid amopsubtype; /* operator subtype, or zero if default */
int2 amopstrategy; /* operator strategy number */
bool amopreqcheck; /* index hit must be rechecked */
Oid amopopr; /* the operator's pg_operator OID */
} FormData_pg_amop;
/* ----------------
* Form_pg_amop corresponds to a pointer to a tuple with
* the format of pg_amop relation.
* ----------------
*/
typedef FormData_pg_amop *Form_pg_amop;
/* ----------------
* compiler constants for pg_amop
* ----------------
*/
#define Natts_pg_amop 5
#define Anum_pg_amop_amopclaid 1
#define Anum_pg_amop_amopsubtype 2
#define Anum_pg_amop_amopstrategy 3
#define Anum_pg_amop_amopreqcheck 4
#define Anum_pg_amop_amopopr 5
/* ----------------
* initial contents of pg_amop
* ----------------
*/
/*
* btree int2_ops
*/
DATA(insert ( 1976 0 1 f 95 ));
DATA(insert ( 1976 0 2 f 522 ));
DATA(insert ( 1976 0 3 f 94 ));
DATA(insert ( 1976 0 4 f 524 ));
DATA(insert ( 1976 0 5 f 520 ));
/* crosstype operators int24 */
DATA(insert ( 1976 23 1 f 534 ));
DATA(insert ( 1976 23 2 f 540 ));
DATA(insert ( 1976 23 3 f 532 ));
DATA(insert ( 1976 23 4 f 542 ));
DATA(insert ( 1976 23 5 f 536 ));
/* crosstype operators int28 */
DATA(insert ( 1976 20 1 f 1864 ));
DATA(insert ( 1976 20 2 f 1866 ));
DATA(insert ( 1976 20 3 f 1862 ));
DATA(insert ( 1976 20 4 f 1867 ));
DATA(insert ( 1976 20 5 f 1865 ));
/*
* btree int4_ops
*/
DATA(insert ( 1978 0 1 f 97 ));
DATA(insert ( 1978 0 2 f 523 ));
DATA(insert ( 1978 0 3 f 96 ));
DATA(insert ( 1978 0 4 f 525 ));
DATA(insert ( 1978 0 5 f 521 ));
/* crosstype operators int42 */
DATA(insert ( 1978 21 1 f 535 ));
DATA(insert ( 1978 21 2 f 541 ));
DATA(insert ( 1978 21 3 f 533 ));
DATA(insert ( 1978 21 4 f 543 ));
DATA(insert ( 1978 21 5 f 537 ));
/* crosstype operators int48 */
DATA(insert ( 1978 20 1 f 37 ));
DATA(insert ( 1978 20 2 f 80 ));
DATA(insert ( 1978 20 3 f 15 ));
DATA(insert ( 1978 20 4 f 82 ));
DATA(insert ( 1978 20 5 f 76 ));
/*
* btree int8_ops
*/
DATA(insert ( 1980 0 1 f 412 ));
DATA(insert ( 1980 0 2 f 414 ));
DATA(insert ( 1980 0 3 f 410 ));
DATA(insert ( 1980 0 4 f 415 ));
DATA(insert ( 1980 0 5 f 413 ));
/* crosstype operators int82 */
DATA(insert ( 1980 21 1 f 1870 ));
DATA(insert ( 1980 21 2 f 1872 ));
DATA(insert ( 1980 21 3 f 1868 ));
DATA(insert ( 1980 21 4 f 1873 ));
DATA(insert ( 1980 21 5 f 1871 ));
/* crosstype operators int84 */
DATA(insert ( 1980 23 1 f 418 ));
DATA(insert ( 1980 23 2 f 420 ));
DATA(insert ( 1980 23 3 f 416 ));
DATA(insert ( 1980 23 4 f 430 ));
DATA(insert ( 1980 23 5 f 419 ));
/*
* btree oid_ops
*/
DATA(insert ( 1989 0 1 f 609 ));
DATA(insert ( 1989 0 2 f 611 ));
DATA(insert ( 1989 0 3 f 607 ));
DATA(insert ( 1989 0 4 f 612 ));
DATA(insert ( 1989 0 5 f 610 ));
/*
* btree tid_ops
*/
DATA(insert ( 2789 0 1 f 2799 ));
DATA(insert ( 2789 0 2 f 2801 ));
DATA(insert ( 2789 0 3 f 387 ));
DATA(insert ( 2789 0 4 f 2802 ));
DATA(insert ( 2789 0 5 f 2800 ));
/*
* btree oidvector_ops
*/
DATA(insert ( 1991 0 1 f 645 ));
DATA(insert ( 1991 0 2 f 647 ));
DATA(insert ( 1991 0 3 f 649 ));
DATA(insert ( 1991 0 4 f 648 ));
DATA(insert ( 1991 0 5 f 646 ));
/*
* btree float4_ops
*/
DATA(insert ( 1970 0 1 f 622 ));
DATA(insert ( 1970 0 2 f 624 ));
DATA(insert ( 1970 0 3 f 620 ));
DATA(insert ( 1970 0 4 f 625 ));
DATA(insert ( 1970 0 5 f 623 ));
/* crosstype operators float48 */
DATA(insert ( 1970 701 1 f 1122 ));
DATA(insert ( 1970 701 2 f 1124 ));
DATA(insert ( 1970 701 3 f 1120 ));
DATA(insert ( 1970 701 4 f 1125 ));
DATA(insert ( 1970 701 5 f 1123 ));
/*
* btree float8_ops
*/
DATA(insert ( 1972 0 1 f 672 ));
DATA(insert ( 1972 0 2 f 673 ));
DATA(insert ( 1972 0 3 f 670 ));
DATA(insert ( 1972 0 4 f 675 ));
DATA(insert ( 1972 0 5 f 674 ));
/* crosstype operators float84 */
DATA(insert ( 1972 700 1 f 1132 ));
DATA(insert ( 1972 700 2 f 1134 ));
DATA(insert ( 1972 700 3 f 1130 ));
DATA(insert ( 1972 700 4 f 1135 ));
DATA(insert ( 1972 700 5 f 1133 ));
/*
* btree char_ops
*/
DATA(insert ( 429 0 1 f 631 ));
DATA(insert ( 429 0 2 f 632 ));
DATA(insert ( 429 0 3 f 92 ));
DATA(insert ( 429 0 4 f 634 ));
DATA(insert ( 429 0 5 f 633 ));
/*
* btree name_ops
*/
DATA(insert ( 1986 0 1 f 660 ));
DATA(insert ( 1986 0 2 f 661 ));
DATA(insert ( 1986 0 3 f 93 ));
DATA(insert ( 1986 0 4 f 663 ));
DATA(insert ( 1986 0 5 f 662 ));
/*
* btree text_ops
*/
DATA(insert ( 1994 0 1 f 664 ));
DATA(insert ( 1994 0 2 f 665 ));
DATA(insert ( 1994 0 3 f 98 ));
DATA(insert ( 1994 0 4 f 667 ));
DATA(insert ( 1994 0 5 f 666 ));
/*
* btree bpchar_ops
*/
DATA(insert ( 426 0 1 f 1058 ));
DATA(insert ( 426 0 2 f 1059 ));
DATA(insert ( 426 0 3 f 1054 ));
DATA(insert ( 426 0 4 f 1061 ));
DATA(insert ( 426 0 5 f 1060 ));
/*
* btree varchar_ops (same operators as text_ops)
*/
DATA(insert ( 2003 0 1 f 664 ));
DATA(insert ( 2003 0 2 f 665 ));
DATA(insert ( 2003 0 3 f 98 ));
DATA(insert ( 2003 0 4 f 667 ));
DATA(insert ( 2003 0 5 f 666 ));
/*
* btree bytea_ops
*/
DATA(insert ( 428 0 1 f 1957 ));
DATA(insert ( 428 0 2 f 1958 ));
DATA(insert ( 428 0 3 f 1955 ));
DATA(insert ( 428 0 4 f 1960 ));
DATA(insert ( 428 0 5 f 1959 ));
/*
* btree abstime_ops
*/
DATA(insert ( 421 0 1 f 562 ));
DATA(insert ( 421 0 2 f 564 ));
DATA(insert ( 421 0 3 f 560 ));
DATA(insert ( 421 0 4 f 565 ));
DATA(insert ( 421 0 5 f 563 ));
/*
* btree date_ops
*/
DATA(insert ( 434 0 1 f 1095 ));
DATA(insert ( 434 0 2 f 1096 ));
DATA(insert ( 434 0 3 f 1093 ));
DATA(insert ( 434 0 4 f 1098 ));
DATA(insert ( 434 0 5 f 1097 ));
/* crosstype operators vs timestamp */
DATA(insert ( 434 1114 1 f 2345 ));
DATA(insert ( 434 1114 2 f 2346 ));
DATA(insert ( 434 1114 3 f 2347 ));
DATA(insert ( 434 1114 4 f 2348 ));
DATA(insert ( 434 1114 5 f 2349 ));
/* crosstype operators vs timestamptz */
DATA(insert ( 434 1184 1 f 2358 ));
DATA(insert ( 434 1184 2 f 2359 ));
DATA(insert ( 434 1184 3 f 2360 ));
DATA(insert ( 434 1184 4 f 2361 ));
DATA(insert ( 434 1184 5 f 2362 ));
/*
* btree time_ops
*/
DATA(insert ( 1996 0 1 f 1110 ));
DATA(insert ( 1996 0 2 f 1111 ));
DATA(insert ( 1996 0 3 f 1108 ));
DATA(insert ( 1996 0 4 f 1113 ));
DATA(insert ( 1996 0 5 f 1112 ));
/*
* btree timetz_ops
*/
DATA(insert ( 2000 0 1 f 1552 ));
DATA(insert ( 2000 0 2 f 1553 ));
DATA(insert ( 2000 0 3 f 1550 ));
DATA(insert ( 2000 0 4 f 1555 ));
DATA(insert ( 2000 0 5 f 1554 ));
/*
* btree timestamp_ops
*/
DATA(insert ( 2039 0 1 f 2062 ));
DATA(insert ( 2039 0 2 f 2063 ));
DATA(insert ( 2039 0 3 f 2060 ));
DATA(insert ( 2039 0 4 f 2065 ));
DATA(insert ( 2039 0 5 f 2064 ));
/* crosstype operators vs date */
DATA(insert ( 2039 1082 1 f 2371 ));
DATA(insert ( 2039 1082 2 f 2372 ));
DATA(insert ( 2039 1082 3 f 2373 ));
DATA(insert ( 2039 1082 4 f 2374 ));
DATA(insert ( 2039 1082 5 f 2375 ));
/* crosstype operators vs timestamptz */
DATA(insert ( 2039 1184 1 f 2534 ));
DATA(insert ( 2039 1184 2 f 2535 ));
DATA(insert ( 2039 1184 3 f 2536 ));
DATA(insert ( 2039 1184 4 f 2537 ));
DATA(insert ( 2039 1184 5 f 2538 ));
/*
* btree timestamptz_ops
*/
DATA(insert ( 1998 0 1 f 1322 ));
DATA(insert ( 1998 0 2 f 1323 ));
DATA(insert ( 1998 0 3 f 1320 ));
DATA(insert ( 1998 0 4 f 1325 ));
DATA(insert ( 1998 0 5 f 1324 ));
/* crosstype operators vs date */
DATA(insert ( 1998 1082 1 f 2384 ));
DATA(insert ( 1998 1082 2 f 2385 ));
DATA(insert ( 1998 1082 3 f 2386 ));
DATA(insert ( 1998 1082 4 f 2387 ));
DATA(insert ( 1998 1082 5 f 2388 ));
/* crosstype operators vs timestamp */
DATA(insert ( 1998 1114 1 f 2540 ));
DATA(insert ( 1998 1114 2 f 2541 ));
DATA(insert ( 1998 1114 3 f 2542 ));
DATA(insert ( 1998 1114 4 f 2543 ));
DATA(insert ( 1998 1114 5 f 2544 ));
/*
* btree interval_ops
*/
DATA(insert ( 1982 0 1 f 1332 ));
DATA(insert ( 1982 0 2 f 1333 ));
DATA(insert ( 1982 0 3 f 1330 ));
DATA(insert ( 1982 0 4 f 1335 ));
DATA(insert ( 1982 0 5 f 1334 ));
/*
* btree macaddr
*/
DATA(insert ( 1984 0 1 f 1222 ));
DATA(insert ( 1984 0 2 f 1223 ));
DATA(insert ( 1984 0 3 f 1220 ));
DATA(insert ( 1984 0 4 f 1225 ));
DATA(insert ( 1984 0 5 f 1224 ));
/*
* btree inet
*/
DATA(insert ( 1974 0 1 f 1203 ));
DATA(insert ( 1974 0 2 f 1204 ));
DATA(insert ( 1974 0 3 f 1201 ));
DATA(insert ( 1974 0 4 f 1206 ));
DATA(insert ( 1974 0 5 f 1205 ));
/*
* btree cidr
*/
DATA(insert ( 432 0 1 f 1203 ));
DATA(insert ( 432 0 2 f 1204 ));
DATA(insert ( 432 0 3 f 1201 ));
DATA(insert ( 432 0 4 f 1206 ));
DATA(insert ( 432 0 5 f 1205 ));
/*
* btree numeric
*/
DATA(insert ( 1988 0 1 f 1754 ));
DATA(insert ( 1988 0 2 f 1755 ));
DATA(insert ( 1988 0 3 f 1752 ));
DATA(insert ( 1988 0 4 f 1757 ));
DATA(insert ( 1988 0 5 f 1756 ));
/*
* btree bool
*/
DATA(insert ( 424 0 1 f 58 ));
DATA(insert ( 424 0 2 f 1694 ));
DATA(insert ( 424 0 3 f 91 ));
DATA(insert ( 424 0 4 f 1695 ));
DATA(insert ( 424 0 5 f 59 ));
/*
* btree bit
*/
DATA(insert ( 423 0 1 f 1786 ));
DATA(insert ( 423 0 2 f 1788 ));
DATA(insert ( 423 0 3 f 1784 ));
DATA(insert ( 423 0 4 f 1789 ));
DATA(insert ( 423 0 5 f 1787 ));
/*
* btree varbit
*/
DATA(insert ( 2002 0 1 f 1806 ));
DATA(insert ( 2002 0 2 f 1808 ));
DATA(insert ( 2002 0 3 f 1804 ));
DATA(insert ( 2002 0 4 f 1809 ));
DATA(insert ( 2002 0 5 f 1807 ));
/*
* btree text pattern
*/
DATA(insert ( 2095 0 1 f 2314 ));
DATA(insert ( 2095 0 2 f 2315 ));
DATA(insert ( 2095 0 3 f 2316 ));
DATA(insert ( 2095 0 4 f 2317 ));
DATA(insert ( 2095 0 5 f 2318 ));
/*
* btree varchar pattern (same operators as text)
*/
DATA(insert ( 2096 0 1 f 2314 ));
DATA(insert ( 2096 0 2 f 2315 ));
DATA(insert ( 2096 0 3 f 2316 ));
DATA(insert ( 2096 0 4 f 2317 ));
DATA(insert ( 2096 0 5 f 2318 ));
/*
* btree bpchar pattern
*/
DATA(insert ( 2097 0 1 f 2326 ));
DATA(insert ( 2097 0 2 f 2327 ));
DATA(insert ( 2097 0 3 f 2328 ));
DATA(insert ( 2097 0 4 f 2329 ));
DATA(insert ( 2097 0 5 f 2330 ));
/*
* btree name pattern
*/
DATA(insert ( 2098 0 1 f 2332 ));
DATA(insert ( 2098 0 2 f 2333 ));
DATA(insert ( 2098 0 3 f 2334 ));
DATA(insert ( 2098 0 4 f 2335 ));
DATA(insert ( 2098 0 5 f 2336 ));
/*
* btree money_ops
*/
DATA(insert ( 2099 0 1 f 902 ));
DATA(insert ( 2099 0 2 f 904 ));
DATA(insert ( 2099 0 3 f 900 ));
DATA(insert ( 2099 0 4 f 905 ));
DATA(insert ( 2099 0 5 f 903 ));
/*
* btree reltime_ops
*/
DATA(insert ( 2233 0 1 f 568 ));
DATA(insert ( 2233 0 2 f 570 ));
DATA(insert ( 2233 0 3 f 566 ));
DATA(insert ( 2233 0 4 f 571 ));
DATA(insert ( 2233 0 5 f 569 ));
/*
* btree tinterval_ops
*/
DATA(insert ( 2234 0 1 f 813 ));
DATA(insert ( 2234 0 2 f 815 ));
DATA(insert ( 2234 0 3 f 811 ));
DATA(insert ( 2234 0 4 f 816 ));
DATA(insert ( 2234 0 5 f 814 ));
/*
* btree array_ops
*/
DATA(insert ( 397 0 1 f 1072 ));
DATA(insert ( 397 0 2 f 1074 ));
DATA(insert ( 397 0 3 f 1070 ));
DATA(insert ( 397 0 4 f 1075 ));
DATA(insert ( 397 0 5 f 1073 ));
/*
* hash index _ops
*/
/* bpchar_ops */
DATA(insert ( 427 0 1 f 1054 ));
/* char_ops */
DATA(insert ( 431 0 1 f 92 ));
/* cidr_ops */
DATA(insert ( 433 0 1 f 1201 ));
/* date_ops */
DATA(insert ( 435 0 1 f 1093 ));
/* float4_ops */
DATA(insert ( 1971 0 1 f 620 ));
/* float8_ops */
DATA(insert ( 1973 0 1 f 670 ));
/* inet_ops */
DATA(insert ( 1975 0 1 f 1201 ));
/* int2_ops */
DATA(insert ( 1977 0 1 f 94 ));
/* int4_ops */
DATA(insert ( 1979 0 1 f 96 ));
/* int8_ops */
DATA(insert ( 1981 0 1 f 410 ));
/* interval_ops */
DATA(insert ( 1983 0 1 f 1330 ));
/* macaddr_ops */
DATA(insert ( 1985 0 1 f 1220 ));
/* name_ops */
DATA(insert ( 1987 0 1 f 93 ));
/* oid_ops */
DATA(insert ( 1990 0 1 f 607 ));
/* oidvector_ops */
DATA(insert ( 1992 0 1 f 649 ));
/* text_ops */
DATA(insert ( 1995 0 1 f 98 ));
/* time_ops */
DATA(insert ( 1997 0 1 f 1108 ));
/* timestamptz_ops */
DATA(insert ( 1999 0 1 f 1320 ));
/* timetz_ops */
DATA(insert ( 2001 0 1 f 1550 ));
/* varchar_ops */
DATA(insert ( 2004 0 1 f 98 ));
/* timestamp_ops */
DATA(insert ( 2040 0 1 f 2060 ));
/* bool_ops */
DATA(insert ( 2222 0 1 f 91 ));
/* bytea_ops */
DATA(insert ( 2223 0 1 f 1955 ));
/* int2vector_ops */
DATA(insert ( 2224 0 1 f 386 ));
/* xid_ops */
DATA(insert ( 2225 0 1 f 352 ));
/* cid_ops */
DATA(insert ( 2226 0 1 f 385 ));
/* abstime_ops */
DATA(insert ( 2227 0 1 f 560 ));
/* reltime_ops */
DATA(insert ( 2228 0 1 f 566 ));
/* text_pattern_ops */
DATA(insert ( 2229 0 1 f 2316 ));
/* varchar_pattern_ops */
DATA(insert ( 2230 0 1 f 2316 ));
/* bpchar_pattern_ops */
DATA(insert ( 2231 0 1 f 2328 ));
/* name_pattern_ops */
DATA(insert ( 2232 0 1 f 2334 ));
/* aclitem_ops */
DATA(insert ( 2235 0 1 f 974 ));
/* numeric_ops */
DATA(insert ( 7676 0 1 f 1752 ));
/*
* gist box_ops
*/
DATA(insert ( 2593 0 1 f 493 ));
DATA(insert ( 2593 0 2 f 494 ));
DATA(insert ( 2593 0 3 f 500 ));
DATA(insert ( 2593 0 4 f 495 ));
DATA(insert ( 2593 0 5 f 496 ));
DATA(insert ( 2593 0 6 f 499 ));
DATA(insert ( 2593 0 7 f 498 ));
DATA(insert ( 2593 0 8 f 497 ));
DATA(insert ( 2593 0 9 f 2571 ));
DATA(insert ( 2593 0 10 f 2570 ));
DATA(insert ( 2593 0 11 f 2573 ));
DATA(insert ( 2593 0 12 f 2572 ));
DATA(insert ( 2593 0 13 f 2863 ));
DATA(insert ( 2593 0 14 f 2862 ));
/*
* gist poly_ops (supports polygons)
*/
DATA(insert ( 2594 0 1 t 485 ));
DATA(insert ( 2594 0 2 t 486 ));
DATA(insert ( 2594 0 3 t 492 ));
DATA(insert ( 2594 0 4 t 487 ));
DATA(insert ( 2594 0 5 t 488 ));
DATA(insert ( 2594 0 6 t 491 ));
DATA(insert ( 2594 0 7 t 490 ));
DATA(insert ( 2594 0 8 t 489 ));
DATA(insert ( 2594 0 9 t 2575 ));
DATA(insert ( 2594 0 10 t 2574 ));
DATA(insert ( 2594 0 11 t 2577 ));
DATA(insert ( 2594 0 12 t 2576 ));
DATA(insert ( 2594 0 13 t 2861 ));
DATA(insert ( 2594 0 14 t 2860 ));
/*
* gist circle_ops
*/
DATA(insert ( 2595 0 1 t 1506 ));
DATA(insert ( 2595 0 2 t 1507 ));
DATA(insert ( 2595 0 3 t 1513 ));
DATA(insert ( 2595 0 4 t 1508 ));
DATA(insert ( 2595 0 5 t 1509 ));
DATA(insert ( 2595 0 6 t 1512 ));
DATA(insert ( 2595 0 7 t 1511 ));
DATA(insert ( 2595 0 8 t 1510 ));
DATA(insert ( 2595 0 9 t 2589 ));
DATA(insert ( 2595 0 10 t 1515 ));
DATA(insert ( 2595 0 11 t 1514 ));
DATA(insert ( 2595 0 12 t 2590 ));
DATA(insert ( 2595 0 13 t 2865 ));
DATA(insert ( 2595 0 14 t 2864 ));
/*
* gin _int4_ops
*/
DATA(insert ( 2745 0 1 f 2750 ));
DATA(insert ( 2745 0 2 f 2751 ));
DATA(insert ( 2745 0 3 t 2752 ));
DATA(insert ( 2745 0 4 t 1070 ));
/*
* gin _text_ops
*/
DATA(insert ( 2746 0 1 f 2750 ));
DATA(insert ( 2746 0 2 f 2751 ));
DATA(insert ( 2746 0 3 t 2752 ));
DATA(insert ( 2746 0 4 t 1070 ));
/*
* gin _abstime_ops
*/
DATA(insert ( 2753 0 1 f 2750 ));
DATA(insert ( 2753 0 2 f 2751 ));
DATA(insert ( 2753 0 3 t 2752 ));
DATA(insert ( 2753 0 4 t 1070 ));
/*
* gin _bit_ops
*/
DATA(insert ( 2754 0 1 f 2750 ));
DATA(insert ( 2754 0 2 f 2751 ));
DATA(insert ( 2754 0 3 t 2752 ));
DATA(insert ( 2754 0 4 t 1070 ));
/*
* gin _bool_ops
*/
DATA(insert ( 2755 0 1 f 2750 ));
DATA(insert ( 2755 0 2 f 2751 ));
DATA(insert ( 2755 0 3 t 2752 ));
DATA(insert ( 2755 0 4 t 1070 ));
/*
* gin _bpchar_ops
*/
DATA(insert ( 2756 0 1 f 2750 ));
DATA(insert ( 2756 0 2 f 2751 ));
DATA(insert ( 2756 0 3 t 2752 ));
DATA(insert ( 2756 0 4 t 1070 ));
/*
* gin _bytea_ops
*/
DATA(insert ( 2757 0 1 f 2750 ));
DATA(insert ( 2757 0 2 f 2751 ));
DATA(insert ( 2757 0 3 t 2752 ));
DATA(insert ( 2757 0 4 t 1070 ));
/*
* gin _char_ops
*/
DATA(insert ( 2758 0 1 f 2750 ));
DATA(insert ( 2758 0 2 f 2751 ));
DATA(insert ( 2758 0 3 t 2752 ));
DATA(insert ( 2758 0 4 t 1070 ));
/*
* gin _cidr_ops
*/
DATA(insert ( 2759 0 1 f 2750 ));
DATA(insert ( 2759 0 2 f 2751 ));
DATA(insert ( 2759 0 3 t 2752 ));
DATA(insert ( 2759 0 4 t 1070 ));
/*
* gin _date_ops
*/
DATA(insert ( 2760 0 1 f 2750 ));
DATA(insert ( 2760 0 2 f 2751 ));
DATA(insert ( 2760 0 3 t 2752 ));
DATA(insert ( 2760 0 4 t 1070 ));
/*
* gin _float4_ops
*/
DATA(insert ( 2761 0 1 f 2750 ));
DATA(insert ( 2761 0 2 f 2751 ));
DATA(insert ( 2761 0 3 t 2752 ));
DATA(insert ( 2761 0 4 t 1070 ));
/*
* gin _float8_ops
*/
DATA(insert ( 2762 0 1 f 2750 ));
DATA(insert ( 2762 0 2 f 2751 ));
DATA(insert ( 2762 0 3 t 2752 ));
DATA(insert ( 2762 0 4 t 1070 ));
/*
* gin _inet_ops
*/
DATA(insert ( 2763 0 1 f 2750 ));
DATA(insert ( 2763 0 2 f 2751 ));
DATA(insert ( 2763 0 3 t 2752 ));
DATA(insert ( 2763 0 4 t 1070 ));
/*
* gin _int2_ops
*/
DATA(insert ( 2764 0 1 f 2750 ));
DATA(insert ( 2764 0 2 f 2751 ));
DATA(insert ( 2764 0 3 t 2752 ));
DATA(insert ( 2764 0 4 t 1070 ));
/*
* gin _int8_ops
*/
DATA(insert ( 2765 0 1 f 2750 ));
DATA(insert ( 2765 0 2 f 2751 ));
DATA(insert ( 2765 0 3 t 2752 ));
DATA(insert ( 2765 0 4 t 1070 ));
/*
* gin _interval_ops
*/
DATA(insert ( 2766 0 1 f 2750 ));
DATA(insert ( 2766 0 2 f 2751 ));
DATA(insert ( 2766 0 3 t 2752 ));
DATA(insert ( 2766 0 4 t 1070 ));
/*
* gin _macaddr_ops
*/
DATA(insert ( 2767 0 1 f 2750 ));
DATA(insert ( 2767 0 2 f 2751 ));
DATA(insert ( 2767 0 3 t 2752 ));
DATA(insert ( 2767 0 4 t 1070 ));
/*
* gin _name_ops
*/
DATA(insert ( 2768 0 1 f 2750 ));
DATA(insert ( 2768 0 2 f 2751 ));
DATA(insert ( 2768 0 3 t 2752 ));
DATA(insert ( 2768 0 4 t 1070 ));
/*
* gin _numeric_ops
*/
DATA(insert ( 2769 0 1 f 2750 ));
DATA(insert ( 2769 0 2 f 2751 ));
DATA(insert ( 2769 0 3 t 2752 ));
DATA(insert ( 2769 0 4 t 1070 ));
/*
* gin _oid_ops
*/
DATA(insert ( 2770 0 1 f 2750 ));
DATA(insert ( 2770 0 2 f 2751 ));
DATA(insert ( 2770 0 3 t 2752 ));
DATA(insert ( 2770 0 4 t 1070 ));
/*
* gin _oidvector_ops
*/
DATA(insert ( 2771 0 1 f 2750 ));
DATA(insert ( 2771 0 2 f 2751 ));
DATA(insert ( 2771 0 3 t 2752 ));
DATA(insert ( 2771 0 4 t 1070 ));
/*
* gin _time_ops
*/
DATA(insert ( 2772 0 1 f 2750 ));
DATA(insert ( 2772 0 2 f 2751 ));
DATA(insert ( 2772 0 3 t 2752 ));
DATA(insert ( 2772 0 4 t 1070 ));
/*
* gin _timestamptz_ops
*/
DATA(insert ( 2773 0 1 f 2750 ));
DATA(insert ( 2773 0 2 f 2751 ));
DATA(insert ( 2773 0 3 t 2752 ));
DATA(insert ( 2773 0 4 t 1070 ));
/*
* gin _timetz_ops
*/
DATA(insert ( 2774 0 1 f 2750 ));
DATA(insert ( 2774 0 2 f 2751 ));
DATA(insert ( 2774 0 3 t 2752 ));
DATA(insert ( 2774 0 4 t 1070 ));
/*
* gin _varbit_ops
*/
DATA(insert ( 2775 0 1 f 2750 ));
DATA(insert ( 2775 0 2 f 2751 ));
DATA(insert ( 2775 0 3 t 2752 ));
DATA(insert ( 2775 0 4 t 1070 ));
/*
* gin _varchar_ops
*/
DATA(insert ( 2776 0 1 f 2750 ));
DATA(insert ( 2776 0 2 f 2751 ));
DATA(insert ( 2776 0 3 t 2752 ));
DATA(insert ( 2776 0 4 t 1070 ));
/*
* gin _timestamp_ops
*/
DATA(insert ( 2777 0 1 f 2750 ));
DATA(insert ( 2777 0 2 f 2751 ));
DATA(insert ( 2777 0 3 t 2752 ));
DATA(insert ( 2777 0 4 t 1070 ));
/*
* gin _money_ops
*/
DATA(insert ( 2778 0 1 f 2750 ));
DATA(insert ( 2778 0 2 f 2751 ));
DATA(insert ( 2778 0 3 t 2752 ));
DATA(insert ( 2778 0 4 t 1070 ));
/*
* gin _reltime_ops
*/
DATA(insert ( 2779 0 1 f 2750 ));
DATA(insert ( 2779 0 2 f 2751 ));
DATA(insert ( 2779 0 3 t 2752 ));
DATA(insert ( 2779 0 4 t 1070 ));
/*
* gin _tinterval_ops
*/
DATA(insert ( 2780 0 1 f 2750 ));
DATA(insert ( 2780 0 2 f 2751 ));
DATA(insert ( 2780 0 3 t 2752 ));
DATA(insert ( 2780 0 4 t 1070 ));
/*
* the operators for the on-disk bitmap index.
*/
/*
* on-disk bitmap index abstime
*/
DATA(insert ( 3014 0 1 f 562 ));
DATA(insert ( 3014 0 2 f 564 ));
DATA(insert ( 3014 0 3 f 560 ));
DATA(insert ( 3014 0 4 f 565 ));
DATA(insert ( 3014 0 5 f 563 ));
/*
* on-disk bitmap index array
*/
DATA(insert ( 3015 0 1 f 1072 ));
DATA(insert ( 3015 0 2 f 1074 ));
DATA(insert ( 3015 0 3 f 1070 ));
DATA(insert ( 3015 0 4 f 1075 ));
DATA(insert ( 3015 0 5 f 1073 ));
/*
* on-disk bitmap index bit
*/
DATA(insert ( 3016 0 1 f 1786 ));
DATA(insert ( 3016 0 2 f 1788 ));
DATA(insert ( 3016 0 3 f 1784 ));
DATA(insert ( 3016 0 4 f 1789 ));
DATA(insert ( 3016 0 5 f 1787 ));
/*
* on-disk bitmap index bool
*/
DATA(insert ( 3017 0 1 f 58 ));
DATA(insert ( 3017 0 2 f 1694 ));
DATA(insert ( 3017 0 3 f 91 ));
DATA(insert ( 3017 0 4 f 1695 ));
DATA(insert ( 3017 0 5 f 59 ));
/*
* on-disk bitmap index bpchar
*/
DATA(insert ( 3018 0 1 f 1058 ));
DATA(insert ( 3018 0 2 f 1059 ));
DATA(insert ( 3018 0 3 f 1054 ));
DATA(insert ( 3018 0 4 f 1061 ));
DATA(insert ( 3018 0 5 f 1060 ));
/*
* on-disk bitmap index bytea
*/
DATA(insert ( 3019 0 1 f 1957 ));
DATA(insert ( 3019 0 2 f 1958 ));
DATA(insert ( 3019 0 3 f 1955 ));
DATA(insert ( 3019 0 4 f 1960 ));
DATA(insert ( 3019 0 5 f 1959 ));
/*
* on-disk bitmap index char
*/
DATA(insert ( 3020 0 1 f 631 ));
DATA(insert ( 3020 0 2 f 632 ));
DATA(insert ( 3020 0 3 f 92 ));
DATA(insert ( 3020 0 4 f 634 ));
DATA(insert ( 3020 0 5 f 633 ));
/*
* on-disk bitmap index cidr
*/
DATA(insert ( 3021 0 1 f 1203 ));
DATA(insert ( 3021 0 2 f 1204 ));
DATA(insert ( 3021 0 3 f 1201 ));
DATA(insert ( 3021 0 4 f 1206 ));
DATA(insert ( 3021 0 5 f 1205 ));
/*
* on-disk bitmap index date
*/
DATA(insert ( 3022 0 1 f 1095 ));
DATA(insert ( 3022 0 2 f 1096 ));
DATA(insert ( 3022 0 3 f 1093 ));
DATA(insert ( 3022 0 4 f 1098 ));
DATA(insert ( 3022 0 5 f 1097 ));
/*
* date-timestamp
*/
DATA(insert ( 3022 1114 1 f 2345 ));
DATA(insert ( 3022 1114 2 f 2346 ));
DATA(insert ( 3022 1114 3 f 2347 ));
DATA(insert ( 3022 1114 4 f 2348 ));
DATA(insert ( 3022 1114 5 f 2349 ));
/*
* date-timestamptz
*/
DATA(insert ( 3022 1184 1 f 2358 ));
DATA(insert ( 3022 1184 2 f 2359 ));
DATA(insert ( 3022 1184 3 f 2360 ));
DATA(insert ( 3022 1184 4 f 2361 ));
DATA(insert ( 3022 1184 5 f 2362 ));
/*
* float4
*/
DATA(insert ( 3023 0 1 f 622 ));
DATA(insert ( 3023 0 2 f 624 ));
DATA(insert ( 3023 0 3 f 620 ));
DATA(insert ( 3023 0 4 f 625 ));
DATA(insert ( 3023 0 5 f 623 ));
/*
* float48
*/
DATA(insert ( 3023 701 1 f 1122 ));
DATA(insert ( 3023 701 2 f 1124 ));
DATA(insert ( 3023 701 3 f 1120 ));
DATA(insert ( 3023 701 4 f 1125 ));
DATA(insert ( 3023 701 5 f 1123 ));
/*
* float8
*/
DATA(insert ( 3024 0 1 f 672 ));
DATA(insert ( 3024 0 2 f 673 ));
DATA(insert ( 3024 0 3 f 670 ));
DATA(insert ( 3024 0 4 f 675 ));
DATA(insert ( 3024 0 5 f 674 ));
/*
* float84
*/
DATA(insert ( 3024 700 1 f 1132 ));
DATA(insert ( 3024 700 2 f 1134 ));
DATA(insert ( 3024 700 3 f 1130 ));
DATA(insert ( 3024 700 4 f 1135 ));
DATA(insert ( 3024 700 5 f 1133 ));
/*
* inet
*/
DATA(insert ( 3025 0 1 f 1203 ));
DATA(insert ( 3025 0 2 f 1204 ));
DATA(insert ( 3025 0 3 f 1201 ));
DATA(insert ( 3025 0 4 f 1206 ));
DATA(insert ( 3025 0 5 f 1205 ));
/*
* int2
*/
DATA(insert ( 3026 0 1 f 95 ));
DATA(insert ( 3026 0 2 f 522 ));
DATA(insert ( 3026 0 3 f 94 ));
DATA(insert ( 3026 0 4 f 524 ));
DATA(insert ( 3026 0 5 f 520 ));
/*
* int24
*/
DATA(insert ( 3026 23 1 f 534 ));
DATA(insert ( 3026 23 2 f 540 ));
DATA(insert ( 3026 23 3 f 532 ));
DATA(insert ( 3026 23 4 f 542 ));
DATA(insert ( 3026 23 5 f 536 ));
/*
* int28
*/
DATA(insert ( 3026 20 1 f 1864 ));
DATA(insert ( 3026 20 2 f 1866 ));
DATA(insert ( 3026 20 3 f 1862 ));
DATA(insert ( 3026 20 4 f 1867 ));
DATA(insert ( 3026 20 5 f 1865 ));
/*
* int4
*/
DATA(insert ( 3027 0 1 f 97 ));
DATA(insert ( 3027 0 2 f 523 ));
DATA(insert ( 3027 0 3 f 96 ));
DATA(insert ( 3027 0 4 f 525 ));
DATA(insert ( 3027 0 5 f 521 ));
/*
* int42
*/
DATA(insert ( 3027 21 1 f 535 ));
DATA(insert ( 3027 21 2 f 541 ));
DATA(insert ( 3027 21 3 f 533 ));
DATA(insert ( 3027 21 4 f 543 ));
DATA(insert ( 3027 21 5 f 537 ));
/*
* int48
*/
DATA(insert ( 3027 20 1 f 37 ));
DATA(insert ( 3027 20 2 f 80 ));
DATA(insert ( 3027 20 3 f 15 ));
DATA(insert ( 3027 20 4 f 82 ));
DATA(insert ( 3027 20 5 f 76 ));
/*
* int8
*/
DATA(insert ( 3028 0 1 f 412 ));
DATA(insert ( 3028 0 2 f 414 ));
DATA(insert ( 3028 0 3 f 410 ));
DATA(insert ( 3028 0 4 f 415 ));
DATA(insert ( 3028 0 5 f 413 ));
/*
* int82
*/
DATA(insert ( 3028 21 1 f 1870 ));
DATA(insert ( 3028 21 2 f 1872 ));
DATA(insert ( 3028 21 3 f 1868 ));
DATA(insert ( 3028 21 4 f 1873 ));
DATA(insert ( 3028 21 5 f 1871 ));
/*
* int84
*/
DATA(insert ( 3028 23 1 f 418 ));
DATA(insert ( 3028 23 2 f 420 ));
DATA(insert ( 3028 23 3 f 416 ));
DATA(insert ( 3028 23 4 f 430 ));
DATA(insert ( 3028 23 5 f 419 ));
/*
* interval
*/
DATA(insert ( 3029 0 1 f 1332 ));
DATA(insert ( 3029 0 2 f 1333 ));
DATA(insert ( 3029 0 3 f 1330 ));
DATA(insert ( 3029 0 4 f 1335 ));
DATA(insert ( 3029 0 5 f 1334 ));
/*
* macaddr
*/
DATA(insert ( 3030 0 1 f 1222 ));
DATA(insert ( 3030 0 2 f 1223 ));
DATA(insert ( 3030 0 3 f 1220 ));
DATA(insert ( 3030 0 4 f 1225 ));
DATA(insert ( 3030 0 5 f 1224 ));
/*
* name
*/
DATA(insert ( 3031 0 1 f 660 ));
DATA(insert ( 3031 0 2 f 661 ));
DATA(insert ( 3031 0 3 f 93 ));
DATA(insert ( 3031 0 4 f 663 ));
DATA(insert ( 3031 0 5 f 662 ));
/*
* numeric
*/
DATA(insert ( 3032 0 1 f 1754 ));
DATA(insert ( 3032 0 2 f 1755 ));
DATA(insert ( 3032 0 3 f 1752 ));
DATA(insert ( 3032 0 4 f 1757 ));
DATA(insert ( 3032 0 5 f 1756 ));
/*
* oid
*/
DATA(insert ( 3033 0 1 f 609 ));
DATA(insert ( 3033 0 2 f 611 ));
DATA(insert ( 3033 0 3 f 607 ));
DATA(insert ( 3033 0 4 f 612 ));
DATA(insert ( 3033 0 5 f 610 ));
/*
* oidvector
*/
DATA(insert ( 3034 0 1 f 645 ));
DATA(insert ( 3034 0 2 f 647 ));
DATA(insert ( 3034 0 3 f 649 ));
DATA(insert ( 3034 0 4 f 648 ));
DATA(insert ( 3034 0 5 f 646 ));
/*
* text
*/
DATA(insert ( 3035 0 1 f 664 ));
DATA(insert ( 3035 0 2 f 665 ));
DATA(insert ( 3035 0 3 f 98 ));
DATA(insert ( 3035 0 4 f 667 ));
DATA(insert ( 3035 0 5 f 666 ));
/*
* time
*/
DATA(insert ( 3036 0 1 f 1110 ));
DATA(insert ( 3036 0 2 f 1111 ));
DATA(insert ( 3036 0 3 f 1108 ));
DATA(insert ( 3036 0 4 f 1113 ));
DATA(insert ( 3036 0 5 f 1112 ));
/*
* timestamptz
*/
DATA(insert ( 3037 0 1 f 1322 ));
DATA(insert ( 3037 0 2 f 1323 ));
DATA(insert ( 3037 0 3 f 1320 ));
DATA(insert ( 3037 0 4 f 1325 ));
DATA(insert ( 3037 0 5 f 1324 ));
/*
* timestamptz-date
*/
DATA(insert ( 3037 1082 1 f 2384 ));
DATA(insert ( 3037 1082 2 f 2385 ));
DATA(insert ( 3037 1082 3 f 2386 ));
DATA(insert ( 3037 1082 4 f 2387 ));
DATA(insert ( 3037 1082 5 f 2388 ));
/*
* timestamptz-timestamp
*/
DATA(insert ( 3037 1114 1 f 2540 ));
DATA(insert ( 3037 1114 2 f 2541 ));
DATA(insert ( 3037 1114 3 f 2542 ));
DATA(insert ( 3037 1114 4 f 2543 ));
DATA(insert ( 3037 1114 5 f 2544 ));
/*
* timetz
*/
DATA(insert ( 3038 0 1 f 1552 ));
DATA(insert ( 3038 0 2 f 1553 ));
DATA(insert ( 3038 0 3 f 1550 ));
DATA(insert ( 3038 0 4 f 1555 ));
DATA(insert ( 3038 0 5 f 1554 ));
/*
* varbit
*/
DATA(insert ( 3039 0 1 f 1806 ));
DATA(insert ( 3039 0 2 f 1808 ));
DATA(insert ( 3039 0 3 f 1804 ));
DATA(insert ( 3039 0 4 f 1809 ));
DATA(insert ( 3039 0 5 f 1807 ));
/*
* varchar
*/
DATA(insert ( 3040 0 1 f 664 ));
DATA(insert ( 3040 0 2 f 665 ));
DATA(insert ( 3040 0 3 f 98 ));
DATA(insert ( 3040 0 4 f 667 ));
DATA(insert ( 3040 0 5 f 666 ));
/*
* timestamp
*/
DATA(insert ( 3041 0 1 f 2062 ));
DATA(insert ( 3041 0 2 f 2063 ));
DATA(insert ( 3041 0 3 f 2060 ));
DATA(insert ( 3041 0 4 f 2065 ));
DATA(insert ( 3041 0 5 f 2064 ));
/*
* timestamp-date
*/
DATA(insert ( 3041 1082 1 f 2371 ));
DATA(insert ( 3041 1082 2 f 2372 ));
DATA(insert ( 3041 1082 3 f 2373 ));
DATA(insert ( 3041 1082 4 f 2374 ));
DATA(insert ( 3041 1082 5 f 2375 ));
/*
* timestamp-timestamptz
*/
DATA(insert ( 3041 1184 1 f 2534 ));
DATA(insert ( 3041 1184 2 f 2535 ));
DATA(insert ( 3041 1184 3 f 2536 ));
DATA(insert ( 3041 1184 4 f 2537 ));
DATA(insert ( 3041 1184 5 f 2538 ));
/*
* text pattern
*/
DATA(insert ( 3042 0 1 f 2314 ));
DATA(insert ( 3042 0 2 f 2315 ));
DATA(insert ( 3042 0 3 f 2316 ));
DATA(insert ( 3042 0 4 f 2317 ));
DATA(insert ( 3042 0 5 f 2318 ));
/*
* varchar pattern
*/
DATA(insert ( 3043 0 1 f 2314 ));
DATA(insert ( 3043 0 2 f 2315 ));
DATA(insert ( 3043 0 3 f 2316 ));
DATA(insert ( 3043 0 4 f 2317 ));
DATA(insert ( 3043 0 5 f 2318 ));
/*
* bpchar pattern
*/
DATA(insert ( 3044 0 1 f 2326 ));
DATA(insert ( 3044 0 2 f 2327 ));
DATA(insert ( 3044 0 3 f 2328 ));
DATA(insert ( 3044 0 4 f 2329 ));
DATA(insert ( 3044 0 5 f 2330 ));
/*
* name pattern
*/
DATA(insert ( 3045 0 1 f 2332 ));
DATA(insert ( 3045 0 2 f 2333 ));
DATA(insert ( 3045 0 3 f 2334 ));
DATA(insert ( 3045 0 4 f 2335 ));
DATA(insert ( 3045 0 5 f 2336 ));
/*
* money
*/
DATA(insert ( 3046 0 1 f 902 ));
DATA(insert ( 3046 0 2 f 904 ));
DATA(insert ( 3046 0 3 f 900 ));
DATA(insert ( 3046 0 4 f 905 ));
DATA(insert ( 3046 0 5 f 903 ));
/*
* reltime
*/
DATA(insert ( 3047 0 1 f 568 ));
DATA(insert ( 3047 0 2 f 570 ));
DATA(insert ( 3047 0 3 f 566 ));
DATA(insert ( 3047 0 4 f 571 ));
DATA(insert ( 3047 0 5 f 569 ));
/*
* tinterval
*/
DATA(insert ( 3048 0 1 f 813 ));
DATA(insert ( 3048 0 2 f 815 ));
DATA(insert ( 3048 0 3 f 811 ));
DATA(insert ( 3048 0 4 f 816 ));
DATA(insert ( 3048 0 5 f 814 ));
/*
* gpxlogloc
*/
DATA(insert ( 2904 0 1 f 3327 ));
DATA(insert ( 2904 0 2 f 3329 ));
DATA(insert ( 2904 0 3 f 3325 ));
DATA(insert ( 2904 0 4 f 3330 ));
DATA(insert ( 2904 0 5 f 3328 ));
/*
* btree jsonb_ops
*/
DATA(insert ( 4033 0 1 f 3242 ));
DATA(insert ( 4033 0 2 f 3244 ));
DATA(insert ( 4033 0 3 f 5515 ));
DATA(insert ( 4033 0 4 f 3245 ));
DATA(insert ( 4033 0 5 f 3243 ));
/*
* hash jsonb ops
*/
DATA(insert ( 4034 0 1 f 5515 ));
/*
* GIN jsonb ops
*/
DATA(insert ( 4036 0 7 f 3246 ));
DATA(insert ( 4036 25 9 f 3247));
DATA(insert ( 4036 1009 10 f 3248 ));
DATA(insert ( 4036 1009 11 f 3249 ));
/*
* GIN jsonb hash ops
*/
DATA(insert ( 4037 0 7 f 3246 ));
#endif /* PG_AMOP_H */
| 14,040 |
1,602 | /* $Source: bitbucket.org:berkeleylab/gasnet.git/other/gasnet_arch_arm.h $
* Description: GASNet ARM-specific Helpers (Internal code, not for client use)
* Copyright 2018, The Regents of the University of California
* Terms of use are as specified in license.txt
*
* This code is used by gasnet_asm.h, configure and cross_configure_helper.
*/
#if !defined(_IN_GASNETEX_H) && !defined(_IN_GASNET_TOOLS_H) && !defined(GASNETI_IN_CONFIGURE)
#error This file is not meant to be included directly- clients should include gasnetex.h or gasnet_tools.h
#endif
#ifndef __arm__
#error Architecture-specific header included on wrong architecture.
#endif
#ifndef _GASNET_ARCH_ARM_H
#define _GASNET_ARCH_ARM_H
#if defined(__thumb__) && !defined(__thumb2__)
/* "GASNet does not support ARM Thumb1 mode" */
#define GASNETI_ARM_ASMCALL(_tmp, _offset) "choke me"
#elif defined(__ARM_ARCH_2__)
/* "GASNet does not support ARM versions earlier than ARMv3" */
#define GASNETI_ARM_ASMCALL(_tmp, _offset) "choke me"
#elif defined(__ARM_ARCH_3__) || defined(__ARM_ARCH_4__) || defined(__ARM_ARCH_4T__)
#define GASNETI_ARM_ASMCALL(_tmp, _offset) \
" mov " #_tmp ", #0xffff0fff @ _tmp = base addr \n" \
" mov lr, pc @ lr = return addr \n" \
" sub pc, " #_tmp ", #" #_offset " @ call _tmp - _offset \n"
#else
#define GASNETI_ARM_ASMCALL(_tmp, _offset) \
" mov " #_tmp ", #0xffff0fff @ _tmp = base addr \n" \
" sub " #_tmp ", " #_tmp ", #" #_offset " @ _tmp -= _offset \n" \
" blx " #_tmp " @ call _tmp \n"
#endif
/* ARCH-specific configure probes below */
#if GASNETI_IN_CONFIGURE
#if CHECK_ARM_CMPXCHG
int cmp_swap(volatile unsigned int *v, int oldval, int newval) {
register unsigned int result asm("r0");
register unsigned int _newval asm("r1") = newval;
register unsigned int _v asm("r2") = (unsigned long)v;
register unsigned int _oldval asm("r4") = oldval;
/* Transient failure is possible if interrupted.
* Since we can't distinguish the cause of the failure,
* we must retry as long as the failure looks "improper"
* which is defined as (!swapped && (v->ctr == oldval))
*/
__asm__ __volatile__ (
"0: mov r0, r4 @ r0 = oldval \n"
GASNETI_ARM_ASMCALL(r3, 0x3f)
#ifdef __thumb2__
" ite cc @ THUMB2: If(cc)-Then-Else \n"
#endif
" ldrcc ip, [r2, #0] @ if (!swapped) ip=v->ctr \n"
" eorcs ip, r4, #1 @ else ip=oldval^1 \n"
" teq r4, ip @ if (ip == oldval) \n"
" beq 0b @ then retry \n"
"1: "
: "=&r" (result)
: "r" (_oldval), "r" (_v), "r" (_newval)
: "r3", "ip", "lr", "cc", "memory" );
return !result;
}
int gasneti_arm_cmpxchg_check(void) {
/* Child */
volatile unsigned int X = 4321;
/* Expect FAIL and X unchanged */
if (cmp_swap(&X, 0, 1234) || (X != 4321)) return 1;
/* Expect SUCCESS and X changed */
if (!cmp_swap(&X, 4321, 1234) || (X != 1234)) return 1;
return 0;
}
#endif
#if CHECK_ARM_MEMBAR
#define arm_membar() \
__asm__ __volatile__ ( \
GASNETI_ARM_ASMCALL(r0, 0x5f) \
: : : "r0", "lr", "cc", "memory" )
int gasneti_arm_membar_check(void) {
/* First check the interface version (number of helpers) */
unsigned int kernel_helper_version = *(unsigned int *)0xffff0ffcUL;
/* Max possible is 128 32-byte helper "slots".
* We check this because prior to 2.6.12, the same location
* held the thread-specific pointer! */
if (kernel_helper_version > 128) return 1;
/* memory barrier occupies slot #3 */
if (kernel_helper_version < 3) return 1;
/* Can't test any side effect, but at least check for crash */
arm_membar();
return 0;
}
#endif
#endif
#endif // _GASNET_ARCH_ARM_H
| 1,715 |
336 | import grpc
import product_info_pb2
import product_info_pb2_grpc
import time;
def run():
# open a gRPC channel
channel = grpc.insecure_channel('localhost:50051')
# create a stub (client)
stub = product_info_pb2_grpc.ProductInfoStub(channel)
response = stub.addProduct(product_info_pb2.Product(name = "Apple iPhone 11", description = "Meet Apple iPhone 11. All-new dual-camera system with Ultra Wide and Night mode.", price = 699.0 ))
print("add product: response", response)
productInfo = stub.getProduct(product_info_pb2.ProductID(value = response.value))
print("get product: response", productInfo)
run() | 210 |
4,462 | from typing import Dict, List, Optional, Union
import numpy as np
import logging
from dataclasses import dataclass
from .model_base import BaseSurrogateModel
from ..datatypes.common import FantasizedPendingEvaluation, Candidate
from ..datatypes.hp_ranges_cs import HyperparameterRanges_CS
from ..datatypes.tuning_job_state import TuningJobState
from ..gpautograd.gp_regression import GaussianProcessRegression
from ..gpautograd.gpr_mcmc import GPRegressionMCMC
from ..gpautograd.posterior_state import GaussProcPosteriorState
from ..utils.debug_log import DebugLogPrinter
from ..utils.simple_profiler import SimpleProfiler
from ..tuning_algorithms.base_classes import DEFAULT_METRIC
logger = logging.getLogger(__name__)
GPModel = Union[GaussianProcessRegression, GPRegressionMCMC]
@dataclass
class InternalCandidateEvaluations:
X: np.ndarray
y: np.ndarray
mean: float
std: float
# Note: If state.pending_evaluations is not empty, it must contain entries
# of type FantasizedPendingEvaluation, which contain the fantasy samples. This
# is the case only for internal states, the member GPMXNetModel.state has
# PendingEvaluation entries without the fantasy samples.
def get_internal_candidate_evaluations(
state: TuningJobState, active_metric: str, normalize_targets: bool,
num_fantasize_samples: int) -> InternalCandidateEvaluations:
candidates_ndarray = []
evaluation_values = []
for candidate_evaluation in state.candidate_evaluations:
candidates_ndarray.append(
state.hp_ranges.to_ndarray(candidate_evaluation.candidate))
evaluation_values.append(candidate_evaluation.metrics[active_metric])
X = np.vstack(candidates_ndarray)
# Normalize
# Note: The fantasy values in state.pending_evaluations are sampled
# from the model fit to normalized targets, so they are already
# normalized
y = np.vstack(evaluation_values).reshape((-1, 1))
mean = 0.0
std = 1.0
if normalize_targets:
std = max(np.std(y).item(), 1e-15)
mean = np.mean(y).item()
y = (y - mean) / std
if state.pending_evaluations:
# In this case, y becomes a matrix, where the observed values are
# broadcasted
fanta_lst = []
cand_lst = []
for pending_eval in state.pending_evaluations:
assert isinstance(pending_eval, FantasizedPendingEvaluation), \
"state.pending_evaluations has to contain FantasizedPendingEvaluation"
fantasies = pending_eval.fantasies[active_metric]
assert fantasies.size == num_fantasize_samples, \
"All state.pending_evaluations entries must have length {}".format(
num_fantasize_samples)
fanta_lst.append(fantasies.reshape((1, -1)))
cand_lst.append(state.hp_ranges.to_ndarray(pending_eval.candidate))
y = np.vstack([y * np.ones((1, num_fantasize_samples))] + fanta_lst)
X = np.vstack([X] + cand_lst)
return InternalCandidateEvaluations(X, y, mean, std)
class GaussProcSurrogateModel(BaseSurrogateModel):
def __init__(
self, state: TuningJobState, active_metric: str, random_seed: int,
gpmodel: GPModel, fit_parameters: bool, num_fantasy_samples: int,
normalize_targets: bool = True,
profiler: SimpleProfiler = None,
debug_log: Optional[DebugLogPrinter] = None,
debug_fantasy_values = None):
"""
Given a TuningJobState state, the corresponding posterior state is
computed here, based on which predictions are supported.
Note: state is immutable. It must contain labeled examples.
Parameters of the GP model in gpmodel are optimized iff fit_parameters
is true. This requires state to contain labeled examples.
We support pending evaluations via fantasizing. Note that state does
not contain the fantasy values, but just the pending configs. Fantasy
values are sampled here.
:param state: TuningJobSubState
:param active_metric: name of the metric to optimize.
:param random_seed: Used only if GP model is created here
:param gpmodel: GaussianProcessRegression model or GPRegressionMCMC model
:param fit_parameters: Optimize parameters of gpmodel? Otherwise, these
parameters are not changed
:param num_fantasy_samples: See above
:param normalize_targets: Normalize target values in
state.candidate_evaluations?
"""
super().__init__(state, active_metric, random_seed, debug_log)
assert num_fantasy_samples > 0
self._gpmodel = gpmodel
self.num_fantasy_samples = num_fantasy_samples
self.normalize_targets = normalize_targets
self.active_metric = active_metric
# Compute posterior (including fitting (optional) and dealing with
# pending evaluations)
# If state.pending_evaluations is not empty, fantasy samples are drawn
# here, but they are not maintained in the state (which is immutable),
# and not in the posterior state either.
# Instead, fantasy samples can be accessed via self.fantasy_samples.
self.fantasy_samples = None
# DEBUG: Allows for comparison testing. If given, these fantasy values are
# used in _draw_fantasy_values (instead of sampling them)
self._debug_fantasy_values = debug_fantasy_values # DEBUG
self._compute_posterior(fit_parameters, profiler)
def predict(self, inputs: np.ndarray) -> List[Dict[str, np.ndarray]]:
"""
Note: Different to GPyOpt, means and stddevs are de-normalized here.
"""
predictions_list_denormalized = []
for posterior_mean, posterior_variance in self._gpmodel.predict(
inputs):
assert posterior_mean.shape[0] == inputs.shape[0], \
(posterior_mean.shape, inputs.shape)
assert posterior_variance.shape == (inputs.shape[0],), \
(posterior_variance.shape, inputs.shape)
if self.state.pending_evaluations:
# If there are pending candidates with fantasy values,
# posterior_mean must be a matrix
assert posterior_mean.ndim == 2 and \
posterior_mean.shape[1] == self.num_fantasy_samples, \
(posterior_mean.shape, self.num_fantasy_samples)
mean_denorm = posterior_mean * self.std + self.mean
std_denorm = np.sqrt(posterior_variance) * self.std
predictions_list_denormalized.append(
{'mean': mean_denorm, 'std': std_denorm})
return predictions_list_denormalized
def backward_gradient(
self, input: np.ndarray,
head_gradients: List[Dict[str, np.ndarray]]) -> List[np.ndarray]:
poster_states = self.posterior_states()
assert poster_states is not None, \
"Cannot run backward_gradient without a posterior state"
assert len(poster_states) == len(head_gradients), \
"len(posterior_states) = {} != {} = len(head_gradients)".format(
len(poster_states), len(head_gradients))
return [
poster_state.backward_gradient(
input, head_gradient, self.mean, self.std)
for poster_state, head_gradient in zip(poster_states, head_gradients)
]
@property
def gpmodel(self) -> GPModel:
return self._gpmodel
def does_mcmc(self):
return isinstance(self._gpmodel, GPRegressionMCMC)
def posterior_states(self) -> Optional[List[GaussProcPosteriorState]]:
return self._gpmodel.states
def get_params(self):
"""
Note: Once MCMC is supported, this method will have to be refactored.
Note: If self.state still has no labeled data, the parameters returned
are the initial ones, where an update would start from.
:return: Hyperparameter dictionary
"""
if not self.does_mcmc():
return self._gpmodel.get_params()
else:
return dict()
def set_params(self, param_dict):
self._gpmodel.set_params(param_dict)
def _compute_posterior(
self, fit_parameters: bool, profiler: SimpleProfiler):
"""
Completes __init__, by computing the posterior. If fit_parameters, this
includes optimizing the surrogate model parameters.
If self.state.pending_evaluations is not empty, we proceed as follows:
- Compute posterior for state without pending evals
- Draw fantasy values for pending evals
- Recompute posterior (without fitting)
"""
if self._debug_log is not None:
self._debug_log.set_state(self.state)
# Compute posterior for state without pending evals
no_pending_state = self.state
if self.state.pending_evaluations:
no_pending_state = TuningJobState(
hp_ranges=self.state.hp_ranges,
candidate_evaluations=self.state.candidate_evaluations,
failed_candidates=self.state.failed_candidates,
pending_evaluations=[])
self._posterior_for_state(no_pending_state, fit_parameters, profiler)
if self.state.pending_evaluations:
# Sample fantasy values for pending evals
pending_configs = [
x.candidate for x in self.state.pending_evaluations]
new_pending = self._draw_fantasy_values(pending_configs)
# Compute posterior for state with pending evals
# Note: profiler is not passed here, this would overwrite the
# results from the first call
with_pending_state = TuningJobState(
hp_ranges=self.state.hp_ranges,
candidate_evaluations=self.state.candidate_evaluations,
failed_candidates=self.state.failed_candidates,
pending_evaluations=new_pending)
self._posterior_for_state(
with_pending_state, fit_parameters=False, profiler=None)
# Note: At this point, the fantasy values are dropped, they are not
# needed anymore. They've just been sampled for the posterior
# computation. We still maintain them in self.fantasy_samples,
# which is mainly used for testing
self.fantasy_samples = new_pending
def _posterior_for_state(
self, state: TuningJobState, fit_parameters: bool,
profiler: Optional[SimpleProfiler]):
"""
Computes posterior for state.
If fit_parameters and state.pending_evaluations is empty, we first
optimize the model parameters.
If state.pending_evaluations are given, these must be
FantasizedPendingEvaluations, i.e. the fantasy values must have been
sampled.
"""
assert state.candidate_evaluations, \
"Cannot compute posterior: state has no labeled datapoints"
internal_candidate_evaluations = get_internal_candidate_evaluations(
state, self.active_metric, self.normalize_targets,
self.num_fantasy_samples)
X_all = internal_candidate_evaluations.X
Y_all = internal_candidate_evaluations.y
assert X_all.shape[0] == Y_all.shape[0]
self.mean = internal_candidate_evaluations.mean
self.std = internal_candidate_evaluations.std
fit_parameters = fit_parameters and (not state.pending_evaluations)
if not fit_parameters:
logger.log(15, "Recomputing GP state")
self._gpmodel.recompute_states(X_all, Y_all, profiler=profiler)
else:
logger.log(15, f"Fitting GP model for {self.active_metric}")
self._gpmodel.fit(X_all, Y_all, profiler=profiler)
if self._debug_log is not None:
self._debug_log.set_gp_params(self.get_params())
if not state.pending_evaluations:
deb_msg = "[GaussProcSurrogateModel._posterior_for_state]\n"
deb_msg += ("- self.mean = {}\n".format(self.mean))
deb_msg += ("- self.std = {}".format(self.std))
logger.info(deb_msg)
self._debug_log.set_targets(internal_candidate_evaluations.y)
else:
num_pending = len(state.pending_evaluations)
fantasies = internal_candidate_evaluations.y[-num_pending:, :]
self._debug_log.set_fantasies(fantasies)
def _draw_fantasy_values(self, candidates: List[Candidate]) \
-> List[FantasizedPendingEvaluation]:
"""
Note: The fantasy values need not be de-normalized, because they are
only used internally here (e.g., get_internal_candidate_evaluations).
Note: A complication is that if the sampling methods of _gpmodel
are called when there are no pending candidates (with fantasies) yet,
they do return a single sample (instead of num_fantasy_samples). This
is because GaussianProcessRegression knows about num_fantasy_samples
only due to the form of the posterior state (bad design!).
In this case, we draw num_fantasy_samples i.i.d.
"""
if candidates:
if self._debug_fantasy_values is not None:
# DEBUG: Use provided fantasy values
assert len(self._debug_fantasy_values) == len(candidates)
logger.info("DEBUG: Use given fantasy values, rather than resampling them")
return self._debug_fantasy_values
logger.debug("Fantasizing target values for candidates:\n{}"
.format(candidates))
X_new = self.state.hp_ranges.to_ndarray_matrix(candidates)
# Special case (see header comment): If the current posterior state
# does not contain pending candidates (no fantasies), we sample
# num_fantasy_samples times i.i.d.
num_samples = 1 if self._gpmodel.multiple_targets() \
else self.num_fantasy_samples
# We need joint sampling for >1 new candidates
num_candidates = len(candidates)
sample_func = self._gpmodel.sample_joint if num_candidates > 1 else \
self._gpmodel.sample_marginals
Y_new = sample_func(X_new, num_samples=num_samples).reshape(
(num_candidates, -1))
return [
FantasizedPendingEvaluation(
candidate, {self.active_metric: y_new.reshape((1, -1))})
for candidate, y_new in zip(candidates, Y_new)
]
else:
return []
def current_best_filter_candidates(self, candidates):
hp_ranges = self.state.hp_ranges
if isinstance(hp_ranges, HyperparameterRanges_CS):
candidates = hp_ranges.filter_for_last_pos_value(candidates)
assert candidates, \
"state.hp_ranges does not contain any candidates " + \
"(labeled or pending) with resource attribute " + \
"'{}' = {}".format(
hp_ranges.name_last_pos, hp_ranges.value_for_last_pos)
return candidates
# Convenience type allowing for multi-output HPO. This is used for methods that work both in the standard case
# of a single output model and in the multi-output case (e.g., see GPModelPendingCandidateStateTransformer)
GaussProcSurrogateOutputModel = Union[GaussProcSurrogateModel, Dict[str, GaussProcSurrogateModel]]
@dataclass
class GPModelArgs:
num_fantasy_samples: int
random_seed: int
active_metric: str = DEFAULT_METRIC
normalize_targets: bool = True
GPModelArgsOutput = Union[GPModelArgs, Dict[str, GPModelArgs]]
GPOutputModel = Union[GPModel, Dict[str, GPModel]] | 6,668 |
852 | <filename>DPGAnalysis/Skims/python/cosmicTPSkim_cff.py
import FWCore.ParameterSet.Config as cms
from TrackPropagation.SteppingHelixPropagator.SteppingHelixPropagatorAny_cfi import *
cosmictrackfinderP5TkCntFilter = cms.EDFilter("TrackCountFilter",
src = cms.InputTag('cosmictrackfinderP5'),
minNumber = cms.uint32(1)
)
ctfWithMaterialTracksP5TkCntFilter = cms.EDFilter("TrackCountFilter",
src = cms.InputTag('ctfWithMaterialTracksP5'),
minNumber = cms.uint32(1)
)
rsWithMaterialTracksP5TkCntFilter = cms.EDFilter("TrackCountFilter",
src = cms.InputTag('rsWithMaterialTracksP5'),
minNumber = cms.uint32(1)
)
cosmicMuonsBarrelOnlyTkFilter = cms.EDFilter("HLTMuonPointingFilter",
SALabel = cms.InputTag("cosmicMuons"),
PropagatorName = cms.string("SteppingHelixPropagatorAny"),
radius = cms.double(90.0),
maxZ = cms.double(130.0),
)
cosmicMuonsEndCapsOnlyTkFilter = cosmicMuonsBarrelOnlyTkFilter.clone(SALabel = cms.InputTag("cosmicMuonsEndCapsOnly"))
cosmicMuonsTkFilter = cosmicMuonsBarrelOnlyTkFilter.clone(SALabel = cms.InputTag("cosmicMuons"))
cosmicMuons1LegTkFilter = cosmicMuonsBarrelOnlyTkFilter.clone(SALabel = cms.InputTag("cosmicMuons1Leg"))
globalCosmicMuonsBarrelOnlyTkFilter = cosmicMuonsBarrelOnlyTkFilter.clone(SALabel = cms.InputTag("globalCosmicMuons"))
globalCosmicMuonsEndCapsOnlyTkFilter = cosmicMuonsBarrelOnlyTkFilter.clone(SALabel = cms.InputTag("globalCosmicMuons"))
globalCosmicMuonsTkFilter = cosmicMuonsBarrelOnlyTkFilter.clone(SALabel = cms.InputTag("globalCosmicMuons"))
globalCosmicMuons1LegTkFilter = cosmicMuonsBarrelOnlyTkFilter.clone(SALabel = cms.InputTag("globalCosmicMuons1Leg"))
cosmicMuonsBarrelOnlyTkSequence = cms.Sequence(cosmicMuonsBarrelOnlyTkFilter)
cosmicMuonsEndCapsOnlyTkSequence = cms.Sequence(cosmicMuonsEndCapsOnlyTkFilter)
cosmicMuonsTkSequence = cms.Sequence(cosmicMuonsTkFilter)
cosmicMuons1LegTkSequence = cms.Sequence(cosmicMuons1LegTkFilter)
globalCosmicMuonsBarrelOnlyTkSequence = cms.Sequence(globalCosmicMuonsBarrelOnlyTkFilter)
globalCosmicMuonsEndCapsOnlyTkSequence = cms.Sequence(globalCosmicMuonsEndCapsOnlyTkFilter)
globalCosmicMuonsTkSequence = cms.Sequence(globalCosmicMuonsTkFilter)
globalCosmicMuons1LegTkSequence = cms.Sequence(globalCosmicMuons1LegTkFilter)
cosmictrackfinderP5TkCntSequence = cms.Sequence(cosmictrackfinderP5TkCntFilter)
ctfWithMaterialTracksP5TkCntSequence = cms.Sequence(ctfWithMaterialTracksP5TkCntFilter)
rsWithMaterialTracksP5TkCntSequence = cms.Sequence(rsWithMaterialTracksP5TkCntFilter)
| 1,682 |
1,581 | <reponame>qaseleniumtesting01/GithubScannerCocoapods
//
// ADKCollectionViewDynamicSizeCell.h
// AppDevKit
//
// Created by <NAME> on 6/8/15.
// Copyright © 2015, Yahoo Inc.
// Licensed under the terms of the BSD License.
// Please see the LICENSE file in the project root for terms.
//
#import <UIKit/UIKit.h>
@interface ADKCollectionViewDynamicSizeCell : UICollectionViewCell
@end
| 133 |
348 | {"nom":"Montagny-Sainte-Félicité","circ":"4ème circonscription","dpt":"Oise","inscrits":305,"abs":152,"votants":153,"blancs":7,"nuls":1,"exp":145,"res":[{"nuance":"REM","nom":"<NAME>","voix":89},{"nuance":"LR","nom":"<NAME>","voix":56}]} | 97 |
443 | <filename>tests/changes/api/test_node_details.py<gh_stars>100-1000
from changes.testutils import APITestCase
class NodeDetailsTest(APITestCase):
def test_simple(self):
node = self.create_node()
path = '/api/0/nodes/{0}/'.format(node.id.hex)
resp = self.client.get(path)
assert resp.status_code == 200
data = self.unserialize(resp)
assert data['id'] == node.id.hex
| 177 |
4,403 | package cn.hutool.poi.ofd;
import cn.hutool.core.io.IORuntimeException;
import cn.hutool.core.io.IoUtil;
import cn.hutool.core.io.file.PathUtil;
import org.ofdrw.font.Font;
import org.ofdrw.layout.OFDDoc;
import org.ofdrw.layout.edit.Annotation;
import org.ofdrw.layout.element.Div;
import org.ofdrw.layout.element.Img;
import org.ofdrw.layout.element.Paragraph;
import org.ofdrw.reader.OFDReader;
import java.io.Closeable;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.io.Serializable;
import java.nio.file.Path;
/**
* OFD文件生成器
*
* @author looly
* @since 5.5.3
*/
public class OfdWriter implements Serializable, Closeable {
private static final long serialVersionUID = 1L;
private final OFDDoc doc;
/**
* 构造
*
* @param file 生成的文件
*/
public OfdWriter(File file) {
this(file.toPath());
}
/**
* 构造
*
* @param file 生成的文件
*/
public OfdWriter(Path file) {
try {
if(PathUtil.exists(file, true)){
this.doc = new OFDDoc(new OFDReader(file), file);
} else{
this.doc = new OFDDoc(file);
}
} catch (IOException e) {
throw new IORuntimeException(e);
}
}
/**
* 构造
*
* @param out 需要输出的流
*/
public OfdWriter(OutputStream out) {
this.doc = new OFDDoc(out);
}
/**
* 增加文本内容
*
* @param font 字体
* @param texts 文本
* @return this
*/
public OfdWriter addText(Font font, String... texts) {
final Paragraph paragraph = new Paragraph();
if (null != font) {
paragraph.setDefaultFont(font);
}
for (String text : texts) {
paragraph.add(text);
}
return add(paragraph);
}
/**
* 追加图片
*
* @param picFile 图片文件
* @param width 宽度
* @param height 高度
* @return this
*/
public OfdWriter addPicture(File picFile, int width, int height) {
return addPicture(picFile.toPath(), width, height);
}
/**
* 追加图片
*
* @param picFile 图片文件
* @param width 宽度
* @param height 高度
* @return this
*/
public OfdWriter addPicture(Path picFile, int width, int height) {
final Img img;
try {
img = new Img(width, height, picFile);
} catch (IOException e) {
throw new IORuntimeException(e);
}
return add(img);
}
/**
* 增加节点
*
* @param div 节点,可以是段落、Canvas、Img或者填充
* @return this
*/
public OfdWriter add(Div div) {
this.doc.add(div);
return this;
}
/**
* 增加注释,比如水印等
*
* @param page 页码
* @param annotation 节点,可以是段落、Canvas、Img或者填充
* @return this
*/
public OfdWriter add(int page, Annotation annotation) {
try {
this.doc.addAnnotation(page, annotation);
} catch (IOException e) {
throw new IORuntimeException(e);
}
return this;
}
@Override
public void close() {
IoUtil.close(this.doc);
}
}
| 1,277 |
364 | package rsc.publisher;
import java.util.Arrays;
import java.util.concurrent.TimeUnit;
import org.openjdk.jmh.annotations.*;
import org.openjdk.jmh.infra.Blackhole;
import org.reactivestreams.Publisher;
import rsc.util.PerfSubscriber;
/**
* Example benchmark. Run from command line as
* <br>
* gradle jmh -Pjmh='PublisherRedoPerf'
*/
@BenchmarkMode(Mode.Throughput)
@Warmup(iterations = 5)
@Measurement(iterations = 5, time = 1, timeUnit = TimeUnit.SECONDS)
@OutputTimeUnit(TimeUnit.SECONDS)
@Fork(value = 1)
@State(Scope.Thread)
public class PublisherRedoPerf {
@Param({"1,1", "1,1000", "1,1000000", "1000,1", "1000,1000", "1000000,1"})
public String params;
public int len;
public int repeat;
Publisher<Integer> sourceRepeating;
Publisher<Integer> sourceRetrying;
Publisher<Integer> redoRepeating;
Publisher<Integer> redoRetrying;
Publisher<Integer> baseline;
@Setup
public void setup() {
String[] ps = params.split(",");
len = Integer.parseInt(ps[0]);
repeat = Integer.parseInt(ps[1]);
Integer[] values = new Integer[len];
Arrays.fill(values, 777);
Px<Integer> source = new PublisherArray<>(values);
Px<Integer> error = source.concatWith(new PublisherError<Integer>(new RuntimeException()));
Integer[] values2 = new Integer[len * repeat];
Arrays.fill(values2, 777);
baseline = new PublisherArray<>(values2);
sourceRepeating = source.repeat(repeat);
sourceRetrying = error.retry(repeat);
redoRepeating = source.repeatWhen(v -> v).take(len * repeat);
redoRetrying = error.retryWhen(v -> v).take(len * repeat);
}
@Benchmark
public void baseline(Blackhole bh) {
baseline.subscribe(new PerfSubscriber(bh));
}
@Benchmark
public void repeatCounted(Blackhole bh) {
sourceRepeating.subscribe(new PerfSubscriber(bh));
}
@Benchmark
public void retryCounted(Blackhole bh) {
sourceRetrying.subscribe(new PerfSubscriber(bh));
}
@Benchmark
public void repeatWhen(Blackhole bh) {
redoRepeating.subscribe(new PerfSubscriber(bh));
}
@Benchmark
public void retryWhen(Blackhole bh) {
redoRetrying.subscribe(new PerfSubscriber(bh));
}
}
| 1,046 |
400 | <gh_stars>100-1000
/*
*
* Copyright (c) 2013 - 2020 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.xipki.qa.ocsp;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.handler.codec.http.DefaultFullHttpRequest;
import io.netty.handler.codec.http.FullHttpRequest;
import io.netty.handler.codec.http.HttpMethod;
import io.netty.handler.codec.http.HttpVersion;
import org.bouncycastle.asn1.*;
import org.bouncycastle.asn1.ocsp.CertID;
import org.bouncycastle.asn1.ocsp.OCSPObjectIdentifiers;
import org.bouncycastle.asn1.x509.Extension;
import org.bouncycastle.asn1.x509.Extensions;
import org.bouncycastle.cert.ocsp.CertificateID;
import org.bouncycastle.cert.ocsp.OCSPException;
import org.bouncycastle.cert.ocsp.OCSPReqBuilder;
import org.xipki.ocsp.client.OcspRequestorException;
import org.xipki.ocsp.client.RequestOptions;
import org.xipki.qa.BenchmarkHttpClient;
import org.xipki.qa.BenchmarkHttpClient.HttpClientException;
import org.xipki.qa.BenchmarkHttpClient.ResponseHandler;
import org.xipki.security.HashAlgo;
import org.xipki.security.ObjectIdentifiers;
import org.xipki.security.SignAlgo;
import org.xipki.security.X509Cert;
import org.xipki.util.Base64;
import org.xipki.util.StringUtil;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.math.BigInteger;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URLEncoder;
import java.security.SecureRandom;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import static org.xipki.util.Args.notNull;
/**
* OCSP benchmark requestor.
*
* @author <NAME>
* @since 2.2.0
*/
class OcspBenchRequestor {
public static final int MAX_LEN_GET = 190;
private final Extension[] extnType = new Extension[0];
private final SecureRandom random = new SecureRandom();
private final ConcurrentHashMap<BigInteger, byte[]> requests = new ConcurrentHashMap<>();
private HashAlgo issuerhashAlg;
private ASN1OctetString issuerNameHash;
private ASN1OctetString issuerKeyHash;
private Extension[] extensions;
private RequestOptions requestOptions;
private String responderRawPathPost;
private String responderRawPathGet;
private BenchmarkHttpClient httpClient;
public void init(ResponseHandler responseHandler, String responderUrl, X509Cert issuerCert,
RequestOptions requestOptions, int queueSize)
throws OcspRequestorException, IOException, URISyntaxException {
notNull(issuerCert, "issuerCert");
notNull(responseHandler, "responseHandler");
this.requestOptions = notNull(requestOptions, "requestOptions");
this.issuerhashAlg = requestOptions.getHashAlgorithm();
this.issuerNameHash = new DEROctetString(
issuerhashAlg.hash(issuerCert.getSubject().getEncoded()));
this.issuerKeyHash = new DEROctetString(
issuerhashAlg.hash(issuerCert.getSubjectPublicKeyInfo().getPublicKeyData().getOctets()));
List<SignAlgo> prefSigAlgs = requestOptions.getPreferredSignatureAlgorithms();
if (prefSigAlgs == null || prefSigAlgs.size() == 0) {
this.extensions = null;
} else {
ASN1EncodableVector vec = new ASN1EncodableVector();
for (SignAlgo algId : prefSigAlgs) {
ASN1Sequence prefSigAlgObj = new DERSequence(algId.getAlgorithmIdentifier());
vec.add(prefSigAlgObj);
}
ASN1Sequence extnValue = new DERSequence(vec);
Extension extn;
try {
extn = new Extension(ObjectIdentifiers.Extn.id_pkix_ocsp_prefSigAlgs, false,
new DEROctetString(extnValue));
} catch (IOException ex) {
throw new OcspRequestorException(ex.getMessage(), ex);
}
this.extensions = new Extension[]{extn};
}
URI uri = new URI(responderUrl);
this.responderRawPathPost = uri.getRawPath();
if (this.responderRawPathPost.endsWith("/")) {
this.responderRawPathGet = this.responderRawPathPost;
} else {
this.responderRawPathGet = this.responderRawPathPost + "/";
}
int port = uri.getPort();
if (port == -1) {
final String scheme = uri.getScheme();
if ("http".equalsIgnoreCase(scheme)) {
port = 80;
} else if ("https".equalsIgnoreCase(scheme)) {
port = 443;
} else {
throw new OcspRequestorException("unknown scheme " + scheme);
}
}
this.httpClient = new BenchmarkHttpClient(uri.getHost(), port, null,
responseHandler, queueSize);
this.httpClient.start();
} // method init
public void shutdown() {
httpClient.shutdown();
}
public void ask(BigInteger[] serialNumbers)
throws OcspRequestorException, HttpClientException {
byte[] ocspReq = buildRequest(serialNumbers);
int size = ocspReq.length;
FullHttpRequest request;
if (size <= MAX_LEN_GET && requestOptions.isUseHttpGetForRequest()) {
String b64Request = Base64.encodeToString(ocspReq);
String urlEncodedReq;
try {
urlEncodedReq = URLEncoder.encode(b64Request, "UTF-8");
} catch (UnsupportedEncodingException ex) {
throw new OcspRequestorException(ex.getMessage());
}
String newRawpath = StringUtil.concat(responderRawPathGet, urlEncodedReq);
request = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1,
HttpMethod.GET, newRawpath);
} else {
ByteBuf content = Unpooled.wrappedBuffer(ocspReq);
request = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1,
HttpMethod.POST, responderRawPathPost, content);
request.headers().addInt("Content-Length", content.readableBytes());
}
request.headers().add("Content-Type", "application/ocsp-request");
httpClient.send(request);
} // method ask
private byte[] buildRequest(BigInteger[] serialNumbers)
throws OcspRequestorException {
boolean canCache = (serialNumbers.length == 1) && !requestOptions.isUseNonce();
if (canCache) {
byte[] request = requests.get(serialNumbers[0]);
if (request != null) {
return request;
}
}
OCSPReqBuilder reqBuilder = new OCSPReqBuilder();
if (requestOptions.isUseNonce() || extensions != null) {
List<Extension> extns = new ArrayList<>(2);
if (requestOptions.isUseNonce()) {
Extension extn = new Extension(OCSPObjectIdentifiers.id_pkix_ocsp_nonce, false,
new DEROctetString(nextNonce(requestOptions.getNonceLen())));
extns.add(extn);
}
if (extensions != null) {
extns.addAll(Arrays.asList(extensions));
}
reqBuilder.setRequestExtensions(new Extensions(extns.toArray(extnType)));
}
try {
for (BigInteger serialNumber : serialNumbers) {
CertID certId = new CertID(issuerhashAlg.getAlgorithmIdentifier(),
issuerNameHash, issuerKeyHash, new ASN1Integer(serialNumber));
reqBuilder.addRequest(new CertificateID(certId));
}
byte[] request = reqBuilder.build().getEncoded();
if (canCache) {
requests.put(serialNumbers[0], request);
}
return request;
} catch (OCSPException | IOException ex) {
throw new OcspRequestorException(ex.getMessage(), ex);
}
} // method buildRequest
private byte[] nextNonce(int nonceLen) {
byte[] nonce = new byte[nonceLen];
random.nextBytes(nonce);
return nonce;
}
}
| 2,923 |
7,272 | <reponame>saurabhkurve/DSA-Bootcamp-Java
package com.kunal.strings;
import java.util.ArrayList;
public class SubSeq {
public static void main(String[] args) {
// subseqAscii("", "abc");
System.out.println(subseqAsciiRet("", "abc"));
}
static void subseq(String p, String up) {
if (up.isEmpty()) {
System.out.println(p);
return;
}
char ch = up.charAt(0);
subseq(p + ch, up.substring(1));
subseq(p, up.substring(1));
}
static ArrayList<String> subseqRet(String p, String up) {
if (up.isEmpty()) {
ArrayList<String> list = new ArrayList<>();
list.add(p);
return list;
}
char ch = up.charAt(0);
ArrayList<String> left = subseqRet(p + ch, up.substring(1));
ArrayList<String> right = subseqRet(p, up.substring(1));
left.addAll(right);
return left;
}
static void subseqAscii(String p, String up) {
if (up.isEmpty()) {
System.out.println(p);
return;
}
char ch = up.charAt(0);
subseqAscii(p + ch, up.substring(1));
subseqAscii(p, up.substring(1));
subseqAscii(p + (ch+0), up.substring(1));
}
static ArrayList<String> subseqAsciiRet(String p, String up) {
if (up.isEmpty()) {
ArrayList<String> list = new ArrayList<>();
list.add(p);
return list;
}
char ch = up.charAt(0);
ArrayList<String> first = subseqAsciiRet(p + ch, up.substring(1));
ArrayList<String> second = subseqAsciiRet(p, up.substring(1));
ArrayList<String> third = subseqAsciiRet(p + (ch+0), up.substring(1));
first.addAll(second);
first.addAll(third);
return first;
}
}
| 913 |
892 | {
"schema_version": "1.2.0",
"id": "GHSA-pmj4-hxrq-f9wh",
"modified": "2022-01-09T00:00:24Z",
"published": "2022-01-04T00:00:50Z",
"aliases": [
"CVE-2021-24991"
],
"details": "The WooCommerce PDF Invoices & Packing Slips WordPress plugin before 2.10.5 does not escape the tab and section parameters before outputting it back in an attribute, leading to a Reflected Cross-Site Scripting in the admin dashboard",
"severity": [
],
"affected": [
],
"references": [
{
"type": "ADVISORY",
"url": "https://nvd.nist.gov/vuln/detail/CVE-2021-24991"
},
{
"type": "WEB",
"url": "https://wpscan.com/vulnerability/88e706df-ae03-4665-94a3-db226e1f31a9"
}
],
"database_specific": {
"cwe_ids": [
"CWE-79"
],
"severity": "MODERATE",
"github_reviewed": false
}
} | 380 |
1,372 | <reponame>Russ76/mrpt
/* +------------------------------------------------------------------------+
| Mobile Robot Programming Toolkit (MRPT) |
| https://www.mrpt.org/ |
| |
| Copyright (c) 2005-2021, Individual contributors, see AUTHORS file |
| See: https://www.mrpt.org/Authors - All rights reserved. |
| Released under BSD License. See: https://www.mrpt.org/License |
+------------------------------------------------------------------------+ */
#pragma once
#include <mrpt/maps/CMultiMetricMap.h>
#include <mrpt/maps/COccupancyGridMap2D.h>
#include <nav_msgs/OccupancyGrid.h>
#include <cstdint>
#include <string>
namespace mrpt::ros1bridge
{
/** \addtogroup mrpt_ros1bridge_grp
* @{ */
/** @name Maps, Occupancy Grid Maps: ROS <-> MRPT
* @{ */
/** Methods to convert between ROS msgs and MRPT objects for map datatypes.
* @brief the map class is implemented as singeleton use map::instance
* ()->fromROS ...
*/
class MapHdl
{
private:
#ifdef OCCUPANCY_GRIDMAP_CELL_SIZE_8BITS
int lut_cellmrpt2ros[0x100]; // lookup table for entry convertion
#else
int lut_cellmrpt2ros[0xFFFF]; // lookup table for entry convertion
#endif
int lut_cellros2mrpt[101]; // lookup table for entry convertion
MapHdl();
MapHdl(const MapHdl&);
~MapHdl() = default;
public:
/**
* @return returns singeleton instance
* @brief it creates a instance with some look up table to speed up the
* conversions
*/
static MapHdl* instance();
#ifdef OCCUPANCY_GRIDMAP_CELL_SIZE_8BITS
int8_t cellMrpt2Ros(int8_t i)
{
return lut_cellmrpt2ros[static_cast<int>(i) - INT8_MIN];
}
#else
int16_t cellMrpt2Ros(int16_t i)
{
return lut_cellmrpt2ros[static_cast<int>(i) - INT16_MIN];
}
#endif
int8_t cellRos2Mrpt(int8_t i)
{
if (i < 0)
{
// unobserved cells: no log-odds information
return 0;
}
ASSERT_LE_(i, 100);
return lut_cellros2mrpt[i];
}
/**
* loads a mprt map
* @return true on sucess.
* @param _metric_map
* @param _config_file
* @param _map_file default: map.simplemap
* @param _section_name default: metricMap
* @param _debug default: false
*/
static bool loadMap(
mrpt::maps::CMultiMetricMap& _metric_map,
const mrpt::config::CConfigFileBase& _config_file,
const std::string& _map_file = "map.simplemap",
const std::string& _section_name = "metricMap", bool _debug = false);
};
/**
* converts ros msg to mrpt object
* @return true on sucessful conversion, false on any error.
* @param src
* @param des
*/
bool fromROS(
const nav_msgs::OccupancyGrid& src, mrpt::maps::COccupancyGridMap2D& des);
/**
* converts mrpt object to ros msg and updates the msg header
* @return true on sucessful conversion, false on any error.
* @param src
* @param header
*/
bool toROS(
const mrpt::maps::COccupancyGridMap2D& src, nav_msgs::OccupancyGrid& msg,
const std_msgs::Header& header);
/**
* converts mrpt object to ros msg
* @return true on sucessful conversion, false on any error.
*/
bool toROS(
const mrpt::maps::COccupancyGridMap2D& src, nav_msgs::OccupancyGrid& msg);
/** @}
* @}
*/
} // namespace mrpt::ros1bridge
| 1,338 |
1,338 | /*
* Copyright (c) 2000-2008, <NAME> <<EMAIL>>,
* Copyright (c) 2000-2008, <NAME> <<EMAIL>>,
* All Rights Reserved. Distributed under the terms of the MIT license.
*/
/*! This class listens to a PlaybackManager
The hooks are called by PlaybackManager after it executed a command,
to keep every listener informed. FrameDropped() is something the nodes
can call and it is passed onto the contollers, so that they can respond
by displaying some kind of warning. */
#ifndef PLAYBACK_LISTENER_H
#define PLAYBACK_LISTENER_H
#include <Rect.h>
#include <SupportDefs.h>
class PlaybackListener {
public:
PlaybackListener();
virtual ~PlaybackListener();
virtual void PlayModeChanged(int32 mode);
virtual void LoopModeChanged(int32 mode);
virtual void LoopingEnabledChanged(bool enabled);
virtual void VideoBoundsChanged(BRect bounds);
virtual void FramesPerSecondChanged(float fps);
virtual void SpeedChanged(float speed);
virtual void CurrentFrameChanged(double frame);
virtual void FrameDropped();
};
#endif // PLAYBACK_LISTENER_H
| 360 |
19,628 | package com.didichuxing.doraemonkit.widget.tableview.intface;
import android.view.MotionEvent;
import android.view.View;
public interface ITouch {
/**
* 用于判断是否请求不拦截事件
* 解决手势冲突问题
*
* @param view
* @param event
*/
void onDisallowInterceptEvent(View view, MotionEvent event);
/**
* 处理touchEvent
*
* @param event
*/
boolean handlerTouchEvent(MotionEvent event);
}
| 212 |
575 | <filename>chrome/browser/extensions/media_router_extension_access_logger_impl.h
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CHROME_BROWSER_EXTENSIONS_MEDIA_ROUTER_EXTENSION_ACCESS_LOGGER_IMPL_H_
#define CHROME_BROWSER_EXTENSIONS_MEDIA_ROUTER_EXTENSION_ACCESS_LOGGER_IMPL_H_
#include "extensions/browser/media_router_extension_access_logger.h"
namespace content {
class BrowserContext;
}
namespace url {
class Origin;
}
namespace extensions {
class MediaRouterExtensionAccessLoggerImpl
: public MediaRouterExtensionAccessLogger {
public:
~MediaRouterExtensionAccessLoggerImpl() override;
void LogMediaRouterComponentExtensionUse(
const url::Origin& origin,
content::BrowserContext* context) const override;
};
} // namespace extensions
#endif // CHROME_BROWSER_EXTENSIONS_MEDIA_ROUTER_EXTENSION_ACCESS_LOGGER_IMPL_H_
| 333 |
1,935 | <reponame>parmance/HIP
/*
Copyright (c) 2021 Advanced Micro Devices, Inc. All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
/**
Functional test for Memset3D and Memset3DAsync
*/
#include <hip_test_common.hh>
/**
* Basic Functional test of hipMemset3D
*/
TEST_CASE("Unit_hipMemset3D_BasicFunctional") {
constexpr int memsetval = 0x22;
constexpr size_t numH = 256;
constexpr size_t numW = 256;
constexpr size_t depth = 10;
size_t width = numW * sizeof(char);
size_t sizeElements = width * numH * depth;
size_t elements = numW * numH * depth;
char *A_h;
hipExtent extent = make_hipExtent(width, numH, depth);
hipPitchedPtr devPitchedPtr;
HIP_CHECK(hipMalloc3D(&devPitchedPtr, extent));
A_h = reinterpret_cast<char *>(malloc(sizeElements));
REQUIRE(A_h != nullptr);
for (size_t i = 0; i < elements; i++) {
A_h[i] = 1;
}
HIP_CHECK(hipMemset3D(devPitchedPtr, memsetval, extent));
hipMemcpy3DParms myparms{};
myparms.srcPos = make_hipPos(0, 0, 0);
myparms.dstPos = make_hipPos(0, 0, 0);
myparms.dstPtr = make_hipPitchedPtr(A_h, width , numW, numH);
myparms.srcPtr = devPitchedPtr;
myparms.extent = extent;
#if HT_NVIDIA
myparms.kind = hipMemcpyKindToCudaMemcpyKind(hipMemcpyDeviceToHost);
#else
myparms.kind = hipMemcpyDeviceToHost;
#endif
HIP_CHECK(hipMemcpy3D(&myparms));
for (size_t i = 0; i < elements; i++) {
if (A_h[i] != memsetval) {
INFO("Memset3D mismatch at index:" << i << " computed:"
<< A_h[i] << " memsetval:" << memsetval);
REQUIRE(false);
}
}
HIP_CHECK(hipFree(devPitchedPtr.ptr));
free(A_h);
}
/**
* Basic Functional test of hipMemset3DAsync
*/
TEST_CASE("Unit_hipMemset3DAsync_BasicFunctional") {
constexpr int memsetval = 0x22;
constexpr size_t numH = 256;
constexpr size_t numW = 256;
constexpr size_t depth = 10;
size_t width = numW * sizeof(char);
size_t sizeElements = width * numH * depth;
size_t elements = numW * numH * depth;
hipExtent extent = make_hipExtent(width, numH, depth);
hipPitchedPtr devPitchedPtr;
char *A_h;
HIP_CHECK(hipMalloc3D(&devPitchedPtr, extent));
A_h = reinterpret_cast<char *>(malloc(sizeElements));
REQUIRE(A_h != nullptr);
for (size_t i = 0; i < elements; i++) {
A_h[i] = 1;
}
hipStream_t stream;
HIP_CHECK(hipStreamCreate(&stream));
HIP_CHECK(hipMemset3DAsync(devPitchedPtr, memsetval, extent, stream));
HIP_CHECK(hipStreamSynchronize(stream));
hipMemcpy3DParms myparms{};
myparms.srcPos = make_hipPos(0, 0, 0);
myparms.dstPos = make_hipPos(0, 0, 0);
myparms.dstPtr = make_hipPitchedPtr(A_h, width , numW, numH);
myparms.srcPtr = devPitchedPtr;
myparms.extent = extent;
#if HT_NVIDIA
myparms.kind = hipMemcpyKindToCudaMemcpyKind(hipMemcpyDeviceToHost);
#else
myparms.kind = hipMemcpyDeviceToHost;
#endif
HIP_CHECK(hipMemcpy3D(&myparms));
for (size_t i = 0; i < elements; i++) {
if (A_h[i] != memsetval) {
INFO("Memset3DAsync mismatch at index:" << i << " computed:"
<< A_h[i] << " memsetval:" << memsetval);
REQUIRE(false);
}
}
HIP_CHECK(hipFree(devPitchedPtr.ptr));
free(A_h);
}
| 1,622 |
538 | <gh_stars>100-1000
#pragma once
#include "Scanner.h"
#include "BytePattern.h"
enum HeartBeatSnesVersion: uint8_t; // see HeartBeatSnesFormat.h
class HeartBeatSnesScanner:
public VGMScanner {
public:
HeartBeatSnesScanner(void) {
USE_EXTENSION(L"spc");
}
virtual ~HeartBeatSnesScanner(void) {
}
virtual void Scan(RawFile *file, void *info = 0);
void SearchForHeartBeatSnesFromARAM(RawFile *file);
void SearchForHeartBeatSnesFromROM(RawFile *file);
private:
static BytePattern ptnReadSongList;
static BytePattern ptnSetDIR;
static BytePattern ptnLoadSRCN;
static BytePattern ptnSaveSeqHeaderAddress;
};
| 229 |
435 | <gh_stars>100-1000
package com.github.airk.triggertest;
import android.os.Bundle;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import android.os.Message;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.View;
import android.widget.ProgressBar;
import android.widget.TextView;
import android.widget.Toast;
import com.github.airk.trigger.Trigger;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.Stack;
/**
* Created by kevin on 15/4/23.
*/
public abstract class TestBaseActivity extends AppCompatActivity {
TextView textView;
ProgressBar progressBar;
Trigger trigger;
Stack<Method> testMethod;
Method setUp;
Method tearDown;
TestRunner runner;
int total;
int remain = 0;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_test);
textView = (TextView) findViewById(R.id.text);
progressBar = (ProgressBar) findViewById(R.id.progress);
textView.setText("Begin");
trigger = Trigger.getInstance(this);
runner = new TestRunner(this);
runner.start();
findOutTestMethod();
runTestMethod();
}
@Override
protected void onDestroy() {
super.onDestroy();
runner.quit();
}
private void findOutTestMethod() {
testMethod = new Stack<>();
Method[] methods = this.getClass().getDeclaredMethods();
for (Method m : methods) {
if (m.getAnnotation(Test.class) != null) {
testMethod.push(m);
} else if (m.getAnnotation(SetUp.class) != null) {
setUp = m;
} else if (m.getAnnotation(TearDown.class) != null) {
tearDown = m;
}
}
total = testMethod.size();
Log.d("TEST", total + " method to be tested...");
}
private void runTestMethod() {
final int MSG = 1;
final Handler handler = new Handler() {
@Override
public void handleMessage(Message msg) {
if (!testMethod.empty()) {
runner.runTest(testMethod.pop());
sendEmptyMessage(MSG);
} else {
runner.runTest(null);
}
}
};
handler.sendEmptyMessageDelayed(MSG, 500);
}
void makeToast(final CharSequence cs) {
Handler handler = new Handler(Looper.getMainLooper());
handler.post(new Runnable() {
@Override
public void run() {
Toast.makeText(TestBaseActivity.this, cs, Toast.LENGTH_SHORT).show();
}
});
}
static class TestRunner extends HandlerThread {
final TestBaseActivity owner;
Handler handler;
public TestRunner(TestBaseActivity owner) {
super("TestRunner");
this.owner = owner;
}
@Override
protected void onLooperPrepared() {
super.onLooperPrepared();
handler = new Handler(getLooper());
}
public void runTest(final Method m) {
if (m == null) {
handler.post(new Runnable() {
@Override
public void run() {
owner.textView.post(new Runnable() {
@Override
public void run() {
owner.textView.setAlpha(0f);
owner.textView.setText("DONE");
owner.textView.animate().alpha(1f).setDuration(380);
owner.progressBar.setVisibility(View.INVISIBLE);
}
});
}
});
handler.postDelayed(new Runnable() {
@Override
public void run() {
owner.makeToast("All test succeed.");
owner.finish();
}
}, 500);
} else {
handler.post(new Runnable() {
@Override
public void run() {
try {
owner.remain++;
owner.textView.post(new Runnable() {
@Override
public void run() {
owner.textView.setText(m.getName());
float f = (float) owner.remain / owner.total;
owner.progressBar.setProgress((int) (100 * f));
}
});
if (owner.setUp != null) {
owner.setUp.invoke(owner, m.getName());
}
m.invoke(owner);
if (owner.tearDown != null) {
owner.tearDown.invoke(owner);
}
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (InvocationTargetException e) {
e.printStackTrace();
owner.makeToast("Test failed: " + m.getName());
throw new RuntimeException("Test failed: " + m.getName());
}
}
});
}
}
}
}
| 3,118 |
30,023 | {
"config": {
"abort": {
"cannot_connect": "No se pudo conectar con el controlador con url {base_url}"
},
"step": {
"user": {
"data": {
"exclude": "Identificadores de dispositivos Vera a excluir de Home Assistant",
"lights": "Identificadores de interruptores Vera que deben ser tratados como luces en Home Assistant",
"vera_controller_url": "URL del controlador"
},
"data_description": {
"vera_controller_url": "Deber\u00eda verse as\u00ed: http://192.168.1.161:3480"
}
}
}
},
"options": {
"step": {
"init": {
"data": {
"exclude": "Identificadores de dispositivos Vera a excluir de Home Assistant",
"lights": "Identificadores de interruptores Vera que deben ser tratados como luces en Home Assistant"
},
"description": "Consulte la documentaci\u00f3n de Vera para obtener detalles sobre los par\u00e1metros opcionales: https://www.home-assistant.io/integrations/vera/. Nota: Cualquier cambio aqu\u00ed necesitar\u00e1 un reinicio del servidor de Home Assistant. Para borrar valores, introduce un espacio.",
"title": "Opciones del controlador Vera"
}
}
}
} | 721 |
14,668 | // Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package com.google.protobuf;
import java.util.Collection;
import java.util.List;
/**
* An interface extending {@code List<String>} that also provides access to the items of the list as
* UTF8-encoded ByteString or byte[] objects. This is used by the protocol buffer implementation to
* support lazily converting bytes parsed over the wire to String objects until needed and also
* increases the efficiency of serialization if the String was never requested as the ByteString or
* byte[] is already cached. The ByteString methods are used in immutable API only and byte[]
* methods used in mutable API only for they use different representations for string/bytes fields.
*
* @author <EMAIL> (<NAME>)
*/
public interface LazyStringList extends ProtocolStringList {
/**
* Returns the element at the specified position in this list as a ByteString.
*
* @param index index of the element to return
* @return the element at the specified position in this list
* @throws IndexOutOfBoundsException if the index is out of range ({@code index < 0 || index >=
* size()})
*/
ByteString getByteString(int index);
/**
* Returns the element at the specified position in this list as an Object that will either be a
* String or a ByteString.
*
* @param index index of the element to return
* @return the element at the specified position in this list
* @throws IndexOutOfBoundsException if the index is out of range ({@code index < 0 || index >=
* size()})
*/
Object getRaw(int index);
/**
* Returns the element at the specified position in this list as byte[].
*
* @param index index of the element to return
* @return the element at the specified position in this list
* @throws IndexOutOfBoundsException if the index is out of range ({@code index < 0 || index >=
* size()})
*/
byte[] getByteArray(int index);
/**
* Appends the specified element to the end of this list (optional operation).
*
* @param element element to be appended to this list
* @throws UnsupportedOperationException if the <tt>add</tt> operation is not supported by this
* list
*/
void add(ByteString element);
/**
* Appends the specified element to the end of this list (optional operation).
*
* @param element element to be appended to this list
* @throws UnsupportedOperationException if the <tt>add</tt> operation is not supported by this
* list
*/
void add(byte[] element);
/**
* Replaces the element at the specified position in this list with the specified element
* (optional operation).
*
* @param index index of the element to replace
* @param element the element to be stored at the specified position
* @throws UnsupportedOperationException if the <tt>set</tt> operation is not supported by this
* list IndexOutOfBoundsException if the index is out of range ({@code index < 0 || index >=
* size()})
*/
void set(int index, ByteString element);
/**
* Replaces the element at the specified position in this list with the specified element
* (optional operation).
*
* @param index index of the element to replace
* @param element the element to be stored at the specified position
* @throws UnsupportedOperationException if the <tt>set</tt> operation is not supported by this
* list IndexOutOfBoundsException if the index is out of range ({@code index < 0 || index >=
* size()})
*/
void set(int index, byte[] element);
/**
* Appends all elements in the specified ByteString collection to the end of this list.
*
* @param c collection whose elements are to be added to this list
* @return true if this list changed as a result of the call
* @throws UnsupportedOperationException if the <tt>addAllByteString</tt> operation is not
* supported by this list
*/
boolean addAllByteString(Collection<? extends ByteString> c);
/**
* Appends all elements in the specified byte[] collection to the end of this list.
*
* @param c collection whose elements are to be added to this list
* @return true if this list changed as a result of the call
* @throws UnsupportedOperationException if the <tt>addAllByteArray</tt> operation is not
* supported by this list
*/
boolean addAllByteArray(Collection<byte[]> c);
/**
* Returns an unmodifiable List of the underlying elements, each of which is either a {@code
* String} or its equivalent UTF-8 encoded {@code ByteString} or byte[]. It is an error for the
* caller to modify the returned List, and attempting to do so will result in an {@link
* UnsupportedOperationException}.
*/
List<?> getUnderlyingElements();
/**
* Merges all elements from another LazyStringList into this one. This method differs from {@link
* #addAll(Collection)} on that underlying byte arrays are copied instead of reference shared.
* Immutable API doesn't need to use this method as byte[] is not used there at all.
*/
void mergeFrom(LazyStringList other);
/**
* Returns a mutable view of this list. Changes to the view will be made into the original list.
* This method is used in mutable API only.
*/
List<byte[]> asByteArrayList();
/** Returns an unmodifiable view of the list. */
LazyStringList getUnmodifiableView();
}
| 1,961 |
746 | package org.protege.editor.owl.model.entity;
import org.protege.editor.owl.ui.action.SelectedOWLEntityAction;
import org.semanticweb.owlapi.model.OWLEntity;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.awt.*;
import java.io.IOException;
/**
* <NAME>
* Stanford Center for Biomedical Informatics Research
* 8 Sep 2017
*/
public class ShowEntityIriInWebBroswerAction extends SelectedOWLEntityAction {
private static final Logger logger = LoggerFactory.getLogger(ShowEntityIriInWebBroswerAction.class);
public ShowEntityIriInWebBroswerAction() {
putValue(NAME, "Show in Web browser");
}
@Override
protected void actionPerformed(OWLEntity selectedEntity) {
try {
Desktop.getDesktop().browse(selectedEntity.getIRI().toURI());
} catch (IOException e) {
logger.error("An error occurred while attempting to open the selected entity IRI in a Web browser", e);
}
}
@Override
protected void disposeAction() throws Exception {
}
}
| 373 |
575 | // Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef IOS_WEB_VIEW_INTERNAL_PASSWORDS_WEB_VIEW_PASSWORD_MANAGER_LOG_ROUTER_FACTORY_H_
#define IOS_WEB_VIEW_INTERNAL_PASSWORDS_WEB_VIEW_PASSWORD_MANAGER_LOG_ROUTER_FACTORY_H_
#include "base/macros.h"
#include "base/no_destructor.h"
#include "components/keyed_service/ios/browser_state_keyed_service_factory.h"
namespace autofill {
class LogRouter;
}
namespace ios_web_view {
class WebViewBrowserState;
// Singleton that owns all PasswordStores and associates them with
// WebViewBrowserState.
class WebViewPasswordManagerLogRouterFactory
: public BrowserStateKeyedServiceFactory {
public:
static autofill::LogRouter* GetForBrowserState(
WebViewBrowserState* browser_state);
static WebViewPasswordManagerLogRouterFactory* GetInstance();
private:
friend class base::NoDestructor<WebViewPasswordManagerLogRouterFactory>;
WebViewPasswordManagerLogRouterFactory();
~WebViewPasswordManagerLogRouterFactory() override;
// BrowserStateKeyedServiceFactory:
std::unique_ptr<KeyedService> BuildServiceInstanceFor(
web::BrowserState* context) const override;
DISALLOW_COPY_AND_ASSIGN(WebViewPasswordManagerLogRouterFactory);
};
} // namespace ios_web_view
#endif // IOS_WEB_VIEW_INTERNAL_PASSWORDS_WEB_VIEW_PASSWORD_MANAGER_LOG_ROUTER_FACTORY_H_
| 490 |
1,103 | <gh_stars>1000+
# -*- coding: utf-8 -*-
import datetime
import os
import zipfile
from fooltrader.contract.files_contract import get_code_from_path
from fooltrader.settings import FOOLTRADER_STORE_PATH, STOCK_END_CODE, STOCK_START_CODE
def zip_dir(src_dir=FOOLTRADER_STORE_PATH, start_code=STOCK_START_CODE, end_code=STOCK_END_CODE, dst_dir=None,
zip_file_name=None, include_tick=False, just_tick=False):
if not zip_file_name:
zip_file_name = "data-{}.zip".format(datetime.datetime.today())
if dst_dir:
dst_path = os.path.join(dst_dir, zip_file_name)
else:
dst_path = os.path.abspath(os.path.join(src_dir, os.pardir, zip_file_name))
the_zip_file = zipfile.ZipFile(dst_path, 'w')
for folder, subfolders, files in os.walk(src_dir):
for file in files:
the_path = os.path.join(folder, file)
# 过滤code
current_code = get_code_from_path(the_path=the_path)
if current_code:
if current_code > end_code or current_code < start_code:
continue
# 只打包tick
if just_tick:
if not 'tick' in the_path:
continue
# 不打包tick
elif not include_tick and 'tick' in the_path:
continue
print("zip {}".format(the_path))
the_zip_file.write(the_path,
os.path.relpath(the_path, src_dir),
compress_type=zipfile.ZIP_DEFLATED)
the_zip_file.close()
def zip_data(src_dir=FOOLTRADER_STORE_PATH, dst_dir=None, zip_file_name=None):
if not zip_file_name:
zip_file_name = "data-{}.zip".format(datetime.datetime.today())
if dst_dir:
dst_path = os.path.join(dst_dir, zip_file_name)
else:
dst_path = os.path.abspath(os.path.join(src_dir, os.pardir, zip_file_name))
the_zip_file = zipfile.ZipFile(dst_path, 'w')
for folder, subfolders, files in os.walk(src_dir):
for file in files:
the_path = os.path.join(folder, file)
# 不打包tick
if 'tick' in the_path:
continue
print("zip {}".format(the_path))
the_zip_file.write(the_path,
os.path.relpath(the_path, src_dir),
compress_type=zipfile.ZIP_DEFLATED)
the_zip_file.close()
def unzip(zip_file, dst_dir):
the_zip_file = zipfile.ZipFile(zip_file)
print("start unzip {} to {}".format(zip_file, dst_dir))
the_zip_file.extractall(dst_dir)
print("finish unzip {} to {}".format(zip_file, dst_dir))
the_zip_file.close()
if __name__ == '__main__':
zip_data()
# zip_dir(zip_file_name="data.zip", just_tick=True, start_code='000002', end_code='000002')
# unzip(os.path.abspath(os.path.join(FOOLTRADER_STORE_PATH, os.pardir, "data.zip")), FOOLTRADER_STORE_PATH)
# unzip("/home/xuanqi/workspace/github/fooltrader/data/future/shfe/2009_shfe_history_data.zip",
# get_exchange_dir(security_type='future', exchange='shfe'))
| 1,563 |
493 | /**
* Copyright (C) 2016 Turi
* All rights reserved.
*
* This software may be modified and distributed under the terms
* of the BSD license. See the LICENSE file for details.
*/
/*
* Copyright (c) 2009 Carnegie Mellon University.
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an "AS
* IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*
* For more about this software visit:
*
* http://www.graphlab.ml.cmu.edu
*
*/
/*
\author <NAME> (ylow)
An implementation of a distributed integer -> integer map with caching
capabilities.
*/
#ifndef GRAPHLAB_LAZY_DHT_HPP
#define GRAPHLAB_LAZY_DHT_HPP
#include <boost/unordered_map.hpp>
#include <boost/intrusive/list.hpp>
#include <rpc/dc.hpp>
#include <parallel/pthread_tools.hpp>
#include <graphlab/util/synchronized_unordered_map.hpp>
#include <util/dense_bitset.hpp>
namespace graphlab {
/**
\internal
\ingroup rpc
This implements a distributed key -> value map with caching
capabilities. It is up to the user to determine cache
invalidation policies. User explicitly calls the invalidate()
function to clear local cache entries. This is an extremely lazy
DHT in that it is up to the user to guarantee that the keys are
unique. Any machine can call set on any key, and the result of
the key will be stored locally. Reads on any unknown keys will be
resolved using a broadcast operation.
*/
template<typename KeyType, typename ValueType>
class lazy_dht{
public:
typedef dc_impl::lru_list<KeyType, ValueType> lru_entry_type;
/// datatype of the data map
typedef boost::unordered_map<KeyType, ValueType> map_type;
/// datatype of the local cache map
typedef boost::unordered_map<KeyType, lru_entry_type* > cache_type;
struct wait_struct {
mutex mut;
conditional cond;
ValueType val;
size_t numreplies;
bool hasvalue;
};
typedef boost::intrusive::member_hook<lru_entry_type,
typename lru_entry_type::lru_member_hook_type,
&lru_entry_type::member_hook_> MemberOption;
/// datatype of the intrusive LRU list embedded in the cache map
typedef boost::intrusive::list<lru_entry_type,
MemberOption,
boost::intrusive::constant_time_size<false> > lru_list_type;
/// Constructor. Creates the integer map.
lazy_dht(distributed_control &dc,
size_t max_cache_size = 65536):rmi(dc, this),data(11) {
cache.rehash(max_cache_size);
maxcache = max_cache_size;
logger(LOG_INFO, "%d Creating distributed_hash_table. Cache Limit = %d",
dc.procid(), maxcache);
reqs = 0;
misses = 0;
dc.barrier();
}
~lazy_dht() {
data.clear();
typename cache_type::iterator i = cache.begin();
while (i != cache.end()) {
delete i->second;
++i;
}
cache.clear();
}
/// Sets the key to the value
void set(const KeyType& key, const ValueType &newval) {
datalock.lock();
data[key] = newval;
datalock.unlock();
}
std::pair<bool, ValueType> get_owned(const KeyType &key) const {
std::pair<bool, ValueType> ret;
datalock.lock();
typename map_type::const_iterator iter = data.find(key);
if (iter == data.end()) {
ret.first = false;
}
else {
ret.first = true;
ret.second = iter->second;
}
datalock.unlock();
return ret;
}
void remote_get_owned(const KeyType &key, procid_t source, size_t ptr) const {
std::pair<bool, ValueType> ret;
datalock.lock();
typename map_type::const_iterator iter = data.find(key);
if (iter == data.end()) {
ret.first = false;
}
else {
ret.first = true;
ret.second = iter->second;
}
datalock.unlock();
rmi.RPC_CALL(remote_call, &lazy_dht<KeyType,ValueType>::get_reply)
(source, ptr, ret.second, ret.first);
}
void get_reply(size_t ptr, ValueType& val, bool hasvalue) {
wait_struct* w = reinterpret_cast<wait_struct*>(ptr);
w->mut.lock();
if (hasvalue) {
w->val = val;
w->hasvalue = true;
}
w->numreplies--;
if (w->numreplies == 0) w->cond.signal();
w->mut.unlock();
}
/** Gets the value associated with the key. returns true on success.. */
std::pair<bool, ValueType> get(const KeyType &key) const {
std::pair<bool, ValueType> ret = get_owned(key);
if (ret.first) return ret;
wait_struct w;
w.numreplies = rmi.numprocs() - 1;
size_t ptr = reinterpret_cast<size_t>(&w);
// otherwise I need to find someone with the key
for (size_t i = 0;i < rmi.numprocs(); ++i) {
if (i != rmi.procid()) {
rmi.RPC_CALL(remote_call,&lazy_dht<KeyType,ValueType>::remote_get_owned)
(i, key, rmi.procid(), ptr);
}
}
w.mut.lock();
while (w.numreplies > 0) w.cond.wait(w.mut);
w.mut.unlock();
ret.first = w.hasvalue;
ret.second = w.val;
if (ret.first) update_cache(key, ret.second);
return ret;
}
/** Gets the value associated with the key, reading from cache if available
Note that the cache may be out of date. */
std::pair<bool, ValueType> get_cached(const KeyType &key) const {
std::pair<bool, ValueType> ret = get_owned(key);
if (ret.first) return ret;
reqs++;
cachelock.lock();
// check if it is in the cache
typename cache_type::iterator i = cache.find(key);
if (i == cache.end()) {
// nope. not in cache. Call the regular get
cachelock.unlock();
misses++;
return get(key);
}
else {
// yup. in cache. return the value
ret.first = true;
ret.second = i->second->value;
// shift the cache entry to the head of the LRU list
lruage.erase(lru_list_type::s_iterator_to(*(i->second)));
lruage.push_front(*(i->second));
cachelock.unlock();
return ret;
}
}
/// Invalidates the cache entry associated with this key
void invalidate(const KeyType &key) const{
cachelock.lock();
// is the key I am invalidating in the cache?
typename cache_type::iterator i = cache.find(key);
if (i != cache.end()) {
// drop it from the lru list
delete i->second;
cache.erase(i);
}
cachelock.unlock();
}
double cache_miss_rate() {
return double(misses) / double(reqs);
}
size_t num_gets() const {
return reqs;
}
size_t num_misses() const {
return misses;
}
size_t cache_size() const {
return cache.size();
}
private:
mutable dc_dist_object<lazy_dht<KeyType, ValueType> > rmi;
mutex datalock;
map_type data; /// The actual table data that is distributed
mutex cachelock; /// lock for the cache datastructures
mutable cache_type cache; /// The cache table
mutable lru_list_type lruage; /// THe LRU linked list associated with the cache
procid_t numprocs; /// NUmber of processors
size_t maxcache; /// Maximum cache size allowed
mutable size_t reqs;
mutable size_t misses;
/// Updates the cache with this new value
void update_cache(const KeyType &key, const ValueType &val) const{
cachelock.lock();
typename cache_type::iterator i = cache.find(key);
// create a new entry
if (i == cache.end()) {
cachelock.unlock();
// if we are out of room, remove the lru entry
if (cache.size() >= maxcache) remove_lru();
cachelock.lock();
// insert the element, remember the iterator so we can push it
// straight to the LRU list
std::pair<typename cache_type::iterator, bool> ret = cache.insert(std::make_pair(key, new lru_entry_type(key, val)));
if (ret.second) lruage.push_front(*(ret.first->second));
}
else {
// modify entry in place
i->second->value = val;
// swap to front of list
//boost::swap_nodes(lru_list_type::s_iterator_to(i->second), lruage.begin());
lruage.erase(lru_list_type::s_iterator_to(*(i->second)));
lruage.push_front(*(i->second));
}
cachelock.unlock();
}
/// Removes the least recently used element from the cache
void remove_lru() const{
cachelock.lock();
KeyType keytoerase = lruage.back().key;
// is the key I am invalidating in the cache?
typename cache_type::iterator i = cache.find(keytoerase);
if (i != cache.end()) {
// drop it from the lru list
delete i->second;
cache.erase(i);
}
cachelock.unlock();
}
};
}
#endif
| 4,044 |
430 | import numpy as np
# @manual=//deeplearning/trt/python:py_tensorrt
import tensorrt as trt
import torch
from ..converter_registry import tensorrt_converter
from .converter_utils import mark_as_int8_layer
def common_activation(
network, mod, input_val, activation_type, activation_dyn_range_fn, layer_name
):
layer = network.add_activation(input=input_val, type=activation_type)
layer.name = layer_name
if input_val.dynamic_range:
dyn_range = activation_dyn_range_fn(input_val.dynamic_range)
mark_as_int8_layer(layer, dyn_range)
return layer.get_output(0)
@tensorrt_converter(torch.nn.functional.relu)
@tensorrt_converter(torch.nn.modules.activation.ReLU)
def relu(network, submod, args, kwargs, layer_name):
# args/kwargs should have already been normalized to kwargs
assert len(args) == 0
input_val = kwargs["input"]
if not isinstance(input_val, trt.tensorrt.ITensor):
raise RuntimeError(
f"ReLU received input {input_val} that is not part "
"of the TensorRT region!"
)
def activation_dyn_range_fn(dyn_range):
return max(0, dyn_range[0]), max(0, dyn_range[1])
return common_activation(
network,
submod,
input_val,
trt.ActivationType.RELU,
activation_dyn_range_fn,
layer_name,
)
@tensorrt_converter(torch.nn.modules.activation.Sigmoid)
def sigmoid(network, submod, args, kwargs, layer_name):
# args/kwargs should have already been normalized to kwargs
assert len(args) == 0
input_val = kwargs["input"]
if not isinstance(input_val, trt.tensorrt.ITensor):
raise RuntimeError(
f"Sigmoid received input {input_val} that is not part "
"of the TensorRT region!"
)
def activation_dyn_range_fn(dyn_range):
def sigmoid_fn(x):
return 1 / (1 + np.exp(-x))
return sigmoid_fn(dyn_range[0]), sigmoid_fn(dyn_range[1])
return common_activation(
network,
submod,
input_val,
trt.ActivationType.SIGMOID,
activation_dyn_range_fn,
layer_name,
)
| 938 |
685 | <gh_stars>100-1000
import re
word="jofwjoifA级哦啊接我金佛安fewfae慰剂serge"
p = re.compile(r'\w', re.L)
result = p.sub("", word)
print(result) | 83 |
3,084 | //
// Copyright (c) Microsoft Corporation. All Rights Reserved.
//
#include <ndis.h>
#include <netiodef.h>
#include <intsafe.h>
#include <ntintsafe.h>
#include "SxBase.h"
#include "SxApi.h"
#include "SxLibrary.h"
| 98 |
2,661 | <filename>earth_enterprise/src/common/khBackTrace.h
/*
* Copyright 2017 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// khBackTrace Implementation
#ifndef COMMON_KHBACKTRACE__H__
#define COMMON_KHBACKTRACE__H__
// Fake dependency for forcing a rebuild based on the build command switch.
#include <enableInternalUseFlag.h>
// This class is only to be used on internal builds using the internal=1
// flag in scons.
#ifdef GEE_INTERNAL_USE_ONLY
// Print a backtrace of the current stack.
// Useful for debugging runtime errors.
void khBackTrace();
#define TRACE_MESSAGE(message) \
fprintf(stderr, "%s: %s : %d : %s\n", \
__FUNCTION__ , __FILE__, __LINE__, message)
#endif
#endif // COMMON_KHBACKTRACE__H__
| 390 |
414 | //
// RCDGroupConversationCell.h
// SealTalk
//
// Created by 张改红 on 2019/7/19.
// Copyright © 2019 RongCloud. All rights reserved.
//
#import <RongIMKit/RongIMKit.h>
static NSString *_Nullable RCDGroupConversationCellIdentifier = @"RCDGroupConversationCellIdentifier";
NS_ASSUME_NONNULL_BEGIN
@interface RCDGroupConversationCell : RCConversationCell
+ (instancetype)cellWithTableView:(UITableView *)tableView;
@end
NS_ASSUME_NONNULL_END
| 171 |
420 | <reponame>MJochim/seahub
#!/usr/bin/env python
# encoding: utf-8
# Copyright (c) 2012-2016 Seafile Ltd.
import sqlite3
import os
import sys
if len(sys.argv) != 2:
print("usage: update.py <dbname>")
sys.exit(-1)
if not os.access(sys.argv[1], os.F_OK):
print(("%s does not exist" % sys.argv[1]))
sys.exit(-1)
conn = sqlite3.connect(sys.argv[1])
c = conn.cursor()
# Create index
c.execute('''CREATE INDEX IF NOT EXISTS "group_groupmessage_425ae3c4" ON "group_groupmessage" ("group_id")''')
c.execute('''CREATE UNIQUE INDEX IF NOT EXISTS "contacts_contact_493fs4f1" ON "contacts_contact" ("user_email", "contact_email")''')
c.close()
| 277 |
1,350 | // Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.devtestlabs;
import com.azure.core.credential.TokenCredential;
import com.azure.core.http.HttpClient;
import com.azure.core.http.HttpPipeline;
import com.azure.core.http.HttpPipelineBuilder;
import com.azure.core.http.policy.AddDatePolicy;
import com.azure.core.http.policy.BearerTokenAuthenticationPolicy;
import com.azure.core.http.policy.HttpLogOptions;
import com.azure.core.http.policy.HttpLoggingPolicy;
import com.azure.core.http.policy.HttpPipelinePolicy;
import com.azure.core.http.policy.HttpPolicyProviders;
import com.azure.core.http.policy.RequestIdPolicy;
import com.azure.core.http.policy.RetryPolicy;
import com.azure.core.http.policy.UserAgentPolicy;
import com.azure.core.management.profile.AzureProfile;
import com.azure.core.util.Configuration;
import com.azure.core.util.logging.ClientLogger;
import com.azure.resourcemanager.devtestlabs.fluent.DevTestLabsClient;
import com.azure.resourcemanager.devtestlabs.implementation.ArmTemplatesImpl;
import com.azure.resourcemanager.devtestlabs.implementation.ArtifactSourcesImpl;
import com.azure.resourcemanager.devtestlabs.implementation.ArtifactsImpl;
import com.azure.resourcemanager.devtestlabs.implementation.CostsImpl;
import com.azure.resourcemanager.devtestlabs.implementation.CustomImagesImpl;
import com.azure.resourcemanager.devtestlabs.implementation.DevTestLabsClientBuilder;
import com.azure.resourcemanager.devtestlabs.implementation.DisksImpl;
import com.azure.resourcemanager.devtestlabs.implementation.EnvironmentsImpl;
import com.azure.resourcemanager.devtestlabs.implementation.FormulasImpl;
import com.azure.resourcemanager.devtestlabs.implementation.GalleryImagesImpl;
import com.azure.resourcemanager.devtestlabs.implementation.GlobalSchedulesImpl;
import com.azure.resourcemanager.devtestlabs.implementation.LabsImpl;
import com.azure.resourcemanager.devtestlabs.implementation.NotificationChannelsImpl;
import com.azure.resourcemanager.devtestlabs.implementation.OperationsImpl;
import com.azure.resourcemanager.devtestlabs.implementation.PoliciesImpl;
import com.azure.resourcemanager.devtestlabs.implementation.PolicySetsImpl;
import com.azure.resourcemanager.devtestlabs.implementation.ProviderOperationsImpl;
import com.azure.resourcemanager.devtestlabs.implementation.SchedulesImpl;
import com.azure.resourcemanager.devtestlabs.implementation.SecretsImpl;
import com.azure.resourcemanager.devtestlabs.implementation.ServiceFabricSchedulesImpl;
import com.azure.resourcemanager.devtestlabs.implementation.ServiceFabricsImpl;
import com.azure.resourcemanager.devtestlabs.implementation.ServiceRunnersImpl;
import com.azure.resourcemanager.devtestlabs.implementation.UsersImpl;
import com.azure.resourcemanager.devtestlabs.implementation.VirtualMachineSchedulesImpl;
import com.azure.resourcemanager.devtestlabs.implementation.VirtualMachinesImpl;
import com.azure.resourcemanager.devtestlabs.implementation.VirtualNetworksImpl;
import com.azure.resourcemanager.devtestlabs.models.ArmTemplates;
import com.azure.resourcemanager.devtestlabs.models.ArtifactSources;
import com.azure.resourcemanager.devtestlabs.models.Artifacts;
import com.azure.resourcemanager.devtestlabs.models.Costs;
import com.azure.resourcemanager.devtestlabs.models.CustomImages;
import com.azure.resourcemanager.devtestlabs.models.Disks;
import com.azure.resourcemanager.devtestlabs.models.Environments;
import com.azure.resourcemanager.devtestlabs.models.Formulas;
import com.azure.resourcemanager.devtestlabs.models.GalleryImages;
import com.azure.resourcemanager.devtestlabs.models.GlobalSchedules;
import com.azure.resourcemanager.devtestlabs.models.Labs;
import com.azure.resourcemanager.devtestlabs.models.NotificationChannels;
import com.azure.resourcemanager.devtestlabs.models.Operations;
import com.azure.resourcemanager.devtestlabs.models.Policies;
import com.azure.resourcemanager.devtestlabs.models.PolicySets;
import com.azure.resourcemanager.devtestlabs.models.ProviderOperations;
import com.azure.resourcemanager.devtestlabs.models.Schedules;
import com.azure.resourcemanager.devtestlabs.models.Secrets;
import com.azure.resourcemanager.devtestlabs.models.ServiceFabricSchedules;
import com.azure.resourcemanager.devtestlabs.models.ServiceFabrics;
import com.azure.resourcemanager.devtestlabs.models.ServiceRunners;
import com.azure.resourcemanager.devtestlabs.models.Users;
import com.azure.resourcemanager.devtestlabs.models.VirtualMachineSchedules;
import com.azure.resourcemanager.devtestlabs.models.VirtualMachines;
import com.azure.resourcemanager.devtestlabs.models.VirtualNetworks;
import java.time.Duration;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
/** Entry point to DevTestLabsManager. The DevTest Labs Client. */
public final class DevTestLabsManager {
private ProviderOperations providerOperations;
private Labs labs;
private Operations operations;
private GlobalSchedules globalSchedules;
private ArtifactSources artifactSources;
private ArmTemplates armTemplates;
private Artifacts artifacts;
private Costs costs;
private CustomImages customImages;
private Formulas formulas;
private GalleryImages galleryImages;
private NotificationChannels notificationChannels;
private PolicySets policySets;
private Policies policies;
private Schedules schedules;
private ServiceRunners serviceRunners;
private Users users;
private Disks disks;
private Environments environments;
private Secrets secrets;
private ServiceFabrics serviceFabrics;
private ServiceFabricSchedules serviceFabricSchedules;
private VirtualMachines virtualMachines;
private VirtualMachineSchedules virtualMachineSchedules;
private VirtualNetworks virtualNetworks;
private final DevTestLabsClient clientObject;
private DevTestLabsManager(HttpPipeline httpPipeline, AzureProfile profile, Duration defaultPollInterval) {
Objects.requireNonNull(httpPipeline, "'httpPipeline' cannot be null.");
Objects.requireNonNull(profile, "'profile' cannot be null.");
this.clientObject =
new DevTestLabsClientBuilder()
.pipeline(httpPipeline)
.endpoint(profile.getEnvironment().getResourceManagerEndpoint())
.subscriptionId(profile.getSubscriptionId())
.defaultPollInterval(defaultPollInterval)
.buildClient();
}
/**
* Creates an instance of DevTestLabs service API entry point.
*
* @param credential the credential to use.
* @param profile the Azure profile for client.
* @return the DevTestLabs service API instance.
*/
public static DevTestLabsManager authenticate(TokenCredential credential, AzureProfile profile) {
Objects.requireNonNull(credential, "'credential' cannot be null.");
Objects.requireNonNull(profile, "'profile' cannot be null.");
return configure().authenticate(credential, profile);
}
/**
* Gets a Configurable instance that can be used to create DevTestLabsManager with optional configuration.
*
* @return the Configurable instance allowing configurations.
*/
public static Configurable configure() {
return new DevTestLabsManager.Configurable();
}
/** The Configurable allowing configurations to be set. */
public static final class Configurable {
private final ClientLogger logger = new ClientLogger(Configurable.class);
private HttpClient httpClient;
private HttpLogOptions httpLogOptions;
private final List<HttpPipelinePolicy> policies = new ArrayList<>();
private RetryPolicy retryPolicy;
private Duration defaultPollInterval;
private Configurable() {
}
/**
* Sets the http client.
*
* @param httpClient the HTTP client.
* @return the configurable object itself.
*/
public Configurable withHttpClient(HttpClient httpClient) {
this.httpClient = Objects.requireNonNull(httpClient, "'httpClient' cannot be null.");
return this;
}
/**
* Sets the logging options to the HTTP pipeline.
*
* @param httpLogOptions the HTTP log options.
* @return the configurable object itself.
*/
public Configurable withLogOptions(HttpLogOptions httpLogOptions) {
this.httpLogOptions = Objects.requireNonNull(httpLogOptions, "'httpLogOptions' cannot be null.");
return this;
}
/**
* Adds the pipeline policy to the HTTP pipeline.
*
* @param policy the HTTP pipeline policy.
* @return the configurable object itself.
*/
public Configurable withPolicy(HttpPipelinePolicy policy) {
this.policies.add(Objects.requireNonNull(policy, "'policy' cannot be null."));
return this;
}
/**
* Sets the retry policy to the HTTP pipeline.
*
* @param retryPolicy the HTTP pipeline retry policy.
* @return the configurable object itself.
*/
public Configurable withRetryPolicy(RetryPolicy retryPolicy) {
this.retryPolicy = Objects.requireNonNull(retryPolicy, "'retryPolicy' cannot be null.");
return this;
}
/**
* Sets the default poll interval, used when service does not provide "Retry-After" header.
*
* @param defaultPollInterval the default poll interval.
* @return the configurable object itself.
*/
public Configurable withDefaultPollInterval(Duration defaultPollInterval) {
this.defaultPollInterval = Objects.requireNonNull(defaultPollInterval, "'retryPolicy' cannot be null.");
if (this.defaultPollInterval.isNegative()) {
throw logger.logExceptionAsError(new IllegalArgumentException("'httpPipeline' cannot be negative"));
}
return this;
}
/**
* Creates an instance of DevTestLabs service API entry point.
*
* @param credential the credential to use.
* @param profile the Azure profile for client.
* @return the DevTestLabs service API instance.
*/
public DevTestLabsManager authenticate(TokenCredential credential, AzureProfile profile) {
Objects.requireNonNull(credential, "'credential' cannot be null.");
Objects.requireNonNull(profile, "'profile' cannot be null.");
StringBuilder userAgentBuilder = new StringBuilder();
userAgentBuilder
.append("azsdk-java")
.append("-")
.append("com.azure.resourcemanager.devtestlabs")
.append("/")
.append("1.0.0-beta.1");
if (!Configuration.getGlobalConfiguration().get("AZURE_TELEMETRY_DISABLED", false)) {
userAgentBuilder
.append(" (")
.append(Configuration.getGlobalConfiguration().get("java.version"))
.append("; ")
.append(Configuration.getGlobalConfiguration().get("os.name"))
.append("; ")
.append(Configuration.getGlobalConfiguration().get("os.version"))
.append("; auto-generated)");
} else {
userAgentBuilder.append(" (auto-generated)");
}
if (retryPolicy == null) {
retryPolicy = new RetryPolicy("Retry-After", ChronoUnit.SECONDS);
}
List<HttpPipelinePolicy> policies = new ArrayList<>();
policies.add(new UserAgentPolicy(userAgentBuilder.toString()));
policies.add(new RequestIdPolicy());
HttpPolicyProviders.addBeforeRetryPolicies(policies);
policies.add(retryPolicy);
policies.add(new AddDatePolicy());
policies
.add(
new BearerTokenAuthenticationPolicy(
credential, profile.getEnvironment().getManagementEndpoint() + "/.default"));
policies.addAll(this.policies);
HttpPolicyProviders.addAfterRetryPolicies(policies);
policies.add(new HttpLoggingPolicy(httpLogOptions));
HttpPipeline httpPipeline =
new HttpPipelineBuilder()
.httpClient(httpClient)
.policies(policies.toArray(new HttpPipelinePolicy[0]))
.build();
return new DevTestLabsManager(httpPipeline, profile, defaultPollInterval);
}
}
/** @return Resource collection API of ProviderOperations. */
public ProviderOperations providerOperations() {
if (this.providerOperations == null) {
this.providerOperations = new ProviderOperationsImpl(clientObject.getProviderOperations(), this);
}
return providerOperations;
}
/** @return Resource collection API of Labs. */
public Labs labs() {
if (this.labs == null) {
this.labs = new LabsImpl(clientObject.getLabs(), this);
}
return labs;
}
/** @return Resource collection API of Operations. */
public Operations operations() {
if (this.operations == null) {
this.operations = new OperationsImpl(clientObject.getOperations(), this);
}
return operations;
}
/** @return Resource collection API of GlobalSchedules. */
public GlobalSchedules globalSchedules() {
if (this.globalSchedules == null) {
this.globalSchedules = new GlobalSchedulesImpl(clientObject.getGlobalSchedules(), this);
}
return globalSchedules;
}
/** @return Resource collection API of ArtifactSources. */
public ArtifactSources artifactSources() {
if (this.artifactSources == null) {
this.artifactSources = new ArtifactSourcesImpl(clientObject.getArtifactSources(), this);
}
return artifactSources;
}
/** @return Resource collection API of ArmTemplates. */
public ArmTemplates armTemplates() {
if (this.armTemplates == null) {
this.armTemplates = new ArmTemplatesImpl(clientObject.getArmTemplates(), this);
}
return armTemplates;
}
/** @return Resource collection API of Artifacts. */
public Artifacts artifacts() {
if (this.artifacts == null) {
this.artifacts = new ArtifactsImpl(clientObject.getArtifacts(), this);
}
return artifacts;
}
/** @return Resource collection API of Costs. */
public Costs costs() {
if (this.costs == null) {
this.costs = new CostsImpl(clientObject.getCosts(), this);
}
return costs;
}
/** @return Resource collection API of CustomImages. */
public CustomImages customImages() {
if (this.customImages == null) {
this.customImages = new CustomImagesImpl(clientObject.getCustomImages(), this);
}
return customImages;
}
/** @return Resource collection API of Formulas. */
public Formulas formulas() {
if (this.formulas == null) {
this.formulas = new FormulasImpl(clientObject.getFormulas(), this);
}
return formulas;
}
/** @return Resource collection API of GalleryImages. */
public GalleryImages galleryImages() {
if (this.galleryImages == null) {
this.galleryImages = new GalleryImagesImpl(clientObject.getGalleryImages(), this);
}
return galleryImages;
}
/** @return Resource collection API of NotificationChannels. */
public NotificationChannels notificationChannels() {
if (this.notificationChannels == null) {
this.notificationChannels = new NotificationChannelsImpl(clientObject.getNotificationChannels(), this);
}
return notificationChannels;
}
/** @return Resource collection API of PolicySets. */
public PolicySets policySets() {
if (this.policySets == null) {
this.policySets = new PolicySetsImpl(clientObject.getPolicySets(), this);
}
return policySets;
}
/** @return Resource collection API of Policies. */
public Policies policies() {
if (this.policies == null) {
this.policies = new PoliciesImpl(clientObject.getPolicies(), this);
}
return policies;
}
/** @return Resource collection API of Schedules. */
public Schedules schedules() {
if (this.schedules == null) {
this.schedules = new SchedulesImpl(clientObject.getSchedules(), this);
}
return schedules;
}
/** @return Resource collection API of ServiceRunners. */
public ServiceRunners serviceRunners() {
if (this.serviceRunners == null) {
this.serviceRunners = new ServiceRunnersImpl(clientObject.getServiceRunners(), this);
}
return serviceRunners;
}
/** @return Resource collection API of Users. */
public Users users() {
if (this.users == null) {
this.users = new UsersImpl(clientObject.getUsers(), this);
}
return users;
}
/** @return Resource collection API of Disks. */
public Disks disks() {
if (this.disks == null) {
this.disks = new DisksImpl(clientObject.getDisks(), this);
}
return disks;
}
/** @return Resource collection API of Environments. */
public Environments environments() {
if (this.environments == null) {
this.environments = new EnvironmentsImpl(clientObject.getEnvironments(), this);
}
return environments;
}
/** @return Resource collection API of Secrets. */
public Secrets secrets() {
if (this.secrets == null) {
this.secrets = new SecretsImpl(clientObject.getSecrets(), this);
}
return secrets;
}
/** @return Resource collection API of ServiceFabrics. */
public ServiceFabrics serviceFabrics() {
if (this.serviceFabrics == null) {
this.serviceFabrics = new ServiceFabricsImpl(clientObject.getServiceFabrics(), this);
}
return serviceFabrics;
}
/** @return Resource collection API of ServiceFabricSchedules. */
public ServiceFabricSchedules serviceFabricSchedules() {
if (this.serviceFabricSchedules == null) {
this.serviceFabricSchedules =
new ServiceFabricSchedulesImpl(clientObject.getServiceFabricSchedules(), this);
}
return serviceFabricSchedules;
}
/** @return Resource collection API of VirtualMachines. */
public VirtualMachines virtualMachines() {
if (this.virtualMachines == null) {
this.virtualMachines = new VirtualMachinesImpl(clientObject.getVirtualMachines(), this);
}
return virtualMachines;
}
/** @return Resource collection API of VirtualMachineSchedules. */
public VirtualMachineSchedules virtualMachineSchedules() {
if (this.virtualMachineSchedules == null) {
this.virtualMachineSchedules =
new VirtualMachineSchedulesImpl(clientObject.getVirtualMachineSchedules(), this);
}
return virtualMachineSchedules;
}
/** @return Resource collection API of VirtualNetworks. */
public VirtualNetworks virtualNetworks() {
if (this.virtualNetworks == null) {
this.virtualNetworks = new VirtualNetworksImpl(clientObject.getVirtualNetworks(), this);
}
return virtualNetworks;
}
/**
* @return Wrapped service client DevTestLabsClient providing direct access to the underlying auto-generated API
* implementation, based on Azure REST API.
*/
public DevTestLabsClient serviceClient() {
return this.clientObject;
}
}
| 7,484 |
640 |
#ifndef __CONFIG_Z88DK_H_
#define __CONFIG_Z88DK_H_
// Automatically Generated at Library Build Time
#undef __Z88DK
#define __Z88DK 2100
#endif
| 75 |
14,668 | #!/usr/bin/env python3
# Copyright 2021 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Runs captured sites framework recording and tests.
$ tools/captured_sites/control.py [command] [arguments]
Commands:
chrome Starts a Chrome instance with autofill hooks
wpr Starts a WPR server instance to record or replay
run Starts a test for a single site or "*" for all sites
Use "captured_sites [command] -h" for more information about each command.',
This script attempts to simplify the various configuration and override options
that are available in creating and executing the Captured Sites Framework for
Autofill and Password Manager.
This script assumes execution location is the src folder of the chromium
checkout. Commands should be run from chromium/src directory.
Also assumes that built targets are in :
out/Default for is_debug = true
out/Release for is_debug = false
Some environment variables should be set in order to use this script to its
full potential.
CAPTURED_SITES_USER_DATA_DIR - a location to store local information about the
chromium profile. This allows the tester to pull back the address and credit
card profile information without restarting it each time.
CAPTURED_SITES_LOG_DATA_DIR - a location to store log data for easier parsing
after a test run has been completed.
Common tasks:
Recording a new test for site 'youtube' (requires two terminal windows):
Window 1$: tools/captured_sites/control.py wpr record youtube
Window 2$: tools/captured_sites/control.py chrome -w -r
Checking a recorded test for site 'youtube' (requires two terminal windows):
Window 1$ tools/captured_sites/control.py wpr replay youtube
Window 2$ tools/captured_sites/control.py chrome -w -r -u youtube
Running all 'sign_in_pass' tests and saving the logs:
$ tools/captured_sites/control.py run -s sign_in_pass *
Running disabled autofill test 'rei':
$ tools/captured_sites/control.py run -d rei
Running autofill test 'rei' with ability to pause at each step:
$ tools/captured_sites/control.py run -q path/to/pipe rei
"""
from __future__ import print_function
import argparse
import json
import os
import signal
import subprocess
import sys
import time
# Checking for environment variables.
_HOME_DIR = os.environ['HOME']
_DEFAULT_USER_DATA_DIR = os.path.join(_HOME_DIR, 'data/userdir')
_DEFAULT_LOG_DATA_DIR = os.path.join(_HOME_DIR, 'data/local_test_results')
if 'CAPTURED_SITES_USER_DATA_DIR' in os.environ:
_USER_DATA_DIR_PATH = os.environ['CAPTURED_SITES_USER_DATA_DIR']
else:
_USER_DATA_DIR_PATH = _DEFAULT_USER_DATA_DIR
if 'CAPTURED_SITES_LOG_DATA_DIR' in os.environ:
_LOG_DATA_DIR_PATH = os.environ['CAPTURED_SITES_LOG_DATA_DIR']
else:
_LOG_DATA_DIR_PATH = _DEFAULT_LOG_DATA_DIR
# Long text chunks that will be used in command constructions.
_EXTRA_BROWSER_AUTOFILL = ('autofill_download_manager=1,form_cache=1,'
'autofill_agent=1,autofill_handler=1,'
'form_structure=1,cache_replayer=2')
_WPR_INJECT_SCRIPTS = ('--inject_scripts=third_party/catapult/web_page_replay_g'
'o/deterministic.js,chrome/test/data/web_page_replay_go_'
'helper_scripts/automation_helper.js')
_NORMAL_BROWSER_AUTOFILL = 'cache_replayer=1'
_RUN_BACKGROUND = 'testing/xvfb.py'
_RUN_DISABLED_TESTS = '--gtest_also_run_disabled_tests'
_AUTOFILL_TEST = '*/AutofillCapturedSitesInteractiveTest'
_PASSWORD_MANAGER_TEST = '*/CapturedSitesPasswordManagerBrowserTest'
_VMODULE_AUTOFILL_FILE = 'autofill_captured_sites_interactive_uitest'
_VMODULE_PASSWORD_FILE = 'password_manager_captured_sites_interactive_uitest'
_STABLE_GOOGLE_CHROME = '/usr/bin/google-chrome'
_RELEASE_BUILD_CHROME = 'out/Release/chrome'
_HOOK_CHROME_TO_WPR = ('--host-resolver-rules="MAP *:80 127.0.0.1:8080,'
'MAP *:443 127.0.0.1:8081,EXCLUDE localhost"')
_AUTOFILL_CACHE_TYPE_LOOKUP = {
'SavedCache': 'SavedCache',
'ProductionServer': 'ProductionServer',
'OnlyLocalHeuristics': 'OnlyLocalHeuristics',
'c': 'SavedCache',
'p': 'ProductionServer',
'n': 'OnlyLocalHeuristics'
}
class Command():
def __init__(self, description, arg_builders, launch_method):
self.description = description
self.arg_builders = arg_builders
self.launch_method = launch_method
def build_and_execute(self, args):
parser = argparse.ArgumentParser(description=self.description)
for arg_builder in self.arg_builders:
arg_builder(parser)
found_args = parser.parse_known_args(args)
self.launch_method(found_args[0], found_args[1])
def _add_chrome_args(parser):
parser.add_argument('-r',
'--release',
dest='build_target',
default=_STABLE_GOOGLE_CHROME,
const=_RELEASE_BUILD_CHROME,
help='Start Release build of chrome.',
action='store_const')
parser.add_argument('-w',
'--wpr',
dest='wpr_selection',
action='store_true',
help='Point chrome instance at wpr service.')
parser.add_argument('-u',
'--url',
dest='start_url',
action='store',
help='Grab starting URL from test recipe.')
def _add_wpr_args(parser):
parser.add_argument('subhead',
choices=['record', 'replay'],
help=('Whether to record new traffic to an archive, '
'or replay from an existing archive.'))
def _add_run_args(parser):
parser.add_argument('-r',
'--release',
dest='target',
action='store_const',
default='Default',
const='Release',
help='Run tests on Release build of chrome.')
parser.add_argument('-s',
'--store-log',
dest='store_log',
action='store_true',
help='Store the log and output in _LOG_DATA_DIR_PATH.')
parser.add_argument('-b',
'--background',
dest='background',
action='store_true',
help='Run the test in background with xvfb.py.')
parser.add_argument('-d',
'--disabled',
dest='add_disabled',
action='store_true',
help='Also run disabled tests that match the filter.')
parser.add_argument('-v',
'--verbose',
dest='verbose_logging',
action='store_const',
default=_NORMAL_BROWSER_AUTOFILL,
const=_EXTRA_BROWSER_AUTOFILL,
help='Log verbose Autofill Server information.')
parser.add_argument('-t',
'--test-retry',
dest='retry_count',
action='store',
default=0,
type=int,
help='How many times to retry failed tests.')
parser.add_argument('-a',
'--autofill-cache-type',
dest='autofill_cache_type',
choices=_AUTOFILL_CACHE_TYPE_LOOKUP.keys(),
action='store',
help='Control the autofill cache behavior.')
parser.add_argument('-q',
'--command_file',
dest='command_file',
action='store',
default='',
type=str,
help='Location of "pipe: file')
def _add_shared_args(parser):
parser.add_argument('-p',
'--print-only',
dest='print_only',
action='store_true',
help='Build the command and print it but do not execute.')
def _add_scenario_site_args(parser):
parser.add_argument('scenario_dir',
nargs='?',
default='',
choices=[
'sign_in_pass', 'sign_up_pass', 'sign_up_fill',
'capture_update_pass', '*', ''
],
help=('Only for password tests to designate the specific '
'test scenario. Use * to indicate all password test'
' scenarios.'))
parser.add_argument('site_name',
help=('The site name which should have a match in '
'testcases.json. Use * to indicate all enumerated '
'sites in that file.'))
def _parse_command_args(command_names):
parser = argparse.ArgumentParser(
formatter_class=argparse.RawTextHelpFormatter)
parser.usage = __doc__
parser.add_argument('name', choices=command_names)
parser.add_argument('args', nargs=argparse.REMAINDER)
return parser.parse_args()
def _make_process_call(command_args, print_only):
command_text = ' '.join(command_args)
print(command_text)
if print_only:
return
if not os.path.exists(command_args[0]):
raise EnvironmentError('Cannot locate binary to execute. '
'Ensure that working directory is chromium/src')
subprocess.call(command_text, shell=True)
def _print_starting_url(url):
password_path = 'chrome/test/data/password/captured_sites/%s.test'
autofill_path = 'chrome/test/data/autofill/captured_sites/%s.test'
if '-' in url:
path = password_path % url.replace('-', '/')
else:
path = autofill_path % url
if not os.path.exists(path):
print('No file found for "%s"' % url, file=sys.stderr)
return
with open(path, 'r') as read_file:
data = json.load(read_file)
if not 'startingURL' in data:
print('No startingURL found in file for "%s"' % url, file=sys.stderr)
return
print('%s test starts at:' % url, file=sys.stderr)
print(data['startingURL'])
print('')
def _launch_chrome(options, forward_args):
if options.start_url:
_print_starting_url(options.start_url)
if not os.path.isdir(_USER_DATA_DIR_PATH):
print('Required CAPTURED_SITES_USER_DATA_DIR "%s" cannot be found' %
_USER_DATA_DIR_PATH)
raise ValueError('Must set environment variable $CAPTURED_SITES_USER_DATA_D'
'IR or ensure default _USER_DATA_DIR_PATH exists')
command_args = [
options.build_target, '--ignore-certificate-errors-spki-list='
'PoNnQAwghMiLUPg1YNFtvTfGreNT8r9oeLEyzgNCJWc=',
'--user-data-dir="%s"' % _USER_DATA_DIR_PATH,
'--disable-application-cache', '--show-autofill-signatures',
'--enable-features=AutofillShowTypePredictions',
'--disable-features=AutofillCacheQueryResponses'
]
if options.wpr_selection:
command_args.append(_HOOK_CHROME_TO_WPR)
_make_process_call(command_args + forward_args, options.print_only)
def _launch_wpr(options, forward_args):
command_args = [
'third_party/catapult/telemetry/telemetry/bin/linux/x86_64/wpr',
options.subhead, '--https_cert_file=components/test/data/autofill/'
'web_page_replay_support_files/wpr_cert.pem',
'--https_key_file=components/test/data/autofill/'
'web_page_replay_support_files/wpr_key.pem', '--http_port=8080',
'--https_port=8081', _WPR_INJECT_SCRIPTS
]
if options.subhead == 'replay':
command_args.append('--serve_response_in_chronological_sequence')
if options.scenario_dir == '':
command_args.append('chrome/test/data/autofill/captured_sites/%s.wpr' %
options.site_name)
else:
command_args.append('chrome/test/data/password/captured_sites/%s/%s.wpr' %
(options.scenario_dir, options.site_name))
_make_process_call(command_args + forward_args, options.print_only)
def _launch_run(options, forward_args):
gtest_filter = _AUTOFILL_TEST
gtest_parameter = options.site_name
vmodule_name = _VMODULE_AUTOFILL_FILE
if options.scenario_dir != '':
gtest_filter = _PASSWORD_MANAGER_TEST
gtest_parameter = '%s_%s' % (options.scenario_dir, options.site_name)
vmodule_name = _VMODULE_PASSWORD_FILE
command_args = [
'out/%s/captured_sites_interactive_tests' % options.target,
'--gtest_filter="%s.Recipe/%s"' % (gtest_filter, gtest_parameter),
'--test-launcher-interactive', '--enable-pixel-output-in-tests',
'--vmodule=captured_sites_test_utils=2,%s,%s=1' %
(options.verbose_logging, vmodule_name)
]
if options.background:
command_args.insert(0, _RUN_BACKGROUND)
if options.add_disabled:
command_args.append(_RUN_DISABLED_TESTS)
if options.retry_count > 0:
command_args.append('--test-launcher-retry-limit=%d' % options.retry_count)
if options.autofill_cache_type:
full_cache_type = _AUTOFILL_CACHE_TYPE_LOOKUP[options.autofill_cache_type]
command_args.append('--autofill-server-type=%s ' % full_cache_type)
if options.command_file:
command_args.append('--command_file=%s' % options.command_file)
if options.store_log:
if not os.path.isdir(_LOG_DATA_DIR_PATH):
print('Required LOG_DATA_DIR "%s" cannot be found' % _LOG_DATA_DIR_PATH)
raise ValueError('Must set environment variable $LOG_DATA_DIR or '
'ensure default _LOG_DATA_DIR_PATH exists')
logging_scenario_site_param = gtest_parameter.replace('*', 'all')
command_args.append(
'--test-launcher-summary-output={}/{}_output.json'.format(
_LOG_DATA_DIR_PATH, logging_scenario_site_param))
command_args.extend(forward_args)
command_args.append('2>&1 | tee {}/{}_capture.log'.format(
_LOG_DATA_DIR_PATH, logging_scenario_site_param))
_make_process_call(command_args, options.print_only)
def _handle_signal(sig, _):
"""Handles received signals to make sure spawned test process are killed.
sig (int): An integer representing the received signal, for example SIGTERM.
"""
# Don't do any cleanup here, instead, leave it to the finally blocks.
# Assumption is based on https://docs.python.org/3/library/sys.html#sys.exit:
# cleanup actions specified by finally clauses of try statements are honored.
# https://tldp.org/LDP/abs/html/exitcodes.html:
# Exit code 128+n -> Fatal error signal "n".
print('Signal to quit received, waiting for potential WPR write to complete')
time.sleep(1)
sys.exit(128 + sig)
def main():
for sig in (signal.SIGTERM, signal.SIGINT):
signal.signal(sig, _handle_signal)
all_commands = {
'chrome':
Command('Start a Chrome instance with autofill hooks.',
[_add_chrome_args, _add_shared_args], _launch_chrome),
'wpr':
Command('Start WPR to replay or record.',
[_add_wpr_args, _add_shared_args, _add_scenario_site_args],
_launch_wpr),
'run':
Command('Start an autofill or password test run.',
[_add_run_args, _add_shared_args, _add_scenario_site_args],
_launch_run)
}
options = _parse_command_args(all_commands.keys())
command = all_commands[options.name]
command.build_and_execute(options.args)
if __name__ == '__main__':
sys.exit(main())
| 6,805 |
841 | <gh_stars>100-1000
package org.jboss.resteasy.test.resource.param.resource;
import org.jboss.resteasy.test.resource.param.UriParamAsPrimitiveTest;
import org.junit.Assert;
import jakarta.ws.rs.GET;
import jakarta.ws.rs.Path;
import jakarta.ws.rs.PathParam;
@Path("/long/wrapper/{arg}")
public class UriParamAsPrimitiveResourceUriLongWrapper {
@GET
public String doGet(@PathParam("arg") Long v) {
Assert.assertEquals(UriParamAsPrimitiveTest.ERROR_CODE, 9223372036854775807L, v.longValue());
return "content";
}
}
| 210 |
1,439 | /*******************************************************************************
* Copyright 2018 <NAME> http://galenframework.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.galenframework.tests.validation;
import com.galenframework.page.PageElement;
import com.galenframework.page.Rect;
import com.galenframework.specs.SpecCentered;
import com.galenframework.validation.ValidationObject;
import org.testng.annotations.DataProvider;
import java.util.HashMap;
public class CenteredValidationTest extends ValidationTestBase {
@DataProvider
@Override
public Object[][] provideGoodSamples() {
return new Object[][]{
// Centered Inside
{specCenteredInside("container", SpecCentered.Alignment.ALL).withErrorRate(2), page(new HashMap<String, PageElement>(){{
put("object", element(10, 10, 80, 80));
put("container", element(0, 0, 100, 100));
}})},
{specCenteredInside("container", SpecCentered.Alignment.ALL).withErrorRate(2), page(new HashMap<String, PageElement>(){{
put("object", element(10, 10, 81, 81));
put("container", element(0, 0, 100, 100));
}})},
{specCenteredInside("container", SpecCentered.Alignment.ALL).withErrorRate(2), page(new HashMap<String, PageElement>(){{
put("object", element(9, 9, 80, 80));
put("container", element(0, 0, 100, 100));
}})},
{specCenteredInside("container", SpecCentered.Alignment.HORIZONTALLY).withErrorRate(2), page(new HashMap<String, PageElement>(){{
put("object", element(10, 10, 80, 20));
put("container", element(0, 0, 100, 100));
}})},
{specCenteredInside("container", SpecCentered.Alignment.HORIZONTALLY, 30), page(new HashMap<String, PageElement>(){{
put("object", element(60, 10, 50, 20));
put("container", element(0, 0, 200, 200));
}})},
{specCenteredInside("container", SpecCentered.Alignment.HORIZONTALLY, 30), page(new HashMap<String, PageElement>(){{
put("object", element(10, 10, 80, 20));
put("container", element(0, 0, 100, 200));
}})},
{specCenteredInside("container", SpecCentered.Alignment.VERTICALLY).withErrorRate(2), page(new HashMap<String, PageElement>(){{
put("object", element(10, 10, 20, 80));
put("container", element(0, 0, 100, 100));
}})},
// Centered on
{specCenteredOn("button", SpecCentered.Alignment.ALL), page(new HashMap<String, PageElement>(){{
put("object", element(80, 80, 90, 90));
put("button", element(100, 100, 50, 50));
}})},
{specCenteredOn("button", SpecCentered.Alignment.ALL), page(new HashMap<String, PageElement>(){{
put("object", element(81, 81, 90, 90));
put("button", element(100, 100, 50, 50));
}})},
{specCenteredOn("button", SpecCentered.Alignment.ALL), page(new HashMap<String, PageElement>(){{
put("object", element(80, 80, 89, 91));
put("button", element(100, 100, 50, 50));
}})},
{specCenteredOn("button", SpecCentered.Alignment.ALL), page(new HashMap<String, PageElement>(){{
put("object", element(80, 80, 90, 90));
put("button", element(100, 100, 50, 50));
}})},
{specCenteredOn("button", SpecCentered.Alignment.VERTICALLY), page(new HashMap<String, PageElement>(){{
put("object", element(80, 80, 10, 90));
put("button", element(100, 100, 50, 50));
}})},
{specCenteredOn("button", SpecCentered.Alignment.HORIZONTALLY), page(new HashMap<String, PageElement>(){{
put("object", element(80, 80, 90, 10));
put("button", element(100, 100, 50, 50));
}})},
};
}
@DataProvider
@Override
public Object[][] provideBadSamples() {
return new Object[][]{
// Centered
{validationResult(NO_AREA, messages("\"object\" is not visible on page"), NULL_META),
specCenteredInside("container", SpecCentered.Alignment.ALL), page(new HashMap<String, PageElement>(){{
put("object", invisibleElement(10, 40, 10, 10));
put("container", element(10, 60, 10, 10));
}})},
{validationResult(NO_AREA, messages("\"object\" is absent on page"), NULL_META),
specCenteredInside("container", SpecCentered.Alignment.ALL), page(new HashMap<String, PageElement>(){{
put("object", absentElement(10, 40, 10, 10));
put("container", element(10, 60, 10, 10));
}})},
{validationResult(NO_AREA, messages("\"container\" is absent on page"), NULL_META),
specCenteredInside("container", SpecCentered.Alignment.ALL), page(new HashMap<String, PageElement>(){{
put("object", element(10, 40, 10, 10));
put("container", absentElement(10, 60, 10, 10));
}})},
{validationResult(NO_AREA, messages("\"container\" is not visible on page"), NULL_META),
specCenteredInside("container", SpecCentered.Alignment.ALL), page(new HashMap<String, PageElement>(){{
put("object", element(10, 40, 10, 10));
put("container", invisibleElement(10, 60, 10, 10));
}})},
{validationResult(areas(new ValidationObject(new Rect(20, 20, 80, 60), "object"), new ValidationObject(new Rect(0, 0, 100, 100), "container")),
messages("\"object\" is not centered horizontally inside \"container\". Offset is 20px"), NULL_META),
specCenteredInside("container", SpecCentered.Alignment.ALL), page(new HashMap<String, PageElement>(){{
put("object", element(20, 20, 80, 60));
put("container", element(0, 0, 100, 100));
}})},
{validationResult(areas(new ValidationObject(new Rect(20, 20, 75, 60), "object"), new ValidationObject(new Rect(0, 0, 100, 100), "container")),
messages("\"object\" is not centered horizontally inside \"container\". Offset is 15px"), NULL_META),
specCenteredInside("container", SpecCentered.Alignment.HORIZONTALLY, 10), page(new HashMap<String, PageElement>(){{
put("object", element(20, 20, 75, 60));
put("container", element(0, 0, 100, 100));
}})},
{validationResult(areas(new ValidationObject(new Rect(0, 20, 120, 60), "object"), new ValidationObject(new Rect(10, 10, 100, 100), "container")),
messages("\"object\" is centered but not horizontally inside \"container\""), NULL_META),
specCenteredInside("container", SpecCentered.Alignment.ALL), page(new HashMap<String, PageElement>(){{
put("object", element(0, 20, 120, 60));
put("container", element(10, 10, 100, 100));
}})},
{validationResult(areas(new ValidationObject(new Rect(20, 10, 100, 60), "object"), new ValidationObject(new Rect(10, 10, 100, 100), "container")),
messages("\"object\" is not centered vertically inside \"container\". Offset is 40px"), NULL_META),
specCenteredInside("container", SpecCentered.Alignment.VERTICALLY), page(new HashMap<String, PageElement>(){{
put("object", element(20, 10, 100, 60));
put("container", element(10, 10, 100, 100));
}})},
{validationResult(areas(new ValidationObject(new Rect(20, 10, 10, 60), "object"), new ValidationObject(new Rect(10, 10, 100, 100), "container")),
messages("\"object\" is not centered horizontally inside \"container\". Offset is 70px"), NULL_META),
specCenteredInside("container", SpecCentered.Alignment.HORIZONTALLY), page(new HashMap<String, PageElement>(){{
put("object", element(20, 10, 10, 60));
put("container", element(10, 10, 100, 100));
}})},
{validationResult(areas(new ValidationObject(new Rect(20, 10, 10, 60), "object"), new ValidationObject(new Rect(10, 10, 100, 100), "container")),
messages("\"object\" is not centered vertically on \"container\". Offset is 40px"), NULL_META),
specCenteredOn("container", SpecCentered.Alignment.VERTICALLY), page(new HashMap<String, PageElement>(){{
put("object", element(20, 10, 10, 60));
put("container", element(10, 10, 100, 100));
}})},
{validationResult(areas(new ValidationObject(new Rect(20, 10, 10, 60), "object"), new ValidationObject(new Rect(10, 10, 100, 100), "container")),
messages("\"object\" is not centered horizontally on \"container\". Offset is 70px"), NULL_META),
specCenteredOn("container", SpecCentered.Alignment.HORIZONTALLY), page(new HashMap<String, PageElement>(){{
put("object", element(20, 10, 10, 60));
put("container", element(10, 10, 100, 100));
}})}
};
}
private SpecCentered specCenteredOn(String object, SpecCentered.Alignment alignment) {
return new SpecCentered(object, alignment, SpecCentered.Location.ON).withErrorRate(2);
}
private SpecCentered specCenteredInside(String object, SpecCentered.Alignment alignment) {
return new SpecCentered(object, alignment, SpecCentered.Location.INSIDE);
}
private SpecCentered specCenteredInside(String object, SpecCentered.Alignment alignment, int errorRate) {
return new SpecCentered(object, alignment, SpecCentered.Location.INSIDE).withErrorRate(errorRate);
}
}
| 4,518 |
2,644 | <reponame>CunningLearner/lk<filename>top/include/lk/list.h
/*
* Copyright (c) 2008 <NAME>
*
* Use of this source code is governed by a MIT-style
* license that can be found in the LICENSE file or at
* https://opensource.org/licenses/MIT
*/
#pragma once
#include <lk/compiler.h>
#include <stdbool.h>
#include <stddef.h>
__BEGIN_CDECLS
#define containerof(ptr, type, member) \
((type *)((addr_t)(ptr) - offsetof(type, member)))
struct list_node {
struct list_node *prev;
struct list_node *next;
};
#define LIST_INITIAL_VALUE(list) { &(list), &(list) }
#define LIST_INITIAL_CLEARED_VALUE { NULL, NULL }
static inline void list_initialize(struct list_node *list) {
list->prev = list->next = list;
}
static inline void list_clear_node(struct list_node *item) {
item->prev = item->next = 0;
}
static inline bool list_in_list(struct list_node *item) {
if (item->prev == 0 && item->next == 0)
return false;
else
return true;
}
static inline void list_add_head(struct list_node *list, struct list_node *item) {
item->next = list->next;
item->prev = list;
list->next->prev = item;
list->next = item;
}
#define list_add_after(entry, new_entry) list_add_head(entry, new_entry)
static inline void list_add_tail(struct list_node *list, struct list_node *item) {
item->prev = list->prev;
item->next = list;
list->prev->next = item;
list->prev = item;
}
#define list_add_before(entry, new_entry) list_add_tail(entry, new_entry)
static inline void list_delete(struct list_node *item) {
item->next->prev = item->prev;
item->prev->next = item->next;
item->prev = item->next = 0;
}
static inline struct list_node *list_remove_head(struct list_node *list) {
if (list->next != list) {
struct list_node *item = list->next;
list_delete(item);
return item;
} else {
return NULL;
}
}
#define list_remove_head_type(list, type, element) ({\
struct list_node *__nod = list_remove_head(list);\
type *__t;\
if(__nod)\
__t = containerof(__nod, type, element);\
else\
__t = (type *)0;\
__t;\
})
static inline struct list_node *list_remove_tail(struct list_node *list) {
if (list->prev != list) {
struct list_node *item = list->prev;
list_delete(item);
return item;
} else {
return NULL;
}
}
#define list_remove_tail_type(list, type, element) ({\
struct list_node *__nod = list_remove_tail(list);\
type *__t;\
if(__nod)\
__t = containerof(__nod, type, element);\
else\
__t = (type *)0;\
__t;\
})
static inline struct list_node *list_peek_head(struct list_node *list) {
if (list->next != list) {
return list->next;
} else {
return NULL;
}
}
#define list_peek_head_type(list, type, element) ({\
struct list_node *__nod = list_peek_head(list);\
type *__t;\
if(__nod)\
__t = containerof(__nod, type, element);\
else\
__t = (type *)0;\
__t;\
})
static inline struct list_node *list_peek_tail(struct list_node *list) {
if (list->prev != list) {
return list->prev;
} else {
return NULL;
}
}
#define list_peek_tail_type(list, type, element) ({\
struct list_node *__nod = list_peek_tail(list);\
type *__t;\
if(__nod)\
__t = containerof(__nod, type, element);\
else\
__t = (type *)0;\
__t;\
})
static inline struct list_node *list_prev(struct list_node *list, struct list_node *item) {
if (item->prev != list)
return item->prev;
else
return NULL;
}
#define list_prev_type(list, item, type, element) ({\
struct list_node *__nod = list_prev(list, item);\
type *__t;\
if(__nod)\
__t = containerof(__nod, type, element);\
else\
__t = (type *)0;\
__t;\
})
static inline struct list_node *list_prev_wrap(struct list_node *list, struct list_node *item) {
if (item->prev != list)
return item->prev;
else if (item->prev->prev != list)
return item->prev->prev;
else
return NULL;
}
#define list_prev_wrap_type(list, item, type, element) ({\
struct list_node *__nod = list_prev_wrap(list, item);\
type *__t;\
if(__nod)\
__t = containerof(__nod, type, element);\
else\
__t = (type *)0;\
__t;\
})
static inline struct list_node *list_next(struct list_node *list, struct list_node *item) {
if (item->next != list)
return item->next;
else
return NULL;
}
#define list_next_type(list, item, type, element) ({\
struct list_node *__nod = list_next(list, item);\
type *__t;\
if(__nod)\
__t = containerof(__nod, type, element);\
else\
__t = (type *)0;\
__t;\
})
static inline struct list_node *list_next_wrap(struct list_node *list, struct list_node *item) {
if (item->next != list)
return item->next;
else if (item->next->next != list)
return item->next->next;
else
return NULL;
}
#define list_next_wrap_type(list, item, type, element) ({\
struct list_node *__nod = list_next_wrap(list, item);\
type *__t;\
if(__nod)\
__t = containerof(__nod, type, element);\
else\
__t = (type *)0;\
__t;\
})
// iterates over the list, node should be struct list_node*
#define list_for_every(list, node) \
for(node = (list)->next; node != (list); node = node->next)
// iterates over the list in a safe way for deletion of current node
// node and temp_node should be struct list_node*
#define list_for_every_safe(list, node, temp_node) \
for(node = (list)->next, temp_node = (node)->next;\
node != (list);\
node = temp_node, temp_node = (node)->next)
// iterates over the list, entry should be the container structure type *
#define list_for_every_entry(list, entry, type, member) \
for((entry) = containerof((list)->next, type, member);\
&(entry)->member != (list);\
(entry) = containerof((entry)->member.next, type, member))
// iterates over the list in a safe way for deletion of current node
// entry and temp_entry should be the container structure type *
#define list_for_every_entry_safe(list, entry, temp_entry, type, member) \
for(entry = containerof((list)->next, type, member),\
temp_entry = containerof((entry)->member.next, type, member);\
&(entry)->member != (list);\
entry = temp_entry, temp_entry = containerof((temp_entry)->member.next, type, member))
static inline bool list_is_empty(struct list_node *list) {
return (list->next == list) ? true : false;
}
static inline size_t list_length(struct list_node *list) {
size_t cnt = 0;
struct list_node *node = list;
list_for_every(list, node) {
cnt++;
}
return cnt;
}
__END_CDECLS
| 2,857 |
6,304 | <reponame>rhencke/engine<filename>src/third_party/skia/src/gpu/GrDriverBugWorkarounds.cpp
/*
* Copyright 2018 Google Inc.
*
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*/
#include "include/gpu/GrDriverBugWorkarounds.h"
#include "include/core/SkTypes.h"
GrDriverBugWorkarounds::GrDriverBugWorkarounds() = default;
GrDriverBugWorkarounds::GrDriverBugWorkarounds(
const std::vector<int>& enabled_driver_bug_workarounds) {
for (auto id : enabled_driver_bug_workarounds) {
switch (id) {
#define GPU_OP(type, name) \
case GrDriverBugWorkaroundType::type: \
name = true; \
break;
GPU_DRIVER_BUG_WORKAROUNDS(GPU_OP)
#undef GPU_OP
default:
SK_ABORT("Not implemented");
break;
}
}
}
void GrDriverBugWorkarounds::applyOverrides(
const GrDriverBugWorkarounds& workarounds) {
#define GPU_OP(type, name) \
name |= workarounds.name;
GPU_DRIVER_BUG_WORKAROUNDS(GPU_OP)
#undef GPU_OP
}
GrDriverBugWorkarounds::~GrDriverBugWorkarounds() = default;
| 539 |
1,091 | /*
* Copyright 2017-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.onlab.packet.dhcp;
import com.google.common.collect.Lists;
import org.onlab.packet.DHCP6;
import org.onlab.packet.DeserializationException;
import org.onlab.packet.Deserializer;
import java.nio.ByteBuffer;
import java.util.List;
import java.util.Objects;
/**
* DHCPv6 Identity Association for Non-temporary Addresses Option.
* Based on RFC-3315
*/
public final class Dhcp6IaNaOption extends Dhcp6Option {
public static final int DEFAULT_LEN = 12;
private int iaId;
private int t1;
private int t2;
private List<Dhcp6Option> options;
@Override
public short getCode() {
return DHCP6.OptionCode.IA_NA.value();
}
@Override
public short getLength() {
return (short) (DEFAULT_LEN + options.stream()
.mapToInt(opt -> (int) opt.getLength() + Dhcp6Option.DEFAULT_LEN)
.sum());
}
/**
* Gets Identity Association ID.
*
* @return the Identity Association ID
*/
public int getIaId() {
return iaId;
}
/**
* Sets Identity Association ID.
*
* @param iaId the Identity Association ID.
*/
public void setIaId(int iaId) {
this.iaId = iaId;
}
/**
* Gets time 1.
* The time at which the client contacts the
* server from which the addresses in the IA_NA
* were obtained to extend the lifetimes of the
* addresses assigned to the IA_NA; T1 is a
* time duration relative to the current time
* expressed in units of seconds.
*
* @return the value of time 1
*/
public int getT1() {
return t1;
}
/**
* Sets time 1.
*
* @param t1 the value of time 1
*/
public void setT1(int t1) {
this.t1 = t1;
}
/**
* Gets time 2.
* The time at which the client contacts any
* available server to extend the lifetimes of
* the addresses assigned to the IA_NA; T2 is a
* time duration relative to the current time
* expressed in units of seconds.
*
* @return the value of time 2
*/
public int getT2() {
return t2;
}
/**
* Sets time 2.
*
* @param t2 the value of time 2
*/
public void setT2(int t2) {
this.t2 = t2;
}
/**
* Gets sub-options.
*
* @return sub-options of this option
*/
public List<Dhcp6Option> getOptions() {
return options;
}
/**
* Sets sub-options.
*
* @param options the sub-options of this option
*/
public void setOptions(List<Dhcp6Option> options) {
this.options = options;
}
/**
* Default constructor.
*/
public Dhcp6IaNaOption() {
}
/**
* Constructs a DHCPv6 IA NA option with DHCPv6 option.
*
* @param dhcp6Option the DHCPv6 option
*/
public Dhcp6IaNaOption(Dhcp6Option dhcp6Option) {
super(dhcp6Option);
}
/**
* Gets deserializer.
*
* @return the deserializer
*/
public static Deserializer<Dhcp6Option> deserializer() {
return (data, offset, length) -> {
Dhcp6Option dhcp6Option =
Dhcp6Option.deserializer().deserialize(data, offset, length);
if (dhcp6Option.getLength() < DEFAULT_LEN) {
throw new DeserializationException("Invalid IA NA option data");
}
Dhcp6IaNaOption iaNaOption = new Dhcp6IaNaOption(dhcp6Option);
byte[] optionData = iaNaOption.getData();
ByteBuffer bb = ByteBuffer.wrap(optionData);
iaNaOption.iaId = bb.getInt();
iaNaOption.t1 = bb.getInt();
iaNaOption.t2 = bb.getInt();
iaNaOption.options = Lists.newArrayList();
while (bb.remaining() >= Dhcp6Option.DEFAULT_LEN) {
Dhcp6Option option;
ByteBuffer optByteBuffer = ByteBuffer.wrap(optionData,
bb.position(),
optionData.length - bb.position());
short code = optByteBuffer.getShort();
short len = optByteBuffer.getShort();
int optLen = UNSIGNED_SHORT_MASK & len;
byte[] subOptData = new byte[Dhcp6Option.DEFAULT_LEN + optLen];
bb.get(subOptData);
// TODO: put more sub-options?
if (code == DHCP6.OptionCode.IAADDR.value()) {
option = Dhcp6IaAddressOption.deserializer()
.deserialize(subOptData, 0, subOptData.length);
} else {
option = Dhcp6Option.deserializer()
.deserialize(subOptData, 0, subOptData.length);
}
iaNaOption.options.add(option);
}
return iaNaOption;
};
}
@Override
public byte[] serialize() {
int payloadLen = DEFAULT_LEN + options.stream()
.mapToInt(opt -> (int) opt.getLength() + Dhcp6Option.DEFAULT_LEN)
.sum();
int len = Dhcp6Option.DEFAULT_LEN + payloadLen;
ByteBuffer bb = ByteBuffer.allocate(len);
bb.putShort(DHCP6.OptionCode.IA_NA.value());
bb.putShort((short) payloadLen);
bb.putInt(iaId);
bb.putInt(t1);
bb.putInt(t2);
options.stream().map(Dhcp6Option::serialize).forEach(bb::put);
return bb.array();
}
@Override
public int hashCode() {
return 31 * super.hashCode() + Objects.hash(iaId, t1, t2, options);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
if (!super.equals(obj)) {
return false;
}
final Dhcp6IaNaOption other = (Dhcp6IaNaOption) obj;
return Objects.equals(this.iaId, other.iaId)
&& Objects.equals(this.t1, other.t1)
&& Objects.equals(this.t2, other.t2)
&& Objects.equals(this.options, other.options);
}
@Override
public String toString() {
return getToStringHelper()
.add("iaId", iaId)
.add("t1", t1)
.add("t2", t2)
.add("options", options)
.toString();
}
}
| 3,318 |
10,225 | package io.quarkus.deployment.configuration.type;
import java.util.Objects;
/**
*
*/
public final class OptionalOf extends ConverterType {
private final ConverterType type;
private int hashCode;
public OptionalOf(final ConverterType type) {
this.type = type;
}
public ConverterType getNestedType() {
return type;
}
@Override
public int hashCode() {
int hashCode = this.hashCode;
if (hashCode == 0) {
hashCode = Objects.hash(type, OptionalOf.class);
if (hashCode == 0) {
hashCode = 0x8000_0000;
}
this.hashCode = hashCode;
}
return hashCode;
}
@Override
public boolean equals(final Object obj) {
return obj instanceof OptionalOf && equals((OptionalOf) obj);
}
public boolean equals(final OptionalOf obj) {
return this == obj || obj != null && type.equals(obj.type);
}
public Class<?> getLeafType() {
return type.getLeafType();
}
}
| 436 |
850 | <reponame>jacebrowning/voc<filename>python/common/org/python/exceptions/SyntaxWarning.java
package org.python.exceptions;
public class SyntaxWarning extends org.python.exceptions.Warning {
public SyntaxWarning() {
super();
}
public SyntaxWarning(String msg) {
super(msg);
}
public SyntaxWarning(org.python.Object[] args, java.util.Map<java.lang.String, org.python.Object> kwargs) {
super(args, kwargs);
}
}
| 175 |
384 | /* This code is part of the tng binary trajectory format.
*
* Written by <NAME>
* Copyright (c) 2012-2013, The GROMACS development team.
* Check out http://www.gromacs.org for more information.
*
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the Revised BSD License.
*/
#ifndef TNG_IO_HPP
#define TNG_IO_HPP
#include "tng_io.h"
namespace Tng
{
class Trajectory;
class Atom;
class Residue;
class Chain;
class Molecule;
typedef class Molecule * Molecule_t;
class Trajectory {
private:
tng_trajectory_t traj;
tng_function_status status;
public:
/**
* @brief Add a molecule to the trajectory.
* @param name is a pointer to the string containing the name of the new molecule.
* @param molecule is a pointer to the newly created molecule.
* @return TNG_SUCCESS (0) if successful or TNG_CRITICAL (2) if a major
* error has occured.
*/
tng_function_status addMolecule(const char *, Molecule_t);
tng_function_status addMoleculeWithId(const char *, int64_t id, Molecule_t);
tng_function_status findMolecule(const char *name, int64_t id, Molecule_t molecule);
friend class Atom;
friend class Residue;
friend class Chain;
friend class Molecule;
//! Normal constructor
Trajectory()
{ status = tng_trajectory_init(&traj); }
//! Copy constructor
Trajectory(Trajectory * src)
{ status = tng_trajectory_init_from_src(traj,&src->traj); }
//! Detructor
~Trajectory()
{ status = tng_trajectory_destroy(&traj); }
//! Status
tng_function_status getStatus()
{ return status; }
/**
* @brief Get the name of the input file.
* @param file_name the string to fill with the name of the input file,
* memory must be allocated before.
* @param max_len maximum char length of the string, i.e. how much memory has
* been reserved for file_name. This includes \0 terminating character.
* @return TNG_SUCCESS (0) if successful, TNG_FAILURE (1) if a minor error
* has occurred (source string longer than destination string).
*/
tng_function_status getInputFile (char *file_name, const int max_len)
{
return status = tng_input_file_get(traj, file_name, max_len);
}
/**
* @brief Set the name of the input file.
* @param file_name the name of the input file.
* @return TNG_SUCCESS (0) if successful or TNG_CRITICAL (2) if a major
* error has occured.
*/
tng_function_status setInputFile(const char *file_name)
{
return status = tng_input_file_set(traj, file_name);
}
/**
* @brief Get the name of the output file.
* @param file_name the string to fill with the name of the output file,
* memory must be allocated before.
* @param max_len maximum char length of the string, i.e. how much memory has
* been reserved for file_name. This includes \0 terminating character.
* @return TNG_SUCCESS (0) if successful, TNG_FAILURE (1) if a minor error
* has occurred (source string longer than destination string).
*/
tng_function_status getOutputFile(char *file_name, const int max_len)
{
return status = tng_output_file_get(traj, file_name, max_len);
}
/**
* @brief Set the name of the output file.
* @param file_name the name of the output file.
* @return TNG_SUCCESS (0) if successful or TNG_CRITICAL (2) if a major
* error has occured.
*/
tng_function_status setOutputFile(const char *file_name)
{
return status = tng_output_file_set(traj, file_name);
}
/**
* @brief Get the endianness of the output file.
* current output file.
* @param endianness will contain the enumeration of the endianness.
* @return TNG_SUCCESS (0) if successful or TNG_FAILURE (1) if the endianness
* could not be retrieved.
*/
tng_function_status getOutputFileEndianness
(tng_file_endianness *endianness)
{
return status = tng_output_file_endianness_get(traj, endianness);
}
/**
* @brief Set the endianness of the output file.
* @param endianness the enumeration of the endianness, can be either
* TNG_BIG_ENDIAN (0) or TNG_LITTLE_ENDIAN (1).
* @details The endianness cannot be changed after file output has started.
* @return TNG_SUCCESS (0) if successful or TNG_FAILURE (1) if the endianness
* could not be set.
*/
tng_function_status setOutputFileEndianness
(const tng_file_endianness endianness)
{
return status = tng_output_file_endianness_set(traj, endianness);
}
/**
* @brief Get the name of the program used when creating the trajectory.
* @param name the string to fill with the name of the program,
* memory must be allocated before.
* @param max_len maximum char length of the string, i.e. how much memory has
* been reserved for name. This includes \0 terminating character.
* @return TNG_SUCCESS (0) if successful, TNG_FAILURE (1) if a minor error
* has occurred (source string longer than destination string).
*/
tng_function_status getFirstProgramName(char *name, const int max_len)
{
return status = tng_first_program_name_get(traj,name,max_len);
}
/**
* @brief Set the name of the program used when creating the trajectory..
* @param new_name is a string containing the wanted name.
* @return TNG_SUCCESS (0) if successful or TNG_CRITICAL (2) if a major
* error has occured.
*/
tng_function_status setFirstProgramName(const char *new_name)
{
return status = tng_first_program_name_set(traj, new_name);
}
/**
* @brief Get the name of the program used when last modifying the trajectory.
* @param name the string to fill with the name of the program,
* memory must be allocated before.
* @param max_len maximum char length of the string, i.e. how much memory has
* been reserved for name. This includes \0 terminating character.
* @return TNG_SUCCESS (0) if successful, TNG_FAILURE (1) if a minor error
* has occurred (source string longer than destination string).
*/
tng_function_status getLastProgramName(char *name, const int max_len)
{
return status = tng_last_program_name_get(traj, name, max_len);
}
/**
* @brief Set the name of the program used when last modifying the trajectory.
* @param new_name is a string containing the wanted name.
* @return TNG_SUCCESS (0) if successful or TNG_CRITICAL (2) if a major
* error has occured.
*/
tng_function_status setLastProgramName(const char *new_name)
{
return status = tng_last_program_name_set(traj, new_name);
}
/**
* @brief Get the name of the user who created the trajectory.
* @param name the string to fill with the name of the user,
* memory must be allocated before.
* @param max_len maximum char length of the string, i.e. how much memory has
* been reserved for name. This includes \0 terminating character.
* @return TNG_SUCCESS (0) if successful, TNG_FAILURE (1) if a minor error
* has occurred (source string longer than destination string).
*/
tng_function_status getFirstUserName(char *name, const int max_len)
{
return status = tng_first_user_name_get(traj,name, max_len);
}
/**
* @brief Set the name of the user who created the trajectory.
* @param new_name is a string containing the wanted name.
* @return TNG_SUCCESS (0) if successful or TNG_CRITICAL (2) if a major
* error has occured.
*/
tng_function_status setFirstUserName(const char *new_name)
{
return status = tng_first_user_name_set(traj, new_name);
}
/**
* @brief Get the name of the user who last modified the trajectory.
* @param name the string to fill with the name of the user,
* memory must be allocated before.
* @param max_len maximum char length of the string, i.e. how much memory has
* been reserved for name. This includes \0 terminating character.
* @return TNG_SUCCESS (0) if successful, TNG_FAILURE (1) if a minor error
* has occurred (source string longer than destination string).
*/
tng_function_status getLastUserName(char *name, const int max_len)
{
return status = tng_last_user_name_get(traj,name,max_len);
}
/**
* @brief Set the name of the user who last modified the trajectory.
* @param new_name is a string containing the wanted name.
* @return TNG_SUCCESS (0) if successful or TNG_CRITICAL (2) if a major
* error has occured.
*/
tng_function_status setLastUserName(const char *new_name)
{
return status = tng_last_user_name_set(traj,new_name);
}
/**
* @brief Get the name of the computer used when creating the trajectory.
* @param name the string to fill with the name of the computer,
* memory must be allocated before.
* @param max_len maximum char length of the string, i.e. how much memory has
* been reserved for name. This includes \0 terminating character.
* @return TNG_SUCCESS (0) if successful, TNG_FAILURE (1) if a minor error
* has occurred (source string longer than destination string).
*/
tng_function_status getFirstComputerName(char *name, const int max_len)
{
return status = tng_first_computer_name_get(traj, name, max_len);
}
/**
* @brief Set the name of the computer used when creating the trajectory.
* @param new_name is a string containing the wanted name.
* @return TNG_SUCCESS (0) if successful or TNG_CRITICAL (2) if a major
* error has occured.
*/
tng_function_status setFirstComputerName(const char *new_name)
{
return status = tng_first_computer_name_set(traj, new_name);
}
/**
* @brief Get the name of the computer used when last modifying the trajectory.
* @param name the string to fill with the name of the computer,
* memory must be allocated before.
* @param max_len maximum char length of the string, i.e. how much memory has
* been reserved for name. This includes \0 terminating character.
* @return TNG_SUCCESS (0) if successful, TNG_FAILURE (1) if a minor error
* has occurred (source string longer than destination string).
*/
tng_function_status getLastComputerName(char *name, const int max_len)
{
return status = tng_last_computer_name_get(traj,name,max_len);
}
/**
* @brief Set the name of the computer used when last modifying the trajectory.
* @param new_name is a string containing the wanted name.
* @return TNG_SUCCESS (0) if successful or TNG_CRITICAL (2) if a major
* error has occured.
*/
tng_function_status setLastComputerName(const char *new_name)
{
return status = tng_last_computer_name_set(traj,new_name);
}
/**
* @brief Get the pgp_signature of the user creating the trajectory.
* @param signature the string to fill with the signature,
* memory must be allocated before.
* @param max_len maximum char length of the string, i.e. how much memory has
* been reserved for name. This includes \0 terminating character.
* @return TNG_SUCCESS (0) if successful, TNG_FAILURE (1) if a minor error
* has occurred (source string longer than destination string).
*/
tng_function_status getFirstSignature(char *signature, const int max_len)
{
return status = tng_last_computer_name_get(traj, signature,max_len);
}
/**
* @brief Set the pgp_signature of the user creating the trajectory.
* @param signature is a string containing the pgp_signature.
* @return TNG_SUCCESS (0) if successful or TNG_CRITICAL (2) if a major
* error has occured.
*/
tng_function_status setFirstSignature(const char *signature)
{
return status = tng_first_signature_set(traj, signature);
}
/**
* @brief Get the pgp_signature of the user last modifying the trajectory.
* @param signature the string to fill with the signature,
* memory must be allocated before.
* @param max_len maximum char length of the string, i.e. how much memory has
* been reserved for name. This includes \0 terminating character.
* @return TNG_SUCCESS (0) if successful, TNG_FAILURE (1) if a minor error
* has occurred (source string longer than destination string).
*/
tng_function_status getLastSignature(char *signature, const int max_len)
{
return status = tng_first_signature_get(traj, signature, max_len);
}
/**
* @brief Set the pgp_signature of the user last modifying the trajectory.
* @param signature is a string containing the pgp_signature.
* @return TNG_SUCCESS (0) if successful or TNG_CRITICAL (2) if a major
* error has occured.
*/
tng_function_status setLastSignature(const char *signature)
{
return status = tng_last_signature_set(traj, signature);
}
/**
* @brief Get the name of the forcefield used in the trajectory.
* @param name the string to fill with the name of the forcefield,
* memory must be allocated before.
* @param max_len maximum char length of the string, i.e. how much memory has
* been reserved for name. This includes \0 terminating character.
* @return TNG_SUCCESS (0) if successful, TNG_FAILURE (1) if a minor error
* has occurred (source string longer than destination string).
*/
tng_function_status getForcefieldName(char *name, const int max_len)
{
return status = tng_last_signature_get(traj,name,max_len);
}
/**
* @brief Set the name of the forcefield used in the trajectory.
* @param new_name is a string containing the wanted name.
* @return TNG_SUCCESS (0) if successful or TNG_CRITICAL (2) if a major
* error has occured.
*/
tng_function_status setForcefieldName(const char *new_name)
{
return status = tng_forcefield_name_set(traj, new_name);
}
/**
* @brief Get the medium stride length of the trajectory.
* @param len is pointing to a value set to the stride length.
* @return TNG_SUCCESS (0) if successful.
*/
tng_function_status getMediumStrideLength(int64_t *len)
{
return status = tng_medium_stride_length_get(traj,len);
}
/**
* @brief Set the medium stride length of the trajectory.
* @param len is the wanted medium stride length.
* @return TNG_SUCCESS (0) if successful, TNG_FAILURE (1) if a minor error
* has occurred.
*/
tng_function_status setMediumStrideLength(const int64_t len)
{
return status = tng_medium_stride_length_set(traj,len);
}
/**
* @brief Get the long stride length of the trajectory.
* @param len is pointing to a value set to the stride length.
* @return TNG_SUCCESS (0) if successful.
*/
tng_function_status getLongStrideLength(int64_t *len)
{
return status = tng_long_stride_length_get(traj, len);
}
/**
* @brief Set the long stride length of the trajectory.
* @param len is the wanted long stride length.
* @return TNG_SUCCESS (0) if successful, TNG_FAILURE (1) if a minor error
* has occurred.
*/
tng_function_status setLongStrideLength(const int64_t len)
{
return status = tng_long_stride_length_set(traj,len);
}
/**
* @brief Get the current time per frame of the trajectory.
* @param len is pointing to a value set to the time per frame.
* @return TNG_SUCCESS (0) if successful.
*/
tng_function_status getTimePerFrame(double *time)
{
return status = tng_time_per_frame_get(traj, time);
}
/**
* @brief Set the time per frame of the trajectory.
* @param len is the new time per frame.
* @return TNG_SUCCESS (0) if successful, TNG_FAILURE (1) if a minor error
* has occurred.
*/
tng_function_status setTimePerFrame(const double time)
{
return status = tng_time_per_frame_set(traj, time);
}
/**
* @brief Get the length of the input file.
* @param len is pointing to a value set to the file length.
* @return TNG_SUCCESS (0) if successful.
*/
tng_function_status getInputFileLen(int64_t *len)
{
return status = tng_input_file_len_get(traj, len);
}
/**
* @brief Get the number of frames in the trajectory
* @param n is pointing to a value set to the number of frames.
* @return TNG_SUCCESS (0) if successful, TNG_FAILURE (1) if a minor error
* has occurred (could not find last frame set).
*/
tng_function_status getNumFrames(int64_t *n)
{
return status = tng_num_frames_get(traj, n);
}
/**
* @brief Get the current number of particles.
* @param n is pointing to a value set to the number of particles.
* @details If variable number of particles are used this function will return
* the number of particles in the current frame set.
* @return TNG_SUCCESS (0) if successful.
*/
tng_function_status getNumParticles(int64_t *n)
{
return status = tng_num_particles_get(traj, n);
}
/**
* @brief Get the current total number of molecules.
* @param n is pointing to a value set to the number of molecules.
* @details If variable number of particles are used this function will return
* the total number of molecules in the current frame set.
* @return TNG_SUCCESS (0) if successful.
*/
tng_function_status getNumMolecules(int64_t *n)
{
return status = tng_num_molecules_get(traj,n);
}
/**
* @brief Get the exponential used for distances in the trajectory.
* @param exp is pointing to a value set to the distance unit exponential.
* @details Example: If the distances are specified in nm (default) exp is -9.
* If the distances are specified in Å exp is -10.
* @return TNG_SUCCESS (0) if successful.
*/
tng_function_status getDistanceUnitExponential
(int64_t *exp)
{
return status = tng_distance_unit_exponential_get(traj, exp);
}
/**
* @brief Set the exponential used for distances in the trajectory.
* @param exp is the distance unit exponential to use.
* @details Example: If the distances are specified in nm (default) exp is -9.
* If the distances are specified in Å exp is -10.
* @return TNG_SUCCESS (0) if successful.
*/
tng_function_status setDistanceUnitExponential
(int64_t exp)
{
return status = tng_distance_unit_exponential_set(traj, exp);
}
/**
* @brief Get the number of frames per frame set.
* per frame set.
* @param n is pointing to a value set to the number of frames per frame set.
* @return TNG_SUCCESS (0) if successful.
*/
tng_function_status getNumFramesPerFrameSet(int64_t *n)
{
return status = tng_num_frames_per_frame_set_get(traj,n);
}
/**
* @brief Set the number of frames per frame set.
* @param n is the number of frames per frame set.
* @details This does not affect already existing frame sets. For
* consistency the number of frames per frame set should be set
* betfore creating any frame sets.
* @return TNG_SUCCESS (0) if successful.
*/
tng_function_status setNumFramesPerFrameSet(const int64_t n)
{
return status = tng_num_frames_per_frame_set_set(traj,n);
}
/**
* @brief Get the number of frame sets.
* @param n is pointing to a value set to the number of frame sets.
* @return TNG_SUCCESS (0) if successful, TNG_FAILURE (1) if a minor error
* has occurred or TNG_CRITICAL (2) if a major error has occured.
*/
tng_function_status getNumFrameSets(int64_t *n)
{
return status = tng_num_frame_sets_get(traj, n);
}
/**
* @brief Get the current trajectory frame set.
* @param frame_set_p will be set to point at the memory position of
* the found frame set.
* @return TNG_SUCCESS (0) if successful.
*/
tng_function_status getCurrentFrameSet(tng_trajectory_frame_set_t *frame_set_p)
{
return status = tng_current_frame_set_get(traj, frame_set_p);
}
/**
* @brief Find the requested frame set number.
* @param nr is the frame set number to search for.
* @details tng_data->current_trajectory_frame_set will contain the
* found trajectory if successful.
* @return TNG_SUCCESS (0) if successful, TNG_FAILURE (1) if a minor error
* has occurred or TNG_CRITICAL (2) if a major error has occured.
*/
tng_function_status findFrameSetNr(const int64_t nr)
{
return status = tng_frame_set_nr_find(traj,nr);
}
/**
* @brief Find the frame set containing a specific frame.
* @param frame is the frame number to search for.
* @details tng_data->current_trajectory_frame_set will contain the
* found trajectory if successful.
* @return TNG_SUCCESS (0) if successful, TNG_FAILURE (1) if a minor error
* has occurred or TNG_CRITICAL (2) if a major error has occured.
*/
tng_function_status findFrameSetOfFrame(const int64_t frame)
{
return status = tng_frame_set_of_frame_find(traj, frame);
}
/**
* @brief Get the file position of the next frame set in the input file.
* @param frame_set is the frame set of which to get the position of the
* following frame set.
* @param pos is pointing to a value set to the file position.
* @return TNG_SUCCESS (0) if successful.
*/
tng_function_status getNextFrameSetFilePos
(const tng_trajectory_frame_set_t frame_set,int64_t *pos)
{
return status = tng_frame_set_next_frame_set_file_pos_get(traj,frame_set,pos );
}
/**
* @brief Get the file position of the previous frame set in the input file.
* @param frame_set is the frame set of which to get the position of the
* previous frame set.
* @param pos is pointing to a value set to the file position.
* @return TNG_SUCCESS (0) if successful.
*/
tng_function_status getPrevFrameSetFilePos
(const tng_trajectory_frame_set_t frame_set,int64_t *pos)
{
return status = tng_frame_set_prev_frame_set_file_pos_get(traj, frame_set, pos);
}
/**
* @brief Get the first and last frames of the frame set.
* @param frame_set is the frame set of which to get the frame range.
* @param first_frame is set to the first frame of the frame set.
* @param last_frame is set to the last frame of the frame set.
* @return TNG_SUCCESS (0) if successful.
*/
tng_function_status getFrameSetFrameRange
(const tng_trajectory_frame_set_t frame_set,
int64_t *first_frame,
int64_t *last_frame)
{
return status = tng_frame_set_frame_range_get(traj,frame_set, first_frame, last_frame);
}
/**
* @brief Get the molecume name of real particle number (number in mol system).
* @param nr is the real number of the particle in the molecular system.
* @param name is a string, which is set to the name of the molecule. Memory
* must be reserved beforehand.
* @param max_len is the maximum length of name.
* @return TNG_SUCCESS (0) if successful or TNG_FAILURE (!) if a minor error
* has occured.
*/
tng_function_status getMoleculeNameOfParticleNr
(const int64_t nr,char *name,int max_len)
{
return status = tng_molecule_name_of_particle_nr_get(traj,nr,name,max_len);
}
/**
* @brief Get the chain name of real particle number (number in mol system).
* @param nr is the real number of the particle in the molecular system.
* @param name is a string, which is set to the name of the chain. Memory
* must be reserved beforehand.
* @param max_len is the maximum length of name.
* @return TNG_SUCCESS (0) if successful or TNG_FAILURE (!) if a minor error
* has occured.
*/
tng_function_status getChainNameOfParticleNr
(const int64_t nr,char *name,int max_len)
{
return status = tng_chain_name_of_particle_nr_get(traj, nr, name, max_len);
}
/**
* @brief Get the residue name of real particle number (number in mol system).
* @param nr is the real number of the particle in the molecular system.
* @param name is a string, which is set to the name of the residue. Memory
* must be reserved beforehand.
* @param max_len is the maximum length of name.
* @return TNG_SUCCESS (0) if successful or TNG_FAILURE (!) if a minor error
* has occured.
*/
tng_function_status getResidueNameOfParticleNr
(const int64_t nr,char *name,int max_len)
{
return status = tng_residue_name_of_particle_nr_get(traj,nr,name,max_len);
}
/**
* @brief Get the atom name of real particle number (number in mol system).
* @param nr is the real number of the particle in the molecular system.
* @param name is a string, which is set to the name of the atom. Memory
* must be reserved beforehand.
* @param max_len is the maximum length of name.
* @return TNG_SUCCESS (0) if successful or TNG_FAILURE (!) if a minor error
* has occured.
*/
tng_function_status getAtomNameOfParticleNr
(const int64_t nr,char *name,int max_len)
{
return status = tng_atom_name_of_particle_nr_get(traj, nr,name,max_len);
}
/**
* @brief Add a particle mapping table.
* @details Each particle mapping table will be written as a separate block,
* followed by the data blocks for the corresponding particles. In most cases
* there is one particle mapping block for each thread writing the trajectory.
* @details The mapping information is added to the currently active frame set
* of tng_data
* @param first_particle_number is the first particle number of this mapping
* block.
* @param n_particles is the number of particles in this mapping block.
* @param mapping_table is a list of the real particle numbers (i.e. the numbers
* used in the molecular system). The list is n_particles long.
* @details mapping_table[0] is the real particle number of the first particle
* in the following data blocks.
* @return TNG_SUCCESS (0) if successful, TNG_FAILURE (1) if a minor error
* has occurred or TNG_CRITICAL (2) if a major error has occured.
*/
tng_function_status addParticleMapping
(const int64_t first_particle_number,
const int64_t n_particles,
const int64_t *mapping_table)
{
return status = tng_particle_mapping_add(traj,first_particle_number,n_particles,mapping_table );
}
/**
* @brief Read the header blocks from the input_file of tng_data.
* @details The trajectory blocks must be read separately and iteratively in chunks
* to fit in memory.
* @details tng_data->input_file_path specifies
* which file to read from. If the file (input_file) is not open it will be
* opened.
* @param hash_mode is an option to decide whether to use the md5 hash or not.
* If hash_mode == TNG_USE_HASH the written md5 hash in the file will be
* compared to the md5 hash of the read contents to ensure valid data.
* @return TNG_SUCCESS (0) if successful or TNG_CRITICAL (2) if a major
* error has occured.
*/
tng_function_status readFileHeaders(const tng_hash_mode hash_mode)
{
return status = tng_file_headers_read(traj, hash_mode);
}
/**
* @brief Write the header blocks to the output_file of tng_data.
* @details The trajectory blocks must be written separately and iteratively in chunks
* to fit in memory.
* @details tng_data->output_file_path
* specifies which file to write to. If the file (output_file) is not open it
* will be opened.
* @param hash_mode is an option to decide whether to use the md5 hash or not.
* If hash_mode == TNG_USE_HASH an md5 hash for each header block will be generated.
* @return TNG_SUCCESS (0) if successful or TNG_CRITICAL (2) if a major
* error has occured.
*/
tng_function_status writeFileHeaders(const tng_hash_mode hash_mode)
{
return status = tng_file_headers_write(traj, hash_mode);
}
/**
* @brief Read one (the next) block (of any kind) from the input_file of tng_data.
* which file to read from. If the file (input_file) is not open it will be
* opened.
* @param block_data is a pointer to the struct which will be populated with the
* data.
* @details If block_data->input_file_pos > 0 it is the position from where the
* reading starts otherwise it starts from the current position.
* @param hash_mode is an option to decide whether to use the md5 hash or not.
* If hash_mode == TNG_USE_HASH the written md5 hash in the file will be
* compared to the md5 hash of the read contents to ensure valid data.
* @return TNG_SUCCESS (0) if successful, TNG_FAILURE (1) if a minor error
* has occurred or TNG_CRITICAL (2) if a major error has occured.
*/
tng_function_status readNextBlock(const tng_hash_mode hash_mode, tng_gen_block_t block_data)
{
return status = tng_block_read_next(traj,block_data, hash_mode);
}
/**
* @brief Read one (the next) frame set, including mapping and related data blocks
* from the input_file of tng_data.
* which file to read from. If the file (input_file) is not open it will be
* opened.
* @param hash_mode is an option to decide whether to use the md5 hash or not.
* If hash_mode == TNG_USE_HASH the written md5 hash in the file will be
* compared to the md5 hash of the read contents to ensure valid data.
* @return TNG_SUCCESS (0) if successful, TNG_FAILURE (1) if a minor error
* has occurred or TNG_CRITICAL (2) if a major error has occured.
*/
tng_function_status readNextFrameSet(const tng_hash_mode hash_mode)
{
return status = tng_frame_set_read_next(traj, hash_mode);
}
/**
* @brief Write one frame set, including mapping and related data blocks
* to the output_file of tng_data.
* @details tng_data->output_file_path specifies
* which file to write to. If the file (output_file) is not open it will be
* opened.
* @param hash_mode is an option to decide whether to use the md5 hash or not.
* If hash_mode == TNG_USE_HASH an md5 hash for each header block will be generated.
* @return TNG_SUCCESS (0) if successful, TNG_FAILURE (1) if a minor error
* has occurred or TNG_CRITICAL (2) if a major error has occured.
*/
tng_function_status writeFrameSet(const tng_hash_mode hash_mode)
{
return status = tng_frame_set_write(traj, hash_mode);
}
/**
* @brief Create and initialise a frame set.
* @param first_frame is the first frame of the frame set.
* @param n_frames is the number of frames in the frame set.
* @return TNG_SUCCESS (0) if successful, TNG_FAILURE (1) if a minor error
* has occurred or TNG_CRITICAL (2) if a major error has occured.
*/
tng_function_status newFrameSet(const int64_t first_frame,
const int64_t n_frames)
{
return status = tng_frame_set_new(traj, first_frame, n_frames);
}
/**
* @brief Create and initialise a frame set with the time of the first frame
* specified.
* @param first_frame is the first frame of the frame set.
* @param n_frames is the number of frames in the frame set.
* @param first_frame_time is the time stamp of the first frame (in seconds).
* @return TNG_SUCCESS (0) if successful, TNG_FAILURE (1) if a minor error
* has occurred or TNG_CRITICAL (2) if a major error has occured.
*/
tng_function_status newFrameSetWithTime
(const int64_t first_frame,
const int64_t n_frames,
const double first_frame_time)
{
return status = tng_frame_set_with_time_new(traj,
first_frame, n_frames,
first_frame_time);
}
/**
* @brief Set the time stamp of the first frame of the current frame set.
* @param first_frame_time is the time stamp of the first frame in the
* frame set.
* @return TNG_SUCCESS (0) if successful.
*/
tng_function_status setTimeOfFirstFrameOfFrameSet
(const double first_frame_time)
{
return status = tng_frame_set_first_frame_time_set(traj,
first_frame_time);
}
/**
* @brief Add a non-particle dependent data block.
* @param id is the block ID of the block to add.
* @param block_name is a descriptive name of the block to add
* @param datatype is the datatype of the data in the block (e.g. int/float)
* @param block_type_flag indicates if this is a non-trajectory block (added
* directly to tng_data) or if it is a trajectory block (added to the
* frame set)
* @param n_frames is the number of frames of the data block (automatically
* set to 1 if adding a non-trajectory data block)
* @param n_values_per_frame is how many values a stored each frame (e.g. 9
* for a box shape block)
* @param stride_length is how many frames are between each entry in the
* data block
* @param codec_id is the ID of the codec to compress the data.
* @param new_data is an array of data values to add.
* @return TNG_SUCCESS (0) if successful, TNG_FAILURE (1) if a minor error
* has occurred or TNG_CRITICAL (2) if a major error has occured.
*/
tng_function_status addDataBlock(const int64_t id,
const char *block_name,
const tng_data_type datatype,
const tng_block_type block_type_flag,
int64_t n_frames,
const int64_t n_values_per_frame,
const int64_t stride_length,
const int64_t codec_id,
void *new_data)
{
return status = tng_data_block_add(traj, id,block_name,
datatype,block_type_flag, n_frames,
n_values_per_frame, stride_length,
codec_id, new_data);
}
/**
* @brief Add a particle dependent data block.
* @param id is the block ID of the block to add.
* @param block_name is a descriptive name of the block to add
* @param datatype is the datatype of the data in the block (e.g. int/float)
* @param block_type_flag indicates if this is a non-trajectory block (added
* directly to tng_data) or if it is a trajectory block (added to the
* frame set)
* @param n_frames is the number of frames of the data block (automatically
* set to 1 if adding a non-trajectory data block)
* @param n_values_per_frame is how many values a stored each frame (e.g. 9
* for a box shape block)
* @param stride_length is how many frames are between each entry in the
* data block
* @param first_particle_number is the number of the first particle stored
* in this data block
* @param n_particles is the number of particles stored in this data block
* @param codec_id is the ID of the codec to compress the data.
* @param new_data is an array of data values to add.
* @return TNG_SUCCESS (0) if successful, TNG_FAILURE (1) if a minor error
* has occurred or TNG_CRITICAL (2) if a major error has occured.
*/
tng_function_status addParticleDataBlock(const int64_t id,
const char *block_name,
const tng_data_type datatype,
const tng_block_type block_type_flag,
int64_t n_frames,
const int64_t n_values_per_frame,
const int64_t stride_length,
const int64_t first_particle_number,
const int64_t n_particles,
const int64_t codec_id,
void *new_data)
{
return status = tng_particle_data_block_add(traj,id, block_name,
datatype, block_type_flag, n_frames,n_values_per_frame,
stride_length,first_particle_number,n_particles,
codec_id, new_data);
}
/**
* @brief Write data of one trajectory frame to the output_file of tng_data.
* @param frame_nr is the index number of the frame to write.
* @param block_id is the ID of the data block to write the data to.
* @param data is an array of data to write. The length of the array should
* equal n_values_per_frame.
* @param hash_mode is an option to decide whether to use the md5 hash or not.
* If hash_mode == TNG_USE_HASH the written md5 hash in the file will be
* compared to the md5 hash of the read contents to ensure valid data.
* @return TNG_SUCCESS (0) if successful, TNG_FAILURE (1) if a minor error
* has occurred or TNG_CRITICAL (2) if a major error has occured.
*/
tng_function_status writeFrameData(const int64_t frame_nr,
const int64_t block_id,
const void *data,
const tng_hash_mode hash_mode)
{
return status = tng_frame_data_write(traj,frame_nr,block_id,data,hash_mode);
}
/**
* @brief Write particle data of one trajectory frame to the output_file of
* tng_data.
* @param frame_nr is the index number of the frame to write.
* @param block_id is the ID of the data block to write the data to.
* @param val_first_particle is the number of the first particle in the data
* array.
* @param val_n_particles is the number of particles in the data array.
* @param data is a 1D-array of data to write. The length of the array should
* equal n_particles * n_values_per_frame.
* @param hash_mode is an option to decide whether to use the md5 hash or not.
* If hash_mode == TNG_USE_HASH the written md5 hash in the file will be
* compared to the md5 hash of the read contents to ensure valid data.
* @return TNG_SUCCESS (0) if successful, TNG_FAILURE (1) if a minor error
* has occurred or TNG_CRITICAL (2) if a major error has occured.
*/
tng_function_status writeFrameParticleData(const int64_t frame_nr,
const int64_t block_id,
const int64_t val_first_particle,
const int64_t val_n_particles,
const void *data,
const tng_hash_mode hash_mode)
{
return status = tng_frame_particle_data_write(traj,frame_nr,block_id,val_first_particle,val_n_particles,data,hash_mode);
}
/**
* @brief Free data is an array of values (2D).
* @param values is the 2D array to free and will be set to 0 afterwards.
* @param n_frames is the number of frames in the data array.
* @param n_values_per_frame is the number of values per frame in the data array.
* @param type is the data type of the data in the array (e.g. int/float/char).
* @return TNG_SUCCESS (0) if successful.
*/
tng_function_status freeDataValues(union data_values **values,
const int64_t n_frames,
const int64_t n_values_per_frame,
const tng_data_type type)
{
return status = tng_data_values_free(traj, values, n_frames,n_values_per_frame,type);
}
/**
* @brief Free data is an array of values (3D).
* @param values is the array to free and will be set to 0 afterwards.
* @param n_frames is the number of frames in the data array.
* @param n_particles is the number of particles in the data array.
* @param n_values_per_frame is the number of values per frame in the data array.
* @param type is the data type of the data in the array (e.g. int/float/char).
* @return TNG_SUCCESS (0) if successful.
*/
tng_function_status freeParticleDataValues(union data_values ***values,
const int64_t n_frames,
const int64_t n_particles,
const int64_t n_values_per_frame,
const tng_data_type type)
{
return status = tng_particle_data_values_free(traj, values,n_frames,n_particles,n_values_per_frame,type);
}
/**
* @brief Retrieve non-particle data, from the last read frame set. Obsolete!
* which file to read from. If the file (input_file) is not open it will be
* opened.
* @param block_id is the id number of the particle data block to read.
* @param values is a pointer to a 2-dimensional array (memory unallocated), which
* will be filled with data. The array will be sized
* (n_frames * n_values_per_frame).
* Since ***values is allocated in this function it is the callers
* responsibility to free the memory.
* @param n_frames is set to the number of particles in the returned data. This is
* needed to properly reach and/or free the data afterwards.
* @param n_values_per_frame is set to the number of values per frame in the data.
* This is needed to properly reach and/or free the data afterwards.
* @param type is set to the data type of the data in the array.
* @return TNG_SUCCESS (0) if successful, TNG_FAILURE (1) if a minor error
* has occurred or TNG_CRITICAL (2) if a major error has occured.
*/
tng_function_status getData(const int64_t block_id,
union data_values ***values,
int64_t *n_frames,
int64_t *n_values_per_frame,
char *type)
{
return status = tng_data_get(traj, block_id, values, n_frames,
n_values_per_frame, type);
}
/**
* @brief Retrieve a vector (1D array) of non-particle data, from the last read frame set.
* @param block_id is the id number of the particle data block to read.
* @param values is a pointer to a 1-dimensional array (memory unallocated), which
* will be filled with data. The length of the array will be (n_frames * n_values_per_frame).
* Since **values is allocated in this function it is the callers
* responsibility to free the memory.
* @param n_frames is set to the number of particles in the returned data. This is
* needed to properly reach and/or free the data afterwards.
* @param stride_length is set to the stride length of the returned data.
* @param n_values_per_frame is set to the number of values per frame in the data.
* This is needed to properly reach and/or free the data afterwards.
* @param type is set to the data type of the data in the array.
* @details This does only work for numerical (int, float, double) data.
* @return TNG_SUCCESS (0) if successful, TNG_FAILURE (1) if a minor error
* has occurred or TNG_CRITICAL (2) if a major error has occured.
*/
tng_function_status getDataVector
(const int64_t block_id,
void **values,
int64_t *n_frames,
int64_t *stride_length,
int64_t *n_values_per_frame,
char *type)
{
return status = tng_data_vector_get(traj, block_id, values, n_frames,
stride_length, n_values_per_frame,
type);
}
/**
* @brief Read and retrieve non-particle data, in a specific interval. Obsolete!
* which file to read from. If the file (input_file) is not open it will be
* opened.
* @param block_id is the id number of the particle data block to read.
* @param start_frame_nr is the index number of the first frame to read.
* @param end_frame_nr is the index number of the last frame to read.
* @param hash_mode is an option to decide whether to use the md5 hash or not.
* If hash_mode == TNG_USE_HASH the md5 hash in the file will be
* compared to the md5 hash of the read contents to ensure valid data.
* @param values is a pointer to a 2-dimensional array (memory unallocated), which
* will be filled with data. The array will be sized
* (n_frames * n_values_per_frame).
* Since ***values is allocated in this function it is the callers
* responsibility to free the memory.
* @param n_values_per_frame is set to the number of values per frame in the data.
* This is needed to properly reach and/or free the data afterwards.
* @param type is set to the data type of the data in the array.
* @return TNG_SUCCESS (0) if successful, TNG_FAILURE (1) if a minor error
* has occurred or TNG_CRITICAL (2) if a major error has occured.
*/
tng_function_status getDataInterval(const int64_t block_id,
const int64_t start_frame_nr,
const int64_t end_frame_nr,
const tng_hash_mode hash_mode,
union data_values ***values,
int64_t *n_values_per_frame,
char *type)
{
return status = tng_data_interval_get(traj, block_id, start_frame_nr,
end_frame_nr, hash_mode, values,
n_values_per_frame, type);
}
/**
* @brief Read and retrieve a vector (1D array) of non-particle data,
* in a specific interval.
* @param block_id is the id number of the particle data block to read.
* @param start_frame_nr is the index number of the first frame to read.
* @param end_frame_nr is the index number of the last frame to read.
* @param hash_mode is an option to decide whether to use the md5 hash or not.
* If hash_mode == TNG_USE_HASH the md5 hash in the file will be
* compared to the md5 hash of the read contents to ensure valid data.
* @param values is a pointer to a 1-dimensional array (memory unallocated), which
* will be filled with data. The length of the array will be (n_frames * n_values_per_frame).
* Since **values is allocated in this function it is the callers
* responsibility to free the memory.
* @param stride_length is set to the stride length (writing frequency) of
* the data.
* @param n_values_per_frame is set to the number of values per frame in the data.
* This is needed to properly reach and/or free the data afterwards.
* @param type is set to the data type of the data in the array.
* @details This does only work for numerical (int, float, double) data.
* @return TNG_SUCCESS (0) if successful, TNG_FAILURE (1) if a minor error
* has occurred or TNG_CRITICAL (2) if a major error has occured.
*/
tng_function_status getDataVectorInterval
(const int64_t block_id,
const int64_t start_frame_nr,
const int64_t end_frame_nr,
const char hash_mode,
void **values,
int64_t *stride_length,
int64_t *n_values_per_frame,
char *type)
{
return status = tng_data_vector_interval_get(traj, block_id,
start_frame_nr,
end_frame_nr,
hash_mode, values,
stride_length,
n_values_per_frame,
type);
}
/**
* @brief Retrieve particle data, from the last read frame set. Obsolete!
* @details The particle dimension of the returned values array is translated
* to real particle numbering, i.e. the numbering of the actual molecular
* system.
* specifies which file to read from. If the file (input_file) is not open it
* will be opened.
* @param block_id is the id number of the particle data block to read.
* @param values is a pointer to a 3-dimensional array (memory unallocated), which
* will be filled with data. The array will be sized
* (n_frames * n_particles * n_values_per_frame).
* Since ****values is allocated in this function it is the callers
* responsibility to free the memory.
* @param n_frames is set to the number of particles in the returned data. This is
* needed to properly reach and/or free the data afterwards.
* @param n_particles is set to the number of particles in the returned data. This is
* needed to properly reach and/or free the data afterwards.
* @param n_values_per_frame is set to the number of values per frame in the data.
* This is needed to properly reach and/or free the data afterwards.
* @param type is set to the data type of the data in the array.
* @return TNG_SUCCESS (0) if successful, TNG_FAILURE (1) if a minor error
* has occurred or TNG_CRITICAL (2) if a major error has occured.
*/
tng_function_status getParticleData(const int64_t block_id,
union data_values ****values,
int64_t *n_frames,
int64_t *n_particles,
int64_t *n_values_per_frame,
char *type)
{
return status = (tng_particle_data_get(traj, block_id, values, n_frames,
n_particles, n_values_per_frame, type));
}
/**
* @brief Retrieve a vector (1D array) of particle data, from the last read frame set.
* @details The particle dimension of the returned values array is translated
* to real particle numbering, i.e. the numbering of the actual molecular
* system.
* @param block_id is the id number of the particle data block to read.
* @param values is a pointer to a 1-dimensional array (memory unallocated), which
* will be filled with data. The length of the array will be
* (n_frames * n_particles * n_values_per_frame).
* Since **values is allocated in this function it is the callers
* responsibility to free the memory.
* @param n_frames is set to the number of frames in the returned data. This is
* needed to properly reach and/or free the data afterwards.
* @param stride_length is set to the stride length of the returned data.
* @param n_particles is set to the number of particles in the returned data. This is
* needed to properly reach and/or free the data afterwards.
* @param n_values_per_frame is set to the number of values per frame in the data.
* This is needed to properly reach and/or free the data afterwards.
* @param type is set to the data type of the data in the array.
* @details This does only work for numerical (int, float, double) data.
* @return TNG_SUCCESS (0) if successful, TNG_FAILURE (1) if a minor error
* has occurred or TNG_CRITICAL (2) if a major error has occured.
*/
tng_function_status getParticleDataVector
(const int64_t block_id,
void **values,
int64_t *n_frames,
int64_t *stride_length,
int64_t *n_particles,
int64_t *n_values_per_frame,
char *type)
{
return status = tng_particle_data_vector_get(traj, block_id,
values, n_frames,
stride_length,
n_particles,
n_values_per_frame, type);
}
/**
* @brief Read and retrieve particle data, in a specific interval. Obsolete!
* @details The particle dimension of the returned values array is translated
* to real particle numbering, i.e. the numbering of the actual molecular
* system.
* @param block_id is the id number of the particle data block to read.
* @param start_frame_nr is the index number of the first frame to read.
* @param end_frame_nr is the index number of the last frame to read.
* @param hash_mode is an option to decide whether to use the md5 hash or not.
* If hash_mode == TNG_USE_HASH the md5 hash in the file will be
* compared to the md5 hash of the read contents to ensure valid data.
* @param values is a pointer to a 3-dimensional array (memory unallocated), which
* will be filled with data. The array will be sized
* (n_frames * n_particles * n_values_per_frame).
* Since ****values is allocated in this function it is the callers
* responsibility to free the memory.
* @param n_particles is set to the number of particles in the returned data. This is
* needed to properly reach and/or free the data afterwards.
* @param n_values_per_frame is set to the number of values per frame in the data.
* This is needed to properly reach and/or free the data afterwards.
* @param type is set to the data type of the data in the array.
* @return TNG_SUCCESS (0) if successful, TNG_FAILURE (1) if a minor error
* has occurred or TNG_CRITICAL (2) if a major error has occured.
*/
tng_function_status getParticleDataInterval(const int64_t block_id,
const int64_t start_frame_nr,
const int64_t end_frame_nr,
const tng_hash_mode hash_mode,
union data_values ****values,
int64_t *n_particles,
int64_t *n_values_per_frame,
char *type)
{
return status = (tng_particle_data_interval_get(traj, block_id, start_frame_nr,
end_frame_nr, hash_mode, values,
n_particles, n_values_per_frame,
type));
}
/**
* @brief Read and retrieve a vector (1D array) particle data, in a
* specific interval.
* @details The particle dimension of the returned values array is translated
* to real particle numbering, i.e. the numbering of the actual molecular
* system.
* @param block_id is the id number of the particle data block to read.
* @param start_frame_nr is the index number of the first frame to read.
* @param end_frame_nr is the index number of the last frame to read.
* @param hash_mode is an option to decide whether to use the md5 hash or not.
* If hash_mode == TNG_USE_HASH the md5 hash in the file will be
* compared to the md5 hash of the read contents to ensure valid data.
* @param values is a pointer to a 1-dimensional array (memory unallocated), which
* will be filled with data. The length of the array will be
* (n_frames * n_particles * n_values_per_frame).
* Since **values is allocated in this function it is the callers
* responsibility to free the memory.
* @param stride_length is set to the stride length (writing frequency) of
* the data.
* @param n_particles is set to the number of particles in the returned data. This is
* needed to properly reach and/or free the data afterwards.
* @param n_values_per_frame is set to the number of values per frame in the data.
* This is needed to properly reach and/or free the data afterwards.
* @param type is set to the data type of the data in the array.
* @details This does only work for numerical (int, float, double) data.
* @return TNG_SUCCESS (0) if successful, TNG_FAILURE (1) if a minor error
* has occurred or TNG_CRITICAL (2) if a major error has occured.
*/
tng_function_status getParticleDataVectorInterval
(const int64_t block_id,
const int64_t start_frame_nr,
const int64_t end_frame_nr,
const tng_hash_mode hash_mode,
void **values,
int64_t *n_particles,
int64_t *stride_length,
int64_t *n_values_per_frame,
char *type)
{
return status = tng_particle_data_vector_interval_get(traj, block_id,
start_frame_nr,
end_frame_nr,
hash_mode,
values,
n_particles,
stride_length,
n_values_per_frame,
type);
}
/** @brief Get the date and time of initial file creation in ISO format (string).
* @param time is a pointer to the string in which the date will be stored. Memory
must be reserved beforehand.
* @return TNG_SUCCESS (0) if successful.
*/
tng_function_status getTimeStr(char *time)
{
return status = tng_time_get_str(traj, time);
}
};
class Molecule
{
private:
tng_molecule_t mol;
Trajectory * traj;
tng_function_status status;
public:
tng_function_status addChain(const char *name, Chain *chain);
tng_function_status findChain(const char *name, int64_t id, Chain *chain);
friend class Trajectory;
//Constructor
Molecule(Trajectory * trajectory)
{
traj = trajectory;
//status = tng_molecule_init(traj->traj,mol);
}
/**
*@Dose nothing, use ~TngMolecule()
*/
~Molecule()
{
status = tng_molecule_destroy(traj->traj,mol);
}
//! Status
tng_function_status getStatus()
{ return status; }
/**
* @brief Set the name of a molecule.
* @param new_name is a string containing the wanted name.
* @return TNG_SUCCESS (0) if successful or TNG_CRITICAL (2) if a major
* error has occured.
*/
tng_function_status setName(const char *new_name)
{
return status = tng_molecule_name_set(traj->traj,mol,new_name);
}
/**
* @brief Get the count of a molecule.
* @param cnt is a pointer to the variable to be populated with the count.
* @return TNG_SUCCESS (0) if successful, TNG_FAILURE (1) if a minor error
* has occurred or TNG_CRITICAL (2) if a major error has occured.
*/
tng_function_status getCnt(int64_t *cnt)
{
return status = tng_molecule_cnt_get(traj->traj,mol,cnt);
}
/**
* @brief Set the count of a molecule.
* @param cnt is the number of instances of this molecule.
* @return TNG_SUCCESS (0) if successful, TNG_FAILURE (1) if a minor error
* has occurred or TNG_CRITICAL (2) if a major error has occured.
*/
tng_function_status setCnt(int64_t cnt)
{
return status = tng_molecule_cnt_set(traj->traj,mol,cnt);
}
};
tng_function_status Trajectory::addMolecule(const char *name, Molecule_t molecule)
{
return status = tng_molecule_add(traj,name, &molecule->mol);
}
tng_function_status Trajectory::addMoleculeWithId
(const char *name,
const int64_t id,
Molecule_t molecule)
{
return status = tng_molecule_w_id_add(traj, name, id, &molecule->mol);
}
/**
* @brief Find a molecule.
* @param tng_data is the trajectory data container containing the molecule.
* @param name is a string containing the name of the molecule. If name is empty
* only id will be used for finding the molecule.
* @param id is the id of the molecule to look for. If id is -1 only the name of
* the molecule will be used for finding the molecule.
* @param molecule is a pointer to the molecule if it was found - otherwise 0.
* @return TNG_SUCCESS (0) if the molecule is found or TNG_FAILURE (1) if the
* molecule is not found.
* @details If name is an empty string and id is -1 the first molecule will be
* found.
*/
tng_function_status Trajectory::findMolecule
(const char *name,
int64_t id,
Molecule_t molecule)
{
return status = tng_molecule_find(traj, name, id,
&molecule->mol);
}
class Atom
{
private:
tng_atom_t atom;
Trajectory * traj;
tng_function_status status;
public:
friend class Residue;
//constructor
Atom(Trajectory * trajectory)
{
traj = trajectory;
}
//deonstructor
/**
*@Dose nothing, use ~TngMolecule()
*/
~Atom()
{
//delete atom;
}
//! Status
tng_function_status getStatus()
{ return status; }
/**
* @brief Set the name of an atom.
* @param new_name is a string containing the wanted name.
* @return TNG_SUCCESS (0) if successful or TNG_CRITICAL (2) if a major
* error has occured.
*/
tng_function_status setName(const char *new_name)
{
return status = tng_atom_name_set(traj->traj, atom , new_name);
}
/**
* @param new_type is a string containing the atom type.
* @return TNG_SUCCESS (0) if successful or TNG_CRITICAL (2) if a major
* error has occured.
*/
tng_function_status setType(const char *new_type)
{
return status = tng_atom_type_set(traj->traj, atom, new_type);
}
};
class Residue
{
private:
tng_residue_t residue;
Trajectory * traj;
tng_function_status status;
public:
friend class Chain;
//constructor
Residue(Trajectory * trajectory)
{
traj = trajectory;
}
//deonstructor
/**
*@Dose nothing, use ~TngMolecule()
*/
~Residue()
{
//delete residue;
}
//! Status
tng_function_status getStatus()
{ return status; }
/**
* @brief Set the name of a residue.
* @param residue is the residue to rename.
* @param new_name is a string containing the wanted name.
* @return TNG_SUCCESS (0) if successful or TNG_CRITICAL (2) if a major
* error has occured.
*/
tng_function_status setName(const char *new_name)
{
return status = tng_residue_name_set(traj->traj, residue,new_name);
}
/**
* @brief Add an atom to a residue.
* @param atom_name is a string containing the name of the atom.
* @param atom_type is a string containing the atom type of the atom.
* @param atom is a pointer to the newly created atom.
* @return TNG_SUCCESS (0) if successful or TNG_CRITICAL (2) if a major
* error has occured.
*/
tng_function_status addAtom(const char *atom_name,
const char *atom_type,
Atom * atom)
{
return status = tng_residue_atom_add(traj->traj, residue, atom_name,
atom_type, &atom->atom);
}
/**
* @brief Add an atom with a specific ID to a residue.
* @param atom_name is a string containing the name of the atom.
* @param atom_type is a string containing the atom type of the atom.
* @param id is the ID of the created atom.
* @param atom is a pointer to the newly created atom.
* @return TNG_SUCCESS (0) if successful, TNG_FAILURE (1) if the ID could
* not be set properly or TNG_CRITICAL (2) if a major error has occured.
*/
tng_function_status addAtomWithId
(const char *atom_name,
const char *atom_type,
const int64_t id,
Atom * atom)
{
return status = tng_residue_atom_w_id_add(traj->traj, residue,
atom_name, atom_type,
id, &atom->atom);
}
};
class Chain
{
private:
tng_chain_t chain;
Trajectory * traj;
tng_function_status status;
public:
friend class Molecule;
//constructor
Chain(Trajectory * trajectory)
{
traj = trajectory;
}
//deonstructor
/**
*@Dose nothing, use ~TngMolecule()
*/
~Chain()
{
//delete chain;
}
//! Status
tng_function_status getStatus()
{ return status; }
/**
* @brief Set the name of a chain.
* @param new_name is a string containing the wanted name.
* @return TNG_SUCCESS (0) if successful or TNG_CRITICAL (2) if a major
* error has occured.
*/
tng_function_status setName(const char *new_name)
{
return status = tng_chain_name_set(traj->traj, chain, new_name);
}
/**
* @brief Find a residue in a chain.
* @param name is a string containing the name of the residue.
* @param id is the id of the residue to find. If id == -1 the first residue
* that matches the specified name will be found.
* @param residue is a pointer to the residue if it was found - otherwise 0.
* @return TNG_SUCCESS (0) if the residue is found or TNG_FAILURE (1) if the
* residue is not found.
* @details If name is an empty string the first residue will be found.
*/
tng_function_status findResidue
(const char *name,
int64_t id,
Residue *residue)
{
return status = tng_chain_residue_find(traj->traj, chain, name,
id, &residue->residue);
}
/**
* @brief Add a residue to a chain.
* @param name is a string containing the name of the residue.
* @param residue is a pointer to the newly created residue.
* @return TNG_SUCCESS (0) if successful or TNG_CRITICAL (2) if a major
* error has occured.
*/
tng_function_status addResidue(const char *name,
Residue * residue)
{
return status = tng_chain_residue_add(traj->traj, chain,
name, &residue->residue);
}
/**
* @brief Add a residue with a specific ID to a chain.
* @param name is a string containing the name of the residue.
* @param id is the ID of the created residue.
* @param residue is a pointer to the newly created residue.
* @return TNG_SUCCESS (0) if successful, TNG_FAILURE (1) if the ID could
* not be set properly or TNG_CRITICAL (2) if a major error has occured.
*/
tng_function_status addResidueWithId
(const char *name,
const int64_t id,
Residue * residue)
{
return status = tng_chain_residue_w_id_add(traj->traj, chain,
name, id, &residue->residue);
}
};
/**
* @brief Add a chain to a molecule.
* @param name is a string containing the name of the chain.
* @param chain s a pointer to the newly created chain.
* @return TNG_SUCCESS (0) if successful or TNG_CRITICAL (2) if a major
* error has occured.
*/
tng_function_status Molecule::addChain(const char *name, Chain *chain)
{
return status = tng_molecule_chain_add(traj->traj,mol,name,&chain->chain);
}
/**
* @brief Find a chain in a molecule.
* @param name is a string containing the name of the chain. If name is empty
* only id will be used for finding the chain.
* @param id is the id of the chain to look for. If id is -1 only the name of
* the chain will be used for finding the chain.
* @param chain is a pointer to the chain if it was found - otherwise 0.
* @return TNG_SUCCESS (0) if the chain is found or TNG_FAILURE (1) if the
* chain is not found.
* @details If name is an empty string and id is -1 the first chain will be
* found.
*/
tng_function_status Molecule::findChain
(const char *name,
int64_t id,
Chain *chain)
{
return status = tng_molecule_chain_find(traj->traj, mol, name, id,
&chain->chain);
}
}
#endif
| 26,248 |
1,144 | package de.metas.contracts.commission.commissioninstance.businesslogic.sales.commissiontrigger;
import de.metas.bpartner.BPartnerId;
import de.metas.contracts.commission.Customer;
import lombok.Builder;
import lombok.NonNull;
import lombok.Value;
/*
* #%L
* de.metas.commission
* %%
* Copyright (C) 2019 metas GmbH
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as
* published by the Free Software Foundation, either version 2 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/gpl-2.0.html>.
* #L%
*/
/**
* "basically" an invoice candidate; but can be other things in future as well.
*/
@Value
public class CommissionTrigger
{
Customer customer;
/**
* The direct sales rep;
* <p>
* Note: used to be the customer's "direct" sales rep or the customer himself. Now it's always the sales rep as it's up to the commission algorithm to decide
* whether the customer can get something out of it or not.
*/
BPartnerId salesRepId;
BPartnerId orgBPartnerId;
CommissionTriggerData commissionTriggerData;
@Builder
private CommissionTrigger(
@NonNull final Customer customer,
@NonNull final BPartnerId salesRepId,
@NonNull final BPartnerId orgBPartnerId,
@NonNull final CommissionTriggerData commissionTriggerData)
{
this.customer = customer;
this.salesRepId = salesRepId;
this.orgBPartnerId = orgBPartnerId;
this.commissionTriggerData = commissionTriggerData;
}
}
| 573 |
922 | /*
* Hibernate Validator, declare and validate application constraints
*
* License: Apache License, Version 2.0
* See the license.txt file in the root directory or <http://www.apache.org/licenses/LICENSE-2.0>.
*/
package org.hibernate.validator.internal.engine.messageinterpolation.el;
import jakarta.el.ELException;
public class DisabledFeatureELException extends ELException {
DisabledFeatureELException(String message) {
super( message );
}
}
| 142 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.