max_stars_count
int64 301
224k
| text
stringlengths 6
1.05M
| token_count
int64 3
727k
|
---|---|---|
3,755 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: MIT. See LICENSE
import frappe
import json
from email.utils import formataddr
from frappe.core.utils import get_parent_doc
from frappe.utils import (get_url, get_formatted_email, cint, list_to_str,
validate_email_address, split_emails, parse_addr, get_datetime)
from frappe.email.email_body import get_message_id
import frappe.email.smtp
import time
from frappe import _
from frappe.utils.background_jobs import enqueue
OUTGOING_EMAIL_ACCOUNT_MISSING = _("""
Unable to send mail because of a missing email account.
Please setup default Email Account from Setup > Email > Email Account
""")
@frappe.whitelist()
def make(doctype=None, name=None, content=None, subject=None, sent_or_received = "Sent",
sender=None, sender_full_name=None, recipients=None, communication_medium="Email", send_email=False,
print_html=None, print_format=None, attachments='[]', send_me_a_copy=False, cc=None, bcc=None,
flags=None, read_receipt=None, print_letterhead=True, email_template=None, communication_type=None,
ignore_permissions=False):
"""Make a new communication.
:param doctype: Reference DocType.
:param name: Reference Document name.
:param content: Communication body.
:param subject: Communication subject.
:param sent_or_received: Sent or Received (default **Sent**).
:param sender: Communcation sender (default current user).
:param recipients: Communication recipients as list.
:param communication_medium: Medium of communication (default **Email**).
:param send_email: Send via email (default **False**).
:param print_html: HTML Print format to be sent as attachment.
:param print_format: Print Format name of parent document to be sent as attachment.
:param attachments: List of attachments as list of files or JSON string.
:param send_me_a_copy: Send a copy to the sender (default **False**).
:param email_template: Template which is used to compose mail .
"""
is_error_report = (doctype=="User" and name==frappe.session.user and subject=="Error Report")
send_me_a_copy = cint(send_me_a_copy)
if not ignore_permissions:
if doctype and name and not is_error_report and not frappe.has_permission(doctype, "email", name) and not (flags or {}).get('ignore_doctype_permissions'):
raise frappe.PermissionError("You are not allowed to send emails related to: {doctype} {name}".format(
doctype=doctype, name=name))
if not sender:
sender = get_formatted_email(frappe.session.user)
recipients = list_to_str(recipients) if isinstance(recipients, list) else recipients
cc = list_to_str(cc) if isinstance(cc, list) else cc
bcc = list_to_str(bcc) if isinstance(bcc, list) else bcc
comm = frappe.get_doc({
"doctype":"Communication",
"subject": subject,
"content": content,
"sender": sender,
"sender_full_name":sender_full_name,
"recipients": recipients,
"cc": cc or None,
"bcc": bcc or None,
"communication_medium": communication_medium,
"sent_or_received": sent_or_received,
"reference_doctype": doctype,
"reference_name": name,
"email_template": email_template,
"message_id":get_message_id().strip(" <>"),
"read_receipt":read_receipt,
"has_attachment": 1 if attachments else 0,
"communication_type": communication_type
}).insert(ignore_permissions=True)
comm.save(ignore_permissions=True)
if isinstance(attachments, str):
attachments = json.loads(attachments)
# if not committed, delayed task doesn't find the communication
if attachments:
add_attachments(comm.name, attachments)
if cint(send_email):
if not comm.get_outgoing_email_account():
frappe.throw(msg=OUTGOING_EMAIL_ACCOUNT_MISSING, exc=frappe.OutgoingEmailError)
comm.send_email(print_html=print_html, print_format=print_format,
send_me_a_copy=send_me_a_copy, print_letterhead=print_letterhead)
emails_not_sent_to = comm.exclude_emails_list(include_sender=send_me_a_copy)
return {
"name": comm.name,
"emails_not_sent_to": ", ".join(emails_not_sent_to or [])
}
def validate_email(doc):
"""Validate Email Addresses of Recipients and CC"""
if not (doc.communication_type=="Communication" and doc.communication_medium == "Email") or doc.flags.in_receive:
return
# validate recipients
for email in split_emails(doc.recipients):
validate_email_address(email, throw=True)
# validate CC
for email in split_emails(doc.cc):
validate_email_address(email, throw=True)
for email in split_emails(doc.bcc):
validate_email_address(email, throw=True)
# validate sender
def set_incoming_outgoing_accounts(doc):
from frappe.email.doctype.email_account.email_account import EmailAccount
incoming_email_account = EmailAccount.find_incoming(
match_by_email=doc.sender, match_by_doctype=doc.reference_doctype)
doc.incoming_email_account = incoming_email_account.email_id if incoming_email_account else None
doc.outgoing_email_account = EmailAccount.find_outgoing(
match_by_email=doc.sender, match_by_doctype=doc.reference_doctype)
if doc.sent_or_received == "Sent":
doc.db_set("email_account", doc.outgoing_email_account.name)
def add_attachments(name, attachments):
'''Add attachments to the given Communication'''
# loop through attachments
for a in attachments:
if isinstance(a, str):
attach = frappe.db.get_value("File", {"name":a},
["file_name", "file_url", "is_private"], as_dict=1)
# save attachments to new doc
_file = frappe.get_doc({
"doctype": "File",
"file_url": attach.file_url,
"attached_to_doctype": "Communication",
"attached_to_name": name,
"folder": "Home/Attachments",
"is_private": attach.is_private
})
_file.save(ignore_permissions=True)
@frappe.whitelist(allow_guest=True, methods=("GET",))
def mark_email_as_seen(name: str = None):
try:
update_communication_as_read(name)
frappe.db.commit() # nosemgrep: this will be called in a GET request
except Exception:
frappe.log_error(frappe.get_traceback())
finally:
frappe.response.update({
"type": "binary",
"filename": "imaginary_pixel.png",
"filecontent": (
b"\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x01\x00\x00"
b"\x00\x01\x08\x06\x00\x00\x00\x1f\x15\xc4\x89\x00\x00\x00\r"
b"IDATx\x9cc\xf8\xff\xff?\x03\x00\x08\xfc\x02\xfe\xa7\x9a\xa0"
b"\xa0\x00\x00\x00\x00IEND\xaeB`\x82"
)
})
def update_communication_as_read(name):
if not name or not isinstance(name, str):
return
communication = frappe.db.get_value(
"Communication",
name,
"read_by_recipient",
as_dict=True
)
if not communication or communication.read_by_recipient:
return
frappe.db.set_value("Communication", name, {
"read_by_recipient": 1,
"delivery_status": "Read",
"read_by_recipient_on": get_datetime()
})
| 2,473 |
775 | // This file is part of MicropolisJ.
// Copyright (C) 2013 <NAME>
// Portions Copyright (C) 1989-2007 Electronic Arts Inc.
//
// MicropolisJ is free software; you can redistribute it and/or modify
// it under the terms of the GNU GPLv3, with additional terms.
// See the README file, included in this distribution, for details.
package micropolisj.engine;
/**
* Enumeration of every possible message for the user generated by the game engine.
*/
public enum MicropolisMessage
{
//orig_num generated last tested/verified
NEED_RES, // 1 doMessages 1/19
NEED_COM, // 2 doMessages 1/19
NEED_IND, // 3 doMessages 1/19
NEED_ROADS, // 4 doMessages 1/19
NEED_RAILS, // 5 doMessages 1/20
NEED_POWER, // 6 doMessages 1/19
NEED_STADIUM, // 7 doMessages 1/20
NEED_SEAPORT, // 8 doMessages 1/20
NEED_AIRPORT, // 9 doMessages
HIGH_POLLUTION, // 10 doMessages 1/20
HIGH_CRIME, // 11 doMessages 1/19
HIGH_TRAFFIC, // 12 doMessages 1/20
NEED_FIRESTATION, // 13 doMessages 1/19
NEED_POLICE, // 14 doMessages 1/19
BLACKOUTS, // 15 doMessages 1/19
HIGH_TAXES, // 16 doMessages 1/19
ROADS_NEED_FUNDING, // 17 doMessages
FIRE_NEED_FUNDING, // 18 doMessages
POLICE_NEED_FUNDING, // 19 doMessages
FIRE_REPORT, // 20
MONSTER_REPORT,
TORNADO_REPORT,
EARTHQUAKE_REPORT, // 23 makeEarthquake
PLANECRASH_REPORT,
SHIPWRECK_REPORT,
TRAIN_CRASH_REPORT,
COPTER_CRASH_REPORT,
HIGH_UNEMPLOYMENT,
OUT_OF_FUNDS_REPORT,
FIREBOMBING_REPORT, //30
NEED_PARKS,
EXPLOSION_REPORT,
INSUFFICIENT_FUNDS, // 33 MainWindow.applyCurrentTool
BULLDOZE_FIRST, // 34 MainWindow.applyCurrentTool
POP_2K_REACHED, // 35 checkGrowth 1/19
POP_10K_REACHED, // 36 checkGrowth
POP_50K_REACHED, // 37 checkGrowth
POP_100K_REACHED, // 38 checkGrowth
POP_500K_REACHED, // 39 checkGrowth
BROWNOUTS_REPORT, // 40 1/20
HEAVY_TRAFFIC_REPORT, // 41 HelicopterSprite
FLOOD_REPORT,
MELTDOWN_REPORT, // 43 doMeltdown
RIOTING_REPORT,
// added by Jason
NO_NUCLEAR_PLANTS;
/** Whether the message should be displayed in the notification pane. */
public boolean useNotificationPane = false;
static
{
// not location-specific
POP_2K_REACHED.useNotificationPane = true;
POP_10K_REACHED.useNotificationPane = true;
POP_50K_REACHED.useNotificationPane = true;
POP_100K_REACHED.useNotificationPane = true;
POP_500K_REACHED.useNotificationPane = true;
HIGH_CRIME.useNotificationPane = true;
HIGH_POLLUTION.useNotificationPane = true;
// location-specific
FLOOD_REPORT.useNotificationPane = true;
FIRE_REPORT.useNotificationPane = true;
MONSTER_REPORT.useNotificationPane = true;
TORNADO_REPORT.useNotificationPane = true;
MELTDOWN_REPORT.useNotificationPane = true;
EARTHQUAKE_REPORT.useNotificationPane = true;
TRAIN_CRASH_REPORT.useNotificationPane = true;
SHIPWRECK_REPORT.useNotificationPane = true;
COPTER_CRASH_REPORT.useNotificationPane = true;
PLANECRASH_REPORT.useNotificationPane = true;
}
}
| 1,992 |
1,031 | /* Copyright (c) <2003-2016> <Newton Game Dynamics>
*
* This software is provided 'as-is', without any express or implied
* warranty. In no event will the authors be held liable for any damages
* arising from the use of this software.
*
* Permission is granted to anyone to use this software for any purpose,
* including commercial applications, and to alter it and redistribute it
* freely
*/
//
// Auto generated Parser Generator class: dNewtonLuaParcer.h
//
#ifndef __dNewtonLuaParcer_h__
#define __dNewtonLuaParcer_h__
#include <dList.h>
#include <dString.h>
class dNewtonLuaLex;
class dNewtonLuaParcer
{
public:
enum dToken
{
ACCEPTING_TOKEN = 254,
ERROR_TOKEN = 255,
_AND = 256,
_BREAK,
_DO,
_ELSE,
_ELSEIF,
_END,
_FALSE,
_FOR,
_FUNCTION,
_GOTO,
_IF,
_IN,
_LOCAL,
_NIL,
_NOT,
_OR,
_REPEAT,
_RETURN,
_THEN,
_TRUE,
_UNTIL,
_WHILE,
_LEFT_SHIFT,
_RIGHT_SHIFT,
_INTEGER_DIVIDE,
_IDENTICAL,
_DIFFERENT,
_LEFT_EQUAL,
_GREATHER_EQUAL,
_DOUBLE_COLUMN,
_DOUBLE_DOT,
_TRIPLE_DOT,
_INTEGER,
_FLOAT,
_LABEL,
_STRING
};
enum ActionType;
class dStackPair;
class dGotoEntry;
class dActionEntry;
class dDefualtUserVariable
{
public:
dDefualtUserVariable ()
:m_scannerLine (0), m_scannerIndex(0), m_token (dToken (0)), m_data()
{
}
dDefualtUserVariable (const dDefualtUserVariable& copy)
:m_scannerLine(copy.m_scannerLine), m_scannerIndex(copy.m_scannerIndex), m_token(copy.m_token), m_data(copy.m_data)
{
}
dDefualtUserVariable (dToken token, const char* const data, int scannerLine, int scannerIndex)
:m_scannerLine (scannerLine), m_scannerIndex(scannerIndex), m_token(token), m_data (data)
{
}
dDefualtUserVariable& operator= (const dDefualtUserVariable& src)
{
m_scannerLine = src.m_scannerLine;
m_scannerIndex = src.m_scannerIndex;
m_token = src.m_token;
m_data = src.m_data;
return *this;
}
dToken GetToken() const
{
return m_token;
}
const dString& GetString() const
{
return m_data;
}
int m_scannerLine;
int m_scannerIndex;
dToken m_token;
dString m_data;
};
class dUserVariable: public dDefualtUserVariable
{
public:
dUserVariable ()
:dDefualtUserVariable ()
,m_tokenList()
,m_nodeList()
{
}
dUserVariable (dCILInstr* const instruction)
:dDefualtUserVariable ()
,m_tokenList()
,m_nodeList()
{
m_nodeList.Append (instruction->GetNode());
}
dUserVariable (const dUserVariable& src)
:dDefualtUserVariable (src)
,m_tokenList()
,m_nodeList()
{
src.m_tokenList.TranferDataToTarget (m_tokenList);
src.m_nodeList.TranferDataToTarget (m_nodeList);
}
dUserVariable& operator= (const dUserVariable& src)
{
dDefualtUserVariable& me = *this;
me = src;
src.m_nodeList.TranferDataToTarget (m_nodeList);
src.m_tokenList.TranferDataToTarget (m_tokenList);
return *this;
}
dUserVariable (dToken token, const char* const text, int scannerLine, int scannerIndex)
:dDefualtUserVariable (token, text, scannerLine, scannerIndex)
,m_tokenList()
,m_nodeList()
{
}
mutable dList<dString> m_tokenList;
mutable dList<dCIL::dListNode*> m_nodeList;
};
dNewtonLuaParcer();
virtual ~dNewtonLuaParcer();
virtual bool Parse(dNewtonLuaLex& scanner);
private:
const dGotoEntry* FindGoto (const dGotoEntry* const gotoList, int count, dToken token) const;
const dActionEntry* FindAction (const dActionEntry* const list, int count, dToken token) const;
const dActionEntry* GetNextAction (dList<dStackPair>& stack, dToken token, dNewtonLuaLex& scanner) const;
bool m_grammarError;
};
#endif
| 1,594 |
337 | import org.jetbrains.annotations.NotNull;
class J {
public static class Foo extends A {
public Foo(int i, @NotNull String s) {
}
}
} | 66 |
9,614 | <reponame>keshavashiya/react-native-ui-kitten<filename>src/showcases/app.json
{
"expo": {
"name": "UI Kitten Showcases",
"slug": "ui-kitten-showcases",
"description": "UI Kitten Documentation Showcases",
"privacy": "public",
"platforms": [
"web"
],
"version": "1.0.0",
"orientation": "portrait",
"icon": "assets/icon.png",
"assetBundlePatterns": [
"assets/*"
],
"entryPoint": "node_modules/expo/AppEntry.js"
}
}
| 214 |
599 | # -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 TH<NAME>, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
from django.conf.urls import url
from .form_mode import views as form_views
from .yaml_mode import views as yaml_views
urlpatterns = [
url(
r"^form_templates/(?P<template_id>\d+)/show_versions/(?P<show_version_id>\d+)/$",
form_views.TemplateViewSet.as_view({"get": "get_template_by_show_version"}),
),
url(
r"^form_templates/(?P<template_id>\d+)/show_versions/(?P<show_version_id>\d+)/releases/$",
form_views.TemplateReleaseViewSet.as_view({"post": "create_release"}),
),
url(
r"^form_templates/(?P<template_id>\d+)/releases/(?P<release_id>\d+)/$",
form_views.TemplateReleaseViewSet.as_view({"put": "update_resource"}),
),
url(
r"^form_templates/(?P<template_id>\d+)/releases/$",
form_views.TemplateReleaseViewSet.as_view({"get": "list_releases"}),
),
url(
r"^form_templates/(?P<template_id>\d+)/releases/latest/$",
form_views.TemplateReleaseViewSet.as_view({"get": "get_latest_release"}),
),
url(r"^yaml_templates/releases/$", yaml_views.TemplateReleaseViewSet.as_view({"post": "apply"})),
]
| 704 |
488 | <reponame>ouankou/rose
#if 1
void foobar1 ( int size, int array[*] );
void foobar1 ( int size, int array[size] )
{
}
#endif
#if 0
void foobar1 ( int size1, int array1[size1] );
void foobar2 ( int size, int array[size] );
#endif
#if 0
void foobar2 ( int size, int array[size] );
#endif
#if 1
// void foobar2 ( int sizeVar, int array[sizeVar] );
// void foobar2 ( int sizeVar, int array[*] );
void foobar2 ( int size, int array[*] );
#endif
#if 1
void foobar2 ( int sizeVar, int array[sizeVar] )
{
}
#endif
| 211 |
339 | <filename>convlab2/util/camrest/lexicalize.py
from copy import deepcopy
def delexicalize_da(da, requestable):
delexicalized_da = []
counter = {}
for intent, slot, value in da:
if intent in requestable:
v = '?'
else:
if slot == 'none':
v = 'none'
else:
k = '-'.join([intent, slot])
counter.setdefault(k, 0)
counter[k] += 1
v = str(counter[k])
delexicalized_da.append([intent, slot, v])
return delexicalized_da
def flat_da(delexicalized_da):
flaten = ['-'.join(x) for x in delexicalized_da]
return flaten
def deflat_da(meta):
meta = deepcopy(meta)
dialog_act = {}
for da in meta:
i, s, v = da.split('-')
k = i
if k not in dialog_act:
dialog_act[k] = []
dialog_act[k].append([s, v])
return dialog_act
def lexicalize_da(meta, entities, state, requestable):
meta = deepcopy(meta)
for k, v in meta.items():
intent = k
if intent in requestable:
for pair in v:
pair[1] = '?'
elif intent.lower() in ['nooffer', 'nobook']:
for pair in v:
if pair[0] in state:
pair[1] = state[pair[0]]
else:
pair[1] = 'none'
else:
for pair in v:
if pair[1] == 'none':
continue
elif pair[0].lower() == 'choice':
pair[1] = str(len(entities))
else:
n = int(pair[1]) - 1
if len(entities) > n and pair[0] in entities[n]:
pair[1] = entities[n][pair[0]]
else:
pair[1] = 'none'
tuples = []
for intent, svs in meta.items():
for slot, value in svs:
tuples.append([intent, slot, value])
return tuples
| 1,104 |
1,235 | <reponame>cjdans5545/khaiii<gh_stars>1000+
/**
* @author Jamie (<EMAIL>)
* @copyright Copyright (C) 2018-, Kakao Corp. All rights reserved.
*/
#include "khaiii/Resource.hpp"
//////////////
// includes //
//////////////
#include <exception>
#include <memory>
#include "khaiii/Config.hpp"
#include "khaiii/KhaiiiApi.hpp"
#include "khaiii/nn/tensor.hpp"
namespace khaiii {
using std::exception;
using std::shared_ptr;
using std::string;
////////////////////
// static members //
////////////////////
shared_ptr<spdlog::logger> Resource::_log = spdlog::stderr_color_mt("Resource");
////////////////////
// ctors and dtor //
////////////////////
Resource::~Resource() {
close();
}
/////////////
// methods //
/////////////
void Resource::open(const Config& cfg, std::string dir) {
embed.open(cfg, dir);
for (int kernel_size = 2; kernel_size < 6; ++kernel_size) {
string path = fmt::format("{}/conv.{}.fil", dir, kernel_size);
convs[kernel_size].open(path, cfg.embed_dim, cfg.embed_dim, kernel_size, &nn::RELU);
}
cnv2hdn.open(dir + "/cnv2hdn.lin", 4 * cfg.embed_dim, cfg.hidden_dim, true, &nn::RELU);
string path = fmt::format("{}/hdn2tag.lin", dir);
hdn2tag.open(path, cfg.hidden_dim, cfg.class_num, true);
_log->info("NN model loaded");
preanal.open(dir);
errpatch.open(dir);
restore.open(dir);
_log->info("PoS tagger opened");
}
void Resource::close() {
embed.close();
hdn2tag.close();
preanal.close();
errpatch.close();
restore.close();
_log->debug("PoS tagger closed");
}
} // namespace khaiii
| 638 |
9,402 | // Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
/*++
Module Name:
miscpalapi.c
Abstract:
Implementation misc PAL APIs
Revision History:
--*/
#include "pal/palinternal.h"
#include "pal/dbgmsg.h"
#include "pal/file.h"
#include "pal/process.h"
#include "pal/module.h"
#include "pal/malloc.hpp"
#include "pal/stackstring.hpp"
#include <errno.h>
#include <unistd.h>
#include <time.h>
#include <pthread.h>
#include <dlfcn.h>
#include <pal_endian.h>
#ifdef __APPLE__
#include <mach-o/dyld.h>
#endif // __APPLE__
SET_DEFAULT_DEBUG_CHANNEL(MISC);
static const char URANDOM_DEVICE_NAME[]="/dev/urandom";
VOID
PALAPI
PAL_Random(
IN OUT LPVOID lpBuffer,
IN DWORD dwLength)
{
int rand_des = -1;
DWORD i;
long num = 0;
static BOOL sMissingDevURandom;
static BOOL sInitializedMRand;
PERF_ENTRY(PAL_Random);
ENTRY("PAL_Random(lpBuffer=%p, dwLength=%d)\n", lpBuffer, dwLength);
if (!sMissingDevURandom)
{
do
{
rand_des = open("/dev/urandom", O_RDONLY | O_CLOEXEC);
}
while ((rand_des == -1) && (errno == EINTR));
if (rand_des == -1)
{
if (errno == ENOENT)
{
sMissingDevURandom = TRUE;
}
else
{
ASSERT("PAL__open() failed, errno:%d (%s)\n", errno, strerror(errno));
}
// Back off and try mrand48.
}
else
{
DWORD offset = 0;
do
{
ssize_t n = read(rand_des, (BYTE*)lpBuffer + offset , dwLength - offset);
if (n == -1)
{
if (errno == EINTR)
{
continue;
}
ASSERT("read() failed, errno:%d (%s)\n", errno, strerror(errno));
break;
}
offset += n;
}
while (offset != dwLength);
_ASSERTE(offset == dwLength);
close(rand_des);
}
}
if (!sInitializedMRand)
{
srand48(time(NULL));
sInitializedMRand = TRUE;
}
// always xor srand48 over the whole buffer to get some randomness
// in case /dev/urandom is not really random
for (i = 0; i < dwLength; i++)
{
if (i % sizeof(long) == 0) {
num = mrand48();
}
*(((BYTE*)lpBuffer) + i) ^= num;
num >>= 8;
}
LOGEXIT("PAL_Random\n");
PERF_EXIT(PAL_Random);
}
| 1,409 |
4,119 | from ignite.metrics.nlp.bleu import Bleu
from ignite.metrics.nlp.rouge import Rouge, RougeL, RougeN
__all__ = [
"Bleu",
"Rouge",
"RougeN",
"RougeL",
]
| 81 |
1,346 | <gh_stars>1000+
from plugin.models.core import db
from plugin.models.m_plex import *
from plugin.models.m_sync import *
from plugin.models.m_trakt import *
from plugin.models.account import Account
from plugin.models.action_history import ActionHistory
from plugin.models.action_queue import ActionQueue
from plugin.models.client import Client
from plugin.models.client_rule import ClientRule
from plugin.models.configuration_option import ConfigurationOption
from plugin.models.exception import Exception
from plugin.models.message import Message
from plugin.models.scheduler_job import SchedulerJob
from plugin.models.scheduler_task import SchedulerTask
from plugin.models.session import Session
from plugin.models.user import User
from plugin.models.user_rule import UserRule
| 199 |
1,403 | <reponame>beldenfox/LLGL
/*
* DisplayFlags.cpp
*
* This file is part of the "LLGL" project (Copyright (c) 2015-2019 by <NAME>)
* See "LICENSE.txt" for license information.
*/
#include <LLGL/DisplayFlags.h>
#include "../Core/HelperMacros.h"
namespace LLGL
{
/* ----- Operators ----- */
LLGL_EXPORT bool operator == (const DisplayModeDescriptor& lhs, const DisplayModeDescriptor& rhs)
{
return
(
LLGL_COMPARE_MEMBER_EQ( resolution ) &&
LLGL_COMPARE_MEMBER_EQ( refreshRate )
);
}
LLGL_EXPORT bool operator != (const DisplayModeDescriptor& lhs, const DisplayModeDescriptor& rhs)
{
return !(lhs == rhs);
}
/* ----- Functions ----- */
LLGL_EXPORT bool CompareSWO(const DisplayModeDescriptor& lhs, const DisplayModeDescriptor& rhs)
{
const auto lhsNumPixels = lhs.resolution.width * lhs.resolution.height;
const auto rhsNumPixels = rhs.resolution.width * rhs.resolution.height;
if (lhsNumPixels < rhsNumPixels)
return true;
if (lhsNumPixels > rhsNumPixels)
return false;
return (lhs.refreshRate < rhs.refreshRate);
}
/*
Computes the greatest common divisor (GCD) for the two parameters
see https://stackoverflow.com/questions/10956543/gcd-function-in-c-sans-cmath-library
*/
static std::uint32_t ComputeGCD(std::uint32_t a, std::uint32_t b)
{
while (b != 0)
{
auto r = a % b;
a = b;
b = r;
}
return a;
}
LLGL_EXPORT Extent2D GetExtentRatio(const Extent2D& extent)
{
auto gcd = ComputeGCD(extent.width, extent.height);
return { extent.width / gcd, extent.height / gcd };
}
} // /namespace LLGL
// ================================================================================
| 669 |
9,953 | #
# This file is part of pyasn1-modules software.
#
# Copyright (c) 2005-2019, <NAME> <<EMAIL>>
# License: http://snmplabs.com/pyasn1/license.html
#
# OCSP request/response syntax
#
# Derived from a minimal OCSP library (RFC2560) code written by
# <NAME> <<EMAIL>>
# Copyright: Ancitel, S.p.a, Rome, Italy
# License: BSD
#
#
# current limitations:
# * request and response works only for a single certificate
# * only some values are parsed out of the response
# * the request does't set a nonce nor signature
# * there is no signature validation of the response
# * dates are left as strings in GeneralizedTime format -- datetime.datetime
# would be nicer
#
from pyasn1.type import namedtype
from pyasn1.type import namedval
from pyasn1.type import tag
from pyasn1.type import univ
from pyasn1.type import useful
from pyasn1_modules import rfc2459
# Start of OCSP module definitions
# This should be in directory Authentication Framework (X.509) module
class CRLReason(univ.Enumerated):
namedValues = namedval.NamedValues(
('unspecified', 0),
('keyCompromise', 1),
('cACompromise', 2),
('affiliationChanged', 3),
('superseded', 4),
('cessationOfOperation', 5),
('certificateHold', 6),
('removeFromCRL', 8),
('privilegeWithdrawn', 9),
('aACompromise', 10)
)
# end of directory Authentication Framework (X.509) module
# This should be in PKIX Certificate Extensions module
class GeneralName(univ.OctetString):
pass
# end of PKIX Certificate Extensions module
id_kp_OCSPSigning = univ.ObjectIdentifier((1, 3, 6, 1, 5, 5, 7, 3, 9))
id_pkix_ocsp = univ.ObjectIdentifier((1, 3, 6, 1, 5, 5, 7, 48, 1))
id_pkix_ocsp_basic = univ.ObjectIdentifier((1, 3, 6, 1, 5, 5, 7, 48, 1, 1))
id_pkix_ocsp_nonce = univ.ObjectIdentifier((1, 3, 6, 1, 5, 5, 7, 48, 1, 2))
id_pkix_ocsp_crl = univ.ObjectIdentifier((1, 3, 6, 1, 5, 5, 7, 48, 1, 3))
id_pkix_ocsp_response = univ.ObjectIdentifier((1, 3, 6, 1, 5, 5, 7, 48, 1, 4))
id_pkix_ocsp_nocheck = univ.ObjectIdentifier((1, 3, 6, 1, 5, 5, 7, 48, 1, 5))
id_pkix_ocsp_archive_cutoff = univ.ObjectIdentifier((1, 3, 6, 1, 5, 5, 7, 48, 1, 6))
id_pkix_ocsp_service_locator = univ.ObjectIdentifier((1, 3, 6, 1, 5, 5, 7, 48, 1, 7))
class AcceptableResponses(univ.SequenceOf):
componentType = univ.ObjectIdentifier()
class ArchiveCutoff(useful.GeneralizedTime):
pass
class UnknownInfo(univ.Null):
pass
class RevokedInfo(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('revocationTime', useful.GeneralizedTime()),
namedtype.OptionalNamedType('revocationReason', CRLReason().subtype(
explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
)
class CertID(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('hashAlgorithm', rfc2459.AlgorithmIdentifier()),
namedtype.NamedType('issuerNameHash', univ.OctetString()),
namedtype.NamedType('issuerKeyHash', univ.OctetString()),
namedtype.NamedType('serialNumber', rfc2459.CertificateSerialNumber())
)
class CertStatus(univ.Choice):
componentType = namedtype.NamedTypes(
namedtype.NamedType('good',
univ.Null().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
namedtype.NamedType('revoked',
RevokedInfo().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
namedtype.NamedType('unknown',
UnknownInfo().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2)))
)
class SingleResponse(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('certID', CertID()),
namedtype.NamedType('certStatus', CertStatus()),
namedtype.NamedType('thisUpdate', useful.GeneralizedTime()),
namedtype.OptionalNamedType('nextUpdate', useful.GeneralizedTime().subtype(
explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
namedtype.OptionalNamedType('singleExtensions', rfc2459.Extensions().subtype(
explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
)
class KeyHash(univ.OctetString):
pass
class ResponderID(univ.Choice):
componentType = namedtype.NamedTypes(
namedtype.NamedType('byName',
rfc2459.Name().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
namedtype.NamedType('byKey',
KeyHash().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2)))
)
class Version(univ.Integer):
namedValues = namedval.NamedValues(('v1', 0))
class ResponseData(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.DefaultedNamedType('version', Version('v1').subtype(
explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
namedtype.NamedType('responderID', ResponderID()),
namedtype.NamedType('producedAt', useful.GeneralizedTime()),
namedtype.NamedType('responses', univ.SequenceOf(componentType=SingleResponse())),
namedtype.OptionalNamedType('responseExtensions', rfc2459.Extensions().subtype(
explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))
)
class BasicOCSPResponse(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('tbsResponseData', ResponseData()),
namedtype.NamedType('signatureAlgorithm', rfc2459.AlgorithmIdentifier()),
namedtype.NamedType('signature', univ.BitString()),
namedtype.OptionalNamedType('certs', univ.SequenceOf(componentType=rfc2459.Certificate()).subtype(
explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
)
class ResponseBytes(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('responseType', univ.ObjectIdentifier()),
namedtype.NamedType('response', univ.OctetString())
)
class OCSPResponseStatus(univ.Enumerated):
namedValues = namedval.NamedValues(
('successful', 0),
('malformedRequest', 1),
('internalError', 2),
('tryLater', 3),
('undefinedStatus', 4), # should never occur
('sigRequired', 5),
('unauthorized', 6)
)
class OCSPResponse(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('responseStatus', OCSPResponseStatus()),
namedtype.OptionalNamedType('responseBytes', ResponseBytes().subtype(
explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
)
class Request(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('reqCert', CertID()),
namedtype.OptionalNamedType('singleRequestExtensions', rfc2459.Extensions().subtype(
explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
)
class Signature(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('signatureAlgorithm', rfc2459.AlgorithmIdentifier()),
namedtype.NamedType('signature', univ.BitString()),
namedtype.OptionalNamedType('certs', univ.SequenceOf(componentType=rfc2459.Certificate()).subtype(
explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
)
class TBSRequest(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.DefaultedNamedType('version', Version('v1').subtype(
explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
namedtype.OptionalNamedType('requestorName', GeneralName().subtype(
explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
namedtype.NamedType('requestList', univ.SequenceOf(componentType=Request())),
namedtype.OptionalNamedType('requestExtensions', rfc2459.Extensions().subtype(
explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2)))
)
class OCSPRequest(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('tbsRequest', TBSRequest()),
namedtype.OptionalNamedType('optionalSignature', Signature().subtype(
explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))
)
| 3,334 |
460 | <gh_stars>100-1000
// Copyright 2012 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "BT_Common.h"
#include "BT_SpinnerWidget.h"
#include "moc/moc_BT_SpinnerWidget.cpp"
SpinnerWidget::SpinnerWidget(QString source, QWidget *w)
: QWidget(w)
, _source(source)
{
_background = QColor(255, 255, 255, 150); //default white translucent
init();
}
SpinnerWidget::SpinnerWidget(QString source, QWidget *w, QColor c)
: QWidget(w)
, _source(source)
, _background(c)
{
init();
}
SpinnerWidget::~SpinnerWidget()
{
SAFE_DELETE(_movie);
SAFE_DELETE(_label);
}
void SpinnerWidget::init()
{
resize(parentWidget()->width(), parentWidget()->height());
_movie = new QMovie(_source);
assert(_movie->isValid());
_label = new QLabel(this);
_label->move(parentWidget()->width()/2 - _movie->scaledSize().width()/2, parentWidget()->height()/2 - _movie->scaledSize().height()/2);
_label->setMovie(_movie);
}
void SpinnerWidget::setBackground(QColor c)
{
_background = c;
}
void SpinnerWidget::start()
{
_movie->start();
}
void SpinnerWidget::stop()
{
_movie->stop();
}
void SpinnerWidget::paintEvent(QPaintEvent *e)
{
QPainter painter(this);
painter.fillRect(QRect(0, 0, this->width(), this->height()), _background);
}
void SpinnerWidget::show()
{
QWidget::show();
start();
}
void SpinnerWidget::hide()
{
QWidget::hide();
stop();
} | 729 |
1,909 | package org.knowm.xchange.coinbasepro.dto.account;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.math.BigDecimal;
public class CoinbaseProWithdrawFundsRequest {
public final @JsonProperty("amount") BigDecimal amount;
public final @JsonProperty("currency") String currency;
public final @JsonProperty("crypto_address") String address;
public final @JsonProperty("destination_tag") String destinationTag;
/**
* A boolean flag to opt out of using a destination tag for currencies that support one. This is
* required when not providing a destination tag.
*/
public final @JsonProperty("no_destination_tag") boolean noDestinationTag;
public CoinbaseProWithdrawFundsRequest(
BigDecimal amount,
String currency,
String address,
String destinationTag,
boolean noDestinationTag) {
this.amount = amount;
this.currency = currency;
this.address = address;
this.destinationTag = destinationTag;
this.noDestinationTag = noDestinationTag;
}
}
| 317 |
3,655 | <filename>packages/now-python/test/fixtures/14-unicode-handler/index.py<gh_stars>1000+
from http.server import BaseHTTPRequestHandler
import json
class handler(BaseHTTPRequestHandler):
def do_POST(self):
post_body = json.loads(self.rfile.read(int(self.headers['content-length'])).decode('utf-8'))
name = post_body.get('name', 'someone')
self.send_response(200)
self.send_header('Content-Type', 'application/json')
self.end_headers()
response_data = {'greeting': f'hello, {name}'}
self.wfile.write(json.dumps(response_data).encode('utf-8'))
return
def do_GET(self):
self.send_response(200)
self.end_headers()
self.wfile.write('ok'.encode('utf-8'))
return
| 326 |
2,504 | #pragma once
namespace RandomNumberService
{
[Windows::Foundation::Metadata::WebHostHidden]
public ref class RandomNumberGeneratorTask sealed : Windows::ApplicationModel::Background::IBackgroundTask
{
public:
virtual void Run(Windows::ApplicationModel::Background::IBackgroundTaskInstance^ taskInstance);
void OnTaskCanceled(Windows::ApplicationModel::Background::IBackgroundTaskInstance^ sender, Windows::ApplicationModel::Background::BackgroundTaskCancellationReason reason);
};
} | 159 |
2,151 | // Copyright 2016 The SwiftShader Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "Direct3DStateBlock8.hpp"
#include "Direct3DDevice8.hpp"
#include "Direct3DBaseTexture8.hpp"
#include "Direct3DVertexBuffer8.hpp"
#include "Direct3DIndexBuffer8.hpp"
#include "Debug.hpp"
#include <assert.h>
namespace D3D8
{
Direct3DStateBlock8::Direct3DStateBlock8(Direct3DDevice8 *device, D3DSTATEBLOCKTYPE type) : device(device), type(type)
{
vertexShaderHandle = 0;
pixelShaderHandle = 0;
indexBuffer = 0;
for(int stream = 0; stream < 16; stream++)
{
streamSource[stream].vertexBuffer = 0;
}
for(int stage = 0; stage < 8; stage++)
{
texture[stage] = 0;
}
clear();
if(type == D3DSBT_PIXELSTATE || type == D3DSBT_ALL)
{
capturePixelRenderStates();
capturePixelTextureStates();
capturePixelShaderStates();
}
if(type == D3DSBT_VERTEXSTATE || type == D3DSBT_ALL)
{
captureVertexRenderStates();
captureVertexTextureStates();
captureLightStates();
captureVertexShaderStates();
}
if(type == D3DSBT_ALL) // Capture remaining states
{
captureTextures();
captureVertexTextures();
captureDisplacementTextures();
captureTexturePalette();
captureVertexStreams();
captureIndexBuffer();
captureViewport();
captureTransforms();
captureTextureTransforms();
captureClippingPlanes();
captureMaterial();
}
}
Direct3DStateBlock8::~Direct3DStateBlock8()
{
clear();
}
long Direct3DStateBlock8::QueryInterface(const IID &iid, void **object)
{
TRACE("");
ASSERT(false); // Internal object
return NOINTERFACE(iid);
}
unsigned long Direct3DStateBlock8::AddRef()
{
TRACE("");
return Unknown::AddRef();
}
unsigned long Direct3DStateBlock8::Release()
{
TRACE("");
return Unknown::Release();
}
long Direct3DStateBlock8::Apply()
{
TRACE("");
if(vertexShaderCaptured)
{
device->SetVertexShader(vertexShaderHandle);
}
if(pixelShaderCaptured)
{
device->SetPixelShader(pixelShaderHandle);
}
if(indexBufferCaptured)
{
device->SetIndices(indexBuffer, baseVertexIndex);
}
for(int state = 0; state < D3DRS_NORMALORDER + 1; state++)
{
if(renderStateCaptured[state])
{
device->SetRenderState((D3DRENDERSTATETYPE)state, renderState[state]);
}
}
for(int stage = 0; stage < 8; stage++)
{
for(int state = 0; state < D3DTSS_RESULTARG + 1; state++)
{
if(textureStageStateCaptured[stage][state])
{
device->SetTextureStageState(stage, (D3DTEXTURESTAGESTATETYPE)state, textureStageState[stage][state]);
}
}
}
for(int stream = 0; stream < 16; stream++)
{
if(streamSourceCaptured[stream])
{
device->SetStreamSource(stream, streamSource[stream].vertexBuffer, streamSource[stream].stride);
}
}
for(int stage = 0; stage < 8; stage++)
{
if(textureCaptured[stage])
{
device->SetTexture(stage, texture[stage]);
}
}
for(int state = 0; state < 512; state++)
{
if(transformCaptured[state])
{
device->SetTransform((D3DTRANSFORMSTATETYPE)state, &transform[state]);
}
}
if(viewportCaptured)
{
device->SetViewport(&viewport);
}
for(int index = 0; index < 6; index++)
{
if(clipPlaneCaptured[index])
{
device->SetClipPlane(index, clipPlane[index]);
}
}
return D3D_OK;
}
long Direct3DStateBlock8::Capture()
{
TRACE("");
if(vertexShaderCaptured)
{
device->GetVertexShader(&vertexShaderHandle);
}
if(pixelShaderCaptured)
{
device->GetPixelShader(&pixelShaderHandle);
}
if(indexBufferCaptured)
{
if(indexBuffer)
{
indexBuffer->Release();
}
device->GetIndices(reinterpret_cast<IDirect3DIndexBuffer8**>(&indexBuffer), &baseVertexIndex);
}
for(int state = 0; state < D3DRS_NORMALORDER + 1; state++)
{
if(renderStateCaptured[state])
{
device->GetRenderState((D3DRENDERSTATETYPE)state, &renderState[state]);
}
}
for(int stage = 0; stage < 8; stage++)
{
for(int state = 0; state < D3DTSS_RESULTARG + 1; state++)
{
if(textureStageStateCaptured[stage][state])
{
device->GetTextureStageState(stage, (D3DTEXTURESTAGESTATETYPE)state, &textureStageState[stage][state]);
}
}
}
for(int stream = 0; stream < 16; stream++)
{
if(streamSourceCaptured[stream])
{
if(streamSource[stream].vertexBuffer)
{
streamSource[stream].vertexBuffer->Release();
}
device->GetStreamSource(stream, reinterpret_cast<IDirect3DVertexBuffer8**>(&streamSource[stream].vertexBuffer), &streamSource[stream].stride);
}
}
for(int stage = 0; stage < 8; stage++)
{
if(textureCaptured[stage])
{
if(texture[stage])
{
texture[stage]->Release();
}
device->GetTexture(stage, reinterpret_cast<IDirect3DBaseTexture8**>(&texture[stage]));
}
}
for(int state = 0; state < 512; state++)
{
if(transformCaptured[state])
{
device->GetTransform((D3DTRANSFORMSTATETYPE)state, &transform[state]);
}
}
if(viewportCaptured)
{
device->GetViewport(&viewport);
}
for(int index = 0; index < 6; index++)
{
if(clipPlaneCaptured[index])
{
device->GetClipPlane(index, clipPlane[index]);
}
}
return D3D_OK;
}
long Direct3DStateBlock8::GetDevice(IDirect3DDevice8 **device)
{
TRACE("");
if(!device)
{
return INVALIDCALL();
}
this->device->AddRef();
*device = this->device;
return D3D_OK;
}
void Direct3DStateBlock8::lightEnable(unsigned long index, int enable)
{
UNIMPLEMENTED();
}
void Direct3DStateBlock8::setClipPlane(unsigned long index, const float *plane)
{
clipPlaneCaptured[index] = true;
clipPlane[index][0] = plane[0];
clipPlane[index][1] = plane[1];
clipPlane[index][2] = plane[2];
clipPlane[index][3] = plane[3];
}
void Direct3DStateBlock8::setCurrentTexturePalette(unsigned int paletteNumber)
{
UNIMPLEMENTED();
}
void Direct3DStateBlock8::setFVF(unsigned long FVF)
{
UNIMPLEMENTED();
}
void Direct3DStateBlock8::setIndices(Direct3DIndexBuffer8 *indexData, unsigned int baseVertexIndex)
{
if(indexData) indexData->AddRef();
indexBufferCaptured = true;
indexBuffer = indexData;
this->baseVertexIndex = baseVertexIndex;
}
void Direct3DStateBlock8::setLight(unsigned long index, const D3DLIGHT8 *light)
{
UNIMPLEMENTED();
}
void Direct3DStateBlock8::setMaterial(const D3DMATERIAL8 *material)
{
UNIMPLEMENTED();
}
void Direct3DStateBlock8::setPixelShader(unsigned long shaderHandle)
{
pixelShaderCaptured = true;
pixelShaderHandle = shaderHandle;
}
void Direct3DStateBlock8::setPixelShaderConstant(unsigned int startRegister, const void *constantData, unsigned int count)
{
UNIMPLEMENTED();
}
void Direct3DStateBlock8::setRenderState(D3DRENDERSTATETYPE state, unsigned long value)
{
renderStateCaptured[state] = true;
renderState[state] = value;
}
void Direct3DStateBlock8::setScissorRect(const RECT *rect)
{
UNIMPLEMENTED();
}
void Direct3DStateBlock8::setStreamSource(unsigned int stream, Direct3DVertexBuffer8 *data, unsigned int stride)
{
if(data) data->AddRef();
streamSourceCaptured[stream] = true;
streamSource[stream].vertexBuffer = data;
streamSource[stream].stride = stride;
}
void Direct3DStateBlock8::setTexture(unsigned long stage, Direct3DBaseTexture8 *texture)
{
if(texture) texture->AddRef();
textureCaptured[stage] = true;
this->texture[stage] = texture;
}
void Direct3DStateBlock8::setTextureStageState(unsigned long stage, D3DTEXTURESTAGESTATETYPE type, unsigned long value)
{
textureStageStateCaptured[stage][type] = true;
textureStageState[stage][type] = value;
}
void Direct3DStateBlock8::setTransform(D3DTRANSFORMSTATETYPE state, const D3DMATRIX *matrix)
{
transformCaptured[state] = true;
transform[state] = *matrix;
}
void Direct3DStateBlock8::setViewport(const D3DVIEWPORT8 *viewport)
{
viewportCaptured = true;
this->viewport = *viewport;
}
void Direct3DStateBlock8::setVertexShader(unsigned long shaderHandle)
{
vertexShaderCaptured = true;
vertexShaderHandle = shaderHandle;
}
void Direct3DStateBlock8::setVertexShaderConstant(unsigned int startRegister, const void *constantData, unsigned int count)
{
UNIMPLEMENTED();
}
void Direct3DStateBlock8::clear()
{
// Erase capture flags
vertexShaderCaptured = false;
pixelShaderCaptured = false;
indexBufferCaptured = false;
for(int state = 0; state < D3DRS_NORMALORDER + 1; state++)
{
renderStateCaptured[state] = false;
}
for(int stage = 0; stage < 8; stage++)
{
for(int state = 0; state < D3DTSS_RESULTARG + 1; state++)
{
textureStageStateCaptured[stage][state] = false;
}
}
for(int stream = 0; stream < 16; stream++)
{
streamSourceCaptured[stream] = false;
}
for(int stage = 0; stage < 8; stage++)
{
textureCaptured[stage] = false;
}
for(int state = 0; state < 512; state++)
{
transformCaptured[state] = false;
}
viewportCaptured = false;
for(int index = 0; index < 6; index++)
{
clipPlaneCaptured[index] = false;
}
// Release resources
vertexShaderHandle = 0;
pixelShaderHandle = 0;
if(indexBuffer)
{
indexBuffer->Release();
indexBuffer = 0;
}
for(int stream = 0; stream < 16; stream++)
{
if(streamSource[stream].vertexBuffer)
{
streamSource[stream].vertexBuffer->Release();
streamSource[stream].vertexBuffer = 0;
}
}
for(int stage = 0; stage < 8; stage++)
{
if(texture[stage])
{
texture[stage]->Release();
texture[stage] = 0;
}
}
}
void Direct3DStateBlock8::captureRenderState(D3DRENDERSTATETYPE state)
{
device->GetRenderState(state, &renderState[state]);
renderStateCaptured[state] = true;
}
void Direct3DStateBlock8::captureTextureStageState(unsigned long stage, D3DTEXTURESTAGESTATETYPE type)
{
device->GetTextureStageState(stage, type, &textureStageState[stage][type]);
textureStageStateCaptured[stage][type] = true;
}
void Direct3DStateBlock8::captureTransform(D3DTRANSFORMSTATETYPE state)
{
device->GetTransform(state, &transform[state]);
transformCaptured[state] = true;
}
void Direct3DStateBlock8::capturePixelRenderStates()
{
captureRenderState(D3DRS_ZENABLE);
captureRenderState(D3DRS_FILLMODE);
captureRenderState(D3DRS_SHADEMODE);
captureRenderState(D3DRS_ZWRITEENABLE);
captureRenderState(D3DRS_ALPHATESTENABLE);
captureRenderState(D3DRS_LASTPIXEL);
captureRenderState(D3DRS_SRCBLEND);
captureRenderState(D3DRS_DESTBLEND);
captureRenderState(D3DRS_ZFUNC);
captureRenderState(D3DRS_ALPHAREF);
captureRenderState(D3DRS_ALPHAFUNC);
captureRenderState(D3DRS_DITHERENABLE);
captureRenderState(D3DRS_FOGSTART);
captureRenderState(D3DRS_FOGEND);
captureRenderState(D3DRS_FOGDENSITY);
captureRenderState(D3DRS_ALPHABLENDENABLE);
captureRenderState(D3DRS_ZBIAS);
captureRenderState(D3DRS_STENCILENABLE);
captureRenderState(D3DRS_STENCILFAIL);
captureRenderState(D3DRS_STENCILZFAIL);
captureRenderState(D3DRS_STENCILPASS);
captureRenderState(D3DRS_STENCILFUNC);
captureRenderState(D3DRS_STENCILREF);
captureRenderState(D3DRS_STENCILMASK);
captureRenderState(D3DRS_STENCILWRITEMASK);
captureRenderState(D3DRS_TEXTUREFACTOR);
captureRenderState(D3DRS_WRAP0);
captureRenderState(D3DRS_WRAP1);
captureRenderState(D3DRS_WRAP2);
captureRenderState(D3DRS_WRAP3);
captureRenderState(D3DRS_WRAP4);
captureRenderState(D3DRS_WRAP5);
captureRenderState(D3DRS_WRAP6);
captureRenderState(D3DRS_WRAP7);
captureRenderState(D3DRS_COLORWRITEENABLE);
captureRenderState(D3DRS_BLENDOP);
}
void Direct3DStateBlock8::capturePixelTextureStates()
{
for(int stage = 0; stage < 8; stage++)
{
captureTextureStageState(stage, D3DTSS_COLOROP);
captureTextureStageState(stage, D3DTSS_COLORARG1);
captureTextureStageState(stage, D3DTSS_COLORARG2);
captureTextureStageState(stage, D3DTSS_ALPHAOP);
captureTextureStageState(stage, D3DTSS_ALPHAARG1);
captureTextureStageState(stage, D3DTSS_ALPHAARG2);
captureTextureStageState(stage, D3DTSS_BUMPENVMAT00);
captureTextureStageState(stage, D3DTSS_BUMPENVMAT01);
captureTextureStageState(stage, D3DTSS_BUMPENVMAT10);
captureTextureStageState(stage, D3DTSS_BUMPENVMAT11);
captureTextureStageState(stage, D3DTSS_TEXCOORDINDEX);
captureTextureStageState(stage, D3DTSS_BUMPENVLSCALE);
captureTextureStageState(stage, D3DTSS_BUMPENVLOFFSET);
captureTextureStageState(stage, D3DTSS_TEXTURETRANSFORMFLAGS);
captureTextureStageState(stage, D3DTSS_COLORARG0);
captureTextureStageState(stage, D3DTSS_ALPHAARG0);
captureTextureStageState(stage, D3DTSS_RESULTARG);
captureTextureStageState(stage, D3DTSS_ADDRESSU);
captureTextureStageState(stage, D3DTSS_ADDRESSV);
captureTextureStageState(stage, D3DTSS_ADDRESSW);
captureTextureStageState(stage, D3DTSS_BORDERCOLOR);
captureTextureStageState(stage, D3DTSS_MAGFILTER);
captureTextureStageState(stage, D3DTSS_MINFILTER);
captureTextureStageState(stage, D3DTSS_MIPFILTER);
captureTextureStageState(stage, D3DTSS_MIPMAPLODBIAS);
captureTextureStageState(stage, D3DTSS_MAXMIPLEVEL);
captureTextureStageState(stage, D3DTSS_MAXANISOTROPY);
}
}
void Direct3DStateBlock8::capturePixelShaderStates()
{
pixelShaderCaptured = true;
device->GetPixelShader(&pixelShaderHandle);
device->GetPixelShaderConstant(0, pixelShaderConstant, 8);
}
void Direct3DStateBlock8::captureVertexRenderStates()
{
captureRenderState(D3DRS_CULLMODE);
captureRenderState(D3DRS_FOGENABLE);
captureRenderState(D3DRS_FOGCOLOR);
captureRenderState(D3DRS_FOGTABLEMODE);
captureRenderState(D3DRS_FOGSTART);
captureRenderState(D3DRS_FOGEND);
captureRenderState(D3DRS_FOGDENSITY);
captureRenderState(D3DRS_RANGEFOGENABLE);
captureRenderState(D3DRS_AMBIENT);
captureRenderState(D3DRS_COLORVERTEX);
captureRenderState(D3DRS_FOGVERTEXMODE);
captureRenderState(D3DRS_CLIPPING);
captureRenderState(D3DRS_LIGHTING);
captureRenderState(D3DRS_LOCALVIEWER);
captureRenderState(D3DRS_EMISSIVEMATERIALSOURCE);
captureRenderState(D3DRS_AMBIENTMATERIALSOURCE);
captureRenderState(D3DRS_DIFFUSEMATERIALSOURCE);
captureRenderState(D3DRS_SPECULARMATERIALSOURCE);
captureRenderState(D3DRS_VERTEXBLEND);
captureRenderState(D3DRS_CLIPPLANEENABLE);
captureRenderState(D3DRS_POINTSIZE);
captureRenderState(D3DRS_POINTSIZE_MIN);
captureRenderState(D3DRS_POINTSPRITEENABLE);
captureRenderState(D3DRS_POINTSCALEENABLE);
captureRenderState(D3DRS_POINTSCALE_A);
captureRenderState(D3DRS_POINTSCALE_B);
captureRenderState(D3DRS_POINTSCALE_C);
captureRenderState(D3DRS_MULTISAMPLEANTIALIAS);
captureRenderState(D3DRS_MULTISAMPLEMASK);
captureRenderState(D3DRS_PATCHEDGESTYLE);
captureRenderState(D3DRS_POINTSIZE_MAX);
captureRenderState(D3DRS_INDEXEDVERTEXBLENDENABLE);
captureRenderState(D3DRS_TWEENFACTOR);
captureRenderState(D3DRS_NORMALIZENORMALS);
captureRenderState(D3DRS_SPECULARENABLE);
captureRenderState(D3DRS_SHADEMODE);
}
void Direct3DStateBlock8::captureVertexTextureStates()
{
for(int stage = 0; stage < 8; stage++)
{
captureTextureStageState(stage, D3DTSS_TEXCOORDINDEX);
captureTextureStageState(stage, D3DTSS_TEXTURETRANSFORMFLAGS);
}
}
void Direct3DStateBlock8::captureLightStates()
{
for(int index = 0; index < 8; index++) // FIXME: Support unlimited index
{
device->GetLight(index, &light[index]);
lightCaptured[index] = true;
}
for(int index = 0; index < 8; index++) // FIXME: Support unlimited index
{
lightEnableState[index] = false;
device->GetLightEnable(index, &lightEnableState[index]);
lightEnableCaptured[index] = true;
}
}
void Direct3DStateBlock8::captureVertexShaderStates()
{
vertexShaderCaptured = true;
device->GetVertexShader(&vertexShaderHandle);
device->GetVertexShaderConstant(0, vertexShaderConstant[0], 256);
}
void Direct3DStateBlock8::captureTextures()
{
for(int sampler = 0; sampler < 8; sampler++)
{
textureCaptured[sampler] = true;
device->GetTexture(sampler, reinterpret_cast<IDirect3DBaseTexture8**>(&texture[sampler]));
if(texture[sampler])
{
texture[sampler]->bind();
texture[sampler]->Release();
}
}
}
void Direct3DStateBlock8::captureVertexTextures()
{
// FIXME
}
void Direct3DStateBlock8::captureDisplacementTextures()
{
// FIXME
}
void Direct3DStateBlock8::captureTexturePalette()
{
paletteNumberCaptured = true;
device->GetCurrentTexturePalette(&paletteNumber);
}
void Direct3DStateBlock8::captureVertexStreams()
{
for(int stream = 0; stream < 16; stream++)
{
streamSourceCaptured[stream] = true;
device->GetStreamSource(stream, reinterpret_cast<IDirect3DVertexBuffer8**>(&streamSource[stream].vertexBuffer), &streamSource[stream].stride);
if(streamSource[stream].vertexBuffer)
{
streamSource[stream].vertexBuffer->bind();
streamSource[stream].vertexBuffer->Release();
}
}
}
void Direct3DStateBlock8::captureIndexBuffer()
{
indexBufferCaptured = true;
device->GetIndices(reinterpret_cast<IDirect3DIndexBuffer8**>(&indexBuffer), &baseVertexIndex);
if(indexBuffer)
{
indexBuffer->bind();
indexBuffer->Release();
}
}
void Direct3DStateBlock8::captureViewport()
{
device->GetViewport(&viewport);
viewportCaptured = true;
}
void Direct3DStateBlock8::captureTransforms()
{
captureTransform(D3DTS_VIEW);
captureTransform(D3DTS_PROJECTION);
captureTransform(D3DTS_WORLD);
}
void Direct3DStateBlock8::captureTextureTransforms()
{
captureTransform(D3DTS_TEXTURE0);
captureTransform(D3DTS_TEXTURE1);
captureTransform(D3DTS_TEXTURE2);
captureTransform(D3DTS_TEXTURE3);
captureTransform(D3DTS_TEXTURE4);
captureTransform(D3DTS_TEXTURE5);
captureTransform(D3DTS_TEXTURE6);
captureTransform(D3DTS_TEXTURE7);
}
void Direct3DStateBlock8::captureClippingPlanes()
{
for(int index = 0; index < 6; index++)
{
device->GetClipPlane(index, (float*)&clipPlane[index]);
clipPlaneCaptured[index] = true;
}
}
void Direct3DStateBlock8::captureMaterial()
{
device->GetMaterial(&material);
materialCaptured = true;
}
} | 7,622 |
557 | #include "DataStructures.h"
// helper function to scaffold empty tensor
tensor makeTensor(int state_count, int action_count) {
tensor t;
t.resize(state_count);
for (int i = 0; i < state_count; ++i) {
t[i].resize(action_count);
for (int j = 0; j < action_count; ++j) {
t[i][j].resize(state_count);
}
}
return t;
} | 144 |
1,550 | <reponame>ATikhonov2/leo-editor
#@+leo-ver=5-thin
#@+node:ekr.20170428084207.285: * @file ../external/npyscreen/fmFormWithMenus.py
#!/usr/bin/env python
# encoding: utf-8
#@+others
#@+node:ekr.20170428084207.286: ** Declarations
import curses
from . import fmForm
from . import fmActionForm
from . import fmActionFormV2
from . import wgNMenuDisplay
#@+node:ekr.20170428084207.287: ** class FormBaseNewWithMenus
class FormBaseNewWithMenus(fmForm.FormBaseNew, wgNMenuDisplay.HasMenus):
"""The FormBaseNew class, but with a handling system for menus as well. See the HasMenus class for details."""
#@+others
#@+node:ekr.20170428084207.288: *3* __init__
def __init__(self, *args, **keywords):
super(FormBaseNewWithMenus, self).__init__(*args, **keywords)
self.initialize_menus()
#@+node:ekr.20170428084207.289: *3* display_menu_advert_at
def display_menu_advert_at(self):
return self.lines-1, 1
#@+node:ekr.20170428084207.290: *3* draw_form
def draw_form(self):
super(FormBaseNewWithMenus, self).draw_form()
menu_advert = " " + self.__class__.MENU_KEY + ": Menu "
if isinstance(menu_advert, bytes):
menu_advert = menu_advert.decode('utf-8', 'replace')
y, x = self.display_menu_advert_at()
self.add_line(y, x,
menu_advert,
self.make_attributes_list(menu_advert, curses.A_NORMAL),
self.columns - x - 1
)
#@-others
#@+node:ekr.20170428084207.291: ** class FormWithMenus
class FormWithMenus(fmForm.Form, wgNMenuDisplay.HasMenus):
"""The Form class, but with a handling system for menus as well. See the HasMenus class for details."""
#@+others
#@+node:ekr.20170428084207.292: *3* __init__
def __init__(self, *args, **keywords):
super(FormWithMenus, self).__init__(*args, **keywords)
self.initialize_menus()
#@+node:ekr.20170428084207.293: *3* display_menu_advert_at
def display_menu_advert_at(self):
return self.lines-1, 1
#@+node:ekr.20170428084207.294: *3* draw_form
def draw_form(self):
super(FormWithMenus, self).draw_form()
menu_advert = " " + self.__class__.MENU_KEY + ": Menu "
y, x = self.display_menu_advert_at()
if isinstance(menu_advert, bytes):
menu_advert = menu_advert.decode('utf-8', 'replace')
self.add_line(y, x,
menu_advert,
self.make_attributes_list(menu_advert, curses.A_NORMAL),
self.columns - x - 1
)
#@-others
# The following class does not inherit from FormWithMenus and so some code is duplicated.
# The pig is getting to inherit edit() from ActionForm, but draw_form from FormWithMenus
#@+node:ekr.20170428084207.295: ** class ActionFormWithMenus
class ActionFormWithMenus(fmActionForm.ActionForm, wgNMenuDisplay.HasMenus):
#@+others
#@+node:ekr.20170428084207.296: *3* __init__
def __init__(self, *args, **keywords):
super(ActionFormWithMenus, self).__init__(*args, **keywords)
self.initialize_menus()
#@+node:ekr.20170428084207.297: *3* display_menu_advert_at
def display_menu_advert_at(self):
return self.lines-1, 1
#@+node:ekr.20170428084207.298: *3* draw_form
def draw_form(self):
super(ActionFormWithMenus, self).draw_form()
menu_advert = " " + self.__class__.MENU_KEY + ": Menu "
y, x = self.display_menu_advert_at()
if isinstance(menu_advert, bytes):
menu_advert = menu_advert.decode('utf-8', 'replace')
self.add_line(y, x,
menu_advert,
self.make_attributes_list(menu_advert, curses.A_NORMAL),
self.columns - x - 1
)
#@-others
#@+node:ekr.20170428084207.299: ** class ActionFormV2WithMenus
class ActionFormV2WithMenus(fmActionFormV2.ActionFormV2, wgNMenuDisplay.HasMenus):
#@+others
#@+node:ekr.20170428084207.300: *3* __init__
def __init__(self, *args, **keywords):
super(ActionFormV2WithMenus, self).__init__(*args, **keywords)
self.initialize_menus()
#@-others
#@+node:ekr.20170428084207.301: ** class SplitFormWithMenus
class SplitFormWithMenus(fmForm.SplitForm, FormWithMenus):
"""Just the same as the Title Form, but with a horizontal line"""
#@+others
#@+node:ekr.20170428084207.302: *3* draw_form
def draw_form(self):
super(SplitFormWithMenus, self).draw_form()
#@-others
#@-others
#@@language python
#@@tabwidth -4
#@-leo
| 2,018 |
2,201 | <gh_stars>1000+
"""
Copyright (c) 2018-2022 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import pickle # nosec - disable B403:import-pickle check
from pathlib import Path
import numpy as np
from ..representation import ClassificationAnnotation
from ..config import NumberField, StringField, PathField, BoolField
from ..utils import get_path
from .format_converter import BaseFormatConverter
from .format_converter import ConverterReturn
class DataIterator:
def __init__(self, source,
uid_voc,
mid_voc,
cat_voc,
item_info,
reviews_info,
batch_size=128,
maxlen=100):
self.source = open(source, 'r', encoding='UTF-8') # pylint: disable=R1732
self.source_dicts = []
for source_dict in [uid_voc, mid_voc, cat_voc]:
# disable B301:pickle check
with open(source_dict, 'rb') as source_content:
self.source_dicts.append(pickle.load(source_content, encoding='UTF-8')) # nosec
with open(item_info, "r", encoding='UTF-8') as f_meta:
meta_map = {}
for line in f_meta:
arr = line.strip().split("\t")
if arr[0] not in meta_map:
meta_map[arr[0]] = arr[1]
self.meta_id_map = {}
for key, val in meta_map.items():
if key in self.source_dicts[1]:
mid_idx = self.source_dicts[1][key]
else:
mid_idx = 0
if val in self.source_dicts[2]:
cat_idx = self.source_dicts[2][val]
else:
cat_idx = 0
self.meta_id_map[mid_idx] = cat_idx
with open(reviews_info, "r", encoding='UTF-8') as f_review:
self.mid_list_for_random = []
for line in f_review:
arr = line.strip().split("\t")
tmp_idx = 0
if arr[1] in self.source_dicts[1]:
tmp_idx = self.source_dicts[1][arr[1]]
self.mid_list_for_random.append(tmp_idx)
self.batch_size = batch_size
self.maxlen = maxlen
self.n_uid = len(self.source_dicts[0])
self.n_mid = len(self.source_dicts[1])
self.n_cat = len(self.source_dicts[2])
self.shuffle = False
self.source_buffer = []
self.k = batch_size * 20
self.end_of_data = False
def __iter__(self):
return self
def reset(self):
self.source.seek(0)
def next(self):
return self.__next__()
def __next__(self):
if self.end_of_data:
self.end_of_data = False
self.reset()
raise StopIteration
source = []
target = []
if len(self.source_buffer) == 0:
for _ in range(self.k):
ss = self.source.readline()
if ss == "":
break
self.source_buffer.append(ss.strip("\n").split("\t"))
# sort by history behavior length
his_length = np.array([len(s[4].split("")) for s in self.source_buffer])
tidx = his_length.argsort()
_sbuf = [self.source_buffer[i] for i in tidx]
self.source_buffer = _sbuf
if len(self.source_buffer) == 0:
self.end_of_data = False
self.reset()
raise StopIteration
try:
# actual work here
while True:
# read from source file and map to word index
try:
ss = self.source_buffer.pop()
except IndexError:
break
uid = self.source_dicts[0][ss[1]] if ss[1] in self.source_dicts[0] else 0
mid = self.source_dicts[1][ss[2]] if ss[2] in self.source_dicts[1] else 0
cat = self.source_dicts[2][ss[3]] if ss[3] in self.source_dicts[2] else 0
tmp = []
for fea in ss[4].split(""):
m = self.source_dicts[1][fea] if fea in self.source_dicts[1] else 0
tmp.append(m)
mid_list = tmp
tmp1 = []
for fea in ss[5].split(""):
c = self.source_dicts[2][fea] if fea in self.source_dicts[2] else 0
tmp1.append(c)
cat_list = tmp1
# read from source file and map to word index
source.append([uid, mid, cat, mid_list, cat_list])
target.append([float(ss[0]), 1-float(ss[0])])
if len(source) >= self.batch_size or len(target) >= self.batch_size:
break
except IOError:
self.end_of_data = True
# all sentence pairs in maxibatch filtered out because of length
if len(source) == 0 or len(target) == 0:
source, target = self.next()
return source, target
class AmazonProductData(BaseFormatConverter):
__provider__ = 'amazon_product_data'
annotation_types = (ClassificationAnnotation, )
@classmethod
def parameters(cls):
parameters = super().parameters()
parameters.update({
"data_dir": PathField(optional=False, is_directory=True, check_exists=True,
description="Dataset root"),
"preprocessed_dir": PathField(optional=False, is_directory=True, check_exists=True,
description="Preprocessed dataset location"),
"separator": StringField(optional=True, default='#',
description="Separator between input identifier and file identifier"),
"test_data": StringField(optional=True, default='local_test_splitByUser',
description="test data filename."),
"batch": NumberField(optional=True, default=1, description="Batch size", value_type=int),
"max_len": NumberField(optional=True, default=None, description="Maximum sequence length", value_type=int),
"subsample_size": NumberField(
optional=True, default=0, description="Number of sentences to process", value_type=int
),
"uid_voc": StringField(optional=True, default='uid_voc.pkl', description="uid_voc filename"),
"mid_voc": StringField(optional=True, default='mid_voc.pkl', description="mid_voc filename"),
"cat_voc": StringField(optional=True, default='cat_voc.pkl', description="cat_voc filename"),
"item_info": StringField(optional=True, default='item-info', description="item info filename"),
"reviews_info": StringField(optional=True, default='reviews-info', description="reviews info filename"),
"mid_his_batch": StringField(optional=True, default="Inputs/mid_his_batch_ph",
description="mid_his_batch input identifier"),
"cat_his_batch": StringField(optional=True, default="Inputs/cat_his_batch_ph",
description="cat_his_batch input identifier"),
"uid_batch": StringField(optional=True, default="Inputs/uid_batch_ph",
description="uid_batch input identifier"),
"mid_batch": StringField(optional=True, default="Inputs/mid_batch_ph",
description="mid_batch input identifier"),
"cat_batch": StringField(optional=True, default="Inputs/cat_batch_ph",
description="cat_batch input identifier"),
"mask": StringField(optional=True, default="Inputs/mask",
description="mask input identifier"),
"seq_len": StringField(optional=True, default="Inputs / seq_len_ph",
description="seq_len input identifier"),
"skip_dump": BoolField(optional=True, default=True, description='Annotate without saving features')
})
return parameters
def configure(self):
self.data_dir = self.get_value_from_config('data_dir')
self.test_data = self.get_value_from_config('test_data')
self.separator = self.get_value_from_config('separator')
self.preprocessed_dir = self.get_value_from_config('preprocessed_dir')
self.uid_voc = self.get_value_from_config('uid_voc')
self.mid_voc = self.get_value_from_config('mid_voc')
self.cat_voc = self.get_value_from_config('cat_voc')
self.item_info = self.get_value_from_config('item_info')
self.reviews_info = self.get_value_from_config('reviews_info')
self.mid_his_batch = self.get_value_from_config('mid_his_batch')
self.cat_his_batch = self.get_value_from_config('cat_his_batch')
self.cat_batch = self.get_value_from_config('cat_batch')
self.mid_batch = self.get_value_from_config('mid_batch')
self.uid_batch = self.get_value_from_config('uid_batch')
self.mask = self.get_value_from_config('mask')
self.seq_len = self.get_value_from_config('seq_len')
self.skip_dump = self.get_value_from_config('skip_dump')
self.batch = self.get_value_from_config('batch')
self.max_len = self.get_value_from_config('max_len')
self.subsample_size = self.get_value_from_config('subsample_size')
@staticmethod
def prepare_data(source, target, maxlen=None):
# x: a list of sentences
lengths_x = [len(s[4]) for s in source]
seqs_mid = [inp[3] for inp in source]
seqs_cat = [inp[4] for inp in source]
if maxlen is not None:
new_seqs_mid = []
new_seqs_cat = []
new_lengths_x = []
for l_x, inp in zip(lengths_x, source):
if l_x > maxlen:
new_seqs_mid.append(inp[3][l_x - maxlen:])
new_seqs_cat.append(inp[4][l_x - maxlen:])
new_lengths_x.append(maxlen)
else:
new_seqs_mid.append(inp[3])
new_seqs_cat.append(inp[4])
new_lengths_x.append(l_x)
lengths_x = new_lengths_x
seqs_mid = new_seqs_mid
seqs_cat = new_seqs_cat
n_samples = len(seqs_mid)
maxlen_x = np.max(lengths_x)
maxlen_x = max(maxlen, maxlen_x) if maxlen is not None else maxlen_x
mid_his = np.zeros((n_samples, maxlen_x)).astype('int64')
cat_his = np.zeros((n_samples, maxlen_x)).astype('int64')
mid_mask = np.zeros((n_samples, maxlen_x)).astype('float32')
for idx, [s_x, s_y] in enumerate(zip(seqs_mid, seqs_cat)):
mid_mask[idx, :lengths_x[idx]] = 1.
mid_his[idx, :lengths_x[idx]] = s_x
cat_his[idx, :lengths_x[idx]] = s_y
uids = np.array([inp[0] for inp in source])
mids = np.array([inp[1] for inp in source])
cats = np.array([inp[2] for inp in source])
return uids, mids, cats, mid_his, cat_his, mid_mask, np.array(target), np.array(lengths_x)
def convert(self, check_content=False, **kwargs):
test_file = get_path(self.data_dir / self.test_data, is_directory=False)
uid_voc = get_path(self.data_dir / self.uid_voc, is_directory=False)
mid_voc = get_path(self.data_dir / self.mid_voc, is_directory=False)
cat_voc = get_path(self.data_dir / self.cat_voc, is_directory=False)
item_info = get_path(self.data_dir / self.item_info, is_directory=False)
reviews_info = get_path(self.data_dir / self.reviews_info, is_directory=False)
test_data = DataIterator(str(test_file), str(uid_voc), str(mid_voc), str(cat_voc), str(item_info),
str(reviews_info), self.batch, self.max_len)
preprocessed_folder = Path(self.preprocessed_dir)
if not self.skip_dump and not preprocessed_folder.exists():
preprocessed_folder.mkdir(exist_ok=True, parents=True)
input_folder = preprocessed_folder / "bs{}".format(self.batch) / 'input'
if not input_folder.exists() and not self.skip_dump:
input_folder.mkdir(parents=True)
annotations = []
subfolder = 0
filecnt = 0
iteration = 0
for src, tgt in test_data:
uids, mids, cats, mid_his, cat_his, mid_mask, gt, sl = self.prepare_data(src, tgt, maxlen=self.max_len)
c_input = input_folder / "{:02d}".format(subfolder)
c_input = c_input / "{:06d}.npz".format(iteration)
if not self.skip_dump:
if not c_input.parent.exists():
c_input.parent.mkdir(parents=True)
sample = {
self.mid_his_batch: mid_his,
self.cat_his_batch: cat_his,
self.uid_batch: uids,
self.mid_batch: mids,
self.cat_batch: cats,
self.mask: mid_mask,
self.seq_len: sl
}
np.savez_compressed(str(c_input), **sample)
filecnt += 1
filecnt %= 0x100
subfolder = subfolder + 1 if filecnt == 0 else subfolder
c_file = str(c_input.relative_to(preprocessed_folder))
identifiers = [
"{}_{}{}".format(self.mid_his_batch, self.separator, c_file),
"{}_{}{}".format(self.cat_his_batch, self.separator, c_file),
"{}_{}{}".format(self.uid_batch, self.separator, c_file),
"{}_{}{}".format(self.mid_batch, self.separator, c_file),
"{}_{}{}".format(self.cat_batch, self.separator, c_file),
"{}_{}{}".format(self.mask, self.separator, c_file),
"{}_{}{}".format(self.seq_len, self.separator, c_file),
]
if not self.subsample_size or (self.subsample_size and (iteration < self.subsample_size)):
annotations.append(ClassificationAnnotation(identifiers, gt[:, 0].tolist()))
iteration += 1
if self.subsample_size and (iteration > self.subsample_size):
break
return ConverterReturn(annotations, None, None)
| 7,266 |
14,668 | <gh_stars>1000+
# Copyright 2018 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from page_sets.login_helpers import login_utils
def LoginDesktopAccount(action_runner, credential,
credentials_path=login_utils.DEFAULT_CREDENTIAL_PATH):
"""Logs in into a Tumblr account."""
account_name, password = login_utils.GetAccountNameAndPassword(
credential, credentials_path=credentials_path)
action_runner.Navigate('https://www.tumblr.com/login')
login_utils.InputWithSelector(
action_runner, account_name, 'input[type=email]')
next_button = '.signup_determine_btn'
enter_password_button = '.forgot_password_link'
action_runner.WaitForElement(selector=next_button)
action_runner.ClickElement(selector=next_button)
action_runner.Wait(1)
action_runner.WaitForElement(selector=enter_password_button)
action_runner.ClickElement(selector=enter_password_button)
action_runner.Wait(1)
login_utils.InputWithSelector(
action_runner, password, 'input[type=password]')
action_runner.Wait(1)
action_runner.WaitForElement(selector=next_button)
action_runner.ClickElement(selector=next_button)
| 408 |
765 | //===-- ThreadSafeSTLVector.h ------------------------------------*- C++
//-*-===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
#ifndef liblldb_ThreadSafeSTLVector_h_
#define liblldb_ThreadSafeSTLVector_h_
#include <mutex>
#include <vector>
#include "lldb/lldb-defines.h"
namespace lldb_private {
template <typename _Object> class ThreadSafeSTLVector {
public:
typedef std::vector<_Object> collection;
typedef typename collection::iterator iterator;
typedef typename collection::const_iterator const_iterator;
// Constructors and Destructors
ThreadSafeSTLVector() : m_collection(), m_mutex() {}
~ThreadSafeSTLVector() = default;
bool IsEmpty() const {
std::lock_guard<std::recursive_mutex> guard(m_mutex);
return m_collection.empty();
}
void Clear() {
std::lock_guard<std::recursive_mutex> guard(m_mutex);
return m_collection.clear();
}
size_t GetCount() {
std::lock_guard<std::recursive_mutex> guard(m_mutex);
return m_collection.size();
}
void AppendObject(_Object &object) {
std::lock_guard<std::recursive_mutex> guard(m_mutex);
m_collection.push_back(object);
}
_Object GetObject(size_t index) {
std::lock_guard<std::recursive_mutex> guard(m_mutex);
return m_collection.at(index);
}
void SetObject(size_t index, const _Object &object) {
std::lock_guard<std::recursive_mutex> guard(m_mutex);
m_collection.at(index) = object;
}
std::recursive_mutex &GetMutex() { return m_mutex; }
private:
collection m_collection;
mutable std::recursive_mutex m_mutex;
// For ThreadSafeSTLVector only
DISALLOW_COPY_AND_ASSIGN(ThreadSafeSTLVector);
};
} // namespace lldb_private
#endif // liblldb_ThreadSafeSTLVector_h_
| 686 |
1,755 | """
A vtkTkImageViewerWidget for python, which is based on the
vtkTkImageWindowWidget.
Specify double=1 to get a double-buffered window.
Created by <NAME>, Nov 1999
"""
from __future__ import absolute_import
import math, os, sys
from vtkmodules.vtkCommonExecutionModel import vtkStreamingDemandDrivenPipeline
from vtkmodules.vtkInteractionImage import vtkImageViewer
from vtkmodules.vtkRenderingCore import vtkActor2D, vtkTextMapper
if sys.hexversion < 0x03000000:
# for Python2
import Tkinter as tkinter
else:
# for Python3
import tkinter
from .vtkLoadPythonTkWidgets import vtkLoadPythonTkWidgets
class vtkTkImageViewerWidget(tkinter.Widget):
"""
A vtkTkImageViewerWidget for Python.
Use GetImageViewer() to get the vtkImageViewer.
Create with the keyword double=1 in order to generate a
double-buffered viewer.
Create with the keyword focus_on_enter=1 to enable
focus-follows-mouse. The default is for a click-to-focus mode.
"""
def __init__(self, master, cnf={}, **kw):
"""
Constructor.
Keyword arguments:
iv -- Use passed image viewer instead of creating a new one.
double -- If True, generate a double-buffered viewer.
Defaults to False.
focus_on_enter -- If True, use a focus-follows-mouse mode.
Defaults to False where the widget will use a click-to-focus
mode.
"""
# load the necessary extensions into tk
vtkLoadPythonTkWidgets(master.tk)
try: # use specified vtkImageViewer
imageViewer = kw['iv']
except KeyError: # or create one if none specified
imageViewer = vtkImageViewer()
doubleBuffer = 0
try:
if kw['double']:
doubleBuffer = 1
del kw['double']
except:
pass
# check if focus should follow mouse
if kw.get('focus_on_enter'):
self._FocusOnEnter = 1
del kw['focus_on_enter']
else:
self._FocusOnEnter = 0
kw['iv'] = imageViewer.GetAddressAsString("vtkImageViewer")
tkinter.Widget.__init__(self, master, 'vtkTkImageViewerWidget',
cnf, kw)
if doubleBuffer:
imageViewer.GetRenderWindow().DoubleBufferOn()
self.BindTkImageViewer()
def __getattr__(self,attr):
# because the tk part of vtkTkImageViewerWidget must have
# the only remaining reference to the ImageViewer when
# it is destroyed, we can't actually store the ImageViewer
# as an attribute but instead have to get it from the tk-side
if attr == '_ImageViewer':
addr = self.tk.call(self._w, 'GetImageViewer')[5:]
return vtkImageViewer('_%s_vtkImageViewer_p' % addr)
raise AttributeError(self.__class__.__name__ +
" has no attribute named " + attr)
def GetImageViewer(self):
return self._ImageViewer
def Render(self):
self._ImageViewer.Render()
def BindTkImageViewer(self):
imager = self._ImageViewer.GetRenderer()
# stuff for window level text.
mapper = vtkTextMapper()
mapper.SetInput("none")
t_prop = mapper.GetTextProperty()
t_prop.SetFontFamilyToTimes()
t_prop.SetFontSize(18)
t_prop.BoldOn()
t_prop.ShadowOn()
self._LevelMapper = mapper
actor = vtkActor2D()
actor.SetMapper(mapper)
actor.SetLayerNumber(1)
actor.GetPositionCoordinate().SetValue(4,22)
actor.GetProperty().SetColor(1,1,0.5)
actor.SetVisibility(0)
imager.AddActor2D(actor)
self._LevelActor = actor
mapper = vtkTextMapper()
mapper.SetInput("none")
t_prop = mapper.GetTextProperty()
t_prop.SetFontFamilyToTimes()
t_prop.SetFontSize(18)
t_prop.BoldOn()
t_prop.ShadowOn()
self._WindowMapper = mapper
actor = vtkActor2D()
actor.SetMapper(mapper)
actor.SetLayerNumber(1)
actor.GetPositionCoordinate().SetValue(4,4)
actor.GetProperty().SetColor(1,1,0.5)
actor.SetVisibility(0)
imager.AddActor2D(actor)
self._WindowActor = actor
self._LastX = 0
self._LastY = 0
self._OldFocus = 0
self._InExpose = 0
# bindings
# window level
self.bind("<ButtonPress-1>",
lambda e,s=self: s.StartWindowLevelInteraction(e.x,e.y))
self.bind("<B1-Motion>",
lambda e,s=self: s.UpdateWindowLevelInteraction(e.x,e.y))
self.bind("<ButtonRelease-1>",
lambda e,s=self: s.EndWindowLevelInteraction())
# Get the value
self.bind("<ButtonPress-3>",
lambda e,s=self: s.StartQueryInteraction(e.x,e.y))
self.bind("<B3-Motion>",
lambda e,s=self: s.UpdateQueryInteraction(e.x,e.y))
self.bind("<ButtonRelease-3>",
lambda e,s=self: s.EndQueryInteraction())
self.bind("<Expose>",
lambda e,s=self: s.ExposeTkImageViewer())
self.bind("<Enter>",
lambda e,s=self: s.EnterTkViewer())
self.bind("<Leave>",
lambda e,s=self: s.LeaveTkViewer())
self.bind("<KeyPress-e>",
lambda e,s=self: s.quit())
self.bind("<KeyPress-r>",
lambda e,s=self: s.ResetTkImageViewer())
def GetImageViewer(self):
return self._ImageViewer
def Render(self):
self._ImageViewer.Render()
def _GrabFocus(self):
self._OldFocus=self.focus_get()
self.focus()
def EnterTkViewer(self):
if self._FocusOnEnter:
self._GrabFocus()
def LeaveTkViewer(self):
if self._FocusOnEnter and (self._OldFocus != None):
self._OldFocus.focus()
def ExposeTkImageViewer(self):
if (self._InExpose == 0):
self._InExpose = 1
if (not self._ImageViewer.GetRenderWindow().
IsA('vtkCocoaRenderWindow')):
self.update()
self._ImageViewer.Render()
self._InExpose = 0
def StartWindowLevelInteraction(self,x,y):
if not self._FocusOnEnter:
self._GrabFocus()
viewer = self._ImageViewer
self._LastX = x
self._LastY = y
self._Window = float(viewer.GetColorWindow())
self._Level = float(viewer.GetColorLevel())
# make the window level text visible
self._LevelActor.SetVisibility(1)
self._WindowActor.SetVisibility(1)
self.UpdateWindowLevelInteraction(x,y)
def EndWindowLevelInteraction(self):
# make the window level text invisible
self._LevelActor.SetVisibility(0)
self._WindowActor.SetVisibility(0)
self.Render()
def UpdateWindowLevelInteraction(self,x,y):
# compute normalized delta
dx = 4.0*(x - self._LastX)/self.winfo_width()*self._Window
dy = 4.0*(self._LastY - y)/self.winfo_height()*self._Level
# abs so that direction does not flip
if (self._Window < 0.0):
dx = -dx
if (self._Level < 0.0):
dy = -dy
# compute new window level
window = self._Window + dx
if (window < 0.0):
level = self._Level + dy
else:
level = self._Level - dy
viewer = self._ImageViewer
viewer.SetColorWindow(window)
viewer.SetColorLevel(level)
self._WindowMapper.SetInput("Window: %g" % window)
self._LevelMapper.SetInput("Level: %g" % level)
self.Render()
def ResetTkImageViewer(self):
# Reset: Set window level to show all values
viewer = self._ImageViewer
input = viewer.GetInput()
if (input == None):
return
# Get the extent in viewer
z = viewer.GetZSlice()
input.UpdateInformation()
info = input.GetOutputInformation(0)
ext = info.Get(vtkStreamingDemandDrivenPipeline.WHOLE_EXTENT())
ext[4] = z
ext[5] = z
input.Update(0, 1, 0, ext)
(low,high) = input.GetScalarRange()
viewer.SetColorWindow(high - low)
viewer.SetColorLevel((high + low) * 0.5)
self.Render()
def StartQueryInteraction(self,x,y):
if not self._FocusOnEnter:
self._GrabFocus()
# Query PixleValue stuff
self._WindowActor.SetVisibility(1)
self.UpdateQueryInteraction(x,y)
def EndQueryInteraction(self):
self._WindowActor.SetVisibility(0)
self.Render()
def UpdateQueryInteraction(self,x,y):
viewer = self._ImageViewer
input = viewer.GetInput()
z = viewer.GetZSlice()
# y is flipped upside down
y = self.winfo_height() - y
# make sure point is in the extent of the image.
(xMin,xMax,yMin,yMax,zMin,zMax) = input.GetExtent()
if (x < xMin or x > xMax or y < yMin or \
y > yMax or z < zMin or z > zMax):
return
numComps = input.GetNumberOfScalarComponents()
text = ""
for i in xrange(numComps):
val = input.GetScalarComponentAsDouble(x,y,z,i)
text = "%s %.1f" % (text,val)
self._WindowMapper.SetInput("(%d, %d): %s" % (x,y,text))
self.Render()
#-----------------------------------------------------------------------------
# an example of how to use this widget
if __name__ == "__main__":
from vtkmodules.vtkImagingSources import vtkImageCanvasSource2D
canvas = vtkImageCanvasSource2D()
canvas.SetNumberOfScalarComponents(3)
canvas.SetScalarType(3)
canvas.SetExtent(0,511,0,511,0,0)
canvas.SetDrawColor(100,100,0)
canvas.FillBox(0,511,0,511)
canvas.SetDrawColor(200,0,200)
canvas.FillBox(32,511,100,500)
canvas.SetDrawColor(100,0,0)
canvas.FillTube(550,20,30,400,5)
canvas.SetDrawColor(255,255,255)
canvas.DrawSegment3D(10,20,0,90,510,0)
canvas.SetDrawColor(200,50,50)
canvas.DrawSegment3D(510,90,0,10,20,0)
# Check segment clipping
canvas.SetDrawColor(0,200,0)
canvas.DrawSegment(-10,30,30,-10)
canvas.DrawSegment(-10,481,30,521)
canvas.DrawSegment(481,-10,521,30)
canvas.DrawSegment(481,521,521,481)
# Check Filling a triangle
canvas.SetDrawColor(20,200,200)
canvas.FillTriangle(-100,100,190,150,40,300)
# Check drawing a circle
canvas.SetDrawColor(250,250,10)
canvas.DrawCircle(350,350,200.0)
# Check drawing a point
canvas.SetDrawColor(250,250,250)
canvas.DrawPoint(350,350)
canvas.DrawPoint(350,550)
# Test filling functionality
canvas.SetDrawColor(55,0,0)
canvas.DrawCircle(450,350,80.0)
canvas.SetDrawColor(100,255,100)
canvas.FillPixel(450,350)
# Create the GUI: two renderer widgets and a quit button
frame = tkinter.Frame()
widget = vtkTkImageViewerWidget(frame,width=512,height=512,double=1)
viewer = widget.GetImageViewer()
viewer.SetInputConnection(canvas.GetOutputPort())
viewer.SetColorWindow(256)
viewer.SetColorLevel(127.5)
button = tkinter.Button(frame,text="Quit",command=frame.quit)
widget.pack(side='top',padx=3,pady=3,fill='both',expand='t')
frame.pack(fill='both',expand='t')
button.pack(fill='x')
frame.mainloop()
| 5,251 |
2,637 | <filename>vendors/nordic/nRF5_SDK_15.2.0/external/freertos/portable/GCC/nrf52/port.c
/*
* FreeRTOS Kernel V10.0.0
* Copyright (C) 2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software. If you wish to use our Amazon
* FreeRTOS name, please do so in a fair use way that does not cause confusion.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
* http://www.FreeRTOS.org
* http://aws.amazon.com/freertos
*
* 1 tab == 4 spaces!
*/
/*-----------------------------------------------------------
* Implementation of functions defined in portable.h for the ARM CM4F port.
*----------------------------------------------------------*/
/* Scheduler includes. */
#include "FreeRTOS.h"
#include "task.h"
/*
* Start first task is a separate function so it can be tested in isolation.
*/
void vPortStartFirstTask( void ) __attribute__ (( naked ));
/*
* Exception handlers.
*/
void vPortSVCHandler( void ) __attribute__ (( naked ));
void xPortPendSVHandler( void ) __attribute__ (( naked ));
/*-----------------------------------------------------------*/
void vPortStartFirstTask( void )
{
__asm volatile(
#if defined(__SES_ARM)
" ldr r0, =_vectors \n" /* Locate the stack using _vectors table. */
#else
" ldr r0, =__isr_vector \n" /* Locate the stack using __isr_vector table. */
#endif
" ldr r0, [r0] \n"
" msr msp, r0 \n" /* Set the msp back to the start of the stack. */
" cpsie i \n" /* Globally enable interrupts. */
" cpsie f \n"
" dsb \n"
" isb \n"
#ifdef SOFTDEVICE_PRESENT
/* Block kernel interrupts only (PendSV) before calling SVC */
" mov r0, %0 \n"
" msr basepri, r0 \n"
#endif
" svc 0 \n" /* System call to start first task. */
" \n"
" .align 2 \n"
#ifdef SOFTDEVICE_PRESENT
::"i"(configKERNEL_INTERRUPT_PRIORITY << (8 - configPRIO_BITS))
#endif
);
}
/*-----------------------------------------------------------*/
void vPortSVCHandler( void )
{
__asm volatile (
" ldr r3, =pxCurrentTCB \n" /* Restore the context. */
" ldr r1, [r3] \n" /* Use pxCurrentTCBConst to get the pxCurrentTCB address. */
" ldr r0, [r1] \n" /* The first item in pxCurrentTCB is the task top of stack. */
" ldmia r0!, {r4-r11, r14} \n" /* Pop the registers that are not automatically saved on exception entry and the critical nesting count. */
" msr psp, r0 \n" /* Restore the task stack pointer. */
" isb \n"
" mov r0, #0 \n"
" msr basepri, r0 \n"
" bx r14 \n"
" \n"
" .align 2 \n"
);
}
/*-----------------------------------------------------------*/
void xPortPendSVHandler( void )
{
/* This is a naked function. */
__asm volatile
(
" mrs r0, psp \n"
" isb \n"
" \n"
" ldr r3, =pxCurrentTCB \n" /* Get the location of the current TCB. */
" ldr r2, [r3] \n"
" \n"
" tst r14, #0x10 \n" /* Is the task using the FPU context? If so, push high vfp registers. */
" it eq \n"
" vstmdbeq r0!, {s16-s31} \n"
" \n"
" stmdb r0!, {r4-r11, r14} \n" /* Save the core registers. */
" \n"
" str r0, [r2] \n" /* Save the new top of stack into the first member of the TCB. */
" \n"
" stmdb sp!, {r3} \n"
" mov r0, %0 \n"
" msr basepri, r0 \n"
" dsb \n"
" isb \n"
" bl vTaskSwitchContext \n"
" mov r0, #0 \n"
" msr basepri, r0 \n"
" ldmia sp!, {r3} \n"
" \n"
" ldr r1, [r3] \n" /* The first item in pxCurrentTCB is the task top of stack. */
" ldr r0, [r1] \n"
" \n"
" ldmia r0!, {r4-r11, r14} \n" /* Pop the core registers. */
" \n"
" tst r14, #0x10 \n" /* Is the task using the FPU context? If so, pop the high vfp registers too. */
" it eq \n"
" vldmiaeq r0!, {s16-s31} \n"
" \n"
" msr psp, r0 \n"
" isb \n"
" \n"
" \n"
" bx r14 \n"
" \n"
" .align 2 \n"
::"i"(configMAX_SYSCALL_INTERRUPT_PRIORITY << (8 - configPRIO_BITS))
);
}
| 3,881 |
1,742 | <reponame>yzpopulation/sage
"""
Quantum Groups Using GAP's QuaGroup Package
AUTHORS:
- <NAME> (03-2017): initial version
The documentation for GAP's QuaGroup package, originally authored by
<NAME>, can be found at
https://www.gap-system.org/Packages/quagroup.html.
"""
# ****************************************************************************
# Copyright (C) 2017 <NAME> <<EMAIL>>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
# https://www.gnu.org/licenses/
# ****************************************************************************
from sage.misc.lazy_attribute import lazy_attribute
from sage.misc.cachefunc import cached_method
from sage.structure.parent import Parent
from sage.structure.element import Element
from sage.structure.unique_representation import UniqueRepresentation
from sage.structure.sage_object import SageObject
from sage.structure.richcmp import op_EQ, op_NE, richcmp
from sage.sets.non_negative_integers import NonNegativeIntegers
from sage.sets.family import Family
from sage.combinat.root_system.cartan_type import CartanType
from sage.libs.gap.libgap import libgap
from sage.features.gap import GapPackage
from sage.graphs.digraph import DiGraph
from sage.rings.rational_field import QQ
from sage.categories.algebras import Algebras
from sage.categories.cartesian_product import cartesian_product
from sage.categories.fields import Fields
from sage.categories.homset import HomsetWithBase, Hom
from sage.categories.hopf_algebras import HopfAlgebras
from sage.categories.modules import Modules
from sage.categories.morphism import Morphism
from sage.categories.rings import Rings
from copy import copy
import re
class QuaGroupModuleElement(Element):
"""
Base class for elements created using QuaGroup.
"""
def __init__(self, parent, libgap_elt):
"""
Initialize ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['G',2]) # optional - gap_packages
sage: TestSuite(Q.an_element()).run() # optional - gap_packages
"""
self._libgap = libgap(libgap_elt)
Element.__init__(self, parent)
def _repr_(self):
"""
Return a string representation of ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['G',2]) # optional - gap_packages
sage: Q.an_element() # optional - gap_packages
1 + (q)*F[a1] + E[a1] + (q^2-1-q^-2 + q^-4)*[ K1 ; 2 ] + K1
+ (-q^-1 + q^-3)*K1[ K1 ; 1 ]
sage: Q = QuantumGroup(['D',4]) # optional - gap_packages
sage: Q.F_simple() # optional - gap_packages
Finite family {1: F[a1], 2: F[a2], 3: F[a3], 4: F[a4]}
"""
# We add some space between the terms
# FIXME: This doesn't work to avoid within the () for the coeff's
c = re.compile(r"\+(?! [^(]* \))")
ret = re.sub(c, ' + ', repr(self._libgap))
# Replace Ei and Fi with the corresponding root in short form.
# Do the largest index first so, e.g., F12 gets replaced as 12
# instead of as 1.
for i,al in reversed(list(enumerate(self.parent()._pos_roots))):
short = '+'.join('%s*a%s'%(coeff,index) if coeff != 1 else 'a%s'%index
for index,coeff in al)
ret = ret.replace('F%s'%(i+1), 'F[%s]'%short)
ret = ret.replace('E%s'%(i+1), 'E[%s]'%short)
return ret
def _latex_(self):
r"""
Return a latex representation of ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['G',2]) # optional - gap_packages
sage: latex(Q.an_element()) # optional - gap_packages
1+{(q)} F_{\alpha_{1}}+E_{\alpha_{1}}+{(q^{2}-1-q^{-2}+q^{-4})}
[ K_{1} ; 2 ]+K_{1}+{(-q^{-1}+q^{-3})} K_{1}[ K_{1} ; 1 ]
sage: Q = QuantumGroup(['D',4]) # optional - gap_packages
sage: latex(list(Q.F_simple())) # optional - gap_packages
\left[F_{\alpha_{1}}, F_{\alpha_{2}},
F_{\alpha_{3}}, F_{\alpha_{4}}\right]
"""
from sage.misc.latex import latex
ret = repr(self._libgap)
# Do the largest index first so, e.g., F12 gets replaced as 12
# instead of as 1.
for i,al in reversed(list(enumerate(self.parent()._pos_roots))):
ret = ret.replace('F%s'%(i+1), 'F_{%s}'%latex(al))
ret = ret.replace('E%s'%(i+1), 'E_{%s}'%latex(al))
for i,ii in reversed(list(enumerate(self.parent()._cartan_type.index_set()))):
ret = ret.replace('K%s'%(i+1), 'K_{%s}'%ii)
# Fugly string parsing to get good looking latex
# TODO: Find a better way
ret = ret.replace('(', '{(')
ret = ret.replace(')', ')}')
ret = ret.replace('v0', 'v_0')
ret = ret.replace('*', ' ')
c = re.compile(r"q\^-?[0-9]*")
for m in reversed(list(c.finditer(ret))):
ret = ret[:m.start()+2] + '{' + ret[m.start()+2:m.end()] + '}' + ret[m.end():]
return ret
def __reduce__(self):
"""
Used in pickling.
EXAMPLES::
sage: Q = QuantumGroup(['G',2]) # optional - gap_packages
sage: x = Q.an_element() # optional - gap_packages
sage: loads(dumps(x)) == x # optional - gap_packages
True
"""
data = self._libgap.ExtRepOfObj()
R = self.base_ring()
ret = []
for i in range(len(data)//2):
ret.append(data[2*i].sage())
ret.append( R(str(data[2*i+1])) )
return (_unpickle_generic_element, (self.parent(), ret))
def __hash__(self):
r"""
Return the hash of ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['B',3]) # optional - gap_packages
sage: x = Q.an_element() # optional - gap_packages
sage: hash(x) == hash(x.gap()) # optional - gap_packages
True
"""
return hash(self._libgap)
def _richcmp_(self, other, op):
"""
Rich comparison of ``self`` and ``other`` by ``op``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: x = Q.an_element() # optional - gap_packages
sage: F1, F12, F2 = Q.F() # optional - gap_packages
sage: q = Q.q() # optional - gap_packages
sage: x == F1 # optional - gap_packages
False
sage: x != F1 # optional - gap_packages
True
sage: F2 * F1 # optional - gap_packages
(q)*F[a1]*F[a2] + F[a1+a2]
sage: F2 * F1 == q * F1 * F2 + F12 # optional - gap_packages
True
"""
return richcmp(self._libgap, other._libgap, op)
def gap(self):
r"""
Return the gap representation of ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['B',3]) # optional - gap_packages
sage: x = Q.an_element() # optional - gap_packages
sage: x.gap() # optional - gap_packages
1+(q)*F1+E1+(q^4-1-q^-4+q^-8)*[ K1 ; 2 ]+K1+(-q^-2+q^-6)*K1[ K1 ; 1 ]
"""
return self._libgap
_libgap_ = _gap_ = gap
def _add_(self, other):
r"""
Add ``self`` and ``other``.
EXAMPLES::
sage: Q = QuantumGroup(['G',2]) # optional - gap_packages
sage: F1, F2 = Q.F_simple() # optional - gap_packages
sage: F1 * F2 + F2 * F1 # optional - gap_packages
(q^3 + 1)*F[a1]*F[a2] + F[a1+a2]
"""
return self.__class__(self.parent(), self._libgap + other._libgap)
def _sub_(self, other):
r"""
Subtract ``self`` and ``other``.
EXAMPLES::
sage: Q = QuantumGroup(['G',2]) # optional - gap_packages
sage: F1, F2 = Q.F_simple() # optional - gap_packages
sage: F1 * F2 - F2 * F1 # optional - gap_packages
(-q^3 + 1)*F[a1]*F[a2] + (-1)*F[a1+a2]
"""
return self.__class__(self.parent(), self._libgap - other._libgap)
def _acted_upon_(self, scalar, self_on_left=True):
r"""
Return the action of ``scalar`` on ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['B',2]) # optional - gap_packages
sage: q = Q.q() # optional - gap_packages
sage: x = Q.one().f_tilde([1,2,1,1,2,2]); x # optional - gap_packages
F[a1+a2]^(3)
sage: 3 * x # optional - gap_packages
(3)*F[a1+a2]^(3)
sage: x * (5/3) # optional - gap_packages
(5/3)*F[a1+a2]^(3)
sage: q^-10 * x # optional - gap_packages
(q^-10)*F[a1+a2]^(3)
sage: (1 + q^2 - q^-1) * x # optional - gap_packages
(q^2 + 1-q^-1)*F[a1+a2]^(3)
"""
try:
scalar = self.parent().base_ring()(scalar)
scalar = scalar.subs(q=self.parent()._libgap_q)
except (TypeError, ValueError):
return None
return self.__class__(self.parent(), self._libgap * libgap(scalar))
def e_tilde(self, i):
r"""
Return the action of the Kashiwara operator
`\widetilde{e}_i` on ``self``.
INPUT:
- ``i`` -- an element of the index set or a list to
perform a string of operators
EXAMPLES::
sage: Q = QuantumGroup(['B',2]) # optional - gap_packages
sage: x = Q.one().f_tilde([1,2,1,1,2,2]) # optional - gap_packages
sage: x.e_tilde([2,2,1,2]) # optional - gap_packages
F[a1]^(2)
"""
# Do not override this method, instead implement _et
if isinstance(i, (list, tuple)):
ret = self
for j in i:
if not ret: # ret == 0
return ret
ret = ret._et(j)
return ret
return self._et(i)
def f_tilde(self, i):
r"""
Return the action of the Kashiwara operator
`\widetilde{f}_i` on ``self``.
INPUT:
- ``i`` -- an element of the index set or a list to
perform a string of operators
EXAMPLES::
sage: Q = QuantumGroup(['B',2]) # optional - gap_packages
sage: Q.one().f_tilde(1) # optional - gap_packages
F[a1]
sage: Q.one().f_tilde(2) # optional - gap_packages
F[a2]
sage: Q.one().f_tilde([1,2,1,1,2]) # optional - gap_packages
F[a1]*F[a1+a2]^(2)
"""
# Do not override this method, instead implement _ft
if isinstance(i, (list, tuple)):
ret = self
for j in i:
if not ret: # ret == 0
return ret
ret = ret._ft(j)
return ret
return self._ft(i)
class QuantumGroup(UniqueRepresentation, Parent):
r"""
A Drinfel'd-Jimbo quantum group (implemented using the optional GAP
package ``QuaGroup``).
EXAMPLES:
We check the quantum Serre relations. We first we import the
`q`-binomial using the `q`-int for quantum groups::
sage: from sage.algebras.quantum_groups.q_numbers import q_binomial
We verify the Serre relations for type `A_2`::
sage: Q = algebras.QuantumGroup(['A',2]) # optional - gap_packages
sage: F1,F12,F2 = Q.F() # optional - gap_packages
sage: q = Q.q() # optional - gap_packages
sage: F1^2*F2 - q_binomial(2,1,q) * F1*F2*F1 + F2*F1^2 # optional - gap_packages
0
We verify the Serre relations for type `B_2`::
sage: Q = algebras.QuantumGroup(['B',2]) # optional - gap_packages
sage: F1, F12, F122, F2 = Q.F() # optional - gap_packages
sage: F1^2*F2 - q_binomial(2,1,q^2) * F1*F2*F1 + F2*F1^2 # optional - gap_packages
0
sage: (F2^3*F1 - q_binomial(3,1,q) * F2^2*F1*F2 # optional - gap_packages
....: + q_binomial(3,2,q) * F2*F1*F2^2 - F1*F2^3)
0
REFERENCES:
- :wikipedia:`Quantum_group`
"""
@staticmethod
def __classcall_private__(cls, cartan_type, q=None):
"""
Initialize ``self``.
TESTS::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: Q is QuantumGroup('A2', None) # optional - gap_packages
True
"""
cartan_type = CartanType(cartan_type)
return super(QuantumGroup, cls).__classcall__(cls, cartan_type, q)
def __init__(self, cartan_type, q):
"""
Initialize ``self``.
TESTS::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: TestSuite(Q).run() # long time # optional - gap_packages
sage: Q = QuantumGroup(['G',2]) # optional - gap_packages
sage: TestSuite(Q).run() # long time # optional - gap_packages
"""
self._cartan_type = cartan_type
GapPackage("QuaGroup", spkg="gap_packages").require()
libgap.LoadPackage('QuaGroup')
R = libgap.eval('RootSystem("%s",%s)'%(cartan_type.type(), cartan_type.rank()))
Q = self._cartan_type.root_system().root_lattice()
I = cartan_type.index_set()
self._pos_roots = [Q.sum_of_terms([(ii, root[i]) for i,ii in enumerate(I)
if root[i] != 0])
for root in R.PositiveRootsInConvexOrder().sage()]
if q is None:
self._libgap = R.QuantizedUEA()
self._libgap_q = libgap.eval('_q')
self._libgap_base = libgap.eval('QuantumField')
base_field = QQ['q'].fraction_field()
q = base_field.gen()
else:
base_field = q.parent()
self._libgap = R.QuantizedUEA(base_field, q)
self._libgap_base = libgap(base_field)
self._libgap_q = libgap(q)
self._q = q
Parent.__init__(self, base=base_field, category=HopfAlgebras(Fields()))
def _repr_(self):
"""
Return a string representation of ``self``.
EXAMPLES::
sage: QuantumGroup(['A',2]) # optional - gap_packages
Quantum Group of type ['A', 2] with q=q
"""
return "Quantum Group of type {} with q={}".format(self._cartan_type, self._q)
def _latex_(self):
r"""
Return a latex representation of ``self``.
EXAMPLES::
sage: latex(QuantumGroup(['A',3])) # optional - gap_packages
U_{q}(A_{3})
sage: zeta3 = CyclotomicField(3).gen() # optional - gap_packages
sage: latex(QuantumGroup(['G',2], q=zeta3)) # optional - gap_packages
U_{\zeta_{3}}(G_2)
"""
from sage.misc.latex import latex
return "U_{%s}(%s)"%(latex(self._q), latex(self._cartan_type))
def gap(self):
"""
Return the gap representation of ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: Q.gap() # optional - gap_packages
QuantumUEA( <root system of type A2>, Qpar = q )
"""
return self._libgap
_libgap_ = _gap_ = gap
def cartan_type(self):
"""
Return the Cartan type of ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: Q.cartan_type() # optional - gap_packages
['A', 2]
"""
return self._cartan_type
def _element_constructor_(self, elt):
"""
Construct an element of ``self`` from ``elt``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: Q(0) # optional - gap_packages
0
sage: Q(4) # optional - gap_packages
(4)*1
sage: Q(4).parent() is Q # optional - gap_packages
True
sage: Q(Q.q()).parent() is Q # optional - gap_packages
True
sage: Q(Q.an_element()) == Q.an_element() # optional - gap_packages
True
"""
if not elt:
return self.zero()
if elt in self.base_ring():
return elt * self.one()
return self.element_class(self, elt)
# Special elements
# ----------------
@cached_method
def one(self):
"""
Return the multiplicative identity of ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: Q.one() # optional - gap_packages
1
"""
return self.element_class(self, self._libgap.One())
@cached_method
def zero(self):
"""
Return the multiplicative identity of ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: Q.zero() # optional - gap_packages
0
"""
return self.element_class(self, self._libgap.ZeroImmutable())
@cached_method
def gens(self):
"""
Return the generators of ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: Q.gens() # optional - gap_packages
(F[a1], F[a1+a2], F[a2],
K1, (-q + q^-1)*[ K1 ; 1 ] + K1,
K2, (-q + q^-1)*[ K2 ; 1 ] + K2,
E[a1], E[a1+a2], E[a2])
"""
return tuple([self.element_class(self, gen)
for gen in self._libgap.GeneratorsOfAlgebra()])
def E(self):
r"""
Return the family of generators `\{E_{\alpha}\}_{\alpha \in \Phi}`,
where `\Phi` is the root system of ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['B',2]) # optional - gap_packages
sage: list(Q.E()) # optional - gap_packages
[E[a1], E[a1+a2], E[a1+2*a2], E[a2]]
"""
N = len(self._pos_roots) + len(self._cartan_type.index_set())*2
d = {al: self.gens()[N+i] for i,al in enumerate(self._pos_roots)}
return Family(self._pos_roots, d.__getitem__)
def E_simple(self):
r"""
Return the family of generators `\{E_i := E_{\alpha_i}\}_{i \in I}`.
EXAMPLES::
sage: Q = QuantumGroup(['B',2]) # optional - gap_packages
sage: Q.E_simple() # optional - gap_packages
Finite family {1: E[a1], 2: E[a2]}
"""
I = self._cartan_type.index_set()
gens = self.algebra_generators()
d = {i: gens['E%s'%i] for i in I}
return Family(I, d.__getitem__)
def F(self):
r"""
Return the family of generators `\{F_{\alpha}\}_{\alpha \in \Phi}`,
where `\Phi` is the root system of ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['G',2]) # optional - gap_packages
sage: list(Q.F()) # optional - gap_packages
[F[a1], F[3*a1+a2], F[2*a1+a2], F[3*a1+2*a2], F[a1+a2], F[a2]]
"""
d = {al: self.gens()[i] for i,al in enumerate(self._pos_roots)}
return Family(self._pos_roots, d.__getitem__)
def F_simple(self):
r"""
Return the family of generators `\{F_i := F_{\alpha_i}\}_{i \in I}`.
EXAMPLES::
sage: Q = QuantumGroup(['G',2]) # optional - gap_packages
sage: Q.F_simple() # optional - gap_packages
Finite family {1: F[a1], 2: F[a2]}
"""
I = self._cartan_type.index_set()
gens = self.algebra_generators()
d = {i: gens['F%s'%i] for i in I}
return Family(I, d.__getitem__)
def K(self):
r"""
Return the family of generators `\{K_i\}_{i \in I}`.
EXAMPLES::
sage: Q = QuantumGroup(['A',3]) # optional - gap_packages
sage: Q.K() # optional - gap_packages
Finite family {1: K1, 2: K2, 3: K3}
sage: Q.K_inverse() # optional - gap_packages
Finite family {1: (-q + q^-1)*[ K1 ; 1 ] + K1,
2: (-q + q^-1)*[ K2 ; 1 ] + K2,
3: (-q + q^-1)*[ K3 ; 1 ] + K3}
"""
N = len(self._pos_roots)
I = self._cartan_type.index_set()
d = {ii: self.gens()[N+2*i] for i,ii in enumerate(I)}
return Family(I, d.__getitem__)
def K_inverse(self):
r"""
Return the family of generators `\{K_i^{-1}\}_{i \in I}`.
EXAMPLES::
sage: Q = QuantumGroup(['A',3]) # optional - gap_packages
sage: Q.K_inverse() # optional - gap_packages
Finite family {1: (-q + q^-1)*[ K1 ; 1 ] + K1,
2: (-q + q^-1)*[ K2 ; 1 ] + K2,
3: (-q + q^-1)*[ K3 ; 1 ] + K3}
"""
N = len(self._pos_roots)
I = self._cartan_type.index_set()
d = {ii: self.gens()[N+2*i+1] for i,ii in enumerate(I)}
return Family(I, d.__getitem__)
@cached_method
def algebra_generators(self):
"""
Return the algebra generators of ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: list(Q.algebra_generators()) # optional - gap_packages
[F[a1], F[a2],
K1, K2,
(-q + q^-1)*[ K1 ; 1 ] + K1, (-q + q^-1)*[ K2 ; 1 ] + K2,
E[a1], E[a2]]
"""
I = self._cartan_type.index_set()
simples = self._cartan_type.root_system().root_lattice().simple_roots()
ret = {}
for i,al in enumerate(simples):
ii = I[i]
ret['F%s'%ii] = self.F()[al]
ret['K%s'%ii] = self.K()[ii]
ret['Ki%s'%ii] = self.K_inverse()[ii]
ret['E%s'%ii] = self.E()[al]
keys = (['F%s'%i for i in I] + ['K%s'%i for i in I]
+ ['Ki%s'%i for i in I] + ['E%s'%i for i in I])
return Family(keys, ret.__getitem__)
def _an_element_(self):
"""
Return an element of ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: Q.an_element() # optional - gap_packages
1 + (q)*F[a1] + E[a1] + (q^2-1-q^-2 + q^-4)*[ K1 ; 2 ]
+ K1 + (-q^-1 + q^-3)*K1[ K1 ; 1 ]
"""
i = self._cartan_type.index_set()[0]
al = self._cartan_type.root_system().root_lattice().simple_root(i)
return self.E()[al] + self.K()[i] + self.K_inverse()[i]**2 + self.q()*self.F()[al]
def some_elements(self):
"""
Return some elements of ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',1]) # optional - gap_packages
sage: Q.some_elements() # optional - gap_packages
[1 + (q)*F[a1] + E[a1] + (q^2-1-q^-2 + q^-4)*[ K1 ; 2 ]
+ K1 + (-q^-1 + q^-3)*K1[ K1 ; 1 ],
K1, F[a1], E[a1]]
"""
return ([self.an_element()] + list(self.K())
+ list(self.F_simple()) + list(self.E_simple()))
def q(self):
"""
Return the parameter `q`.
EXAMPLES::
sage: Q = QuantumGroup(['A',3]) # optional - gap_packages
sage: Q.q() # optional - gap_packages
q
sage: zeta3 = CyclotomicField(3).gen() # optional - gap_packages
sage: Q = QuantumGroup(['B',2], q=zeta3) # optional - gap_packages
sage: Q.q() # optional - gap_packages
zeta3
"""
return self._q
# Misc
# ----
def _Hom_(self, Y, category):
"""
Return the highest weight module of weight ``weight`` of ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: B = Q.lower_half() # optional - gap_packages
sage: H = Hom(Q, B); H # optional - gap_packages
Set of Morphisms from Quantum Group of type ['A', 2] with q=q to
Lower Half of Quantum Group of type ['A', 2] with q=q in Category of rings
sage: type(H) # optional - gap_packages
<class '...QuantumGroupHomset_with_category_with_equality_by_id'>
"""
if category is not None and not category.is_subcategory(Rings()):
raise TypeError("%s is not a subcategory of Rings()"%category)
if Y not in Rings():
raise TypeError("%s is not a ring"%Y)
return QuantumGroupHomset(self, Y, category=category)
def highest_weight_module(self, weight):
"""
Return the highest weight module of weight ``weight`` of ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: Q.highest_weight_module([1,3]) # optional - gap_packages
Highest weight module of weight Lambda[1] + 3*Lambda[2] of
Quantum Group of type ['A', 2] with q=q
"""
return HighestWeightModule(self, weight)
def lower_half(self):
"""
Return the lower half of the quantum group ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: Q.lower_half() # optional - gap_packages
Lower Half of Quantum Group of type ['A', 2] with q=q
"""
return LowerHalfQuantumGroup(self)
# Hopf structure
# --------------
def coproduct(self, elt, n=1):
r"""
Return the coproduct of ``elt`` (iterated ``n`` times).
The comultiplication `\Delta \colon U_q(\mathfrak{g}) \to
U_q(\mathfrak{g}) \otimes U_q(\mathfrak{g})` is defined by
.. MATH::
\begin{aligned}
\Delta(E_i) &= E_i \otimes 1 + K_i \otimes E_i, \\
\Delta(F_i) &= F_i \otimes K_i^{-1} + 1 \otimes F_i, \\
\Delta(K_i) &= K_i \otimes K_i.
\end{aligned}
EXAMPLES::
sage: Q = QuantumGroup(['B',2]) # optional - gap_packages
sage: [Q.coproduct(e) for e in Q.E()] # optional - gap_packages
[1*(E[a1]<x>1) + 1*(K1<x>E[a1]),
1*(E[a1+a2]<x>1) + 1*(K1*K2<x>E[a1+a2]) + q^2-q^-2*(K2*E[a1]<x>E[a2]),
q^4-q^2-1 + q^-2*(E[a1]<x>E[a2]^(2)) + 1*(E[a1+2*a2]<x>1)
+ 1*(K1<x>E[a1+2*a2]) + q-q^-1*(K1*K2[ K2 ; 1 ]<x>E[a1+2*a2])
+ q-q^-1*(K2*E[a1+a2]<x>E[a2]) + q^5-2*q^3
+ 2*q^-1-q^-3*(K2[ K2 ; 1 ]*E[a1]<x>E[a2]^(2)),
1*(E[a2]<x>1) + 1*(K2<x>E[a2])]
sage: [Q.coproduct(f, 2) for f in Q.F_simple()] # optional - gap_packages
[1*(1<x>1<x>F[a1]) + -q^2 + q^-2*(1<x>F[a1]<x>[ K1 ; 1 ])
+ 1*(1<x>F[a1]<x>K1) + q^4-2 + q^-4*(F[a1]<x>[ K1 ; 1 ]<x>[ K1 ; 1 ])
+ -q^2 + q^-2*(F[a1]<x>[ K1 ; 1 ]<x>K1) + -q^2
+ q^-2*(F[a1]<x>K1<x>[ K1 ; 1 ]) + 1*(F[a1]<x>K1<x>K1),
1*(1<x>1<x>F[a2]) + -q + q^-1*(1<x>F[a2]<x>[ K2 ; 1 ])
+ 1*(1<x>F[a2]<x>K2) + q^2-2 + q^-2*(F[a2]<x>[ K2 ; 1 ]<x>[ K2 ; 1 ])
+ -q + q^-1*(F[a2]<x>[ K2 ; 1 ]<x>K2) + -q
+ q^-1*(F[a2]<x>K2<x>[ K2 ; 1 ]) + 1*(F[a2]<x>K2<x>K2)]
"""
D = self._libgap.ComultiplicationMap(n+1)
# TODO: This is not the correct parent. Need to create it.
return self.element_class(self, libgap.Image(D, elt._libgap))
def antipode(self, elt):
r"""
Return the antipode of ``elt``.
The antipode `S \colon U_q(\mathfrak{g}) \to U_q(\mathfrak{g})`
is the anti-automorphism defined by
.. MATH::
S(E_i) = -K_i^{-1}E_i, \qquad
S(F_i) = -F_iK_i, \qquad
S(K_i) = K_i^{-1}.
EXAMPLES::
sage: Q = QuantumGroup(['B',2]) # optional - gap_packages
sage: [Q.antipode(f) for f in Q.F()] # optional - gap_packages
[(-1)*F[a1]*K1,
(-q^6 + q^2)*F[a1]*F[a2]*K1*K2 + (-q^4)*F[a1+a2]*K1*K2,
(-q^8 + q^6 + q^4-q^2)*F[a1]*F[a2]^(2)*K1
+ (-q^9 + 2*q^7-2*q^3 + q)*F[a1]*F[a2]^(2)*K1*K2[ K2 ; 1 ]
+ (-q^5 + q^3)*F[a1+a2]*F[a2]*K1
+ (-q^6 + 2*q^4-q^2)*F[a1+a2]*F[a2]*K1*K2[ K2 ; 1 ]
+ (-q^4)*F[a1+2*a2]*K1 + (-q^5 + q^3)*F[a1+2*a2]*K1*K2[ K2 ; 1 ],
(-1)*F[a2]*K2]
"""
S = self._libgap.AntipodeMap()
return self.element_class(self, libgap.Image(S, elt._libgap))
def counit(self, elt):
r"""
Return the counit of ``elt``.
The counit `\varepsilon \colon U_q(\mathfrak{g}) \to \QQ(q)` is
defined by
.. MATH::
\varepsilon(E_i) = \varepsilon(F_i) = 0, \qquad
\varepsilon(K_i) = 1.
EXAMPLES::
sage: Q = QuantumGroup(['B',2]) # optional - gap_packages
sage: x = Q.an_element()^2 # optional - gap_packages
sage: Q.counit(x) # optional - gap_packages
4
sage: Q.counit(Q.one()) # optional - gap_packages
1
sage: Q.counit(Q.zero()) # optional - gap_packages
0
"""
# We need to extract the constant coefficient because the
# counit in QuaGroup doesn't support it
R = self.base_ring()
ext_rep = list(elt._libgap.ExtRepOfObj())
constant = R.zero()
for i in range(len(ext_rep)//2):
if ext_rep[2*i].Length() == 0:
ext_rep.pop(2*i) # Pop the key
constant = R(str(ext_rep.pop(2*i))) # Pop the coefficient
break
# To reconstruct, we need the following
F = libgap.eval('ElementsFamily')(libgap.eval('FamilyObj')(self._libgap))
elt = F.ObjByExtRep(ext_rep)
co = self._libgap.CounitMap()
return R( str(co(elt)) ) + constant
class Element(QuaGroupModuleElement):
def _mul_(self, other):
r"""
Multiply ``self`` and ``other``.
EXAMPLES::
sage: Q = QuantumGroup(['G',2]) # optional - gap_packages
sage: F1, F2 = Q.F_simple() # optional - gap_packages
sage: F1 * F2 * F1 * F2 # optional - gap_packages
F[a1]*F[a1+a2]*F[a2] + (q^7 + q^5 + q + q^-1)*F[a1]^(2)*F[a2]^(2)
sage: E1, E2 = Q.E_simple() # optional - gap_packages
sage: F1 * E1 # optional - gap_packages
F[a1]*E[a1]
sage: E1 * F1 # optional - gap_packages
F[a1]*E[a1] + [ K1 ; 1 ]
"""
return self.__class__(self.parent(), self._libgap * other._libgap)
def bar(self):
r"""
Return the bar involution on ``self``.
The bar involution is defined by
.. MATH::
\overline{E_i} = E_i, \qquad\qquad
\overline{F_i} = F_i, \qquad\qquad
\overline{K_i} = K_i^{-1}.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: [gen.bar() for gen in Q.gens()] # optional - gap_packages
[F[a1],
(q-q^-1)*F[a1]*F[a2] + F[a1+a2],
F[a2],
(-q + q^-1)*[ K1 ; 1 ] + K1, K1,
(-q + q^-1)*[ K2 ; 1 ] + K2, K2,
E[a1],
(-q^2 + 1)*E[a1]*E[a2] + (q^2)*E[a1+a2],
E[a2]]
"""
bar = self.parent()._libgap.BarAutomorphism()
return self.__class__(self.parent(), libgap.Image(bar, self._libgap))
def omega(self):
r"""
Return the action of the `\omega` automorphism on ``self``.
The `\omega` automorphism is defined by
.. MATH::
\omega(E_i) = F_i, \qquad\qquad
\omega(F_i) = E_i, \qquad\qquad
\omega(K_i) = K_i^{-1}.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: [gen.omega() for gen in Q.gens()] # optional - gap_packages
[E[a1],
(-q)*E[a1+a2],
E[a2],
(-q + q^-1)*[ K1 ; 1 ] + K1,
K1,
(-q + q^-1)*[ K2 ; 1 ] + K2,
K2,
F[a1],
(-q^-1)*F[a1+a2],
F[a2]]
"""
omega = self.parent()._libgap.AutomorphismOmega()
return self.__class__(self.parent(), libgap.Image(omega, self._libgap))
def tau(self):
r"""
Return the action of the `\tau` anti-automorphism on ``self``.
The `\tau` anti-automorphism is defined by
.. MATH::
\tau(E_i) = E_i, \qquad\qquad
\tau(F_i) = F_i, \qquad\qquad
\tau(K_i) = K_i^{-1}.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: [gen.tau() for gen in Q.gens()] # optional - gap_packages
[F[a1],
(-q^2 + 1)*F[a1]*F[a2] + (-q)*F[a1+a2],
F[a2],
(-q + q^-1)*[ K1 ; 1 ] + K1,
K1,
(-q + q^-1)*[ K2 ; 1 ] + K2,
K2,
E[a1],
(q-q^-1)*E[a1]*E[a2] + (-q)*E[a1+a2],
E[a2]]
"""
tau = self.parent()._libgap.AntiAutomorphismTau()
return self.__class__(self.parent(), libgap.Image(tau, self._libgap))
def braid_group_action(self, braid):
r"""
Return the action of the braid group element ``braid``.
The braid group operator `T_i \colon U_q(\mathfrak{g}) \to
U_q(\mathfrak{g})` is defined by
.. MATH::
\begin{aligned}
T_i(E_i) &= -F_iK_i, \\
T_i(E_j) &= \sum_{k=0}^{-a_{ij}} (-1)^k q_i^{-k} E_i^{(-a_{ij}-k)} E_j E_i^{(k)} \text{ if } i \neq j,\\
T_i(K_j) &= K_jK_i^{a_{ij}}, \\
T_i(F_i) &= -K_i^{-1}E_i, \\
T_i(F_j) &= \sum_{k=0}^{-a_{ij}} (-1)^k q_i^{-k} F_i^{(k)} F_j F_i^{(-a_{ij}-k)} \text{ if } i \neq j,
\end{aligned}
where `a_{ij} = \langle \alpha_j, \alpha_i^\vee \rangle` is the
`(i,j)`-entry of the Cartan matrix associated to `\mathfrak{g}`.
INPUT:
- ``braid`` -- a reduced word of a braid group element
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: F1 = Q.F_simple()[1] # optional - gap_packages
sage: F1.braid_group_action([1]) # optional - gap_packages
(q-q^-1)*[ K1 ; 1 ]*E[a1] + (-1)*K1*E[a1]
sage: F1.braid_group_action([1,2]) # optional - gap_packages
F[a2]
sage: F1.braid_group_action([2,1]) # optional - gap_packages
(-q^3 + 3*q-3*q^-1 + q^-3)*[ K1 ; 1 ]*[ K2 ; 1 ]*E[a1]*E[a2]
+ (q^3-2*q + q^-1)*[ K1 ; 1 ]*[ K2 ; 1 ]*E[a1+a2]
+ (q^2-2 + q^-2)*[ K1 ; 1 ]*K2*E[a1]*E[a2]
+ (-q^2 + 1)*[ K1 ; 1 ]*K2*E[a1+a2]
+ (q^2-2 + q^-2)*K1*[ K2 ; 1 ]*E[a1]*E[a2]
+ (-q^2 + 1)*K1*[ K2 ; 1 ]*E[a1+a2]
+ (-q + q^-1)*K1*K2*E[a1]*E[a2] + (q)*K1*K2*E[a1+a2]
sage: F1.braid_group_action([1,2,1]) == F1.braid_group_action([2,1,2]) # optional - gap_packages
True
sage: F1.braid_group_action([]) == F1 # optional - gap_packages
True
"""
if not braid:
return self
QU = self.parent()._libgap
tau = QU.AntiAutomorphismTau()
ret = QU.IdentityMapping()
for i in braid:
if i < 0:
i = -i
T = QU.AutomorphismTalpha(i)
ret *= tau * T * tau
else:
ret *= QU.AutomorphismTalpha(i)
return self.__class__(self.parent(), libgap.Image(ret, self._libgap))
def _et(self, i):
r"""
Return the action of the Kashiwara operator `\widetilde{e}_i`
on ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['G',2]) # optional - gap_packages
sage: [(g.e_tilde(1), g.e_tilde(2)) for g in Q.F()] # optional - gap_packages
[(1, 0), (0, F[a1]^(3)), (0, F[a1]^(2)),
(0, F[3*a1+a2]), (0, F[a1]), (0, 1)]
TESTS::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: Q.one()._et(1) # optional - gap_packages
0
sage: Q.zero().e_tilde(1) # optional - gap_packages
0
"""
if not self: # self == 0
return self
ret = self._libgap.Ealpha(i)
if not ret:
return self.parent().zero()
return self.__class__(self.parent(), ret)
def _ft(self, i):
r"""
Return the action of the Kashiwara operator `\widetilde{f}_i`
on ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['G',2]) # optional - gap_packages
sage: [(g._ft(1), g._ft(2)) for g in Q.F()] # optional - gap_packages
[(F[a1]^(2), F[a1+a2]),
(F[a1]*F[3*a1+a2], F[3*a1+2*a2]),
(F[a1]*F[2*a1+a2], F[a1+a2]^(2)),
(F[a1]*F[3*a1+2*a2], F[a1+a2]^(3)),
(F[a1]*F[a1+a2], F[a1+a2]*F[a2]),
(F[a1]*F[a2], F[a2]^(2))]
sage: Q.one().f_tilde([1,2,1,1,2,2]) # optional - gap_packages
F[2*a1+a2]*F[a1+a2]*F[a2]
TESTS::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: Q.zero().f_tilde(1) # optional - gap_packages
0
"""
if not self: # self == 0
return self
ret = self._libgap.Falpha(i)
if not ret:
return self.parent().zero()
return self.__class__(self.parent(), ret)
#####################################################################
## Morphisms
class QuantumGroupMorphism(Morphism):
r"""
A morphism whose domain is a quantum group.
"""
def __init__(self, parent, im_gens, check=True):
r"""
Initialize ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',1]) # optional - gap_packages
sage: F, K, Ki, E = Q.gens() # optional - gap_packages
sage: phi = Q.hom([E, Ki, K, F]) # optional - gap_packages
sage: TestSuite(phi).run(skip="_test_category") # optional - gap_packages
"""
self._repr_type_str = "Quantum group homomorphism"
Morphism.__init__(self, parent)
Q = parent.domain()
self._im_gens = tuple(im_gens)
if check and len(im_gens) != len(Q.algebra_generators()):
raise ValueError("number of images must equal the number of generators")
self._libgap = Q._libgap.QEAHomomorphism(parent.codomain(), im_gens)
def __reduce__(self):
r"""
For pickling.
EXAMPLES::
sage: Q = QuantumGroup(['A',1]) # optional - gap_packages
sage: F, K, Ki, E = Q.gens() # optional - gap_packages
sage: phi = Q.hom([E, Ki, K, F]) # optional - gap_packages
sage: loads(dumps(phi)) == phi # optional - gap_packages
True
"""
return (self.parent(), (self._im_gens,))
def _call_(self, val):
r"""
Return the image of ``val`` under ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',1]) # optional - gap_packages
sage: F, K, Ki, E = Q.gens() # optional - gap_packages
sage: phi = Q.hom([E, Ki, K, F]) # optional - gap_packages
sage: phi(F) # optional - gap_packages
E[a1]
sage: phi(E*F) # optional - gap_packages
F[a1]*E[a1]
sage: phi(F*E) # optional - gap_packages
F[a1]*E[a1] + [ K1 ; 1 ]
sage: phi(E*K) # optional - gap_packages
(-q + q^-1)*F[a1]*[ K1 ; 1 ] + F[a1]*K1
sage: phi(F*E) == phi(F) * phi(E) # optional - gap_packages
True
"""
try:
return self.codomain()(self._libgap.ImageElm(val))
except TypeError:
return self.codomain()(str(self._libgap.ImageElm(val)))
def __richcmp__(self, other, op):
r"""
Rich comparison of ``self`` and ``other`` by ``op``.
EXAMPLES::
sage: Q = QuantumGroup(['A',1]) # optional - gap_packages
sage: F, K, Ki, E = Q.gens() # optional - gap_packages
sage: phi = Q.hom([E, Ki, K, F]) # optional - gap_packages
sage: psi = Q.hom([F, K, Ki, E]) # optional - gap_packages
sage: phi == Q.hom([E, Ki, K, F]) # optional - gap_packages
True
sage: phi == psi # optional - gap_packages
False
sage: psi != Q.hom([F, K, Ki, E]) # optional - gap_packages
False
sage: phi != psi # optional - gap_packages
True
sage: QB = QuantumGroup(['B',3]) # optional - gap_packages
sage: QC = QuantumGroup(['C',3]) # optional - gap_packages
sage: x = ZZ.one() # optional - gap_packages
sage: phi = QB.hom([x]*len(QB.algebra_generators())) # optional - gap_packages
sage: psi = QC.hom([x]*len(QC.algebra_generators())) # optional - gap_packages
sage: phi.im_gens() == psi.im_gens() # optional - gap_packages
True
sage: phi == psi # optional - gap_packages
False
"""
if op == op_EQ:
return (type(self) == type(other)
and self.domain() is other.domain()
and self._im_gens == other._im_gens)
if op == op_NE:
return not (self == other)
return NotImplemented
def im_gens(self):
r"""
Return the image of the generators under ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',1]) # optional - gap_packages
sage: F, K, Ki, E = Q.gens() # optional - gap_packages
sage: phi = Q.hom([E, Ki, K, F]) # optional - gap_packages
sage: phi.im_gens() # optional - gap_packages
(E[a1], (-q + q^-1)*[ K1 ; 1 ] + K1, K1, F[a1])
"""
return self._im_gens
def _repr_defn(self):
r"""
Used in constructing the string representation of ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',1]) # optional - gap_packages
sage: F, K, Ki, E = Q.gens() # optional - gap_packages
sage: phi = Q.hom([E, Ki, K, F]) # optional - gap_packages
sage: print(phi._repr_defn()) # optional - gap_packages
F[a1] |--> E[a1]
K1 |--> (-q + q^-1)*[ K1 ; 1 ] + K1
(-q + q^-1)*[ K1 ; 1 ] + K1 |--> K1
E[a1] |--> F[a1]
"""
return '\n'.join('%s |--> %s'%(gen, self._im_gens[i])
for i, gen in enumerate(self.domain().algebra_generators()))
class QuantumGroupHomset(HomsetWithBase):
r"""
The homset whose domain is a quantum group.
"""
def __call__(self, im_gens, check=True):
r"""
Construct an element of ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',1]) # optional - gap_packages
sage: H = Hom(Q, Q) # optional - gap_packages
sage: F, K, Ki, E = Q.gens() # optional - gap_packages
sage: phi = H([E, Ki, K, F]); phi # optional - gap_packages
Quantum group homomorphism endomorphism of Quantum Group of type ['A', 1] with q=q
Defn: F[a1] |--> E[a1]
K1 |--> (-q + q^-1)*[ K1 ; 1 ] + K1
(-q + q^-1)*[ K1 ; 1 ] + K1 |--> K1
E[a1] |--> F[a1]
sage: H(phi) == phi # optional - gap_packages
True
sage: H2 = Hom(Q, Q, Modules(Fields())) # optional - gap_packages
sage: H == H2 # optional - gap_packages
False
sage: H2(phi) # optional - gap_packages
Quantum group homomorphism endomorphism of Quantum Group of type ['A', 1] with q=q
Defn: F[a1] |--> E[a1]
K1 |--> (-q + q^-1)*[ K1 ; 1 ] + K1
(-q + q^-1)*[ K1 ; 1 ] + K1 |--> K1
E[a1] |--> F[a1]
"""
if isinstance(im_gens, QuantumGroupMorphism):
if im_gens.parent() is self:
return im_gens
if im_gens.parent() != self:
return QuantumGroupMorphism(self, im_gens.im_gens())
raise TypeError("unable to coerce {}".format(im_gens))
return QuantumGroupMorphism(self, im_gens)
def projection_lower_half(Q):
r"""
Return the projection onto the lower half of the quantum group.
EXAMPLES::
sage: from sage.algebras.quantum_groups.quantum_group_gap import projection_lower_half
sage: Q = QuantumGroup(['G',2]) # optional - gap_packages
sage: phi = projection_lower_half(Q); phi # optional - gap_packages
Quantum group homomorphism endomorphism of Quantum Group of type ['G', 2] with q=q
Defn: F[a1] |--> F[a1]
F[a2] |--> F[a2]
K1 |--> 0
K2 |--> 0
(-q + q^-1)*[ K1 ; 1 ] + K1 |--> 0
(-q^3 + q^-3)*[ K2 ; 1 ] + K2 |--> 0
E[a1] |--> 0
E[a2] |--> 0
sage: all(phi(f) == f for f in Q.F()) # optional - gap_packages
True
sage: all(phi(e) == Q.zero() for e in Q.E()) # optional - gap_packages
True
sage: all(phi(K) == Q.zero() for K in Q.K()) # optional - gap_packages
True
"""
I = Q._cartan_type.index_set()
return Hom(Q,Q)(list(Q.F_simple()) + [Q.zero()]*(len(I)*3))
#####################################################################
## Representations
class QuaGroupRepresentationElement(QuaGroupModuleElement):
"""
Element of a quantum group representation.
"""
def __reduce__(self):
"""
Used in pickling.
EXAMPLES::
sage: Q = QuantumGroup(['B',2]) # optional - gap_packages
sage: F1, F2 = Q.F_simple() # optional - gap_packages
sage: q = Q.q() # optional - gap_packages
sage: V = Q.highest_weight_module([2,1]) # optional - gap_packages
sage: v = V.highest_weight_vector() # optional - gap_packages
sage: x = (2 - q) * v + F1*v + q*F2*F1*v # optional - gap_packages
sage: loads(dumps(x)) == x # optional - gap_packages
True
"""
return (self.parent(), (self.monomial_coefficients(),))
def _acted_upon_(self, scalar, self_on_left=False):
r"""
Return the action of ``scalar`` on ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['B',2]) # optional - gap_packages
sage: F1, F2 = Q.F_simple() # optional - gap_packages
sage: q = Q.q() # optional - gap_packages
sage: V = Q.highest_weight_module([2,1]) # optional - gap_packages
sage: v = V.highest_weight_vector() # optional - gap_packages
sage: F1 * v # optional - gap_packages
F[a1]*v0
sage: F2 * v # optional - gap_packages
F[a2]*v0
sage: F1^2 * v # optional - gap_packages
(q^2 + q^-2)*F[a1]^(2)*v0
sage: F2^2 * v # optional - gap_packages
0*v0
sage: (F1 * F2) * v # optional - gap_packages
F[a1]*F[a2]*v0
sage: F1 * (F2 * v) # optional - gap_packages
F[a1]*F[a2]*v0
sage: (2 - q) * v + F1*v + q*F2*F1*v # optional - gap_packages
(-q + 2)*1*v0 + F[a1]*v0 + (q^3)*F[a1]*F[a2]*v0 + (q)*F[a1+a2]*v0
"""
try:
if scalar.parent() is self.parent()._Q:
if self_on_left: # Only act: scalar * v
return None
return self.__class__(self.parent(), scalar._libgap ** self._libgap)
except AttributeError:
pass
return QuaGroupModuleElement._acted_upon_(self, scalar, self_on_left)
_lmul_ = _acted_upon_
def _et(self, i):
r"""
Return the action of `\widetilde{e}_i` on ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: V = Q.highest_weight_module([1,1]) # optional - gap_packages
sage: v = V.highest_weight_vector() # optional - gap_packages
sage: v._et(1) # optional - gap_packages
0*v0
sage: V.zero().e_tilde(1) # optional - gap_packages
0*v0
"""
if not self: # self == 0
return self
V = self.parent()
ret = V._libgap.Ealpha(self._libgap, i)
return self.__class__(V, ret)
def _ft(self, i):
r"""
Return the action of `\widetilde{e}_i` on ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['C',2]) # optional - gap_packages
sage: V = Q.highest_weight_module([1,1]) # optional - gap_packages
sage: v = V.highest_weight_vector() # optional - gap_packages
sage: v._ft(1) # optional - gap_packages
F[a1]*v0
sage: v._ft(2) # optional - gap_packages
F[a2]*v0
sage: v.f_tilde([1,1]) # optional - gap_packages
0*v0
sage: v.f_tilde([2,2]) # optional - gap_packages
0*v0
sage: v.f_tilde([2,1,1]) # optional - gap_packages
(-q^-3)*F[a1]*F[a1+a2]*v0 + (-q^-4)*F[2*a1+a2]*v0
sage: v.f_tilde([1,2,2]) # optional - gap_packages
F[a1+a2]*F[a2]*v0
sage: V.zero().f_tilde(1) # optional - gap_packages
0*v0
"""
if not self: # self == 0
return self
V = self.parent()
ret = V._libgap.Falpha(self._libgap, i)
return self.__class__(V, ret)
def monomial_coefficients(self, copy=True):
r"""
Return the dictionary of ``self`` whose keys are the basis indices
and the values are coefficients.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: V = Q.highest_weight_module([1,1]) # optional - gap_packages
sage: v = V.highest_weight_vector() # optional - gap_packages
sage: F1, F2 = Q.F_simple() # optional - gap_packages
sage: q = Q.q() # optional - gap_packages
sage: x = v + F1*v + q*F2*F1*v; x # optional - gap_packages
1*v0 + F[a1]*v0 + (q^2)*F[a1]*F[a2]*v0 + (q)*F[a1+a2]*v0
sage: sorted(x.monomial_coefficients().items(), key=str) # optional - gap_packages
[(0, 1), (1, 1), (3, q^2), (4, q)]
"""
R = self.parent()._Q.base_ring()
B = self.parent()._libgap.Basis()
data = [R(str(c)) for c in libgap.Coefficients(B, self._libgap)]
return {i: c for i,c in enumerate(data) if c != 0}
def _vector_(self, R=None):
"""
Return ``self`` as a vector.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: V = Q.highest_weight_module([1,1]) # optional - gap_packages
sage: v = V.highest_weight_vector() # optional - gap_packages
sage: vector(v) # optional - gap_packages
(1, 0, 0, 0, 0, 0, 0, 0)
sage: F1, F2 = Q.F_simple() # optional - gap_packages
sage: q = Q.q() # optional - gap_packages
sage: x = v + F1*v + q*F2*F1*v; x # optional - gap_packages
1*v0 + F[a1]*v0 + (q^2)*F[a1]*F[a2]*v0 + (q)*F[a1+a2]*v0
sage: vector(x) # optional - gap_packages
(1, 1, 0, q^2, q, 0, 0, 0)
"""
V = self.parent()._dense_free_module(R)
v = copy(V.zero())
for i,c in self.monomial_coefficients().items():
v[i] = c
return v
class CrystalGraphVertex(SageObject):
r"""
Helper class used as the vertices of a crystal graph.
"""
def __init__(self, V, s):
"""
Initialize ``self``.
EXAMPLES::
sage: from sage.algebras.quantum_groups.quantum_group_gap import CrystalGraphVertex
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: V = Q.highest_weight_module([1,0]) # optional - gap_packages
sage: v = CrystalGraphVertex(V, '<F2*v0>') # optional - gap_packages
sage: TestSuite(v).run() # optional - gap_packages
"""
self.V = V
self.s = s
def __hash__(self):
"""
Return the hash of ``self``.
EXAMPLES::
sage: from sage.algebras.quantum_groups.quantum_group_gap import CrystalGraphVertex
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: V = Q.highest_weight_module([1,0]) # optional - gap_packages
sage: v = CrystalGraphVertex(V, '<F2*v0>') # optional - gap_packages
sage: hash(v) == hash('<F2*v0>') # optional - gap_packages
True
"""
return hash(self.s)
def __eq__(self, other):
"""
Check equality of ``self`` and ``other``.
EXAMPLES::
sage: from sage.algebras.quantum_groups.quantum_group_gap import CrystalGraphVertex
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: V = Q.highest_weight_module([1,0]) # optional - gap_packages
sage: v = CrystalGraphVertex(V, '<F2*v0>') # optional - gap_packages
sage: vp = CrystalGraphVertex(V, '<F2*v0>') # optional - gap_packages
sage: v == vp # optional - gap_packages
True
sage: vpp = CrystalGraphVertex(V, '<1*v0>') # optional - gap_packages
sage: v == vpp # optional - gap_packages
False
"""
return isinstance(other, CrystalGraphVertex) and self.s == other.s
def _repr_(self):
"""
Return a string representation of ``self``.
EXAMPLES::
sage: from sage.algebras.quantum_groups.quantum_group_gap import CrystalGraphVertex
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: V = Q.highest_weight_module([1,0]) # optional - gap_packages
sage: CrystalGraphVertex(V, '<F2*v0>') # optional - gap_packages
<F2*v0>
"""
return self.s
def _latex_(self):
r"""
Return a latex representation of ``self``.
EXAMPLES::
sage: from sage.algebras.quantum_groups.quantum_group_gap import CrystalGraphVertex
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: V = Q.highest_weight_module([1,0]) # optional - gap_packages
sage: v = CrystalGraphVertex(V, '<F2*v0>') # optional - gap_packages
sage: latex(v) # optional - gap_packages
\langle F_{\alpha_{1} + \alpha_{2}} v_0 \rangle
"""
# Essentially same as QuaGroupModuleElement._latex_
from sage.misc.latex import latex
ret = self.s[1:-1] # Strip leading '<' and trailing '>'
for i,al in enumerate(self.V._pos_roots):
ret = ret.replace('F%s'%(i+1), 'F_{%s}'%latex(al))
ret = ret.replace('E%s'%(i+1), 'E_{%s}'%latex(al))
for i,ii in enumerate(self.V._cartan_type.index_set()):
ret = ret.replace('K%s'%(i+1), 'K_{%s}'%ii)
# Fugly string parsing to get good looking latex
# TODO: Find a better way
ret = ret.replace('(', '{(')
ret = ret.replace(')', ')}')
ret = ret.replace('v0', 'v_0')
ret = ret.replace('*', ' ')
ret = ret.replace('<x>', ' \\otimes ')
c = re.compile(r"q\^-?[0-9]*")
for m in reversed(list(c.finditer(ret))):
ret = ret[:m.start()+2]+'{'+ret[m.start()+2:m.end()]+'}'+ret[m.end():]
return '\\langle {} \\rangle'.format(ret)
class QuantumGroupModule(Parent, UniqueRepresentation):
r"""
Abstract base class for quantum group representations.
"""
def __init__(self, Q, category):
r"""
Initialize ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['G',2]) # optional - gap_packages
sage: V = Q.highest_weight_module([1,0]) # optional - gap_packages
sage: TestSuite(V).run() # optional - gap_packages
"""
self._Q = Q
self._libgap_q = Q._libgap_q
self._libgap_base = Q._libgap_base
self._cartan_type = Q._cartan_type
self._pos_roots = Q._pos_roots
Parent.__init__(self, base=Q.base_ring(), category=category)
def _latex_(self):
r"""
Return a latex representation of ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: V = Q.highest_weight_module([1,0]) # optional - gap_packages
sage: T = tensor([V,V]) # optional - gap_packages
sage: S = T.highest_weight_decomposition()[0] # optional - gap_packages
sage: latex(S) # optional - gap_packages # random (depends on dot2tex)
\begin{tikzpicture}
...
\end{tikzpicture}
"""
from sage.misc.latex import latex
return latex(self.crystal_graph())
def gap(self):
r"""
Return the gap representation of ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: V = Q.highest_weight_module([1,1]) # optional - gap_packages
sage: V.gap() # optional - gap_packages
<8-dimensional left-module over QuantumUEA( <root system of type A2>,
Qpar = q )>
"""
return self._libgap
_libgap_ = _gap_ = gap
def _element_constructor_(self, elt):
"""
Construct an element of ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: V = Q.highest_weight_module([1,1]) # optional - gap_packages
sage: q = Q.q() # optional - gap_packages
sage: V(0) # optional - gap_packages
0*v0
sage: V({1: q^2 - q^-2, 3: 2}) # optional - gap_packages
(q^2-q^-2)*F[a1]*v0 + (2)*F[a1]*F[a2]*v0
"""
if not elt:
return self.zero()
if isinstance(elt, dict):
return self._from_dict(elt)
return self.element_class(self, elt)
@cached_method
def basis(self):
r"""
Return a basis of ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: V = Q.highest_weight_module([1,1]) # optional - gap_packages
sage: V.basis() # optional - gap_packages
Family (1*v0, F[a1]*v0, F[a2]*v0, F[a1]*F[a2]*v0, F[a1+a2]*v0,
F[a1]*F[a1+a2]*v0, F[a1+a2]*F[a2]*v0, F[a1+a2]^(2)*v0)
"""
return Family([self.element_class(self, b) for b in self._libgap.Basis()])
@cached_method
def crystal_basis(self):
r"""
Return the crystal basis of ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: V = Q.highest_weight_module([1,1]) # optional - gap_packages
sage: V.crystal_basis() # optional - gap_packages
Family (1*v0, F[a1]*v0, F[a2]*v0, F[a1]*F[a2]*v0,
(q)*F[a1]*F[a2]*v0 + F[a1+a2]*v0, F[a1+a2]*F[a2]*v0,
(-q^-2)*F[a1]*F[a1+a2]*v0, (-q^-1)*F[a1+a2]^(2)*v0)
"""
return Family([self.element_class(self, b) for b in self._libgap.CrystalBasis()])
@cached_method
def R_matrix(self):
"""
Return the `R`-matrix of ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',1]) # optional - gap_packages
sage: V = Q.highest_weight_module([1]) # optional - gap_packages
sage: V.R_matrix() # optional - gap_packages
[ 1 0 0 0]
[ 0 q -q^2 + 1 0]
[ 0 0 q 0]
[ 0 0 0 1]
"""
R = self._libgap.RMatrix()
F = self._Q.base_ring()
from sage.matrix.constructor import matrix
M = matrix(F, [[F(str(elt)) for elt in row] for row in R])
M.set_immutable()
return M
def crystal_graph(self):
r"""
Return the crystal graph of ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: V = Q.highest_weight_module([1,1]) # optional - gap_packages
sage: G = V.crystal_graph(); G # optional - gap_packages
Digraph on 8 vertices
sage: B = crystals.Tableaux(['A',2], shape=[2,1]) # optional - gap_packages
sage: G.is_isomorphic(B.digraph(), edge_labels=True) # optional - gap_packages
True
"""
G = self._libgap.CrystalGraph()
vertices = [CrystalGraphVertex(self, repr(p)) for p in G['points']]
edges = [[vertices[e[0][0]-1], vertices[e[0][1]-1], e[1]]
for e in G['edges'].sage()]
G = DiGraph([vertices, edges], format='vertices_and_edges')
from sage.graphs.dot2tex_utils import have_dot2tex
if have_dot2tex():
G.set_latex_options(format="dot2tex",
edge_labels=True,
color_by_label=self._cartan_type._index_set_coloring)
return G
@cached_method
def zero(self):
r"""
Return the zero element of ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: V = Q.highest_weight_module([1,1]) # optional - gap_packages
sage: V.zero() # optional - gap_packages
0*v0
"""
return self.element_class(self, self._libgap.ZeroImmutable())
class HighestWeightModule(QuantumGroupModule):
"""
A highest weight module of a quantum group.
"""
@staticmethod
def __classcall_private__(cls, Q, weight):
"""
Normalize input to ensure a unique representation.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: La = Q.cartan_type().root_system().weight_lattice().fundamental_weights() # optional - gap_packages
sage: V = Q.highest_weight_module([1,3]) # optional - gap_packages
sage: V is Q.highest_weight_module(La[1]+3*La[2]) # optional - gap_packages
True
"""
P = Q._cartan_type.root_system().weight_lattice()
if isinstance(weight, (list, tuple)):
La = P.fundamental_weights()
weight = P.sum(la*weight[i] for i,la in enumerate(La))
else:
weight = P(weight)
return super(HighestWeightModule, cls).__classcall__(cls, Q, weight)
def __init__(self, Q, weight):
"""
Initialize ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: V = Q.highest_weight_module([1,1]) # optional - gap_packages
sage: TestSuite(V).run() # optional - gap_packages
"""
self._libgap = Q._libgap.HighestWeightModule(list(weight.to_vector()))
self._weight = weight
cat = Modules(Q.base_ring()).FiniteDimensional().WithBasis()
QuantumGroupModule.__init__(self, Q, cat)
def _repr_(self):
"""
Return a string representation of ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: Q.highest_weight_module([1,1]) # optional - gap_packages
Highest weight module of weight Lambda[1] + Lambda[2] of
Quantum Group of type ['A', 2] with q=q
"""
return "Highest weight module of weight {} of {}".format(self._weight, self._Q)
def _latex_(self):
r"""
Return a latex representation of ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: V = Q.highest_weight_module([1,2]) # optional - gap_packages
sage: latex(V) # optional - gap_packages
V(\Lambda_{1} + 2\Lambda_{2})
"""
from sage.misc.latex import latex
return "V({})".format(latex(self._weight))
@cached_method
def highest_weight_vector(self):
"""
Return the highest weight vector of ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: V = Q.highest_weight_module([1,1]) # optional - gap_packages
sage: V.highest_weight_vector() # optional - gap_packages
1*v0
"""
return self.element_class(self, self._libgap.HighestWeightsAndVectors()[1][0][0])
an_element = highest_weight_vector
def tensor(self, *V, **options):
"""
Return the tensor product of ``self`` with ``V``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: V = Q.highest_weight_module([1,1]) # optional - gap_packages
sage: Vp = Q.highest_weight_module([1,0]) # optional - gap_packages
sage: Vp.tensor(V) # optional - gap_packages
Highest weight module of weight Lambda[1] of Quantum Group of type ['A', 2] with q=q
# Highest weight module of weight Lambda[1] + Lambda[2] of Quantum Group of type ['A', 2] with q=q
"""
return TensorProductOfHighestWeightModules(self, *V, **options)
Element = QuaGroupRepresentationElement
class TensorProductOfHighestWeightModules(QuantumGroupModule):
def __init__(self, *modules, **options):
"""
Initialize ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: V = Q.highest_weight_module([1,1]) # optional - gap_packages
sage: T = tensor([V,V]) # optional - gap_packages
sage: TestSuite(T).run() # optional - gap_packages
"""
Q = modules[0]._Q
self._modules = tuple(modules)
self._libgap = libgap.TensorProductOfAlgebraModules([m._libgap for m in modules])
cat = Modules(Q.base_ring()).TensorProducts().FiniteDimensional().WithBasis()
QuantumGroupModule.__init__(self, Q, category=cat)
def _repr_(self):
"""
Return a string representation of ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: V = Q.highest_weight_module([1,0]) # optional - gap_packages
sage: T = tensor([V,V]) # optional - gap_packages
sage: T # optional - gap_packages
Highest weight module of weight Lambda[1] of Quantum Group of type ['A', 2] with q=q
# Highest weight module of weight Lambda[1] of Quantum Group of type ['A', 2] with q=q
"""
return " # ".join(repr(M) for M in self._modules)
def _latex_(self):
r"""
Return a string representation of ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: V = Q.highest_weight_module([1,0]) # optional - gap_packages
sage: T = tensor([V,V]) # optional - gap_packages
sage: latex(T) # optional - gap_packages
V(\Lambda_{1}) \otimes V(\Lambda_{1})
"""
from sage.misc.latex import latex
return " \\otimes ".join(latex(M) for M in self._modules)
@lazy_attribute
def _highest_weights_and_vectors(self):
"""
Return the highest weights and the corresponding vectors.
.. NOTE::
The resulting objects are GAP objects.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: V = Q.highest_weight_module([0,1]) # optional - gap_packages
sage: T = tensor([V,V]) # optional - gap_packages
sage: T._highest_weights_and_vectors # optional - gap_packages
[ [ [ 0, 2 ], [ 1, 0 ] ],
[ [ 1*(1*v0<x>1*v0) ], [ -q^-1*(1*v0<x>F3*v0)+1*(F3*v0<x>1*v0) ] ] ]
"""
return self._libgap.HighestWeightsAndVectors()
def highest_weight_vectors(self):
r"""
Return the highest weight vectors of ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: V = Q.highest_weight_module([1,0]) # optional - gap_packages
sage: T = tensor([V,V]) # optional - gap_packages
sage: T.highest_weight_vectors() # optional - gap_packages
[1*(1*v0<x>1*v0), -q^-1*(1*v0<x>F[a1]*v0) + 1*(F[a1]*v0<x>1*v0)]
"""
return [self.element_class(self, v)
for vecs in self._highest_weights_and_vectors[1]
for v in vecs]
some_elements = highest_weight_vectors
def _an_element_(self):
"""
Return an element of ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: V = Q.highest_weight_module([1,0]) # optional - gap_packages
sage: T = tensor([V,V]) # optional - gap_packages
sage: T.an_element() # optional - gap_packages
1*(1*v0<x>1*v0)
"""
return self.highest_weight_vectors()[0]
@cached_method
def highest_weight_decomposition(self):
"""
Return the highest weight decomposition of ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: V = Q.highest_weight_module([1,0]) # optional - gap_packages
sage: T = tensor([V,V]) # optional - gap_packages
sage: T.highest_weight_decomposition() # optional - gap_packages
[Highest weight submodule with weight 2*Lambda[1] generated by 1*(1*v0<x>1*v0),
Highest weight submodule with weight Lambda[2] generated by -q^-1*(1*v0<x>F[a1]*v0) + 1*(F[a1]*v0<x>1*v0)]
"""
return [HighestWeightSubmodule(self, self.element_class(self, v), tuple(wt.sage()))
for wt,vecs in zip(*self._highest_weights_and_vectors)
for v in vecs]
Element = QuaGroupRepresentationElement
class HighestWeightSubmodule(QuantumGroupModule):
def __init__(self, ambient, gen, weight):
"""
Initialize ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: V = Q.highest_weight_module([1,0]) # optional - gap_packages
sage: T = tensor([V,V]) # optional - gap_packages
sage: S = T.highest_weight_decomposition()[0] # optional - gap_packages
sage: TestSuite(S).run() # optional - gap_packages
"""
self._ambient = ambient
cat = ambient.category()
QuantumGroupModule.__init__(self, ambient._Q, cat.Subobjects())
self._gen = gen
self._libgap = self._ambient._libgap.HWModuleByGenerator(gen, weight)
# Convert the weight to an element of the weight lattice
P = self._Q._cartan_type.root_system().weight_lattice()
La = P.fundamental_weights()
self._weight = P.sum(la*weight[i] for i,la in enumerate(La))
def _repr_(self):
"""
Return a string representation of ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: V = Q.highest_weight_module([1,0]) # optional - gap_packages
sage: T = tensor([V,V]) # optional - gap_packages
sage: T.highest_weight_decomposition() # optional - gap_packages
[Highest weight submodule with weight 2*Lambda[1]
generated by 1*(1*v0<x>1*v0),
Highest weight submodule with weight Lambda[2]
generated by -q^-1*(1*v0<x>F[a1]*v0) + 1*(F[a1]*v0<x>1*v0)]
"""
return "Highest weight submodule with weight {} generated by {}".format(self._weight, self._gen)
@lazy_attribute
def _ambient_basis_map(self):
"""
A dict that maps the basis of ``self`` to the ambient module.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: V = Q.highest_weight_module([1,0]) # optional - gap_packages
sage: T = tensor([V,V]) # optional - gap_packages
sage: S = T.highest_weight_decomposition()[0] # optional - gap_packages
sage: S._ambient_basis_map # optional - gap_packages
{0: 1*(1*v0<x>1*v0),
1: 1*(1*v0<x>F[a1]*v0) + q^-1*(F[a1]*v0<x>1*v0),
2: 1*(F[a1]*v0<x>F[a1]*v0),
3: 1*(1*v0<x>F[a1+a2]*v0) + q^-1*(F[a1+a2]*v0<x>1*v0),
4: 1*(F[a1]*v0<x>F[a1+a2]*v0) + q^-1*(F[a1+a2]*v0<x>F[a1]*v0),
5: 1*(F[a1+a2]*v0<x>F[a1+a2]*v0)}
"""
B = list(self.basis())
d = {self.highest_weight_vector(): self._gen}
todo = set([self.highest_weight_vector()])
I = self._cartan_type.index_set()
while todo:
x = todo.pop()
for i in I:
y = x.f_tilde(i)
if y and y not in d:
d[y] = d[x].f_tilde(i)
todo.add(y)
return {B.index(k): d[k] for k in d}
def ambient(self):
"""
Return the ambient module of ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: V = Q.highest_weight_module([1,0]) # optional - gap_packages
sage: T = tensor([V,V]) # optional - gap_packages
sage: S = T.highest_weight_decomposition()[0] # optional - gap_packages
sage: S.ambient() is T # optional - gap_packages
True
"""
return self._ambient
@lazy_attribute
def lift(self):
"""
The lift morphism from ``self`` to the ambient space.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: V = Q.highest_weight_module([1,0]) # optional - gap_packages
sage: T = tensor([V,V]) # optional - gap_packages
sage: S = T.highest_weight_decomposition()[0] # optional - gap_packages
sage: S.lift # optional - gap_packages
Generic morphism:
From: Highest weight submodule with weight 2*Lambda[1] generated by 1*(1*v0<x>1*v0)
To: Highest weight module ... # Highest weight module ...
sage: x = sum(S.basis()) # optional - gap_packages
sage: x.lift() # optional - gap_packages
1*(1*v0<x>1*v0) + 1*(1*v0<x>F[a1]*v0) + 1*(1*v0<x>F[a1+a2]*v0)
+ q^-1*(F[a1]*v0<x>1*v0) + 1*(F[a1]*v0<x>F[a1]*v0)
+ 1*(F[a1]*v0<x>F[a1+a2]*v0) + q^-1*(F[a1+a2]*v0<x>1*v0)
+ q^-1*(F[a1+a2]*v0<x>F[a1]*v0) + 1*(F[a1+a2]*v0<x>F[a1+a2]*v0)
"""
return self.module_morphism(self._ambient_basis_map.__getitem__,
codomain=self._ambient, unitriangular="lower")
def retract(self, elt):
"""
The retract map from the ambient space to ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: V = Q.highest_weight_module([1,0]) # optional - gap_packages
sage: T = tensor([V,V]) # optional - gap_packages
sage: all(S.retract(S.lift(x)) == x # optional - gap_packages
....: for S in T.highest_weight_decomposition()
....: for x in S.basis())
True
"""
c = self.lift.matrix().solve_right(elt._vector_())
return self._from_dict(c.dict(), coerce=False, remove_zeros=False)
def highest_weight_vector(self):
"""
Return the highest weight vector of ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: V = Q.highest_weight_module([1,0]) # optional - gap_packages
sage: T = tensor([V,V]) # optional - gap_packages
sage: S = T.highest_weight_decomposition()[1] # optional - gap_packages
sage: u = S.highest_weight_vector(); u # optional - gap_packages
(1)*e.1
sage: u.lift() # optional - gap_packages
-q^-1*(1*v0<x>F[a1]*v0) + 1*(F[a1]*v0<x>1*v0)
"""
I = self._cartan_type.index_set()
zero = self._libgap.ZeroImmutable()
for v in self.basis():
if all(self._libgap.Ealpha(v._libgap, i) == zero for i in I):
return v
return self.zero()
an_element = highest_weight_vector
def crystal_graph(self, use_ambient=True):
"""
Return the crystal graph of ``self``.
INPUT:
- ``use_ambient`` -- boolean (default: ``True``); if ``True``,
the vertices are given in terms of the ambient module
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: V = Q.highest_weight_module([1,0]) # optional - gap_packages
sage: T = tensor([V,V]) # optional - gap_packages
sage: S = T.highest_weight_decomposition()[1] # optional - gap_packages
sage: G = S.crystal_graph() # optional - gap_packages
sage: sorted(G.vertices(sort=False), key=str) # optional - gap_packages
[<-q^-1*(1*v0<x>F[a1+a2]*v0) + 1*(F[a1+a2]*v0<x>1*v0)>,
<-q^-1*(1*v0<x>F[a1]*v0) + 1*(F[a1]*v0<x>1*v0)>,
<-q^-1*(F[a1]*v0<x>F[a1+a2]*v0) + 1*(F[a1+a2]*v0<x>F[a1]*v0)>]
sage: sorted(S.crystal_graph(False).vertices(sort=False), key=str) # optional - gap_packages
[<(1)*e.1>, <(1)*e.2>, <(1)*e.3>]
"""
G = self._libgap.CrystalGraph()
if not use_ambient:
return QuantumGroupModule.crystal_graph(self)
# Mostly a copy; there is likely a better way with a helper function
B = self.basis()
d = {repr(B[k]._libgap): '<{!r}>'.format(self._ambient_basis_map[k])
for k in self._ambient_basis_map}
vertices = [CrystalGraphVertex(self, d[repr(p)[1:-1]])
for p in G['points']]
edges = [[vertices[e[0][0]-1], vertices[e[0][1]-1], e[1]]
for e in G['edges'].sage()]
G = DiGraph([vertices, edges], format='vertices_and_edges')
from sage.graphs.dot2tex_utils import have_dot2tex
if have_dot2tex():
G.set_latex_options(format="dot2tex",
edge_labels=True,
color_by_label=self._cartan_type._index_set_coloring)
return G
Element = QuaGroupRepresentationElement
# TODO: Generalized this to Verma modules
class LowerHalfQuantumGroup(Parent, UniqueRepresentation):
"""
The lower half of the quantum group.
"""
@staticmethod
def __classcall_private__(cls, Q):
"""
Initialize ``self``.
EXAMPLES::
sage: from sage.algebras.quantum_groups.quantum_group_gap import LowerHalfQuantumGroup
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: Q.lower_half() is LowerHalfQuantumGroup(Q) # optional - gap_packages
True
"""
from sage.combinat.root_system.cartan_type import CartanType_abstract
if isinstance(Q, CartanType_abstract):
Q = QuantumGroup(Q)
return super(LowerHalfQuantumGroup, cls).__classcall__(cls, Q)
def __init__(self, Q):
"""
Initialize ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: B = Q.lower_half() # optional - gap_packages
sage: TestSuite(B).run() # optional - gap_packages
"""
self._Q = Q
self._libgap = Q._libgap
self._libgap_q = Q._libgap_q
self._libgap_base = Q._libgap_base
self._cartan_type = Q._cartan_type
self._pos_roots = Q._pos_roots
self._proj = projection_lower_half(Q)
B = Q.base_ring()
Parent.__init__(self, base=B, category=Algebras(B).WithBasis().Subobjects())
def _repr_(self):
r"""
Return a string representation of ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: Q.lower_half() # optional - gap_packages
Lower Half of Quantum Group of type ['A', 2] with q=q
"""
return "Lower Half of {}".format(self._Q)
def _latex_(self):
r"""
Return a latex representation of ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: latex(Q.lower_half()) # optional - gap_packages
U^-_{q}(A_{2})
"""
from sage.misc.latex import latex
return "U^-_{%s}(%s)"%(latex(self._Q._q), latex(self._cartan_type))
def _element_constructor_(self, elt):
r"""
Construct an element of ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: B = Q.lower_half() # optional - gap_packages
sage: q = Q.q() # optional - gap_packages
sage: B(0) # optional - gap_packages
0
sage: B(1 + q^2) # optional - gap_packages
(q^2 + 1)*1
sage: B({(1,2,0): q, (0,0,2): q^2 - 2}) # optional - gap_packages
(q)*F[a1]*F[a1+a2]^(2) + (q^2-2)*F[a2]^(2)
"""
if not elt:
return self.zero()
if isinstance(elt, dict):
return self._from_dict(elt)
if elt in self.base_ring():
return elt * self.one()
if elt.parent() is self._Q:
return self.element_class(self, self._proj(elt)._libgap)
return self.element_class(self, elt)
def ambient(self):
r"""
Return the ambient quantum group of ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: B = Q.lower_half() # optional - gap_packages
sage: B.ambient() is Q # optional - gap_packages
True
"""
return self._Q
@cached_method
def highest_weight_vector(self):
"""
Return the highest weight vector of ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: B = Q.lower_half() # optional - gap_packages
sage: B.highest_weight_vector() # optional - gap_packages
1
"""
return self.element_class(self, self._Q.one()._libgap)
one = highest_weight_vector
an_element = highest_weight_vector
@cached_method
def zero(self):
"""
Return the zero element of ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: B = Q.lower_half() # optional - gap_packages
sage: B.zero() # optional - gap_packages
0
"""
return self.element_class(self, self._Q._libgap.ZeroImmutable())
@cached_method
def algebra_generators(self):
r"""
Return the algebra generators of ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: B = Q.lower_half() # optional - gap_packages
sage: B.algebra_generators() # optional - gap_packages
Finite family {1: F[a1], 2: F[a2]}
"""
F = self._Q.F_simple()
keys = F.keys()
d = {i: self.element_class(self, F[i]._libgap) for i in keys}
return Family(keys, d.__getitem__)
gens = algebra_generators
def _construct_monomial(self, k):
"""
Construct a monomial of ``self`` indexed by ``k``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: B = Q.lower_half() # optional - gap_packages
sage: B._construct_monomial((1,2,1)) # optional - gap_packages
F[a1]*F[a1+a2]^(2)*F[a2]
sage: B._construct_monomial((3,0,1)) # optional - gap_packages
F[a1]^(3)*F[a2]
"""
F = libgap.eval('ElementsFamily')(libgap.eval('FamilyObj')(self._libgap))
one = self._libgap_base.One()
data = []
for i,val in enumerate(k):
if val == 0:
continue
data.append(i+1)
data.append(val)
return self.element_class(self, F.ObjByExtRep([data, one]))
@cached_method
def basis(self):
r"""
Return the basis of ``self``.
This returns the PBW basis of ``self``, which is given by
monomials in `\{F_{\alpha}\}`, where `\alpha` runs over all
positive roots.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: B = Q.lower_half() # optional - gap_packages
sage: basis = B.basis(); basis # optional - gap_packages
Lazy family (monomial(i))_{i in The Cartesian product of
(Non negative integers, Non negative integers, Non negative integers)}
sage: basis[1,2,1] # optional - gap_packages
F[a1]*F[a1+a2]^(2)*F[a2]
sage: basis[1,2,4] # optional - gap_packages
F[a1]*F[a1+a2]^(2)*F[a2]^(4)
sage: basis[1,0,4] # optional - gap_packages
F[a1]*F[a2]^(4)
"""
I = cartesian_product([NonNegativeIntegers()]*len(self._pos_roots))
return Family(I, self._construct_monomial, name="monomial")
def _construct_canonical_basis_elts(self, k):
r"""
Construct the monomial elements of ``self`` indexed by ``k``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: B = Q.lower_half() # optional - gap_packages
sage: B._construct_canonical_basis_elts((1,2)) # optional - gap_packages
[F[a1]*F[a2]^(2), (q^2)*F[a1]*F[a2]^(2) + F[a1+a2]*F[a2]]
"""
B = self._libgap.CanonicalBasis()
return [self.element_class(self, v) for v in B.PBWElements(k)]
@cached_method
def canonical_basis_elements(self):
r"""
Construct the monomial elements of ``self`` indexed by ``k``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: B = Q.lower_half() # optional - gap_packages
sage: C = B.canonical_basis_elements(); C # optional - gap_packages
Lazy family (Canonical basis(i))_{i in The Cartesian product of
(Non negative integers, Non negative integers)}
sage: C[2,1] # optional - gap_packages
[F[a1]^(2)*F[a2], F[a1]*F[a1+a2] + (q^2)*F[a1]^(2)*F[a2]]
sage: C[1,2] # optional - gap_packages
[F[a1]*F[a2]^(2), (q^2)*F[a1]*F[a2]^(2) + F[a1+a2]*F[a2]]
"""
I = cartesian_product([NonNegativeIntegers()]*len(self._cartan_type.index_set()))
return Family(I, self._construct_canonical_basis_elts, name='Canonical basis')
def lift(self, elt):
r"""
Lift ``elt`` to the ambient quantum group of ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: B = Q.lower_half() # optional - gap_packages
sage: x = B.lift(B.an_element()); x # optional - gap_packages
1
sage: x.parent() is Q # optional - gap_packages
True
"""
return self._Q.element_class(self._Q, elt._libgap)
def retract(self, elt):
r"""
Retract ``elt`` from the ambient quantum group to ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: B = Q.lower_half() # optional - gap_packages
sage: x = Q.an_element(); x # optional - gap_packages
1 + (q)*F[a1] + E[a1] + (q^2-1-q^-2 + q^-4)*[ K1 ; 2 ]
+ K1 + (-q^-1 + q^-3)*K1[ K1 ; 1 ]
sage: B.retract(x) # optional - gap_packages
1 + (q)*F[a1]
"""
return self.element_class(self, self._proj(elt)._libgap)
class Element(QuaGroupModuleElement):
"""
An element of the lower half of the quantum group.
"""
def _acted_upon_(self, scalar, self_on_left=False):
r"""
Return the action of ``scalar`` on ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: B = Q.lower_half() # optional - gap_packages
sage: F1, F2 = Q.F_simple() # optional - gap_packages
sage: v = B.highest_weight_vector(); v # optional - gap_packages
1
sage: 2 * v # optional - gap_packages
(2)*1
sage: v * (3/2) # optional - gap_packages
(3/2)*1
sage: F1 * v # optional - gap_packages
F[a1]
sage: F2 * (F1 * v) # optional - gap_packages
(q)*F[a1]*F[a2] + F[a1+a2]
sage: (F1 * v) * F2 # optional - gap_packages
F[a1]*F[a2]
"""
try:
if scalar.parent() is self.parent()._Q:
if self_on_left:
ret = self._libgap * scalar._libgap
else:
ret = scalar._libgap * self._libgap
return self.__class__(self.parent(), self.parent()._proj(ret)._libgap)
except AttributeError:
pass
return QuaGroupModuleElement._acted_upon_(self, scalar, self_on_left)
_lmul_ = _acted_upon_
def _mul_(self, other):
r"""
Multiply ``self`` and ``other``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: B = Q.lower_half() # optional - gap_packages
sage: F1, F2 = Q.F_simple() # optional - gap_packages
sage: v = B.highest_weight_vector() # optional - gap_packages
sage: f1, f2 = F1 * v, F2 * v # optional - gap_packages
sage: f1 * f2 # optional - gap_packages
F[a1]*F[a2]
sage: f1^2 * f2 # optional - gap_packages
(q + q^-1)*F[a1]^(2)*F[a2]
sage: f2 * f1^2 * f2 # optional - gap_packages
(q + q^-1)*F[a1]*F[a1+a2]*F[a2]
+ (q^4 + 2*q^2 + 1)*F[a1]^(2)*F[a2]^(2)
"""
ret = self.parent()._proj(self._libgap * other._libgap)
return self.__class__(self.parent(), ret._libgap)
def monomial_coefficients(self, copy=True):
r"""
Return the dictionary of ``self`` whose keys are the basis
indices and the values are coefficients.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: B = Q.lower_half() # optional - gap_packages
sage: x = B.retract(Q.an_element()); x # optional - gap_packages
1 + (q)*F[a1]
sage: sorted(x.monomial_coefficients().items(), key=str) # optional - gap_packages
[((0, 0, 0), 1), ((1, 0, 0), q)]
"""
ext_rep = self._libgap.ExtRepOfObj()
num_pos_roots = len(self.parent()._pos_roots)
R = self.parent().base_ring()
d = {}
for i in range(len(ext_rep)//2):
exp = [0] * num_pos_roots
mon = ext_rep[2*i].sage()
for j in range(len(mon)//2):
exp[mon[2*j]-1] = mon[2*j+1]
d[tuple(exp)] = R(str(ext_rep[2*i+1]))
return d
def bar(self):
r"""
Return the bar involution on ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: F1, F2 = Q.F_simple() # optional - gap_packages
sage: B = Q.lower_half() # optional - gap_packages
sage: x = B(Q.an_element()); x # optional - gap_packages
1 + (q)*F[a1]
sage: x.bar() # optional - gap_packages
1 + (q^-1)*F[a1]
sage: (F1*x).bar() == F1 * x.bar() # optional - gap_packages
True
sage: (F2*x).bar() == F2 * x.bar() # optional - gap_packages
True
sage: Q = QuantumGroup(['G',2]) # optional - gap_packages
sage: F1, F2 = Q.F_simple() # optional - gap_packages
sage: q = Q.q() # optional - gap_packages
sage: B = Q.lower_half() # optional - gap_packages
sage: x = B(q^-2*F1*F2^2*F1) # optional - gap_packages
sage: x # optional - gap_packages
(q + q^-5)*F[a1]*F[a1+a2]*F[a2]
+ (q^8 + q^6 + q^2 + 1)*F[a1]^(2)*F[a2]^(2)
sage: x.bar() # optional - gap_packages
(q^5 + q^-1)*F[a1]*F[a1+a2]*F[a2]
+ (q^12 + q^10 + q^6 + q^4)*F[a1]^(2)*F[a2]^(2)
"""
bar = self.parent()._libgap.BarAutomorphism()
# bar does not introduce E/K/Ki's
return self.__class__(self.parent(), libgap.Image(bar, self._libgap))
def tau(self):
r"""
Return the action of the `\tau` anti-automorphism on ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: F1, F2 = Q.F_simple() # optional - gap_packages
sage: B = Q.lower_half() # optional - gap_packages
sage: x = B(Q.an_element()); x # optional - gap_packages
1 + (q)*F[a1]
sage: x.tau() # optional - gap_packages
1 + (q)*F[a1]
sage: (F1*x).tau() == x.tau() * F1.tau() # optional - gap_packages
True
sage: (F2*x).tau() == x.tau() * F2.tau() # optional - gap_packages
True
sage: Q = QuantumGroup(['G',2]) # optional - gap_packages
sage: F1, F2 = Q.F_simple() # optional - gap_packages
sage: q = Q.q() # optional - gap_packages
sage: B = Q.lower_half() # optional - gap_packages
sage: x = B(q^-2*F1*F2^2*F1) # optional - gap_packages
sage: x # optional - gap_packages
(q + q^-5)*F[a1]*F[a1+a2]*F[a2]
+ (q^8 + q^6 + q^2 + 1)*F[a1]^(2)*F[a2]^(2)
sage: x.tau() # optional - gap_packages
(q + q^-5)*F[a1]*F[a1+a2]*F[a2]
+ (q^8 + q^6 + q^2 + 1)*F[a1]^(2)*F[a2]^(2)
"""
tau = self.parent()._libgap.AntiAutomorphismTau()
# tau does not introduce E/K/Ki's
return self.__class__(self.parent(), libgap.Image(tau, self._libgap))
def braid_group_action(self, braid):
r"""
Return the action of the braid group element ``braid``
projected into ``self``.
INPUT:
- ``braid`` -- a reduced word of a braid group element
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: L = Q.lower_half() # optional - gap_packages
sage: v = L.highest_weight_vector().f_tilde([1,2,2,1]); v # optional - gap_packages
F[a1]*F[a1+a2]*F[a2]
sage: v.braid_group_action([1]) # optional - gap_packages
(-q^3-q)*F[a2]^(2)
sage: v.braid_group_action([]) == v # optional - gap_packages
True
"""
if not braid:
return self
Q = self.parent()
QU = Q._libgap
tau = QU.AntiAutomorphismTau()
f = QU.IdentityMapping()
for i in braid:
if i < 0:
i = -i
T = QU.AutomorphismTalpha(i)
f *= tau * T * tau
else:
f *= QU.AutomorphismTalpha(i)
ret = libgap.Image(f, self._libgap)
return self.__class__(Q, Q._proj(ret)._libgap)
def _et(self, i):
r"""
Return the action of `\widetilde{e}_i` on ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: L = Q.lower_half() # optional - gap_packages
sage: v = L.highest_weight_vector() # optional - gap_packages
sage: v._et(1) # optional - gap_packages
0
sage: w = v.f_tilde([1,2,1]); w # optional - gap_packages
F[a1]*F[a1+a2]
sage: w._et(1) # optional - gap_packages
F[a1+a2]
sage: w._et(2) # optional - gap_packages
F[a1]^(2)
sage: L.zero().e_tilde(1) # optional - gap_packages
0
"""
if not self: # self == 0
return self
Q = self.parent()
ret = self._libgap.Ealpha(i)
if not ret:
return self.parent().zero()
return self.__class__(Q, Q._proj(ret)._libgap)
def _ft(self, i):
r"""
Return the action of `\widetilde{e}_i` on ``self``.
EXAMPLES::
sage: Q = QuantumGroup(['A',2]) # optional - gap_packages
sage: L = Q.lower_half() # optional - gap_packages
sage: v = L.highest_weight_vector() # optional - gap_packages
sage: v._ft(1) # optional - gap_packages
F[a1]
sage: L.zero().f_tilde(1) # optional - gap_packages
0
"""
if not self: # self == 0
return self
Q = self.parent()
ret = self._libgap.Falpha(i)
if not ret:
return self.parent().zero()
return self.__class__(Q, Q._proj(ret)._libgap)
def _unpickle_generic_element(parent, data):
"""
Used to unpickle an element of ``parent`` using ``data``.
EXAMPLES::
sage: Q = QuantumGroup(['D',4]) # optional - gap_packages
sage: x = Q.an_element() # optional - gap_packages
sage: loads(dumps(x)) == x # indirect doctest # optional - gap_packages
True
"""
F = libgap.eval('ElementsFamily')(libgap.eval('FamilyObj')(parent._libgap))
ret = []
# We need to multiply by this to get the right type in GAP
one = parent._libgap_base.One()
for i in range(len(data)//2):
ret.append( libgap(data[2*i]) )
ret.append( one * libgap(data[2*i+1].subs(q=parent._libgap_q)) )
return parent.element_class(parent, F.ObjByExtRep(ret))
| 55,181 |
343 | <reponame>nzeh/syzygy
// Copyright 2012 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Declares utility functions for building mappings between two distinct
// BlockGraphs. This is intended for use in generating mappings between two
// BlockGraphs that represent different versions of the same binary but it
// should work for arbitrary BlockGraphs. See compare.cc for a full description
// of the algorithm.
#ifndef SYZYGY_EXPERIMENTAL_COMPARE_COMPARE_H_
#define SYZYGY_EXPERIMENTAL_COMPARE_COMPARE_H_
#include <map>
#include <vector>
#include "syzygy/block_graph/block_graph.h"
namespace experimental {
typedef std::map<const block_graph::BlockGraph::Block*,
const block_graph::BlockGraph::Block*> BlockGraphMapping;
// Builds a mapping between two related BlockGraphs. The mapping will be a
// partial bijection between the blocks in each BlockGraph. If provided,
// unmapped1 and unmapped2 will be populated with a list of blocks that were
// not mapped from each block graph.
bool BuildBlockGraphMapping(const block_graph::BlockGraph& bg1,
const block_graph::BlockGraph& bg2,
BlockGraphMapping* mapping,
block_graph::ConstBlockVector* unmapped1,
block_graph::ConstBlockVector* unmapped2);
// Reverses a block mapping. This can not be done in-place, so
// @p reverse_mapping and @p mapping must not be the same object.
bool ReverseBlockGraphMapping(const BlockGraphMapping& mapping,
BlockGraphMapping* reverse_mapping);
} // experimental
#endif // SYZYGY_EXPERIMENTAL_COMPARE_COMPARE_H_
| 743 |
450 | /******************************************************************
*
* fthash.h - fast dynamic hash tables
*
* Copyright 2002 by
* <NAME>, <NAME>, and <NAME>
*
* This file is part of the FreeType project, and may only be used,
* modified, and distributed under the terms of the FreeType project
* license, LICENSE.TXT. By continuing to use, modify, or distribute
* this file you indicate that you have read the license and
* understand and accept it fully.
*
*
* This header is used to define dynamic hash tables as described
* by the article "Main-Memory Linear Hashing - Some Enhancements
* of Larson's Algorithm" by <NAME>.
*
* Basically, linear hashing prevents big "stalls" during
* resizes of the buckets array by only splitting one bucket
* at a time. This ensures excellent response time even when
* the table is frequently resized..
*
*
* Note that the use of the FT_Hash type is rather unusual in order
* to be as generic and efficient as possible. See the comments in the
* following definitions for more details.
*/
#ifndef __FT_HASH_H__
#define __FT_HASH_H__
#include <ft2build.h>
#include FT_TYPES_H
FT_BEGIN_HEADER
/***********************************************************
*
* @type: FT_Hash
*
* @description:
* handle to a @FT_HashRec structure used to model a
* dynamic hash table
*/
typedef struct FT_HashRec_* FT_Hash;
/***********************************************************
*
* @type: FT_HashNode
*
* @description:
* handle to a @FT_HashNodeRec structure used to model a
* single node of a hash table
*/
typedef struct FT_HashNodeRec_* FT_HashNode;
/***********************************************************
*
* @type: FT_HashLookup
*
* @description:
* handle to a @FT_HashNode pointer. This is returned by
* the @ft_hash_lookup function and can later be used by
* @ft_hash_add or @ft_hash_remove
*/
typedef FT_HashNode* FT_HashLookup;
/***********************************************************
*
* @type: FT_Hash_EqualFunc
*
* @description:
* a function used to compare two nodes of the hash table
*
* @input:
* node1 :: handle to first node
* node2 :: handle to second node
*
* @return:
* 1 iff the 'keys' in 'node1' and 'node2' are identical.
* 0 otherwise.
*/
typedef FT_Int (*FT_Hash_EqualFunc)( FT_HashNode node1,
FT_HashNode node2 );
/***********************************************************
*
* @struct: FT_HashRec
*
* @description:
* a structure used to model a dynamic hash table.
*
* @fields:
* memory :: memory manager used to allocate
* the buckets array and the hash nodes
*
* buckets :: array of hash buckets
*
* node_size :: size of node in bytes
* node_compare :: a function used to compare two nodes
* node_hash :: a function used to compute the hash
* value of a given node
* p ::
* mask ::
* slack ::
*
* @note:
* 'p', 'mask' and 'slack' are control values managed by
* the hash table. Do not try to interpret them directly.
*
* You can grab the hash table size by calling
* '@ft_hash_get_size'.
*/
typedef struct FT_HashRec_
{
FT_HashNode* buckets;
FT_UInt p;
FT_UInt mask; /* really maxp-1 */
FT_Long slack;
FT_Hash_EqualFunc node_equal;
FT_Memory memory;
} FT_HashRec;
/***********************************************************
*
* @struct: FT_HashNodeRec
*
* @description:
* a structure used to model the root fields of a dynamic
* hash table node.
*
* it's up to client applications to "sub-class" this
* structure to add relevant (key,value) definitions
*
* @fields:
* link :: pointer to next node in bucket's collision list
* hash :: 32-bit hash value for this node
*
* @note:
* it's up to client applications to "sub-class" this structure
* to add relevant (key,value) type definitions. For example,
* if we want to build a "string -> int" mapping, we could use
* something like:
*
* {
* typedef struct MyNodeRec_
* {
* FT_HashNodeRec hnode;
* const char* key;
* int value;
*
* } MyNodeRec, *MyNode;
* }
*
*/
typedef struct FT_HashNodeRec_
{
FT_HashNode link;
FT_UInt32 hash;
} FT_HashNodeRec;
/****************************************************************
*
* @function: ft_hash_init
*
* @description:
* initialize a dynamic hash table
*
* @input:
* table :: handle to target hash table structure
* node_equal :: node comparison function
* memory :: memory manager handle used to allocate the
* buckets array within the hash table
*
* @return:
* error code. 0 means success
*
* @note:
* the node comparison function should only compare node _keys_
* and ignore values !! with good hashing computation (which the
* user must perform itself), the comparison function should be
* pretty seldom called.
*
* here is a simple example:
*
* {
* static int my_equal( MyNode node1,
* MyNode node2 )
* {
* // compare keys of 'node1' and 'node2'
* return (strcmp( node1->key, node2->key ) == 0);
* }
*
* ....
*
* ft_hash_init( &hash, (FT_Hash_EqualFunc) my_compare, memory );
* ....
* }
*/
FT_BASE( FT_Error )
ft_hash_init( FT_Hash table,
FT_Hash_EqualFunc compare,
FT_Memory memory );
/****************************************************************
*
* @function: ft_hash_lookup
*
* @description:
* search a hash table to find a node corresponding to a given
* key.
*
* @input:
* table :: handle to target hash table structure
* keynode :: handle to a reference hash node that will be
* only used for key comparisons with the table's
* elements
*
* @return:
* a pointer-to-hash-node value, which must be used as followed:
*
* - if '*result' is NULL, the key wasn't found in the hash
* table. The value of 'result' can be used to add new elements
* through @ft_hash_add however..
*
* - if '*result' is not NULL, it's a handle to the first table
* node that corresponds to the search key. The value of 'result'
* can be used to remove this element through @ft_hash_remove
*
* @note:
* here is an example:
*
* {
* // maps a string to an integer with a hash table
* // returns -1 in case of failure
* //
* int my_lookup( FT_Hash table,
* const char* key )
* {
* MyNode* pnode;
* MyNodeRec noderec;
*
* // set-up key node. It's 'hash' and 'key' fields must
* // be set correctly.. we ignore 'link' and 'value'
* //
* noderec.hnode.hash = strhash( key );
* noderec.key = key;
*
* // perform search - return value
* //
* pnode = (MyNode) ft_hash_lookup( table, &noderec );
* if ( *pnode )
* {
* // we found it
* return (*pnode)->value;
* }
* return -1;
* }
* }
*/
FT_BASE_DEF( FT_HashLookup )
ft_hash_lookup( FT_Hash table,
FT_HashNode keynode );
/****************************************************************
*
* @function: ft_hash_add
*
* @description:
* add a new node to a dynamic hash table. the user must
* call @ft_hash_lookup and allocate a new node before calling
* this function.
*
* @input:
* table :: hash table handle
* lookup :: pointer-to-hash-node value returned by @ft_hash_lookup
* new_node :: handle to new hash node. All its fields must be correctly
* set, including 'hash'.
*
* @return:
* error code. 0 means success
*
* @note:
* this function should always be used _after_ a call to @ft_hash_lookup
* that returns a pointer to a NULL handle. Here's an example:
*
* {
* // sets the value corresponding to a given string key
* //
* void my_set( FT_Hash table,
* const char* key,
* int value )
* {
* MyNode* pnode;
* MyNodeRec noderec;
* MyNode node;
*
* // set-up key node. It's 'hash' and 'key' fields must
* // be set correctly..
* noderec.hnode.hash = strhash( key );
* noderec.key = key;
*
* // perform search - return value
* pnode = (MyNode) ft_hash_lookup( table, &noderec );
* if ( *pnode )
* {
* // we found it, simply replace the value in the node
* (*pnode)->value = value;
* return;
* }
*
* // allocate a new node - and set it up
* node = (MyNode) malloc( sizeof(*node) );
* if ( node == NULL ) .....
*
* node->hnode.hash = noderec.hnode.hash;
* node->key = key;
* node->value = value;
*
* // add it to the hash table
* error = ft_hash_add( table, pnode, node );
* if (error) ....
* }
*/
FT_BASE( FT_Error )
ft_hash_add( FT_Hash table,
FT_HashLookup lookup,
FT_HashNode new_node );
/****************************************************************
*
* @function: ft_hash_remove
*
* @description:
* try to remove the node corresponding to a given key from
* a hash table. This must be called after @ft_hash_lookup
*
* @input:
* table :: hash table handle
* lookup :: pointer-to-hash-node value returned by @ft_hash_lookup
*
* @note:
* this function doesn't free the node itself !! Here's an example:
*
* {
* // sets the value corresponding to a given string key
* //
* void my_remove( FT_Hash table,
* const char* key )
* {
* MyNodeRec noderec;
* MyNode node;
*
* noderec.hnode.hash = strhash(key);
* noderec.key = key;
* node = &noderec;
*
* pnode = ft_hash_lookup( table, &noderec );
* node = *pnode;
* if ( node != NULL )
* {
* error = ft_hash_remove( table, pnode );
* if ( !error )
* free( node );
* }
* }
* }
*/
FT_BASE( FT_Error )
ft_hash_remove( FT_Hash table,
FT_HashLookup lookup );
/****************************************************************
*
* @function: ft_hash_get_size
*
* @description:
* return the number of elements in a given hash table
*
* @input:
* table :: handle to target hash table structure
*
* @return:
* number of elements. 0 if empty
*/
FT_BASE( FT_UInt )
ft_hash_get_size( FT_Hash table );
/****************************************************************
*
* @functype: FT_Hash_ForeachFunc
*
* @description:
* a function used to iterate over all elements of a given
* hash table
*
* @input:
* node :: handle to target @FT_HashNodeRec node structure
* data :: optional argument to iteration routine
*
* @also: @ft_hash_foreach
*/
typedef void (*FT_Hash_ForeachFunc)( const FT_HashNode node,
const FT_Pointer data );
/****************************************************************
*
* @function: ft_hash_foreach
*
* @description:
* parse over all elements in a hash table
*
* @input:
* table :: handle to target hash table structure
* foreach_func :: iteration routine called for each element
* foreach_data :: optional argument to the iteration routine
*
* @note:
* this function is often used to release all elements from a
* hash table. See the example given for @ft_hash_done
*/
FT_BASE( void )
ft_hash_foreach( FT_Hash table,
FT_Hash_ForeachFunc foreach_func,
const FT_Pointer foreach_data );
/****************************************************************
*
* @function: ft_hash_done
*
* @description:
* finalize a given hash table
*
* @input:
* table :: handle to target hash table structure
* node_func :: optional iteration function pointer. this
* can be used to destroy all nodes explicitely
* node_data :: optional argument to the node iterator
*
* @note:
* this function simply frees the hash table's buckets.
* you probably will need to call @ft_hash_foreach to
* destroy all its elements before @ft_hash_done, as in
* the following example:
*
* {
* static void my_node_clear( const MyNode node )
* {
* free( node );
* }
*
* static void my_done( FT_Hash table )
* {
* ft_hash_done( table, (FT_Hash_ForeachFunc) my_node_clear, NULL );
* }
* }
*/
FT_BASE( void )
ft_hash_done( FT_Hash table,
FT_Hash_ForeachFunc item_func,
const FT_Pointer item_data );
/* */
/* compute bucket index from hash value in a dynamic hash table */
/* this is only used to break encapsulation to speed lookups in */
/* the FreeType cache manager !! */
/* */
#define FT_HASH_COMPUTE_INDEX(_table,_hash,_index) \
{ \
FT_UInt _mask = (_table)->mask; \
FT_UInt _hash0 = (_hash); \
\
(_index) = (FT_UInt)( (_hash0) & _mask ) ); \
if ( (_index) < (_table)->p ) \
(_index) = (FT_uInt)( (_hash0) & ( 2*_mask+1 ) ); \
}
FT_END_HEADER
#endif /* __FT_HASH_H__ */
| 6,036 |
11,024 | #
# plot.py
#
# This source file is part of the FoundationDB open source project
#
# Copyright 2013-2020 Apple Inc. and the FoundationDB project authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import matplotlib.pyplot as plt
class Plotter:
def __init__(self, results):
self.results = results
def add_plot(data, time_resolution, label, use_avg=False):
out_data = {}
counts = {}
for t in data.keys():
out_data.setdefault(t//time_resolution*time_resolution, 0)
counts.setdefault(t//time_resolution*time_resolution, 0)
out_data[t//time_resolution*time_resolution] += data[t]
counts[t//time_resolution*time_resolution] += 1
if use_avg:
out_data = { t: v/counts[t] for t,v in out_data.items() }
plt.plot(list(out_data.keys()), list(out_data.values()), label=label)
def add_plot_with_times(data, label):
plt.plot(list(data.keys()), list(data.values()), label=label)
def display(self, time_resolution=0.1):
plt.figure(figsize=(40,9))
plt.subplot(3, 3, 1)
for priority in self.results.started.keys():
Plotter.add_plot(self.results.started[priority], time_resolution, priority)
plt.xlabel('Time (s)')
plt.ylabel('Released/s')
plt.legend()
plt.subplot(3, 3, 2)
for priority in self.results.queued.keys():
Plotter.add_plot(self.results.queued[priority], time_resolution, priority)
plt.xlabel('Time (s)')
plt.ylabel('Requests/s')
plt.legend()
plt.subplot(3, 3, 3)
for priority in self.results.unprocessed_queue_sizes.keys():
data = {k: max(v) for (k,v) in self.results.unprocessed_queue_sizes[priority].items()}
Plotter.add_plot(data, time_resolution, priority)
plt.xlabel('Time (s)')
plt.ylabel('Max queue size')
plt.legend()
num = 4
for priority in self.results.latencies.keys():
plt.subplot(3, 3, num)
median_latencies = {k: v[int(0.5*len(v))] if len(v) > 0 else 0 for (k,v) in self.results.latencies[priority].items()}
percentile90_latencies = {k: v[int(0.9*len(v))] if len(v) > 0 else 0 for (k,v) in self.results.latencies[priority].items()}
max_latencies = {k: max(v) if len(v) > 0 else 0 for (k,v) in self.results.latencies[priority].items()}
Plotter.add_plot(median_latencies, time_resolution, 'median')
Plotter.add_plot(percentile90_latencies, time_resolution, '90th percentile')
Plotter.add_plot(max_latencies, time_resolution, 'max')
plt.xlabel('Time (s)')
plt.ylabel(str(priority) + ' Latency (s)')
plt.yscale('log')
plt.legend()
num += 1
for priority in self.results.rate.keys():
plt.subplot(3, 3, num)
if len(self.results.rate[priority]) > 0:
Plotter.add_plot(self.results.rate[priority], time_resolution, 'Rate', use_avg=True)
if len(self.results.released[priority]) > 0:
Plotter.add_plot(self.results.released[priority], time_resolution, 'Released', use_avg=True)
if len(self.results.limit[priority]) > 0:
Plotter.add_plot(self.results.limit[priority], time_resolution, 'Limit', use_avg=True)
if len(self.results.limit_and_budget[priority]) > 0:
Plotter.add_plot(self.results.limit_and_budget[priority], time_resolution, 'Limit and budget', use_avg=True)
if len(self.results.budget[priority]) > 0:
Plotter.add_plot(self.results.budget[priority], time_resolution, 'Budget', use_avg=True)
plt.xlabel('Time (s)')
plt.ylabel('Value (' + str(priority) + ')')
plt.legend()
num += 1
plt.show()
| 1,941 |
32,544 | package com.baeldung.batch;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.item.ItemProcessor;
public class CoffeeItemProcessor implements ItemProcessor<Coffee, Coffee> {
private static final Logger LOGGER = LoggerFactory.getLogger(CoffeeItemProcessor.class);
@Override
public Coffee process(final Coffee coffee) throws Exception {
String brand = coffee.getBrand().toUpperCase();
String origin = coffee.getOrigin().toUpperCase();
String chracteristics = coffee.getCharacteristics().toUpperCase();
Coffee transformedCoffee = new Coffee(brand, origin, chracteristics);
LOGGER.info("Converting ( {} ) into ( {} )", coffee, transformedCoffee);
return transformedCoffee;
}
}
| 268 |
12,278 | <gh_stars>1000+
// Copyright (C) 2005 <NAME>, <NAME>.
// Use, modification and distribution is subject to the Boost Software
// License, Version 1.0. (http://www.boost.org/LICENSE_1_0.txt)
#ifndef BOOST_TYPEOF_STD_map_hpp_INCLUDED
#define BOOST_TYPEOF_STD_map_hpp_INCLUDED
#include <map>
#include <boost/typeof/typeof.hpp>
#include <boost/typeof/std/memory.hpp>
#include <boost/typeof/std/functional.hpp>
#include <boost/typeof/std/utility.hpp>
#include BOOST_TYPEOF_INCREMENT_REGISTRATION_GROUP()
BOOST_TYPEOF_REGISTER_TEMPLATE(std::map, 2)
BOOST_TYPEOF_REGISTER_TEMPLATE(std::map, 3)
BOOST_TYPEOF_REGISTER_TEMPLATE(std::map, 4)
BOOST_TYPEOF_REGISTER_TEMPLATE(std::multimap, 2)
BOOST_TYPEOF_REGISTER_TEMPLATE(std::multimap, 3)
BOOST_TYPEOF_REGISTER_TEMPLATE(std::multimap, 4)
#endif//BOOST_TYPEOF_STD_map_hpp_INCLUDED
| 379 |
841 | package cgeo.geocaching.maps.mapsforge.v6;
import cgeo.geocaching.location.Geopoint;
import org.mapsforge.core.model.LatLong;
public final class MapsforgeUtils {
private MapsforgeUtils() {
// Do not instantiate, utility class
}
public static LatLong toLatLong(final Geopoint geopoint) {
return new LatLong(geopoint.getLatitude(), geopoint.getLongitude());
}
public static Geopoint toGeopoint(final LatLong latLong) {
return new Geopoint(latLong.getLatitude(), latLong.getLongitude());
}
}
| 201 |
1,056 | <filename>javafx/javafx2.editor/src/org/netbeans/modules/javafx2/editor/completion/impl/SimpleClassItem.java
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.modules.javafx2.editor.completion.impl;
import java.util.Collection;
import java.util.logging.Logger;
import javax.lang.model.element.ExecutableElement;
import javax.lang.model.element.Modifier;
import javax.lang.model.element.TypeElement;
import javax.lang.model.util.ElementFilter;
import javax.swing.ImageIcon;
import org.netbeans.api.editor.mimelookup.MimeRegistration;
import org.netbeans.modules.javafx2.editor.JavaFXEditorUtils;
import org.netbeans.modules.javafx2.editor.completion.beans.FxBean;
import org.netbeans.spi.editor.completion.CompletionItem;
import org.openide.util.ImageUtilities;
import org.openide.util.NbBundle;
/**
* Simple class completion: just insert the (right now fully qualified) classname
*
* @author sdedic
*/
public class SimpleClassItem extends AbstractCompletionItem {
private static final Logger LOG = Logger.getLogger(SimpleClassItem.class.getName()); // NOI18N
private static final String ICON_CLASS = "org/netbeans/modules/javafx2/editor/resources/class.png"; // NOI18N
private String className;
private String fullClassName;
private String leftText;
private boolean deprecated;
private int priority;
private static ImageIcon ICON;
public SimpleClassItem(CompletionContext ctx, String text) {
super(ctx, text);
}
void setDeprecated(boolean deprecated) {
this.deprecated = deprecated;
}
void setPriority(int priority) {
this.priority = priority;
}
void setClassName(String n) {
this.className = n;
}
void setFullClassName(String n) {
fullClassName = n;
}
public String getClassName() {
return className;
}
public String getFullClassName() {
return fullClassName;
}
@Override
public int getSortPriority() {
return priority;
}
@Override
public CharSequence getSortText() {
return className;
}
@Override
protected String getLeftHtmlText() {
if (leftText != null) {
return leftText;
}
String s;
if (deprecated) {
s = NbBundle.getMessage(SimpleClassItem.class, "FMT_Deprecated", className);
} else {
s = className;
}
s = NbBundle.getMessage(SimpleClassItem.class,
"FMT_AddPackage", s,
fullClassName.substring(0, fullClassName.length() - className.length() - 1));
return this.leftText = s;
}
@Override
protected ImageIcon getIcon() {
if (ICON == null) {
ICON = ImageUtilities.loadImageIcon(ICON_CLASS, false);
}
return ICON;
}
@Override
public CharSequence getInsertPrefix() {
return className;
}
@Override
protected String getSubstituteText() {
// the opening < is a part of the replacement area
return "<" + super.getSubstituteText() + (ctx.isReplaceExisting() ? "" : " "); // NOI18N
}
@MimeRegistration(mimeType=JavaFXEditorUtils.FXML_MIME_TYPE, service=ClassItemFactory.class)
public static class ItemFactory implements ClassItemFactory {
@Override
public CompletionItem convert(TypeElement elem, CompletionContext ctx, int priorityHint) {
// ignore Strings, they are written as char content
if (elem.getQualifiedName().contentEquals("java.lang.String")) { // NOI18N
return null;
}
boolean ok = false;
Collection<? extends ExecutableElement> execs = ElementFilter.constructorsIn(elem.getEnclosedElements());
for (ExecutableElement e : execs) {
if (!e.getModifiers().contains(Modifier.PUBLIC)) {
// ignore non-public ctors
continue;
}
if (e.getModifiers().contains(Modifier.ABSTRACT)) {
// ignore non-public ctors
continue;
}
if (e.getParameters().isEmpty()) {
ok = true;
}
}
// non-public, no-arg ctor -> provide an item
String fqn = elem.getQualifiedName().toString();
if (!ok) {
// last chance - try to find a Builder
FxBean bean = ctx.getBeanInfo(fqn);
if (bean != null && !bean.isFxInstance()
// not entirely correct, since Builder can create a non-abstract subclass,
// but eliminates abominations like Node and Parent. Next step will be a
// blacklist for classes.
&& !elem.getModifiers().contains(Modifier.ABSTRACT)
&& bean.getBuilder() != null) {
ok = true;
}
}
if (!ok) {
return null;
}
String sn = ctx.getSimpleClassName(fqn);
return setup(new SimpleClassItem(ctx,
sn == null ? fqn : sn),
elem, ctx, priorityHint);
}
}
static SimpleClassItem setup(
SimpleClassItem item, TypeElement elem, CompletionContext ctx, int priority) {
item.setFullClassName(elem.getQualifiedName().toString());
item.setClassName(elem.getSimpleName().toString());
item.setDeprecated(ctx.isBlackListed(elem));
item.setPriority(priority);
return item;
}
public String toString() {
return "simple-class[" + getFullClassName() + "]";
}
}
| 2,833 |
2,594 | <reponame>deenu713/bundletool
/*
* Copyright (C) 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
package com.android.tools.build.bundletool.device;
import com.android.bundle.Devices.DeviceSpec;
import com.android.bundle.Targeting.DeviceGroupModuleTargeting;
import com.android.bundle.Targeting.ModuleTargeting;
import java.util.Collections;
/**
* A {@link TargetingDimensionMatcher} that provides module matching on a set of device groups.
*
* <p>Device groups are an artificial concept and they are explicitly defined in the {@link
* DeviceSpec}.
*/
public class DeviceGroupModuleMatcher
extends TargetingDimensionMatcher<DeviceGroupModuleTargeting> {
public DeviceGroupModuleMatcher(DeviceSpec deviceSpec) {
super(deviceSpec);
}
@Override
public boolean matchesTargeting(DeviceGroupModuleTargeting targetingValue) {
if (targetingValue.equals(DeviceGroupModuleTargeting.getDefaultInstance())) {
return true;
}
return !Collections.disjoint(
targetingValue.getValueList(), getDeviceSpec().getDeviceGroupsList());
}
@Override
protected boolean isDeviceDimensionPresent() {
return !getDeviceSpec().getDeviceGroupsList().isEmpty();
}
@Override
protected void checkDeviceCompatibleInternal(DeviceGroupModuleTargeting targetingValue) {}
@Override
protected DeviceGroupModuleTargeting getTargetingValue(ModuleTargeting moduleTargeting) {
return moduleTargeting.getDeviceGroupTargeting();
}
}
| 565 |
377 | /*******************************************************************************
* * Copyright 2012 Impetus Infotech.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
******************************************************************************/
package com.impetus.kundera.persistence.context;
import com.impetus.kundera.graph.Node;
/**
* Logs CRUD events,
*
* @author vivek.mishra
*
*/
public class EventLog
{
private EventType eventType;
private long timeinMillies;
private Node node;
EventLog(EventType eventType, Node transactional)
{
this.node = transactional;
this.eventType = eventType;
this.timeinMillies = System.currentTimeMillis();
}
Node getSavePointData()
{
return node.getOriginalNode();
}
/**
* @return the entityId
*/
Object getEntityId()
{
return node.getNodeId();
}
/**
* @return the eventType
*/
EventType getEventType()
{
return eventType;
}
/**
* @return the timeinMillies
*/
long getTimeinMillies()
{
return timeinMillies;
}
/**
* @return the node
*/
Node getNode()
{
return node;
}
public enum EventType
{
INSERT, UPDATE, DELETE;
}
}
| 660 |
518 | <filename>Source/Kernel/iSCSIPDUShared.h
/*
* Copyright (c) 2016, <NAME>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef __ISCSI_PDU_SHARED_H__
#define __ISCSI_PDU_SHARED_H__
// Reverse DNS notation is necessary for kernel code to prevent namespace
// collisions. For convenience, we abridge the name here.
#define iSCSIPDU com_NSinenian_iSCSIPDU
// If used in user-space, this header will need to include additional
// headers that define primitive fixed-size types. If used with the kernel,
// IOLib must be included for kernel memory allocation
#ifdef KERNEL
#include <IOKit/IOLib.h>
#else
#include <stdlib.h>
#include <MacTypes.h>
#include <CoreFoundation/CoreFoundation.h>
#endif
///////////////////// BYTE SIZE OF VARIOUS PDU FIELDS //////////////////////
/*! Byte size of the data segment length field in all iSCSI PDUs. */
static const unsigned short kiSCSIPDUDataSegmentLengthSize = 3;
/*! Basic header segment size for a PDU. */
static const unsigned short kiSCSIPDUBasicHeaderSegmentSize = 48;
/*! Each PDU must be a multiple of this many bytes. If the data contained
* in a PDU is less than this value it is padded with zeros. */
static const unsigned short kiSCSIPDUByteAlignment = 4;
/*! Reserved target transfer tag value. */
static const UInt32 kiSCSIPDUTargetTransferTagReserved = 0xFFFFFFFF;
/*! Reserved initiator task tag value. */
static const UInt32 kiSCSIPDUInitiatorTaskTagReserved = 0xFFFFFFFF;
/*! Bit offset within opcode byte for request PDUs that should be set to
* '1' to indicate immediate delivery of the PDU. */
static const UInt8 kiSCSIPDUImmediateDeliveryFlag = 0x40;
/*! Fields that are common to the basic header segment of all PDUs.
* The ordering of these fields must not be changed as it matches
* the ordering of respective fields in the PDU. */
typedef struct __iSCSIPDUCommonBHS {
UInt8 opCodeAndDeliveryMarker;
UInt8 opCodeFields[3];
UInt8 totalAHSLength;
UInt8 dataSegmentLength[kiSCSIPDUDataSegmentLengthSize];
UInt64 LUNorOpCodeFields;
UInt32 initiatorTaskTag;
UInt64 reserved;
UInt64 reserved2;
UInt64 reserved3;
UInt32 reserved4;
} __attribute__((packed)) iSCSIPDUCommonBHS;
/*! Fields that are common to the basic header segment of initiator PDUs.
* The ordering of these fields must not be changed as it matches
* the ordering of respective fields in the PDU. */
typedef struct __iSCSIPDUInitiatorBHS {
UInt8 opCodeAndDeliveryMarker;
UInt8 opCodeFields[3];
UInt8 totalAHSLength;
UInt8 dataSegmentLength[kiSCSIPDUDataSegmentLengthSize];
UInt64 LUNorOpCodeFields;
UInt32 initiatorTaskTag;
UInt32 reserved;
UInt32 cmdSN;
UInt32 expStatSN;
UInt32 reserved2;
UInt64 reserved3;
UInt32 reserved4;
} __attribute__((packed)) iSCSIPDUInitiatorBHS;
/*! Fields that are common to the basic header segment of target PDUs.
* The ordering of these fields must not be changed as it matches
* the ordering of respective fields in the PDU. */
typedef struct __iSCSIPDUTargetBHS {
UInt8 opCode;
UInt8 opCodeFields[3];
UInt8 totalAHSLength;
UInt8 dataSegmentLength[kiSCSIPDUDataSegmentLengthSize];
UInt64 LUNorOpCodeFields;
UInt32 initiatorTaskTag;
UInt32 reserved;
UInt32 statSN;
UInt32 expCmdSN;
UInt32 maxCmdSN;
UInt64 reserved2;
UInt32 reserved3;
} __attribute__((packed)) iSCSIPDUTargetBHS;
/*! Possible reject codes that may be issued throughout the login process. */
enum iSCSIPDURejectCode {
/*! Reserved reject code (not used). */
kiSCSIPDURejectReserved = 0x01,
/*! Data digest error (may resend original PDU). */
kiSCSIPDURejectDataDigestError = 0x02,
/*! Sequence ack was rejected (may resend original PDU). */
kiSCSIPDURejectSNACKReject = 0x03,
/*! iSCSI protocol error has occured (e.g., SNACK was issued
* for something that was already acknowledged). */
kiSCSIPDURejectProtoError = 0x04,
/*! The command is not supported. */
kiSCSIPDURejectCmdNotSupported = 0x05,
/*! Too many commands. */
kiSCSIPDURejectTooManyImmediateCmds = 0x06,
/*! There is a task in progress. */
kiSCSIPDURejectTaskInProgress = 0x07,
/*! Invalid data acknowledgement. */
kiSCSIPDURejectInvalidDataACK = 0x08,
/*! A PDU field was invalid. */
kiSCSIPDURejectInvalidPDUField = 0x09,
/*! Can't generate target transfer tag; out of resources. */
kiSCSIPDURejectLongOperationReject = 0x0a,
/*! Negotiation was reset. */
kiSCSIPDURejectNegotiationReset = 0x0b,
/*! Waiting to logout. */
kiSCSIPDURejectWaitingForLogout = 0x0c
};
/*! Asynchronous iSCSI events to be handled by the session layer. */
enum iSCSIPDUAsyncMsgEvent {
/*! SCSI asynchronous event (with sense data). */
kiSCSIPDUAsyncMsgSCSIAsyncMsg = 0x00,
/*! Target requests logout. */
kiSCSIPDUAsyncMsgLogout = 0x01,
/*! Target will drop connection. */
kiSCSIPDUAsynMsgDropConnection = 0x02,
/*! Target will drop all connections. */
kiSCSIPDUAsyncMsgDropAllConnections = 0x03,
/*! Target requests parameter renegotiation. */
kiSCSIPDUAsyncMsgNegotiateParams = 0x04,
/*! Vendor specific event. */
kiSCSIPDUAsyncMsgVendorCode = 0xFF
};
/*! Op codes are used to code PDUs sent form the initiator to the target.
* They specify the type of commands or data encoded witin the PDU. */
enum iSCSIPDUInitiatorOpCodes {
/*! Initiator command for a ping. */
kiSCSIPDUOpCodeNOPOut = 0x00,
/*! SCSI command send by the initiator. */
kiSCSIPDUOpCodeSCSICmd = 0x01,
/*! Task management request sent by the initiator. */
kiSCSIPDUOpCodeTaskMgmtReq= 0x02,
/*! Login request sent by the initiator. */
kiSCSIPDUOpCodeLoginReq = 0x03,
/*! Text request sent by the initiator. */
kiSCSIPDUOpCodeTextReq = 0x04,
/*! Data sent to a target. */
kiSCSIPDUOpCodeDataOut = 0x05,
/*! Logout request sent by the initiator. */
kiSCSIPDUOpCodeLogoutReq = 0x06,
/*! SNACK request sent by the initiator. */
kiSCSIPDUOpCodeSNACKReq = 0x10,
/*! Maximum allowable initiator op code for error-checking. */
kiSCSIPDUMaxInitiatorOpCode
};
/*! Op codes are used to code PDUs sent form the initiator to the target.
* They specify the type of commands or data encoded witin the PDU. */
enum iSCSIPDUTargetOpCodes {
/*! Target repsonse for a ping from the initiator. */
kiSCSIPDUOpCodeNOPIn = 0x20,
/*! Target reponse for a SCSI command. */
kiSCSIPDUOpCodeSCSIRsp = 0x21,
/*! Target response to a task management request. */
kiSCSIPDUOpCodeTaskMgmtRsp = 0x22,
/*! Target response to a login request. */
kiSCSIPDUOpCodeLoginRsp = 0x23,
/*! Target response to a text request. */
kiSCSIPDUOpCodeTextRsp = 0x24,
/*! Target response with data (e.g., to a SCSI read request). */
kiSCSIPDUOpCodeDataIn = 0x25,
/*! Target response to a logout request. */
kiSCSIPDUOpCodeLogoutRsp = 0x26,
/*! Target response indicating it is ready to transfer. */
kiSCSIPDUOpCodeR2T = 0x31,
/*! Asynchronous message from the target. */
kiSCSIPDUOpCodeAsyncMsg = 0x32,
/*! Response indicating last PDU was rejected. */
kiSCSIPDUOpCodeReject = 0x3F,
/*! Maximum allowable target op code for error-checking. */
kiSCSIPDUMaxTargetOpCode
};
#endif
| 3,293 |
605 | // SubModule2.h - Main header with same name as directory.
#include "SubModule2/Header3.h"
#include "SubModule2/Header4.h"
| 42 |
2,151 | // Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CONTENT_RENDERER_RENDER_FRAME_METADATA_OBSERVER_IMPL_H_
#define CONTENT_RENDERER_RENDER_FRAME_METADATA_OBSERVER_IMPL_H_
#include "cc/trees/render_frame_metadata.h"
#include "cc/trees/render_frame_metadata_observer.h"
#include "content/common/render_frame_metadata.mojom.h"
#include "mojo/public/cpp/bindings/binding.h"
namespace cc {
class FrameTokenAllocator;
} // namespace cc
namespace content {
// Implementation of cc::RenderFrameMetadataObserver which exists in the
// renderer an observers frame submission. It then notifies the
// mojom::RenderFrameMetadataObserverClient, which is expected to be in the
// browser process, of the metadata associated with the frame.
//
// BindToCurrentThread should be called from the Compositor thread so that the
// Mojo pipe is properly bound.
//
// Subsequent usage should only be from the Compositor thread.
class RenderFrameMetadataObserverImpl
: public cc::RenderFrameMetadataObserver,
public mojom::RenderFrameMetadataObserver {
public:
RenderFrameMetadataObserverImpl(
mojom::RenderFrameMetadataObserverRequest request,
mojom::RenderFrameMetadataObserverClientPtrInfo client_info);
~RenderFrameMetadataObserverImpl() override;
// cc::RenderFrameMetadataObserver:
void BindToCurrentThread(
cc::FrameTokenAllocator* frame_token_allocator) override;
void OnRenderFrameSubmission(cc::RenderFrameMetadata metadata) override;
// mojom::RenderFrameMetadataObserver:
void ReportAllFrameSubmissionsForTesting(bool enabled) override;
private:
// When true this will notifiy |render_frame_metadata_observer_client_| of all
// frame submissions.
bool report_all_frame_submissions_for_testing_enabled_ = false;
uint32_t last_frame_token_ = 0;
base::Optional<cc::RenderFrameMetadata> last_render_frame_metadata_;
// Not owned.
cc::FrameTokenAllocator* frame_token_allocator_ = nullptr;
// These are destroyed when BindToCurrentThread() is called.
mojom::RenderFrameMetadataObserverRequest request_;
mojom::RenderFrameMetadataObserverClientPtrInfo client_info_;
mojo::Binding<mojom::RenderFrameMetadataObserver>
render_frame_metadata_observer_binding_;
mojom::RenderFrameMetadataObserverClientPtr
render_frame_metadata_observer_client_;
DISALLOW_COPY_AND_ASSIGN(RenderFrameMetadataObserverImpl);
};
} // namespace content
#endif // CONTENT_RENDERER_RENDER_FRAME_METADATA_OBSERVER_IMPL_H_
| 827 |
351 | <gh_stars>100-1000
# Copyright: (c) 2018, <NAME> (@jborean93) <<EMAIL>>
# MIT License (see LICENSE or https://opensource.org/licenses/MIT)
import base64
import struct
from thirdparty.ntlm_auth.constants import NegotiateFlags
from thirdparty.ntlm_auth.exceptions import NoAuthContextError
from thirdparty.ntlm_auth.messages import AuthenticateMessage, ChallengeMessage, \
NegotiateMessage
from thirdparty.ntlm_auth.session_security import SessionSecurity
class NtlmContext(object):
def __init__(self, username, password, domain=None, workstation=None,
cbt_data=None, ntlm_compatibility=3):
r"""
Initialises a NTLM context to use when authenticating using the NTLM
protocol.
Initialises the NTLM context to use when sending and receiving messages
to and from the server. You should be using this object as it supports
NTLMv2 authenticate and it easier to use than before. It also brings in
the ability to use signing and sealing with session_security and
generate a MIC structure.
:param username: The username to authenticate with
:param password: <PASSWORD> the <PASSWORD>
:param domain: The domain part of the username (None if n/a)
:param workstation: The localworkstation (None if n/a)
:param cbt_data: A GssChannelBindingsStruct or None to bind channel
data with the auth process
:param ntlm_compatibility: (Default 3)
The Lan Manager Compatibility Level to use with the auth message
This is set by an Administrator in the registry key
'HKLM\SYSTEM\CurrentControlSet\Control\Lsa\LmCompatibilityLevel'
The values correspond to the following;
0 : LM and NTLMv1
1 : LM, NTLMv1 and NTLMv1 with Extended Session Security
2 : NTLMv1 and NTLMv1 with Extended Session Security
3-5 : NTLMv2 Only
Note: Values 3 to 5 are no different from a client perspective
"""
self.username = username
self.password = password
self.domain = domain
self.workstation = workstation
self.cbt_data = cbt_data
self._server_certificate_hash = None # deprecated for backwards compat
self.ntlm_compatibility = ntlm_compatibility
self.complete = False
# Setting up our flags so the challenge message returns the target info
# block if supported
self.negotiate_flags = NegotiateFlags.NTLMSSP_NEGOTIATE_TARGET_INFO | \
NegotiateFlags.NTLMSSP_NEGOTIATE_128 | \
NegotiateFlags.NTLMSSP_NEGOTIATE_56 | \
NegotiateFlags.NTLMSSP_NEGOTIATE_UNICODE | \
NegotiateFlags.NTLMSSP_NEGOTIATE_VERSION | \
NegotiateFlags.NTLMSSP_NEGOTIATE_KEY_EXCH | \
NegotiateFlags.NTLMSSP_NEGOTIATE_ALWAYS_SIGN | \
NegotiateFlags.NTLMSSP_NEGOTIATE_SIGN | \
NegotiateFlags.NTLMSSP_NEGOTIATE_SEAL
# Setting the message types based on the ntlm_compatibility level
self._set_ntlm_compatibility_flags(self.ntlm_compatibility)
self._negotiate_message = None
self._challenge_message = None
self._authenticate_message = None
self._session_security = None
@property
def mic_present(self):
if self._authenticate_message:
return bool(self._authenticate_message.mic)
return False
@property
def session_key(self):
if self._authenticate_message:
return self._authenticate_message.exported_session_key
def reset_rc4_state(self, outgoing=True):
""" Resets the signing cipher for the incoming or outgoing cipher. For SPNEGO for calculating mechListMIC. """
if self._session_security:
self._session_security.reset_rc4_state(outgoing=outgoing)
def step(self, input_token=None):
if self._negotiate_message is None:
self._negotiate_message = NegotiateMessage(self.negotiate_flags,
self.domain,
self.workstation)
return self._negotiate_message.get_data()
else:
self._challenge_message = ChallengeMessage(input_token)
self._authenticate_message = AuthenticateMessage(
self.username, self.password, self.domain, self.workstation,
self._challenge_message, self.ntlm_compatibility,
server_certificate_hash=self._server_certificate_hash,
cbt_data=self.cbt_data
)
self._authenticate_message.add_mic(self._negotiate_message,
self._challenge_message)
flag_bytes = self._authenticate_message.negotiate_flags
flags = struct.unpack("<I", flag_bytes)[0]
if flags & NegotiateFlags.NTLMSSP_NEGOTIATE_SEAL or \
flags & NegotiateFlags.NTLMSSP_NEGOTIATE_SIGN:
self._session_security = SessionSecurity(
flags, self.session_key
)
self.complete = True
return self._authenticate_message.get_data()
def sign(self, data):
return self._session_security.get_signature(data)
def verify(self, data, signature):
self._session_security.verify_signature(data, signature)
def wrap(self, data):
if self._session_security is None:
raise NoAuthContextError("Cannot wrap data as no security context "
"has been established")
data, header = self._session_security.wrap(data)
return header + data
def unwrap(self, data):
if self._session_security is None:
raise NoAuthContextError("Cannot unwrap data as no security "
"context has been established")
header = data[0:16]
data = data[16:]
message = self._session_security.unwrap(data, header)
return message
def _set_ntlm_compatibility_flags(self, ntlm_compatibility):
if (ntlm_compatibility >= 0) and (ntlm_compatibility <= 5):
if ntlm_compatibility == 0:
self.negotiate_flags |= \
NegotiateFlags.NTLMSSP_NEGOTIATE_NTLM | \
NegotiateFlags.NTLMSSP_NEGOTIATE_LM_KEY
elif ntlm_compatibility == 1:
self.negotiate_flags |= \
NegotiateFlags.NTLMSSP_NEGOTIATE_NTLM | \
NegotiateFlags.NTLMSSP_NEGOTIATE_EXTENDED_SESSIONSECURITY
else:
self.negotiate_flags |= \
NegotiateFlags.NTLMSSP_NEGOTIATE_EXTENDED_SESSIONSECURITY
else:
raise Exception("Unknown ntlm_compatibility level - "
"expecting value between 0 and 5")
# Deprecated in favour of NtlmContext - this current class is heavily geared
# towards a HTTP API which is not always the case with NTLM. This is currently
# just a thin wrapper over NtlmContext and will be removed in future ntlm-auth
# versions
class Ntlm(object):
def __init__(self, ntlm_compatibility=3):
self._context = NtlmContext(None, None,
ntlm_compatibility=ntlm_compatibility)
self._challenge_token = None
@property
def negotiate_flags(self):
return self._context.negotiate_flags
@negotiate_flags.setter
def negotiate_flags(self, value):
self._context.negotiate_flags = value
@property
def ntlm_compatibility(self):
return self._context.ntlm_compatibility
@ntlm_compatibility.setter
def ntlm_compatibility(self, value):
self._context.ntlm_compatibility = value
@property
def negotiate_message(self):
return self._context._negotiate_message
@negotiate_message.setter
def negotiate_message(self, value):
self._context._negotiate_message = value
@property
def challenge_message(self):
return self._context._challenge_message
@challenge_message.setter
def challenge_message(self, value):
self._context._challenge_message = value
@property
def authenticate_message(self):
return self._context._authenticate_message
@authenticate_message.setter
def authenticate_message(self, value):
self._context._authenticate_message = value
@property
def session_security(self):
return self._context._session_security
@session_security.setter
def session_security(self, value):
self._context._session_security = value
def create_negotiate_message(self, domain_name=None, workstation=None):
self._context.domain = domain_name
self._context.workstation = workstation
msg = self._context.step()
return base64.b64encode(msg)
def parse_challenge_message(self, msg2):
self._challenge_token = base64.b64decode(msg2)
def create_authenticate_message(self, user_name, password,
domain_name=None, workstation=None,
server_certificate_hash=None):
self._context.username = user_name
self._context.password = password
self._context.domain = domain_name
self._context.workstation = workstation
self._context._server_certificate_hash = server_certificate_hash
msg = self._context.step(self._challenge_token)
return base64.b64encode(msg)
| 4,177 |
303 | <reponame>ofZach/landlinesApp<filename>www/draw/metadata/6/6176.json
{"id":6176,"line-1":"<NAME>","line-2":"Marshall Islands","attribution":"©2015 CNES / Astrium, DigitalGlobe","url":"https://www.google.com/maps/@9.397633,167.474357,17z/data=!3m1!1e3"} | 103 |
975 | <reponame>zzqcn/joy
/*
*
* Copyright (c) 2016 Cisco Systems, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* Neither the name of the Cisco Systems, Inc. nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
/**
* \file ppi.h
*
* \brief per-packet information (ppi) module using the generic
* programming interface defined in feature.h.
*
*/
#ifndef PPI_H
#define PPI_H
#include <stdio.h>
#include "output.h"
#include "feature.h"
#define MAX_NUM_PKT 200
/** usage string */
#define ppi_usage " ppi=1 include per-packet info (ppi)\n"
/** ppi filter key */
#define ppi_filter(record) 1
#define TCP_OPT_LEN 24
struct pkt_info {
struct timeval time;
unsigned int ack;
unsigned int seq;
unsigned short len;
unsigned char flags;
unsigned short opt_len;
unsigned char opts[TCP_OPT_LEN];
};
/** ppi structure */
typedef struct ppi {
unsigned int np;
struct pkt_info pkt_info[MAX_NUM_PKT];
} ppi_t;
void tcp_flags_to_string(unsigned char flags, char *string);
void tcp_opt_print_json(zfile f,
const unsigned char *options,
unsigned int total_len);
declare_feature(ppi);
/** initialization function */
void ppi_init(struct ppi **ppi_handle);
/** update ppi */
void ppi_update(struct ppi *ppi,
const struct pcap_pkthdr *header,
const void *data,
unsigned int len,
unsigned int report_ppi);
/** JSON print ppi */
void ppi_print_json(const struct ppi *w1,
const struct ppi *w2,
zfile f);
/** delete ppi */
void ppi_delete(struct ppi **ppi_handle);
/** ppi unit test entry point */
void ppi_unit_test(void);
#endif /* PPI_H */
| 1,083 |
1,056 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.core.windows.actions;
import java.awt.event.ActionEvent;
import java.awt.event.InputEvent;
import java.awt.event.KeyEvent;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import javax.swing.AbstractAction;
import javax.swing.Action;
import javax.swing.KeyStroke;
import org.netbeans.core.windows.Constants;
import org.netbeans.core.windows.ModeImpl;
import org.netbeans.core.windows.TopComponentTracker;
import org.netbeans.core.windows.WindowManagerImpl;
import org.netbeans.core.windows.view.ui.popupswitcher.KeyboardPopupSwitcher;
import org.openide.util.NbBundle;
import org.openide.util.Utilities;
import org.openide.util.WeakListeners;
import org.openide.windows.TopComponent;
/**
* Invokes Recent View List
*
* @author <NAME>
*/
public final class RecentViewListAction extends AbstractAction
implements PropertyChangeListener {
private final boolean documentsOnly;
/** Creates a new instance of RecentViewListAction */
public RecentViewListAction() {
this( false );
}
public static Action createDocumentsOnlyInstance() {
return new RecentViewListAction( true );
}
private RecentViewListAction( boolean documentsOnly ) {
this.documentsOnly = documentsOnly;
putValue(NAME, NbBundle.getMessage(RecentViewListAction.class,
documentsOnly ? "CTL_RecentDocumentListAction" : "CTL_RecentViewListAction"));
TopComponent.getRegistry().addPropertyChangeListener(
WeakListeners.propertyChange(this, TopComponent.getRegistry()));
updateEnabled();
}
@Override
public void actionPerformed(ActionEvent evt) {
boolean editors = true;
boolean views = !documentsOnly;
if( "immediately".equals( evt.getActionCommand() ) ) {
TopComponent activeTc = TopComponent.getRegistry().getActivated();
if( null != activeTc ) {
if( TopComponentTracker.getDefault().isEditorTopComponent( activeTc ) ) {
//switching in a document, go to some other document
views = false;
} else {
//switching in a view, go to some other view
editors = false;
views = true;
}
}
}
TopComponent[] documents = getRecentWindows(editors, views);
if (documents.length < 2) {
return;
}
if(!"immediately".equals(evt.getActionCommand()) && // NOI18N
!(evt.getSource() instanceof javax.swing.JMenuItem)) {
// #46800: fetch key directly from action command
KeyStroke keyStroke = Utilities.stringToKey(evt.getActionCommand());
if(keyStroke != null) {
int triggerKey = keyStroke.getKeyCode();
int reverseKey = KeyEvent.VK_SHIFT;
int releaseKey = 0;
int modifiers = keyStroke.getModifiers();
if((InputEvent.CTRL_MASK & modifiers) != 0) {
releaseKey = KeyEvent.VK_CONTROL;
} else if((InputEvent.ALT_MASK & modifiers) != 0) {
releaseKey = KeyEvent.VK_ALT;
} else if((InputEvent.META_MASK & modifiers) != 0) {
releaseKey = KeyEvent.VK_META;
}
if(releaseKey != 0) {
if (!KeyboardPopupSwitcher.isShown()) {
KeyboardPopupSwitcher.showPopup(documentsOnly, releaseKey, triggerKey, (evt.getModifiers() & KeyEvent.SHIFT_MASK) == 0);
}
return;
}
}
}
int documentIndex = (evt.getModifiers() & KeyEvent.SHIFT_MASK) == 0 ? 1 : documents.length-1;
TopComponent tc = documents[documentIndex];
// #37226 Unmaximized the other mode if needed.
WindowManagerImpl wm = WindowManagerImpl.getInstance();
ModeImpl mode = (ModeImpl) wm.findMode(tc);
if(mode != null && mode != wm.getCurrentMaximizedMode()) {
wm.switchMaximizedMode(null);
}
tc.requestActive();
}
@Override
public void propertyChange(PropertyChangeEvent evt) {
if(TopComponent.Registry.PROP_OPENED.equals(evt.getPropertyName())) {
updateEnabled();
}
}
/** Only here for fix #41477:, called from layer.xml:
* For KDE on unixes, Ctrl+TAB is occupied by OS,
* so we also register Ctrl+BACk_QUOTE as recent view list action shortcut.
* For other OS's, Ctrl+TAB is the only default, because we create link
* not pointing to anything by returning null
*/
public static String getStringRep4Unixes() {
if (Utilities.isUnix() && !Utilities.isMac()) {
return "Actions/Window/org-netbeans-core-windows-actions-RecentViewListAction.instance"; //NOI18N
}
return null;
}
/**
* Update enable state of this action.
*/
private void updateEnabled() {
setEnabled(isMoreThanOneViewOpened());
}
private boolean isMoreThanOneViewOpened() {
if( !documentsOnly ) {
return TopComponent.getRegistry().getOpened().size() > 1;
}
for(Iterator it = WindowManagerImpl.getInstance().getModes().iterator(); it.hasNext(); ) {
ModeImpl mode = (ModeImpl)it.next(); {
if (mode.getKind() == Constants.MODE_KIND_EDITOR)
return (mode.getOpenedTopComponents().size() > 1);
}
}
return false;
}
private static TopComponent[] getRecentWindows( boolean editors, boolean views) {
WindowManagerImpl wm = WindowManagerImpl.getInstance();
TopComponent[] documents = wm.getRecentViewList();
TopComponentTracker tcTracker = TopComponentTracker.getDefault();
List<TopComponent> docsList = new ArrayList<TopComponent>();
for (int i = 0; i < documents.length; i++) {
TopComponent tc = documents[i];
if (tc == null) {
continue;
}
ModeImpl mode = (ModeImpl)wm.findMode(tc);
if (mode == null) {
continue;
}
if( (editors && tcTracker.isEditorTopComponent( tc ))
|| (views && tcTracker.isViewTopComponent( tc )) ) {
docsList.add(tc);
}
}
return docsList.toArray(new TopComponent[0]);
}
}
| 3,206 |
903 | package json;
import org.develnext.jphp.core.compiler.jvm.JvmCompilerCase;
import org.develnext.jphp.ext.jsoup.JsoupExtension;
import php.runtime.env.CompileScope;
public class JsoupJvmTestCase extends JvmCompilerCase {
@Override
protected CompileScope newScope() {
CompileScope scope = super.newScope();
scope.registerExtension(new JsoupExtension());
return scope;
}
}
| 156 |
2,372 | <filename>physx/samples/sampleframework/renderer/src/d3d11/D3D11RendererTexture2D.h<gh_stars>1000+
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// * Neither the name of NVIDIA CORPORATION nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ''AS IS'' AND ANY
// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
// Copyright (c) 2008-2021 NVIDIA Corporation. All rights reserved.
#ifndef D3D11_RENDERER_TEXTURE_2D_H
#define D3D11_RENDERER_TEXTURE_2D_H
#include <RendererConfig.h>
#if defined(RENDERER_ENABLE_DIRECT3D11)
#include <RendererTexture2D.h>
#include "D3D11Renderer.h"
#include "D3D11RendererTextureCommon.h"
namespace SampleRenderer
{
class D3D11RendererTexture2D : public RendererTexture2D, public D3D11RendererResource
{
friend class D3D11RendererTarget;
friend class D3D11RendererSpotLight;
public:
D3D11RendererTexture2D(ID3D11Device& d3dDevice, ID3D11DeviceContext& d3dDeviceContext, const RendererTexture2DDesc& desc);
virtual ~D3D11RendererTexture2D(void);
public:
virtual void* lockLevel(PxU32 level, PxU32& pitch);
virtual void unlockLevel(PxU32 level);
void bind(PxU32 samplerIndex, PxU32 flags = BIND_PIXEL);
virtual void select(PxU32 stageIndex)
{
bind(stageIndex);
}
private:
virtual void onDeviceLost(void);
virtual void onDeviceReset(void);
void loadTextureDesc(const RendererTexture2DDesc&);
void loadSamplerDesc(const RendererTexture2DDesc&);
void loadResourceDesc(const RendererTexture2DDesc&);
void loadTargetDesc(const RendererTexture2DDesc&);
void loadDepthStencilDesc(const RendererTexture2DDesc&);
private:
ID3D11Device& m_d3dDevice;
ID3D11DeviceContext& m_d3dDeviceContext;
ID3D11Texture2D* m_d3dTexture;
D3D11_TEXTURE2D_DESC m_d3dTextureDesc;
ID3D11SamplerState* m_d3dSamplerState;
D3D11_SAMPLER_DESC m_d3dSamplerDesc;
ID3D11ShaderResourceView* m_d3dSRV;
D3D11_SHADER_RESOURCE_VIEW_DESC m_d3dSRVDesc;
ID3D11RenderTargetView* m_d3dRTV;
D3D11_RENDER_TARGET_VIEW_DESC m_d3dRTVDesc;
ID3D11DepthStencilView* m_d3dDSV;
D3D11_DEPTH_STENCIL_VIEW_DESC m_d3dDSVDesc;
PxU8** m_data;
D3D11_SUBRESOURCE_DATA* m_resourceData;
};
} // namespace SampleRenderer
#endif // #if defined(RENDERER_ENABLE_DIRECT3D11)
#endif
| 1,352 |
348 | <gh_stars>100-1000
{"nom":"Noyarey","circ":"3ème circonscription","dpt":"Isère","inscrits":1736,"abs":1070,"votants":666,"blancs":32,"nuls":14,"exp":620,"res":[{"nuance":"REM","nom":"<NAME>","voix":407},{"nuance":"FI","nom":"<NAME>","voix":213}]} | 100 |
763 | package org.batfish.vendor.check_point_management.parsing;
import static org.batfish.common.BfConsts.RELPATH_CHECKPOINT_MANAGEMENT_DIR;
import static org.batfish.common.matchers.WarningMatchers.hasText;
import static org.batfish.vendor.check_point_management.parsing.CheckpointManagementParser.RELPATH_CHECKPOINT_SHOW_GATEWAYS_AND_SERVERS;
import static org.batfish.vendor.check_point_management.parsing.CheckpointManagementParser.RELPATH_CHECKPOINT_SHOW_GROUPS;
import static org.batfish.vendor.check_point_management.parsing.CheckpointManagementParser.RELPATH_CHECKPOINT_SHOW_NAT_RULEBASE;
import static org.batfish.vendor.check_point_management.parsing.CheckpointManagementParser.RELPATH_CHECKPOINT_SHOW_SERVICES_ICMP;
import static org.batfish.vendor.check_point_management.parsing.CheckpointManagementParser.RELPATH_CHECKPOINT_SHOW_SERVICES_TCP;
import static org.batfish.vendor.check_point_management.parsing.CheckpointManagementParser.RELPATH_CHECKPOINT_SHOW_SERVICES_UDP;
import static org.batfish.vendor.check_point_management.parsing.CheckpointManagementParser.RELPATH_CHECKPOINT_SHOW_SERVICE_GROUPS;
import static org.batfish.vendor.check_point_management.parsing.CheckpointManagementParser.buildObjectsList;
import static org.batfish.vendor.check_point_management.parsing.CheckpointManagementParser.mergeAccessLayers;
import static org.batfish.vendor.check_point_management.parsing.CheckpointManagementParser.mergeNatRuleOrSection;
import static org.batfish.vendor.check_point_management.parsing.CheckpointManagementParser.mergeNatRulebasePages;
import static org.batfish.vendor.check_point_management.parsing.CheckpointManagementParser.readGatewaysAndServers;
import static org.batfish.vendor.check_point_management.parsing.CheckpointManagementParser.readNatRulebase;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.equalTo;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertThat;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import java.util.Map;
import javax.annotation.Nonnull;
import org.batfish.datamodel.Ip;
import org.batfish.datamodel.answers.ParseVendorConfigurationAnswerElement;
import org.batfish.vendor.check_point_management.AccessLayer;
import org.batfish.vendor.check_point_management.AccessRule;
import org.batfish.vendor.check_point_management.AccessSection;
import org.batfish.vendor.check_point_management.AllInstallationTargets;
import org.batfish.vendor.check_point_management.CpmiAnyObject;
import org.batfish.vendor.check_point_management.Domain;
import org.batfish.vendor.check_point_management.GatewayOrServerPolicy;
import org.batfish.vendor.check_point_management.GatewaysAndServers;
import org.batfish.vendor.check_point_management.Group;
import org.batfish.vendor.check_point_management.NatMethod;
import org.batfish.vendor.check_point_management.NatRule;
import org.batfish.vendor.check_point_management.NatRulebase;
import org.batfish.vendor.check_point_management.NatSection;
import org.batfish.vendor.check_point_management.Original;
import org.batfish.vendor.check_point_management.Package;
import org.batfish.vendor.check_point_management.ServiceGroup;
import org.batfish.vendor.check_point_management.ServiceIcmp;
import org.batfish.vendor.check_point_management.ServiceTcp;
import org.batfish.vendor.check_point_management.ServiceUdp;
import org.batfish.vendor.check_point_management.SimpleGateway;
import org.batfish.vendor.check_point_management.Uid;
import org.junit.Test;
/** Test of {@link CheckpointManagementParser}. */
public final class CheckpointManagementParserTest {
private static final String DOMAIN_NAME = "domain1";
private static final String PACKAGE_NAME = "package1";
private static final String SERVER_NAME = "server1";
private static final Uid UID_ANY = Uid.of("100");
private static final Uid UID_ORIG = Uid.of("101");
private static final Uid UID_GW1 = Uid.of("102");
private static final Uid UID_GW2 = Uid.of("103");
private static @Nonnull Package testPackage(boolean nat) {
return new Package(
new Domain(DOMAIN_NAME, Uid.of("domain1Uid")),
AllInstallationTargets.instance(),
PACKAGE_NAME,
false,
nat,
Uid.of("package1Uid"));
}
@Test
public void testReadNatRulebaseNatPolicyFalse() {
ParseVendorConfigurationAnswerElement pvcae = new ParseVendorConfigurationAnswerElement();
Package pakij = testPackage(false);
String natRulebaseInput =
"[" // show-nat-rulebase
+ "{" // NatRulebase page
+ "\"uid\":\"0\","
+ "\"objects-dictionary\":[],"
+ "\"rulebase\":["
+ "{" // nat-rule
+ "\"type\":\"nat-rule\","
+ "\"auto-generated\":true,"
+ "\"uid\":\"0\","
+ "\"comments\":\"foo\","
+ "\"enabled\":true,"
+ "\"install-on\":[\"100\"],"
+ "\"method\":\"hide\","
+ "\"original-destination\":\"1\","
+ "\"original-service\":\"2\","
+ "\"original-source\":\"3\","
+ "\"rule-number\":1,"
+ "\"translated-destination\":\"4\","
+ "\"translated-service\":\"5\","
+ "\"translated-source\":\"6\""
+ "}" // nat-rule
+ "]," // rulebase
+ "\"from\": 1,"
+ "\"to\": 1"
+ "}" // NatRulebase page
+ "]"; // show-nat-rulebase
// NAT rulebase should be null despite definition because package nat-policy is false.
assertNull(
readNatRulebase(
pakij,
DOMAIN_NAME,
ImmutableMap.of(RELPATH_CHECKPOINT_SHOW_NAT_RULEBASE, natRulebaseInput),
pvcae,
SERVER_NAME));
}
@Test
public void testReadNatRulebaseEmpty() {
ParseVendorConfigurationAnswerElement pvcae = new ParseVendorConfigurationAnswerElement();
Package pakij = testPackage(true);
String natRulebaseInput = "[]"; // show-nat-rulebase
// NAT rulebase should be null since there are no rulebase pages.
assertNull(
readNatRulebase(
pakij,
DOMAIN_NAME,
ImmutableMap.of(RELPATH_CHECKPOINT_SHOW_NAT_RULEBASE, natRulebaseInput),
pvcae,
SERVER_NAME));
}
@Test
public void testReadNatRulebaseMissingFile() {
ParseVendorConfigurationAnswerElement pvcae = new ParseVendorConfigurationAnswerElement();
Package pakij = testPackage(true);
// NAT rulebase should be null since the file is missing.
assertNull(readNatRulebase(pakij, DOMAIN_NAME, ImmutableMap.of(), pvcae, SERVER_NAME));
}
@Test
public void testReadNatRulebaseSinglePage() {
ParseVendorConfigurationAnswerElement pvcae = new ParseVendorConfigurationAnswerElement();
Package pakij = testPackage(true);
String natRulebaseInput =
"[" // show-nat-rulebase
+ "{" // NatRulebase page
+ "\"uid\":\"0\","
+ "\"objects-dictionary\":[],"
+ "\"rulebase\":["
+ "{" // nat-rule
+ "\"type\":\"nat-rule\","
+ "\"auto-generated\":true,"
+ "\"uid\":\"0\","
+ "\"comments\":\"foo\","
+ "\"enabled\":true,"
+ "\"install-on\":[\"100\"],"
+ "\"method\":\"hide\","
+ "\"original-destination\":\"1\","
+ "\"original-service\":\"2\","
+ "\"original-source\":\"3\","
+ "\"rule-number\":1,"
+ "\"translated-destination\":\"4\","
+ "\"translated-service\":\"5\","
+ "\"translated-source\":\"6\""
+ "}" // nat-rule
+ "]," // rulebase
+ "\"from\": 1,"
+ "\"to\": 1"
+ "}" // NatRulebase page
+ "]"; // show-nat-rulebase
// NAT rulebase should be populated.
assertThat(
readNatRulebase(
pakij,
DOMAIN_NAME,
ImmutableMap.of(RELPATH_CHECKPOINT_SHOW_NAT_RULEBASE, natRulebaseInput),
pvcae,
SERVER_NAME),
equalTo(
new NatRulebase(
ImmutableMap.of(),
ImmutableList.of(
new NatRule(
true,
"foo",
true,
ImmutableList.of(Uid.of("100")),
NatMethod.HIDE,
Uid.of("1"),
Uid.of("2"),
Uid.of("3"),
1,
Uid.of("4"),
Uid.of("5"),
Uid.of("6"),
Uid.of("0"))),
Uid.of("0"))));
}
@Test
public void testReadNatRulebaseTwoPages() {
ParseVendorConfigurationAnswerElement pvcae = new ParseVendorConfigurationAnswerElement();
Package pakij = testPackage(true);
String natRulebaseInput =
"[" // show-nat-rulebase
+ "{" // NatRulebase page1
+ "\"uid\":\"0\","
+ "\"objects-dictionary\":["
+ "{" // CpmiAnyObject
+ "\"uid\": \"100\","
+ "\"type\": \"CpmiAnyObject\","
+ "\"name\": \"Any\""
+ "}" // CpmiAnyObject
+ "],"
+ "\"rulebase\":["
+ "{" // nat-rule
+ "\"type\":\"nat-rule\","
+ "\"auto-generated\":true,"
+ "\"uid\":\"10\","
+ "\"comments\":\"foo\","
+ "\"enabled\":true,"
+ "\"install-on\":[\"100\"],"
+ "\"method\":\"hide\","
+ "\"original-destination\":\"1\","
+ "\"original-service\":\"2\","
+ "\"original-source\":\"3\","
+ "\"rule-number\":1,"
+ "\"translated-destination\":\"4\","
+ "\"translated-service\":\"5\","
+ "\"translated-source\":\"6\""
+ "}" // nat-rule
+ "]," // rulebase
+ "\"from\": 1,"
+ "\"to\": 1"
+ "}," // NatRulebase page1
+ "{" // NatRulebase page2
+ "\"uid\":\"0\","
+ "\"objects-dictionary\":["
+ "{" // CpmiAnyObject
+ "\"uid\": \"101\","
+ "\"type\": \"Global\","
+ "\"name\": \"Original\""
+ "}" // CpmiAnyObject
+ "],"
+ "\"rulebase\":["
+ "{" // nat-rule
+ "\"type\":\"nat-rule\","
+ "\"auto-generated\":true,"
+ "\"uid\":\"11\","
+ "\"comments\":\"foo\","
+ "\"enabled\":true,"
+ "\"install-on\":[\"100\"],"
+ "\"method\":\"hide\","
+ "\"original-destination\":\"1\","
+ "\"original-service\":\"2\","
+ "\"original-source\":\"3\","
+ "\"rule-number\":2,"
+ "\"translated-destination\":\"4\","
+ "\"translated-service\":\"5\","
+ "\"translated-source\":\"6\""
+ "}" // nat-rule
+ "]," // rulebase
+ "\"from\": 2,"
+ "\"to\": 2"
+ "}" // NatRulebase page2
+ "]"; // show-nat-rulebase
// NAT rulebase should be populated with merged rules and objects-dictionary from the two pages
assertThat(
readNatRulebase(
pakij,
DOMAIN_NAME,
ImmutableMap.of(RELPATH_CHECKPOINT_SHOW_NAT_RULEBASE, natRulebaseInput),
pvcae,
SERVER_NAME),
equalTo(
new NatRulebase(
ImmutableMap.of(
UID_ANY, new CpmiAnyObject(UID_ANY),
UID_ORIG, new Original(UID_ORIG)),
ImmutableList.of(
new NatRule(
true,
"foo",
true,
ImmutableList.of(Uid.of("100")),
NatMethod.HIDE,
Uid.of("1"),
Uid.of("2"),
Uid.of("3"),
1,
Uid.of("4"),
Uid.of("5"),
Uid.of("6"),
Uid.of("10")),
new NatRule(
true,
"foo",
true,
ImmutableList.of(Uid.of("100")),
NatMethod.HIDE,
Uid.of("1"),
Uid.of("2"),
Uid.of("3"),
2,
Uid.of("4"),
Uid.of("5"),
Uid.of("6"),
Uid.of("11"))),
Uid.of("0"))));
}
@Test
public void testMergeNatRulebasePagesChildSplitAcrossTwoPages() {
ParseVendorConfigurationAnswerElement pvcae = new ParseVendorConfigurationAnswerElement();
CpmiAnyObject anyObject = new CpmiAnyObject(UID_ANY);
// rulebase contains section1 with two rules (rule1, rule2) & a rule at the top level (rule3)
// The full rulebase is split across two pages for this test:
// 1. rulebaseA contains part of section1 with only rule1
// 2. rulebaseB contains the other part of section1 with rule2, and also the top-level rule3
NatRule rule1 =
new NatRule(
true,
"foo",
true,
ImmutableList.of(UID_ANY),
NatMethod.HIDE,
UID_ANY,
UID_ANY,
UID_ANY,
1,
UID_ANY,
UID_ANY,
UID_ANY,
Uid.of("11"));
NatSection section1a = new NatSection("section1", ImmutableList.of(rule1), Uid.of("1"));
NatRulebase rulebaseA =
new NatRulebase(
ImmutableMap.of(UID_ANY, anyObject), ImmutableList.of(section1a), Uid.of("1000"));
NatRule rule2 =
new NatRule(
true,
"foo",
true,
ImmutableList.of(UID_ANY),
NatMethod.HIDE,
UID_ANY,
UID_ANY,
UID_ANY,
2,
UID_ANY,
UID_ANY,
UID_ANY,
Uid.of("12"));
NatRule rule3 =
new NatRule(
true,
"foo",
true,
ImmutableList.of(UID_ANY),
NatMethod.HIDE,
UID_ANY,
UID_ANY,
UID_ANY,
3,
UID_ANY,
UID_ANY,
UID_ANY,
Uid.of("13"));
NatSection section1b = new NatSection("section1", ImmutableList.of(rule2), Uid.of("1"));
NatRulebase rulebaseB =
new NatRulebase(
ImmutableMap.of(UID_ANY, anyObject),
ImmutableList.of(section1b, rule3),
Uid.of("1000"));
NatSection section1 = new NatSection("section1", ImmutableList.of(rule1, rule2), Uid.of("1"));
NatRulebase rulebaseCombined =
new NatRulebase(
ImmutableMap.of(UID_ANY, anyObject), ImmutableList.of(section1, rule3), Uid.of("1000"));
assertThat(
mergeNatRulebasePages(ImmutableList.of(rulebaseA, rulebaseB), pvcae),
equalTo(rulebaseCombined));
}
@Test
public void testReadNatRulebaseTwoUids() {
ParseVendorConfigurationAnswerElement pvcae = new ParseVendorConfigurationAnswerElement();
Package pakij = testPackage(true);
String natRulebaseInput =
"[" // show-nat-rulebase
+ "{" // NatRulebase page1
+ "\"uid\":\"0\","
+ "\"objects-dictionary\":["
+ "{" // CpmiAnyObject
+ "\"uid\": \"100\","
+ "\"type\": \"CpmiAnyObject\","
+ "\"name\": \"Any\""
+ "}" // CpmiAnyObject
+ "],"
+ "\"rulebase\":["
+ "{" // nat-rule
+ "\"type\":\"nat-rule\","
+ "\"auto-generated\":true,"
+ "\"uid\":\"0\","
+ "\"comments\":\"foo\","
+ "\"enabled\":true,"
+ "\"install-on\":[\"100\"],"
+ "\"method\":\"hide\","
+ "\"original-destination\":\"1\","
+ "\"original-service\":\"2\","
+ "\"original-source\":\"3\","
+ "\"rule-number\":1,"
+ "\"translated-destination\":\"4\","
+ "\"translated-service\":\"5\","
+ "\"translated-source\":\"6\""
+ "}" // nat-rule
+ "]," // rulebase
+ "\"from\": 1,"
+ "\"to\": 1"
+ "}," // NatRulebase page1
+ "{" // NatRulebase page2
+ "\"uid\":\"1\","
+ "\"objects-dictionary\":["
+ "{" // CpmiAnyObject
+ "\"uid\": \"101\","
+ "\"type\": \"Global\","
+ "\"name\": \"Original\""
+ "}" // CpmiAnyObject
+ "],"
+ "\"rulebase\":["
+ "{" // nat-rule
+ "\"type\":\"nat-rule\","
+ "\"auto-generated\":true,"
+ "\"uid\":\"0\","
+ "\"comments\":\"foo\","
+ "\"enabled\":true,"
+ "\"install-on\":[\"100\"],"
+ "\"method\":\"hide\","
+ "\"original-destination\":\"1\","
+ "\"original-service\":\"2\","
+ "\"original-source\":\"3\","
+ "\"rule-number\":2,"
+ "\"translated-destination\":\"4\","
+ "\"translated-service\":\"5\","
+ "\"translated-source\":\"6\""
+ "}" // nat-rule
+ "]," // rulebase
+ "\"from\": 2,"
+ "\"to\": 2"
+ "}" // NatRulebase page2
+ "]"; // show-nat-rulebase
// NAT rulebase should be populated only with first page, since only first UID is kept
assertThat(
readNatRulebase(
pakij,
DOMAIN_NAME,
ImmutableMap.of(RELPATH_CHECKPOINT_SHOW_NAT_RULEBASE, natRulebaseInput),
pvcae,
SERVER_NAME),
equalTo(
new NatRulebase(
ImmutableMap.of(UID_ANY, new CpmiAnyObject(UID_ANY)),
ImmutableList.of(
new NatRule(
true,
"foo",
true,
ImmutableList.of(Uid.of("100")),
NatMethod.HIDE,
Uid.of("1"),
Uid.of("2"),
Uid.of("3"),
1,
Uid.of("4"),
Uid.of("5"),
Uid.of("6"),
Uid.of("0"))),
Uid.of("0"))));
}
@Test
public void testMergeNatRuleOrSectionWarning() {
ParseVendorConfigurationAnswerElement pvcae = new ParseVendorConfigurationAnswerElement();
NatRule rule1 =
new NatRule(
true,
"foo",
true,
ImmutableList.of(Uid.of("100")),
NatMethod.HIDE,
Uid.of("1"),
Uid.of("2"),
Uid.of("3"),
1,
Uid.of("4"),
Uid.of("5"),
Uid.of("6"),
Uid.of("0"));
NatSection section1 = new NatSection("bar", ImmutableList.of(), Uid.of("0"));
assertThat(mergeNatRuleOrSection(ImmutableList.of(rule1, section1), pvcae), equalTo(rule1));
assertThat(
pvcae.getWarnings().get(RELPATH_CHECKPOINT_MANAGEMENT_DIR).getRedFlagWarnings(),
contains(
hasText("Cannot merge NatRule pages (for uid 0), ignoring instances after the first")));
}
@Test
public void testMergeAccessLayers() {
ParseVendorConfigurationAnswerElement pvcae = new ParseVendorConfigurationAnswerElement();
Uid uidAny = Uid.of("0");
Uid uid1 = Uid.of("1");
Uid uid2 = Uid.of("2");
Uid uidTcp = Uid.of("3");
Uid uidUdp = Uid.of("4");
Uid uidRule1 = Uid.of("5");
Uid uidRule2 = Uid.of("6");
Uid uidRule3 = Uid.of("7");
Uid uidBogus = Uid.of("999");
String name1 = "1";
String name2 = "2";
CpmiAnyObject any = new CpmiAnyObject(uidAny);
ServiceTcp tcp = new ServiceTcp("tcp", "1234", uidTcp);
ServiceUdp udp = new ServiceUdp("udp", "1234", uidUdp);
AccessRule rule1 =
AccessRule.testBuilder(uidAny)
.setName("rule1")
.setUid(uidRule1)
.setAction(uidBogus)
.build();
AccessRule rule2 =
AccessRule.testBuilder(uidAny)
.setName("rule2")
.setUid(uidRule2)
.setAction(uidBogus)
.build();
AccessRule rule3 =
AccessRule.testBuilder(uidAny)
.setName("rule3")
.setUid(uidRule3)
.setAction(uidBogus)
.build();
AccessLayer al1a =
new AccessLayer(ImmutableMap.of(uidAny, any), ImmutableList.of(rule1, rule2), uid1, name1);
AccessLayer al1b =
new AccessLayer(ImmutableMap.of(uidAny, any), ImmutableList.of(rule3), uid1, name1);
AccessLayer al1 =
new AccessLayer(
ImmutableMap.of(uidAny, any), ImmutableList.of(rule1, rule2, rule3), uid1, name1);
AccessLayer al2a =
new AccessLayer(
ImmutableMap.of(uidAny, any, uidTcp, tcp), ImmutableList.of(rule1), uid2, name2);
AccessLayer al2b =
new AccessLayer(
ImmutableMap.of(uidAny, any, uidUdp, udp), ImmutableList.of(rule2), uid2, name2);
AccessLayer al2 =
new AccessLayer(
ImmutableMap.of(uidAny, any, uidTcp, tcp, uidUdp, udp),
ImmutableList.of(rule1, rule2),
uid2,
name2);
assertThat(
mergeAccessLayers(ImmutableList.of(al1a, al1b, al2a, al2b), pvcae),
equalTo(ImmutableList.of(al1, al2)));
}
/** Test merging AccessLayers when their children (AccessSections) are split across pages. */
@Test
public void testMergeAccessLayersAccessSectionSplit() {
ParseVendorConfigurationAnswerElement pvcae = new ParseVendorConfigurationAnswerElement();
Uid uidAny = Uid.of("0");
Uid uid2 = Uid.of("2");
Uid uidTcp = Uid.of("3");
Uid uidUdp = Uid.of("4");
Uid uidRule1 = Uid.of("5");
Uid uidRule2 = Uid.of("6");
Uid uidRule3 = Uid.of("7");
Uid uidSection1 = Uid.of("8");
Uid uidBogus = Uid.of("999");
String name2 = "2";
CpmiAnyObject any = new CpmiAnyObject(uidAny);
ServiceTcp tcp = new ServiceTcp("tcp", "1234", uidTcp);
ServiceUdp udp = new ServiceUdp("udp", "1234", uidUdp);
AccessRule rule1 =
AccessRule.testBuilder(uidAny)
.setName("rule1")
.setUid(uidRule1)
.setAction(uidBogus)
.build();
AccessRule rule2 =
AccessRule.testBuilder(uidAny)
.setName("rule2")
.setUid(uidRule2)
.setAction(uidBogus)
.build();
AccessRule rule3 =
AccessRule.testBuilder(uidAny)
.setName("rule3")
.setUid(uidRule3)
.setAction(uidBogus)
.build();
AccessLayer ala =
new AccessLayer(
ImmutableMap.of(uidAny, any, uidTcp, tcp),
ImmutableList.of(new AccessSection("section1", ImmutableList.of(rule1), uidSection1)),
uid2,
name2);
AccessLayer alb =
new AccessLayer(
ImmutableMap.of(uidAny, any, uidUdp, udp),
ImmutableList.of(
new AccessSection("section1", ImmutableList.of(rule2), uidSection1), rule3),
uid2,
name2);
AccessLayer al =
new AccessLayer(
ImmutableMap.of(uidAny, any, uidTcp, tcp, uidUdp, udp),
ImmutableList.of(
new AccessSection("section1", ImmutableList.of(rule1, rule2), uidSection1), rule3),
uid2,
name2);
assertThat(mergeAccessLayers(ImmutableList.of(ala, alb), pvcae), equalTo(ImmutableList.of(al)));
}
@Test
public void testMergeAccessLayersWarning() {
ParseVendorConfigurationAnswerElement pvcae = new ParseVendorConfigurationAnswerElement();
Uid uidAny = Uid.of("0");
Uid uid1 = Uid.of("1");
Uid uidRule1 = Uid.of("5");
Uid uidBogus = Uid.of("999");
String name1 = "1";
CpmiAnyObject any = new CpmiAnyObject(uidAny);
AccessRule rule1 =
AccessRule.testBuilder(uidAny)
.setName("rule1")
.setUid(uidRule1)
.setAction(uidBogus)
.build();
AccessLayer al1a =
new AccessLayer(ImmutableMap.of(uidAny, any), ImmutableList.of(rule1), uid1, name1);
AccessLayer al1b =
new AccessLayer(ImmutableMap.of(uidAny, any), ImmutableList.of(rule1), uid1, name1);
assertThat(
mergeAccessLayers(ImmutableList.of(al1a, al1b), pvcae), equalTo(ImmutableList.of(al1a)));
assertThat(
pvcae.getWarnings().get(RELPATH_CHECKPOINT_MANAGEMENT_DIR).getRedFlagWarnings(),
contains(
hasText(
"Cannot merge AccessRule pages (for uid 5), ignoring instances after the first")));
}
/** Convert JSON object text into ObjectPage JSON */
private String wrapJsonObj(String obj) {
return String.format("[{\"objects\":[%s]}]", obj);
}
@Test
public void testBuildObjectsList() {
String serviceGroupJson =
"{\"type\": \"service-group\", \"name\": \"serviceGroup\", \"uid\": \"1\", \"members\":"
+ " [\"2\"]}";
String icmpJson =
"{\"type\": \"service-icmp\", \"name\": \"icmp\", \"uid\": \"2\", \"icmp-type\": 1,"
+ " \"icmp-code\": 2}";
String tcpJson =
"{\"type\": \"service-tcp\", \"name\": \"tcp\", \"uid\": \"3\", \"port\": \"22\"}";
String udpJson =
"{\"type\": \"service-udp\", \"name\": \"udp\", \"uid\": \"4\", \"port\": \"222\"}";
String groupJson =
"{\"type\": \"group\", \"name\": \"group\", \"uid\": \"5\", \"members\": [\"6\"]}";
Map<String, String> fileMap =
ImmutableMap.<String, String>builder()
.put(RELPATH_CHECKPOINT_SHOW_GROUPS, wrapJsonObj(groupJson))
.put(RELPATH_CHECKPOINT_SHOW_SERVICE_GROUPS, wrapJsonObj(serviceGroupJson))
.put(RELPATH_CHECKPOINT_SHOW_SERVICES_ICMP, wrapJsonObj(icmpJson))
.put(RELPATH_CHECKPOINT_SHOW_SERVICES_TCP, wrapJsonObj(tcpJson))
.put(RELPATH_CHECKPOINT_SHOW_SERVICES_UDP, wrapJsonObj(udpJson))
.build();
Map<String, Map<String, Map<String, String>>> domainFileMap =
ImmutableMap.of("server", ImmutableMap.of("domain", fileMap));
assertThat(
buildObjectsList(
domainFileMap, "domain", "server", new ParseVendorConfigurationAnswerElement()),
containsInAnyOrder(
new Group("group", ImmutableList.of(Uid.of("6")), Uid.of("5")),
new ServiceGroup("serviceGroup", ImmutableList.of(Uid.of("2")), Uid.of("1")),
new ServiceIcmp("icmp", 1, 2, Uid.of("2")),
new ServiceTcp("tcp", "22", Uid.of("3")),
new ServiceUdp("udp", "222", Uid.of("4"))));
}
@Test
public void testReadGatewaysAndServersEmpty() {
ParseVendorConfigurationAnswerElement pvcae = new ParseVendorConfigurationAnswerElement();
String sgasInput = "[]"; // show-gateways-and-servers
// GatewaysAndServers should be null since there are no pages.
assertNull(
readGatewaysAndServers(
SERVER_NAME,
DOMAIN_NAME,
ImmutableMap.of(
SERVER_NAME,
ImmutableMap.of(
DOMAIN_NAME,
ImmutableMap.of(RELPATH_CHECKPOINT_SHOW_GATEWAYS_AND_SERVERS, sgasInput))),
pvcae));
}
@Test
public void testReadGatewaysAndServersMissingFile() {
ParseVendorConfigurationAnswerElement pvcae = new ParseVendorConfigurationAnswerElement();
// GatewaysAndServers should be null since the file is missing.
assertNull(
readGatewaysAndServers(
SERVER_NAME,
DOMAIN_NAME,
ImmutableMap.of(SERVER_NAME, ImmutableMap.of(DOMAIN_NAME, ImmutableMap.of())),
pvcae));
}
@Test
public void testReadGatewaysAndServersSinglePage() {
ParseVendorConfigurationAnswerElement pvcae = new ParseVendorConfigurationAnswerElement();
String sgasInput =
"[" // show-gateways-and-servers
+ "{" // show-gateways-and-servers page
+ "\"objects\":["
+ "{" // simple-gateway
+ "\"type\":\"simple-gateway\","
+ "\"uid\":\"102\","
+ "\"name\":\"gw1\","
+ "\"interfaces\": [],"
+ "\"ipv4-address\":\"1.1.1.1\","
+ "\"policy\":{"
+ "\"access-policy-installed\": false,"
+ "\"access-policy-name\": null,"
+ "\"threat-policy-installed\": false,"
+ "\"threat-policy-name\": null"
+ "}" // policy
+ "}" // simple-gateawy
+ "]," // objects
+ "\"from\": 1,"
+ "\"to\": 1"
+ "}" // show-gateways-and-servers page
+ "]"; // show-gateways-and-servers
// GatewaysAndServers should be populated.
assertThat(
readGatewaysAndServers(
SERVER_NAME,
DOMAIN_NAME,
ImmutableMap.of(
SERVER_NAME,
ImmutableMap.of(
DOMAIN_NAME,
ImmutableMap.of(RELPATH_CHECKPOINT_SHOW_GATEWAYS_AND_SERVERS, sgasInput))),
pvcae),
equalTo(
new GatewaysAndServers(
ImmutableMap.of(
UID_GW1,
new SimpleGateway(
Ip.parse("1.1.1.1"),
"gw1",
ImmutableList.of(),
new GatewayOrServerPolicy(null, null),
UID_GW1)))));
}
@Test
public void testReadGatewaysAndServersTwoPages() {
ParseVendorConfigurationAnswerElement pvcae = new ParseVendorConfigurationAnswerElement();
String sgasInput =
"[" // show-gateways-and-servers
+ "{" // show-gateways-and-servers page1
+ "\"objects\":["
+ "{" // simple-gateway
+ "\"type\":\"simple-gateway\","
+ "\"uid\":\"102\","
+ "\"name\":\"gw1\","
+ "\"interfaces\": [],"
+ "\"ipv4-address\":\"1.1.1.1\","
+ "\"policy\":{"
+ "\"access-policy-installed\": false,"
+ "\"access-policy-name\": null,"
+ "\"threat-policy-installed\": false,"
+ "\"threat-policy-name\": null"
+ "}" // policy
+ "}" // simple-gateawy
+ "]," // objects
+ "\"from\": 1,"
+ "\"to\": 1"
+ "}," // show-gateways-and-servers page1
+ "{" // show-gateways-and-servers page2
+ "\"objects\":["
+ "{" // simple-gateway
+ "\"type\":\"simple-gateway\","
+ "\"uid\":\"103\","
+ "\"name\":\"gw2\","
+ "\"interfaces\": [],"
+ "\"ipv4-address\":\"172.16.31.10\","
+ "\"policy\":{"
+ "\"access-policy-installed\": false,"
+ "\"access-policy-name\": null,"
+ "\"threat-policy-installed\": false,"
+ "\"threat-policy-name\": null"
+ "}" // policy
+ "}" // simple-gateawy
+ "]," // objects
+ "\"from\": 1,"
+ "\"to\": 1"
+ "}" // show-gateways-and-servers page2
+ "]"; // show-gateways-and-servers
// GatewaysAndServers should be populated with merged objects from the two pages
assertThat(
readGatewaysAndServers(
SERVER_NAME,
DOMAIN_NAME,
ImmutableMap.of(
SERVER_NAME,
ImmutableMap.of(
DOMAIN_NAME,
ImmutableMap.of(RELPATH_CHECKPOINT_SHOW_GATEWAYS_AND_SERVERS, sgasInput))),
pvcae),
equalTo(
new GatewaysAndServers(
ImmutableMap.of(
UID_GW1,
new SimpleGateway(
Ip.parse("1.1.1.1"),
"gw1",
ImmutableList.of(),
new GatewayOrServerPolicy(null, null),
UID_GW1),
UID_GW2,
new SimpleGateway(
Ip.parse("172.16.31.10"),
"gw2",
ImmutableList.of(),
new GatewayOrServerPolicy(null, null),
UID_GW2)))));
}
}
| 16,722 |
335 | <filename>S/Slap_adjective.json
{
"word": "Slap",
"definitions": [
"Lacking strength, energy, or discipline; ineffectual.",
"(of food) soft or runny."
],
"parts-of-speech": "Adjective"
} | 97 |
679 | /**************************************************************
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*************************************************************/
// MARKER(update_precomp.py): autogen include statement, do not remove
#include "precompiled_sal.hxx"
//------------------------------------------------------------------------
// include files
//------------------------------------------------------------------------
#include <osl_Module_Const.h>
#include "gtest/gtest.h"
using namespace osl;
using namespace rtl;
//------------------------------------------------------------------------
// helper functions and classes
//------------------------------------------------------------------------
/** print Boolean value.
*/
inline void printBool( sal_Bool bOk )
{
printf("#printBool# " );
( sal_True == bOk ) ? printf("TRUE!\n" ): printf("FALSE!\n" );
}
/** print a UNI_CODE String.
*/
inline void printUString( const ::rtl::OUString & str )
{
rtl::OString aString;
printf("#printUString_u# " );
aString = ::rtl::OUStringToOString( str, RTL_TEXTENCODING_ASCII_US );
printf("%s\n", aString.getStr( ) );
}
/** get dll file URL.
*/
inline ::rtl::OUString getDllURL( void )
{
#if ( defined WNT ) // lib in Unix and lib in Windows are not same in file name.
::rtl::OUString libPath( rtl::OUString::createFromAscii( "Module_DLL.dll" ) );
#else
::rtl::OUString libPath( rtl::OUString::createFromAscii( "libModule_DLL.so" ) );
#endif
::rtl::OUString dirPath, dllPath;
osl::Module::getUrlFromAddress( ( void* ) &getDllURL, dirPath );
// file:///aoo/main/sal/unx/bin/osl_Module
dirPath = dirPath.copy( 0, dirPath.lastIndexOf('/'));
// file:///aoo/main/sal/unx/bin
dirPath = dirPath.copy( 0, dirPath.lastIndexOf('/') + 1);
// file:///aoo/main/sal/unx/
dirPath = dirPath + rtl::OUString::createFromAscii( "lib/" );
// file:///aoo/main/sal/unx/lib/
osl::FileBase::getAbsoluteFileURL( dirPath, libPath, dllPath );
rtl::OString aOString = ::rtl::OUStringToOString (dllPath, RTL_TEXTENCODING_UTF8);
printf("getDllURL() returning %s\n", aOString.getStr());
return dllPath;
}
/** print a UNI_CODE file name.
*/
inline void printFileName( const ::rtl::OUString & str )
{
rtl::OString aString;
printf("#printFileName_u# " );
aString = ::rtl::OUStringToOString( str, RTL_TEXTENCODING_ASCII_US );
printf("%s\n", aString.getStr( ) );
}
inline sal_Bool isURL( const ::rtl::OUString pathname )
{
::rtl::OUString aPreURL( rtl::OUString::createFromAscii( "file:///" ) );
return ( ( pathname.indexOf( aPreURL ) == 0 ) ? sal_True : sal_False );
}
/** create a temp test directory using OUString name of full qualified URL or system path.
*/
inline void createTestDirectory( const ::rtl::OUString dirname )
{
::rtl::OUString aPathURL = dirname.copy( 0 );
::osl::FileBase::RC nError;
if ( !isURL( dirname ) )
::osl::FileBase::getFileURLFromSystemPath( dirname, aPathURL ); //convert if not full qualified URL
nError = ::osl::Directory::create( aPathURL );
ASSERT_TRUE(( ::osl::FileBase::E_None == nError ) || ( nError == ::osl::FileBase::E_EXIST )) << "In createTestDirectory Function: creation: ";
}
/** delete a temp test directory using OUString name of full qualified URL or system path.
*/
inline void deleteTestDirectory( const ::rtl::OUString dirname )
{
::rtl::OUString aPathURL = dirname.copy( 0 );
::osl::FileBase::RC nError;
if ( !isURL( dirname ) )
::osl::FileBase::getFileURLFromSystemPath( dirname, aPathURL ); //convert if not full qualified URL
::osl::Directory testDir( aPathURL );
if ( testDir.isOpen( ) == sal_True )
{
testDir.close( ); //close if still open.
}
nError = ::osl::Directory::remove( aPathURL );
ASSERT_TRUE(( ::osl::FileBase::E_None == nError ) || ( nError == ::osl::FileBase::E_NOENT )) << "In deleteTestDirectory function: remove ";
}
//check if the file exist
inline sal_Bool ifFileExist( const ::rtl::OUString & str )
{
::rtl::OUString aUStr;
if ( isURL( str ) )
::osl::FileBase::getSystemPathFromFileURL( str, aUStr );
else
return sal_False;
::osl::File strFile( aUStr );
::osl::FileBase::RC nError = strFile.open( OpenFlag_Read );
if ( ::File::E_NOENT == nError )
return sal_False;
else{
strFile.close( );
return sal_True;
}
}
/** detete a temp test file using OUString name.
*/
inline void deleteTestFile( const ::rtl::OUString filename )
{
::rtl::OUString aPathURL = filename.copy( 0 );
::osl::FileBase::RC nError;
if ( !isURL( filename ) )
::osl::FileBase::getFileURLFromSystemPath( filename, aPathURL ); //convert if not full qualified URL
nError = ::osl::File::setAttributes( aPathURL, Attribute_GrpWrite| Attribute_OwnWrite| Attribute_OthWrite ); // if readonly, make writtenable.
ASSERT_TRUE(( ::osl::FileBase::E_None == nError ) || ( ::osl::FileBase::E_NOENT == nError )) << "In deleteTestFile Function: set writtenable ";
nError = ::osl::File::remove( aPathURL );
ASSERT_TRUE(( ::osl::FileBase::E_None == nError ) || ( nError == ::osl::FileBase::E_NOENT )) << "In deleteTestFile Function: remove ";
}
//------------------------------------------------------------------------
// test code start here
//------------------------------------------------------------------------
namespace osl_Module
{
/** class and member function that is available for module test :
*/
class testClass
{
public:
static void myFunc()
{
printf("#Sun Microsystem\n");
};
};
/** testing the methods:
Module();
Module( const ::rtl::OUString& strModuleName, sal_Int32 nRtldMode = SAL_LOADMODULE_DEFAULT);
*/
class ctors : public ::testing::Test
{
public:
sal_Bool bRes, bRes1;
}; // class ctors
TEST_F(ctors, ctors_none)
{
::osl::Module aMod;
bRes = aMod.is();
ASSERT_TRUE( sal_False == bRes ) << "#test comment#: test constructor without parameter.";
}
TEST_F(ctors, ctors_name_mode)
{
::osl::Module aMod( getDllURL( ) );
bRes = aMod.is( );
aMod.unload( );
ASSERT_TRUE( sal_True == bRes ) << "#test comment#: test constructor with load action.";
}
/** testing the methods:
static sal_Bool getUrlFromAddress(void * addr, ::rtl::OUString & libraryUrl)
*/
class getUrlFromAddress : public ::testing::Test
{
public:
sal_Bool bRes, bRes1;
}; // class getUrlFromAddress
/* tester comments: another case is getFunctionSymbol_001*/
TEST_F(getUrlFromAddress, getUrlFromAddress_001 )
{
OUString aFileURL;
bRes = osl::Module::getUrlFromAddress( ( void* ) &::osl_Module::testClass::myFunc, aFileURL ) ;
if ( !( bRes ) )
{
ASSERT_TRUE(sal_False) << "Cannot locate current module.";
}
ASSERT_TRUE(sal_True == bRes && 0 < aFileURL.lastIndexOf('/')) << "#test comment#: test get Module URL from address.";
}
TEST_F(getUrlFromAddress, getUrlFromAddress_002 )
{
::osl::Module aMod( getDllURL( ) );
FuncPtr pFunc = ( FuncPtr ) aMod.getSymbol( rtl::OUString::createFromAscii( "firstfunc" ) );
OUString aFileURL;
bRes = osl::Module::getUrlFromAddress( ( void* )pFunc, aFileURL );
if ( !( bRes ) )
{
ASSERT_TRUE(sal_False) << "Cannot locate current module.";
}
aMod.unload( );
ASSERT_TRUE( sal_True == bRes && 0 < aFileURL.lastIndexOf('/') && aFileURL.equalsIgnoreAsciiCase( getDllURL( ) ) )
<< "#test comment#: load an external library, get its function address and get its URL.";
}
/** testing the method:
sal_Bool SAL_CALL load( const ::rtl::OUString& strModuleName,
sal_Int32 nRtldMode = SAL_LOADMODULE_DEFAULT)
*/
class load : public ::testing::Test
{
public:
sal_Bool bRes, bRes1;
}; // class load
TEST_F(load, load_001 )
{
::osl::Module aMod( getDllURL( ) );
::osl::Module aMod1;
aMod1.load( getDllURL( ) );
bRes = oslModule(aMod) == oslModule(aMod1);
aMod.unload( );
aMod1.unload( );
ASSERT_TRUE(sal_True == bRes) << "#test comment#: load function should do the same thing as constructor with library name.";
}
// load lib which is under a CJK directory
TEST_F(load, load_002 )
{
#ifdef UNX
//Can not get a CJK directory already exist, so here create one. Perhaps reason is encoding problem.
::rtl::OUString aPidDirURL = rtl::OUString::createFromAscii( "file:///tmp/" ) + ::rtl::OUString::valueOf( ( long )getpid( ) );
::rtl::OUString aMyDirURL = aPidDirURL + aKname;
createTestDirectory( aPidDirURL );
createTestDirectory( aMyDirURL );
::rtl::OUString aDLLURL = aMyDirURL + rtl::OUString::createFromAscii( "/libModule_DLL.so" );
//check if the lib exist.
//FIXME: if assert condition is false, the case will return, so the directory will not be clean-up
ASSERT_TRUE(ifFileExist( getDllURL( ) ) == sal_True) << "#Source file is not exist. please manually clean-up directory and file under /tmp";
::osl::FileBase::RC nError = ::osl::File::copy( getDllURL( ), aDLLURL );
ASSERT_TRUE(nError == ::osl::FileBase::E_None) << "#copy failed. please manually clean-up directory and file under /tmp";
//ifFileExist returned false but the file exist
ASSERT_TRUE( ifFileExist( aDLLURL ) == sal_True )
<< "#This file is not exist, copy failed. please manually clean-up directory and file under /tmp";
//test if can create a normal file
::rtl::OUString aFileURL = aMyDirURL + rtl::OUString::createFromAscii( "/test_file" );
::osl::File testFile( aFileURL );
nError = testFile.open( OpenFlag_Create );
ASSERT_TRUE(nError == ::osl::FileBase::E_None) << "#create failed. please manually clean-up directory and file under /tmp";
ASSERT_TRUE( ifFileExist( aFileURL ) == sal_True )
<< "#This file is not exist, create failed. please manually clean-up directory and file under /tmp";
//load the copied dll
::osl::Module aMod( aDLLURL );
::osl::Module aMod1;
sal_Bool bOK = aMod1.load( aDLLURL );
bRes = oslModule(aMod) == oslModule(aMod1);
aMod.unload( );
aMod1.unload( );
deleteTestFile( aFileURL );
deleteTestFile( aDLLURL );
deleteTestDirectory( aMyDirURL );
deleteTestDirectory( aPidDirURL );
ASSERT_TRUE(sal_True == bRes && bOK == sal_True) << "#test comment#: load lib which is under a CJK directory.";
#endif
}
/** testing the method:
void SAL_CALL unload()
*/
class unload : public ::testing::Test
{
public:
sal_Bool bRes, bRes1;
}; // class unload
TEST_F(unload, unload_001)
{
::osl::Module aMod( getDllURL( ) );
aMod.unload( );
bRes = oslModule(aMod) ==NULL;
ASSERT_TRUE(sal_True == bRes) << "#test comment#: unload function should do the same thing as destructor.";
}
/** testing the methods:
sal_Bool SAL_CALL is() const
*/
class is : public ::testing::Test
{
public:
sal_Bool bRes, bRes1;
}; // class is
TEST_F(is, is_001)
{
::osl::Module aMod;
bRes = aMod.is( );
aMod.load( getDllURL( ) );
bRes1 = aMod.is( );
aMod.unload( );
ASSERT_TRUE(sal_False == bRes && sal_True == bRes1) << "#test comment#: test if a module is a loaded module.";
}
/** testing the methods:
void* SAL_CALL getSymbol( const ::rtl::OUString& strSymbolName)
*/
class getSymbol : public ::testing::Test
{
public:
sal_Bool bRes;
}; // class getSymbol
TEST_F(getSymbol, getSymbol_001)
{
::osl::Module aMod( getDllURL( ) );
FuncPtr pFunc = ( FuncPtr ) aMod.getSymbol( rtl::OUString::createFromAscii( "firstfunc" ) );
bRes = sal_False;
if ( pFunc )
bRes = pFunc( bRes );
aMod.unload();
ASSERT_TRUE(sal_True == bRes) << "#test comment#: load a dll and call one function in it.";
}
/** testing the methods:
operator oslModule() const
*/
class optr_oslModule : public ::testing::Test
{
public:
sal_Bool bRes, bRes1;
}; // class optr_oslModule
TEST_F(optr_oslModule, optr_oslModule_001 )
{
::osl::Module aMod;
bRes = ( (oslModule)aMod == NULL );
aMod.load( getDllURL( ) );
bRes1 = (oslModule)aMod != NULL;
aMod.unload( );
ASSERT_TRUE(sal_True == bRes && sal_True == bRes1)
<< "#test comment#: the m_Module of a Module instance will be NULL when is not loaded, it will not be NULL after loaded.";
}
TEST_F(optr_oslModule, optr_oslModule_002 )
{
::osl::Module aMod( getDllURL( ) );
::rtl::OUString funcName(::rtl::OUString::createFromAscii( "firstfunc" ) );
FuncPtr pFunc = ( FuncPtr ) osl_getSymbol( (oslModule)aMod, funcName.pData );
bRes = sal_False;
if ( pFunc )
bRes = pFunc( bRes );
aMod.unload();
ASSERT_TRUE(sal_True == bRes) << "#test comment#: use m_Module to call osl_getSymbol() function.";
}
/** testing the methods:
oslGenericFunction SAL_CALL getFunctionSymbol( const ::rtl::OUString& ustrFunctionSymbolName )
*/
class getFunctionSymbol : public ::testing::Test
{
public:
sal_Bool bRes, bRes1;
}; // class getFunctionSymbol
TEST_F(getFunctionSymbol, getFunctionSymbol_001)
{
::osl::Module aMod( getDllURL( ) );
oslGenericFunction oslFunc = aMod.getFunctionSymbol( rtl::OUString::createFromAscii( "firstfunc" ) );
::rtl::OUString aLibraryURL;
bRes = ::osl::Module::getUrlFromAddress( oslFunc, aLibraryURL);
aMod.unload();
printFileName( aLibraryURL );
ASSERT_TRUE(sal_True == bRes && aLibraryURL.equalsIgnoreAsciiCase( getDllURL() )) << "#test comment#: load a dll and get its function addr and get its URL.";
}
} // namespace osl_Module
int main(int argc, char **argv)
{
::testing::InitGoogleTest(&argc, argv);
return RUN_ALL_TESTS();
}
| 5,867 |
890 | // Copyright (C) <2019> Intel Corporation
//
// SPDX-License-Identifier: Apache-2.0
#ifndef MsdkVideoCompositor_h
#define MsdkVideoCompositor_h
#ifdef ENABLE_MSDK
#include <vector>
#include <boost/scoped_ptr.hpp>
#include <boost/shared_ptr.hpp>
#include <boost/thread/shared_mutex.hpp>
#include <logger.h>
#include <JobTimer.h>
#include <webrtc/system_wrappers/include/clock.h>
#include "VideoFrameMixer.h"
#include "VideoLayout.h"
#include "MediaFramePipeline.h"
#include "MsdkFrame.h"
#include "FrameConverter.h"
namespace mcu {
using namespace owt_base;
class MsdkVideoCompositor;
class MsdkAvatarManager {
DECLARE_LOGGER();
public:
MsdkAvatarManager(uint8_t size, boost::shared_ptr<mfxFrameAllocator> allocator);
~MsdkAvatarManager();
bool setAvatar(uint8_t index, const std::string &url);
bool unsetAvatar(uint8_t index);
boost::shared_ptr<owt_base::MsdkFrame> getAvatarFrame(uint8_t index);
protected:
bool getImageSize(const std::string &url, uint32_t *pWidth, uint32_t *pHeight);
boost::shared_ptr<owt_base::MsdkFrame> loadImage(const std::string &url);
private:
uint8_t m_size;
boost::shared_ptr<mfxFrameAllocator> m_allocator;
std::map<uint8_t, std::string> m_inputs;
std::map<std::string, boost::shared_ptr<owt_base::MsdkFrame>> m_frames;
boost::shared_mutex m_mutex;
};
class MsdkInput {
DECLARE_LOGGER();
public:
MsdkInput(MsdkVideoCompositor *owner, boost::shared_ptr<mfxFrameAllocator> allocator);
~MsdkInput();
void activate();
void deActivate();
bool isActivate();
void pushInput(const owt_base::Frame& frame);
boost::shared_ptr<MsdkFrame> popInput();
protected:
bool initSwFramePool(int width, int height);
boost::shared_ptr<owt_base::MsdkFrame> getMsdkFrame(const uint32_t width, const uint32_t height);
bool processCmd(const owt_base::Frame& frame);
boost::shared_ptr<MsdkFrame> convert(const owt_base::Frame& frame);
private:
MsdkVideoCompositor *m_owner;
boost::shared_ptr<mfxFrameAllocator> m_allocator;
boost::shared_ptr<owt_base::MsdkFrame> m_msdkFrame;
boost::scoped_ptr<FrameConverter> m_converter;
bool m_active;
boost::shared_ptr<owt_base::MsdkFrame> m_busyFrame;
// todo, dont flush
boost::scoped_ptr<MsdkFramePool> m_swFramePool;
int m_swFramePoolWidth;
int m_swFramePoolHeight;
boost::shared_mutex m_mutex;
};
class MsdkVpp
{
DECLARE_LOGGER();
const uint8_t NumOfMixedFrames = 3;
public:
MsdkVpp(owt_base::VideoSize &size, owt_base::YUVColor &bgColor, const bool crop);
~MsdkVpp();
bool init(void);
bool update(owt_base::VideoSize &size, owt_base::YUVColor &bgColor, LayoutSolution &layout);
boost::shared_ptr<owt_base::MsdkFrame> mix(
std::vector<boost::shared_ptr<owt_base::MsdkFrame>> &inputFrames);
protected:
void defaultParam(void);
void updateParam(void);
bool isValidParam(void);
bool allocateFrames();
void createVpp(void);
bool resetVpp(void);
void applyAspectRatio(std::vector<boost::shared_ptr<owt_base::MsdkFrame>> &inputFrames);
void convertToCompInputStream(mfxVPPCompInputStream *inputStream, const owt_base::VideoSize& rootSize, const Region& region);
private:
MFXVideoSession *m_session;
boost::shared_ptr<mfxFrameAllocator> m_allocator;
MFXVideoVPP *m_vpp;
bool m_vppReady;
// param
boost::scoped_ptr<mfxVideoParam> m_videoParam;
boost::scoped_ptr<mfxExtVPPComposite> m_extVppComp;
std::vector<mfxVPPCompInputStream> m_compInputStreams;
// config msdk layout
std::vector<mfxVPPCompInputStream> m_msdkLayout;
// config
owt_base::VideoSize m_size;
owt_base::YUVColor m_bgColor;
LayoutSolution m_layout;
bool m_crop;
// frames
boost::scoped_ptr<owt_base::MsdkFramePool> m_mixedFramePool;
boost::shared_ptr<owt_base::MsdkFrame> m_defaultInputFrame;
boost::shared_ptr<owt_base::MsdkFrame> m_defaultRootFrame;
};
class MsdkFrameGenerator : public JobTimerListener
{
DECLARE_LOGGER();
const uint32_t kMsToRtpTimestamp = 90;
struct Output_t {
uint32_t width;
uint32_t height;
uint32_t fps;
owt_base::FrameDestination *dest;
};
public:
MsdkFrameGenerator(
MsdkVideoCompositor *owner,
owt_base::VideoSize &size,
owt_base::YUVColor &bgColor,
const bool crop,
const uint32_t maxFps,
const uint32_t minFps);
~MsdkFrameGenerator();
void updateLayoutSolution(LayoutSolution& solution);
bool isSupported(uint32_t width, uint32_t height, uint32_t fps);
bool addOutput(const uint32_t width, const uint32_t height, const uint32_t fps, owt_base::FrameDestination *dst);
bool removeOutput(owt_base::FrameDestination *dst);
void onTimeout() override;
protected:
boost::shared_ptr<owt_base::MsdkFrame> generateFrame();
boost::shared_ptr<owt_base::MsdkFrame> layout();
void reconfigureIfNeeded();
public:
const webrtc::Clock *m_clock;
MsdkVideoCompositor *m_owner;
uint32_t m_maxSupportedFps;
uint32_t m_minSupportedFps;
uint32_t m_counter;
uint32_t m_counterMax;
std::vector<std::list<Output_t>> m_outputs;
boost::shared_mutex m_outputMutex;
boost::scoped_ptr<MsdkVpp> m_msdkVpp;
// configure
owt_base::VideoSize m_size;
owt_base::YUVColor m_bgColor;
bool m_crop;
// reconfifure
LayoutSolution m_layout;
LayoutSolution m_newLayout;
bool m_configureChanged;
boost::shared_mutex m_configMutex;
boost::scoped_ptr<JobTimer> m_jobTimer;
};
/**
* composite a sequence of frames into one frame based on current layout config,
* we may enable the video rotation based on VAD history.
*/
class MsdkVideoCompositor : public VideoFrameCompositor {
DECLARE_LOGGER();
friend class MsdkInput;
friend class MsdkFrameGenerator;
public:
MsdkVideoCompositor(uint32_t maxInput, owt_base::VideoSize rootSize, owt_base::YUVColor bgColor, bool crop);
~MsdkVideoCompositor();
bool activateInput(int input);
void deActivateInput(int input);
bool setAvatar(int input, const std::string& avatar);
bool unsetAvatar(int input);
void pushInput(int input, const owt_base::Frame& frame);
void updateRootSize(owt_base::VideoSize& rootSize);
void updateBackgroundColor(owt_base::YUVColor& bgColor);
void updateLayoutSolution(LayoutSolution& solution);
bool addOutput(const uint32_t width, const uint32_t height, const uint32_t framerateFPS, owt_base::FrameDestination *dst) override;
bool removeOutput(owt_base::FrameDestination *dst) override;
void drawText(const std::string& textSpec) {}
void clearText() {}
protected:
void createAllocator();
boost::shared_ptr<owt_base::MsdkFrame> getInputFrame(int index);
void flush(void);
private:
uint32_t m_maxInput;
MFXVideoSession *m_session;
boost::shared_ptr<mfxFrameAllocator> m_allocator;
std::vector<boost::shared_ptr<MsdkFrameGenerator>> m_generators;
std::vector<boost::shared_ptr<MsdkInput>> m_inputs;
boost::scoped_ptr<MsdkAvatarManager> m_avatarManager;
};
}
#endif /* ENABLE_MSDK */
#endif /* MsdkVideoCompositor_h*/
| 3,040 |
32,544 | <filename>core-java-modules/core-java-string-operations-3/src/test/java/com/baeldung/accentsanddiacriticsremoval/StringNormalizerUnitTest.java
package com.baeldung.accentsanddiacriticsremoval;
import static org.junit.Assert.assertFalse;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.text.Normalizer;
import org.junit.jupiter.api.Test;
class StringNormalizerUnitTest {
@Test
public void givenNotNormalizedString_whenIsNormalized_thenReturnFalse() {
assertFalse(Normalizer.isNormalized("āăąēîïĩíĝġńñšŝśûůŷ", Normalizer.Form.NFKD));
}
@Test
void givenStringWithDecomposableUnicodeCharacters_whenRemoveAccents_thenReturnASCIIString() {
assertEquals("aaaeiiiiggnnsssuuy", StringNormalizer.removeAccents("āăąēîïĩíĝġńñšŝśûůŷ"));
}
@Test
void givenStringWithDecomposableUnicodeCharacters_whenRemoveAccentsWithApacheCommons_thenReturnASCIIString() {
assertEquals("aaaeiiiiggnnsssuuy", StringNormalizer.removeAccentsWithApacheCommons("āăąēîïĩíĝġńñšŝśûůŷ"));
}
@Test
void givenStringWithNondecomposableUnicodeCharacters_whenRemoveAccents_thenReturnOriginalString() {
assertEquals("łđħœ", StringNormalizer.removeAccents("łđħœ"));
}
@Test
void givenStringWithNondecomposableUnicodeCharacters_whenRemoveAccentsWithApacheCommons_thenReturnModifiedString() {
assertEquals("lđħœ", StringNormalizer.removeAccentsWithApacheCommons("łđħœ"));
}
@Test
void givenStringWithDecomposableUnicodeCharacters_whenUnicodeValueOfNormalizedString_thenReturnUnicodeValue() {
assertEquals("\\u0066 \\u0069", StringNormalizer.unicodeValueOfNormalizedString("fi"));
assertEquals("\\u0061 \\u0304", StringNormalizer.unicodeValueOfNormalizedString("ā"));
assertEquals("\\u0069 \\u0308", StringNormalizer.unicodeValueOfNormalizedString("ï"));
assertEquals("\\u006e \\u0301", StringNormalizer.unicodeValueOfNormalizedString("ń"));
}
@Test
void givenStringWithNonDecomposableUnicodeCharacters_whenUnicodeValueOfNormalizedString_thenReturnOriginalValue() {
assertEquals("\\u0142", StringNormalizer.unicodeValueOfNormalizedString("ł"));
assertEquals("\\u0127", StringNormalizer.unicodeValueOfNormalizedString("ħ"));
assertEquals("\\u0111", StringNormalizer.unicodeValueOfNormalizedString("đ"));
}
} | 943 |
1,350 | // Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.costmanagement.models;
import com.azure.core.util.Context;
import com.azure.resourcemanager.costmanagement.fluent.models.ViewInner;
import java.time.OffsetDateTime;
import java.util.List;
/** An immutable client-side representation of View. */
public interface View {
/**
* Gets the id property: Fully qualified resource Id for the resource.
*
* @return the id value.
*/
String id();
/**
* Gets the name property: The name of the resource.
*
* @return the name value.
*/
String name();
/**
* Gets the type property: The type of the resource.
*
* @return the type value.
*/
String type();
/**
* Gets the etag property: eTag of the resource. To handle concurrent update scenario, this field will be used to
* determine whether the user is updating the latest version or not.
*
* @return the etag value.
*/
String etag();
/**
* Gets the displayName property: User input name of the view. Required.
*
* @return the displayName value.
*/
String displayName();
/**
* Gets the scope property: Cost Management scope to save the view on. This includes
* 'subscriptions/{subscriptionId}' for subscription scope,
* 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}' for resourceGroup scope,
* 'providers/Microsoft.Billing/billingAccounts/{billingAccountId}' for Billing Account scope,
* 'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/departments/{departmentId}' for Department scope,
* 'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/enrollmentAccounts/{enrollmentAccountId}' for
* EnrollmentAccount scope,
* 'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/billingProfiles/{billingProfileId}' for
* BillingProfile scope,
* 'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/invoiceSections/{invoiceSectionId}' for
* InvoiceSection scope, 'providers/Microsoft.Management/managementGroups/{managementGroupId}' for Management Group
* scope, '/providers/Microsoft.CostManagement/externalBillingAccounts/{externalBillingAccountName}' for
* ExternalBillingAccount scope, and
* '/providers/Microsoft.CostManagement/externalSubscriptions/{externalSubscriptionName}' for ExternalSubscription
* scope.
*
* @return the scope value.
*/
String scope();
/**
* Gets the createdOn property: Date the user created this view.
*
* @return the createdOn value.
*/
OffsetDateTime createdOn();
/**
* Gets the modifiedOn property: Date when the user last modified this view.
*
* @return the modifiedOn value.
*/
OffsetDateTime modifiedOn();
/**
* Gets the dateRange property: Selected date range for viewing cost in.
*
* @return the dateRange value.
*/
String dateRange();
/**
* Gets the currency property: Selected currency.
*
* @return the currency value.
*/
String currency();
/**
* Gets the chart property: Chart type of the main view in Cost Analysis. Required.
*
* @return the chart value.
*/
ChartType chart();
/**
* Gets the accumulated property: Show costs accumulated over time.
*
* @return the accumulated value.
*/
AccumulatedType accumulated();
/**
* Gets the metric property: Metric to use when displaying costs.
*
* @return the metric value.
*/
MetricType metric();
/**
* Gets the kpis property: List of KPIs to show in Cost Analysis UI.
*
* @return the kpis value.
*/
List<KpiProperties> kpis();
/**
* Gets the pivots property: Configuration of 3 sub-views in the Cost Analysis UI.
*
* @return the pivots value.
*/
List<PivotProperties> pivots();
/**
* Gets the typePropertiesType property: The type of the report. Usage represents actual usage, forecast represents
* forecasted data and UsageAndForecast represents both usage and forecasted data. Actual usage and forecasted data
* can be differentiated based on dates.
*
* @return the typePropertiesType value.
*/
ReportType typePropertiesType();
/**
* Gets the timeframe property: The time frame for pulling data for the report. If custom, then a specific time
* period must be provided.
*
* @return the timeframe value.
*/
ReportTimeframeType timeframe();
/**
* Gets the timePeriod property: Has time period for pulling data for the report.
*
* @return the timePeriod value.
*/
ReportConfigTimePeriod timePeriod();
/**
* Gets the dataSet property: Has definition for data in this report config.
*
* @return the dataSet value.
*/
ReportConfigDataset dataSet();
/**
* Gets the includeMonetaryCommitment property: Include monetary commitment.
*
* @return the includeMonetaryCommitment value.
*/
Boolean includeMonetaryCommitment();
/**
* Gets the inner com.azure.resourcemanager.costmanagement.fluent.models.ViewInner object.
*
* @return the inner object.
*/
ViewInner innerModel();
/** The entirety of the View definition. */
interface Definition extends DefinitionStages.Blank, DefinitionStages.WithScopeStage, DefinitionStages.WithCreate {
}
/** The View definition stages. */
interface DefinitionStages {
/** The first stage of the View definition. */
interface Blank extends WithScopeStage {
}
/** The stage of the View definition allowing to specify parent resource. */
interface WithScopeStage {
/**
* Specifies scope.
*
* @param scope The scope associated with view operations. This includes 'subscriptions/{subscriptionId}'
* for subscription scope, 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}' for
* resourceGroup scope, 'providers/Microsoft.Billing/billingAccounts/{billingAccountId}' for Billing
* Account scope,
* 'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/departments/{departmentId}' for
* Department scope,
* 'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/enrollmentAccounts/{enrollmentAccountId}'
* for EnrollmentAccount scope,
* 'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/billingProfiles/{billingProfileId}'
* for BillingProfile scope,
* 'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/invoiceSections/{invoiceSectionId}'
* for InvoiceSection scope, 'providers/Microsoft.Management/managementGroups/{managementGroupId}' for
* Management Group scope,
* 'providers/Microsoft.CostManagement/externalBillingAccounts/{externalBillingAccountName}' for
* External Billing Account scope and
* 'providers/Microsoft.CostManagement/externalSubscriptions/{externalSubscriptionName}' for External
* Subscription scope.
* @return the next definition stage.
*/
WithCreate withExistingScope(String scope);
}
/**
* The stage of the View definition which contains all the minimum required properties for the resource to be
* created, but also allows for any other optional properties to be specified.
*/
interface WithCreate
extends DefinitionStages.WithEtag,
DefinitionStages.WithDisplayName,
DefinitionStages.WithScope,
DefinitionStages.WithChart,
DefinitionStages.WithAccumulated,
DefinitionStages.WithMetric,
DefinitionStages.WithKpis,
DefinitionStages.WithPivots,
DefinitionStages.WithTypePropertiesType,
DefinitionStages.WithTimeframe,
DefinitionStages.WithTimePeriod,
DefinitionStages.WithDataSet {
/**
* Executes the create request.
*
* @return the created resource.
*/
View create();
/**
* Executes the create request.
*
* @param context The context to associate with this operation.
* @return the created resource.
*/
View create(Context context);
}
/** The stage of the View definition allowing to specify etag. */
interface WithEtag {
/**
* Specifies the etag property: eTag of the resource. To handle concurrent update scenario, this field will
* be used to determine whether the user is updating the latest version or not..
*
* @param etag eTag of the resource. To handle concurrent update scenario, this field will be used to
* determine whether the user is updating the latest version or not.
* @return the next definition stage.
*/
WithCreate withEtag(String etag);
}
/** The stage of the View definition allowing to specify displayName. */
interface WithDisplayName {
/**
* Specifies the displayName property: User input name of the view. Required..
*
* @param displayName User input name of the view. Required.
* @return the next definition stage.
*/
WithCreate withDisplayName(String displayName);
}
/** The stage of the View definition allowing to specify scope. */
interface WithScope {
/**
* Specifies the scope property: Cost Management scope to save the view on. This includes
* 'subscriptions/{subscriptionId}' for subscription scope,
* 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}' for resourceGroup scope,
* 'providers/Microsoft.Billing/billingAccounts/{billingAccountId}' for Billing Account scope,
* 'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/departments/{departmentId}' for
* Department scope,
* 'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/enrollmentAccounts/{enrollmentAccountId}'
* for EnrollmentAccount scope,
* 'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/billingProfiles/{billingProfileId}' for
* BillingProfile scope,
* 'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/invoiceSections/{invoiceSectionId}' for
* InvoiceSection scope, 'providers/Microsoft.Management/managementGroups/{managementGroupId}' for
* Management Group scope,
* '/providers/Microsoft.CostManagement/externalBillingAccounts/{externalBillingAccountName}' for
* ExternalBillingAccount scope, and
* '/providers/Microsoft.CostManagement/externalSubscriptions/{externalSubscriptionName}' for
* ExternalSubscription scope..
*
* @param scope Cost Management scope to save the view on. This includes 'subscriptions/{subscriptionId}'
* for subscription scope, 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}' for
* resourceGroup scope, 'providers/Microsoft.Billing/billingAccounts/{billingAccountId}' for Billing
* Account scope,
* 'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/departments/{departmentId}' for
* Department scope,
* 'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/enrollmentAccounts/{enrollmentAccountId}'
* for EnrollmentAccount scope,
* 'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/billingProfiles/{billingProfileId}'
* for BillingProfile scope,
* 'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/invoiceSections/{invoiceSectionId}'
* for InvoiceSection scope, 'providers/Microsoft.Management/managementGroups/{managementGroupId}' for
* Management Group scope,
* '/providers/Microsoft.CostManagement/externalBillingAccounts/{externalBillingAccountName}' for
* ExternalBillingAccount scope, and
* '/providers/Microsoft.CostManagement/externalSubscriptions/{externalSubscriptionName}' for
* ExternalSubscription scope.
* @return the next definition stage.
*/
WithCreate withScope(String scope);
}
/** The stage of the View definition allowing to specify chart. */
interface WithChart {
/**
* Specifies the chart property: Chart type of the main view in Cost Analysis. Required..
*
* @param chart Chart type of the main view in Cost Analysis. Required.
* @return the next definition stage.
*/
WithCreate withChart(ChartType chart);
}
/** The stage of the View definition allowing to specify accumulated. */
interface WithAccumulated {
/**
* Specifies the accumulated property: Show costs accumulated over time..
*
* @param accumulated Show costs accumulated over time.
* @return the next definition stage.
*/
WithCreate withAccumulated(AccumulatedType accumulated);
}
/** The stage of the View definition allowing to specify metric. */
interface WithMetric {
/**
* Specifies the metric property: Metric to use when displaying costs..
*
* @param metric Metric to use when displaying costs.
* @return the next definition stage.
*/
WithCreate withMetric(MetricType metric);
}
/** The stage of the View definition allowing to specify kpis. */
interface WithKpis {
/**
* Specifies the kpis property: List of KPIs to show in Cost Analysis UI..
*
* @param kpis List of KPIs to show in Cost Analysis UI.
* @return the next definition stage.
*/
WithCreate withKpis(List<KpiProperties> kpis);
}
/** The stage of the View definition allowing to specify pivots. */
interface WithPivots {
/**
* Specifies the pivots property: Configuration of 3 sub-views in the Cost Analysis UI..
*
* @param pivots Configuration of 3 sub-views in the Cost Analysis UI.
* @return the next definition stage.
*/
WithCreate withPivots(List<PivotProperties> pivots);
}
/** The stage of the View definition allowing to specify typePropertiesType. */
interface WithTypePropertiesType {
/**
* Specifies the typePropertiesType property: The type of the report. Usage represents actual usage,
* forecast represents forecasted data and UsageAndForecast represents both usage and forecasted data.
* Actual usage and forecasted data can be differentiated based on dates..
*
* @param typePropertiesType The type of the report. Usage represents actual usage, forecast represents
* forecasted data and UsageAndForecast represents both usage and forecasted data. Actual usage and
* forecasted data can be differentiated based on dates.
* @return the next definition stage.
*/
WithCreate withTypePropertiesType(ReportType typePropertiesType);
}
/** The stage of the View definition allowing to specify timeframe. */
interface WithTimeframe {
/**
* Specifies the timeframe property: The time frame for pulling data for the report. If custom, then a
* specific time period must be provided..
*
* @param timeframe The time frame for pulling data for the report. If custom, then a specific time period
* must be provided.
* @return the next definition stage.
*/
WithCreate withTimeframe(ReportTimeframeType timeframe);
}
/** The stage of the View definition allowing to specify timePeriod. */
interface WithTimePeriod {
/**
* Specifies the timePeriod property: Has time period for pulling data for the report..
*
* @param timePeriod Has time period for pulling data for the report.
* @return the next definition stage.
*/
WithCreate withTimePeriod(ReportConfigTimePeriod timePeriod);
}
/** The stage of the View definition allowing to specify dataSet. */
interface WithDataSet {
/**
* Specifies the dataSet property: Has definition for data in this report config..
*
* @param dataSet Has definition for data in this report config.
* @return the next definition stage.
*/
WithCreate withDataSet(ReportConfigDataset dataSet);
}
}
/**
* Begins update for the View resource.
*
* @return the stage of resource update.
*/
View.Update update();
/** The template for View update. */
interface Update
extends UpdateStages.WithEtag,
UpdateStages.WithDisplayName,
UpdateStages.WithScope,
UpdateStages.WithChart,
UpdateStages.WithAccumulated,
UpdateStages.WithMetric,
UpdateStages.WithKpis,
UpdateStages.WithPivots,
UpdateStages.WithTypePropertiesType,
UpdateStages.WithTimeframe,
UpdateStages.WithTimePeriod,
UpdateStages.WithDataSet {
/**
* Executes the update request.
*
* @return the updated resource.
*/
View apply();
/**
* Executes the update request.
*
* @param context The context to associate with this operation.
* @return the updated resource.
*/
View apply(Context context);
}
/** The View update stages. */
interface UpdateStages {
/** The stage of the View update allowing to specify etag. */
interface WithEtag {
/**
* Specifies the etag property: eTag of the resource. To handle concurrent update scenario, this field will
* be used to determine whether the user is updating the latest version or not..
*
* @param etag eTag of the resource. To handle concurrent update scenario, this field will be used to
* determine whether the user is updating the latest version or not.
* @return the next definition stage.
*/
Update withEtag(String etag);
}
/** The stage of the View update allowing to specify displayName. */
interface WithDisplayName {
/**
* Specifies the displayName property: User input name of the view. Required..
*
* @param displayName User input name of the view. Required.
* @return the next definition stage.
*/
Update withDisplayName(String displayName);
}
/** The stage of the View update allowing to specify scope. */
interface WithScope {
/**
* Specifies the scope property: Cost Management scope to save the view on. This includes
* 'subscriptions/{subscriptionId}' for subscription scope,
* 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}' for resourceGroup scope,
* 'providers/Microsoft.Billing/billingAccounts/{billingAccountId}' for Billing Account scope,
* 'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/departments/{departmentId}' for
* Department scope,
* 'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/enrollmentAccounts/{enrollmentAccountId}'
* for EnrollmentAccount scope,
* 'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/billingProfiles/{billingProfileId}' for
* BillingProfile scope,
* 'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/invoiceSections/{invoiceSectionId}' for
* InvoiceSection scope, 'providers/Microsoft.Management/managementGroups/{managementGroupId}' for
* Management Group scope,
* '/providers/Microsoft.CostManagement/externalBillingAccounts/{externalBillingAccountName}' for
* ExternalBillingAccount scope, and
* '/providers/Microsoft.CostManagement/externalSubscriptions/{externalSubscriptionName}' for
* ExternalSubscription scope..
*
* @param scope Cost Management scope to save the view on. This includes 'subscriptions/{subscriptionId}'
* for subscription scope, 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}' for
* resourceGroup scope, 'providers/Microsoft.Billing/billingAccounts/{billingAccountId}' for Billing
* Account scope,
* 'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/departments/{departmentId}' for
* Department scope,
* 'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/enrollmentAccounts/{enrollmentAccountId}'
* for EnrollmentAccount scope,
* 'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/billingProfiles/{billingProfileId}'
* for BillingProfile scope,
* 'providers/Microsoft.Billing/billingAccounts/{billingAccountId}/invoiceSections/{invoiceSectionId}'
* for InvoiceSection scope, 'providers/Microsoft.Management/managementGroups/{managementGroupId}' for
* Management Group scope,
* '/providers/Microsoft.CostManagement/externalBillingAccounts/{externalBillingAccountName}' for
* ExternalBillingAccount scope, and
* '/providers/Microsoft.CostManagement/externalSubscriptions/{externalSubscriptionName}' for
* ExternalSubscription scope.
* @return the next definition stage.
*/
Update withScope(String scope);
}
/** The stage of the View update allowing to specify chart. */
interface WithChart {
/**
* Specifies the chart property: Chart type of the main view in Cost Analysis. Required..
*
* @param chart Chart type of the main view in Cost Analysis. Required.
* @return the next definition stage.
*/
Update withChart(ChartType chart);
}
/** The stage of the View update allowing to specify accumulated. */
interface WithAccumulated {
/**
* Specifies the accumulated property: Show costs accumulated over time..
*
* @param accumulated Show costs accumulated over time.
* @return the next definition stage.
*/
Update withAccumulated(AccumulatedType accumulated);
}
/** The stage of the View update allowing to specify metric. */
interface WithMetric {
/**
* Specifies the metric property: Metric to use when displaying costs..
*
* @param metric Metric to use when displaying costs.
* @return the next definition stage.
*/
Update withMetric(MetricType metric);
}
/** The stage of the View update allowing to specify kpis. */
interface WithKpis {
/**
* Specifies the kpis property: List of KPIs to show in Cost Analysis UI..
*
* @param kpis List of KPIs to show in Cost Analysis UI.
* @return the next definition stage.
*/
Update withKpis(List<KpiProperties> kpis);
}
/** The stage of the View update allowing to specify pivots. */
interface WithPivots {
/**
* Specifies the pivots property: Configuration of 3 sub-views in the Cost Analysis UI..
*
* @param pivots Configuration of 3 sub-views in the Cost Analysis UI.
* @return the next definition stage.
*/
Update withPivots(List<PivotProperties> pivots);
}
/** The stage of the View update allowing to specify typePropertiesType. */
interface WithTypePropertiesType {
/**
* Specifies the typePropertiesType property: The type of the report. Usage represents actual usage,
* forecast represents forecasted data and UsageAndForecast represents both usage and forecasted data.
* Actual usage and forecasted data can be differentiated based on dates..
*
* @param typePropertiesType The type of the report. Usage represents actual usage, forecast represents
* forecasted data and UsageAndForecast represents both usage and forecasted data. Actual usage and
* forecasted data can be differentiated based on dates.
* @return the next definition stage.
*/
Update withTypePropertiesType(ReportType typePropertiesType);
}
/** The stage of the View update allowing to specify timeframe. */
interface WithTimeframe {
/**
* Specifies the timeframe property: The time frame for pulling data for the report. If custom, then a
* specific time period must be provided..
*
* @param timeframe The time frame for pulling data for the report. If custom, then a specific time period
* must be provided.
* @return the next definition stage.
*/
Update withTimeframe(ReportTimeframeType timeframe);
}
/** The stage of the View update allowing to specify timePeriod. */
interface WithTimePeriod {
/**
* Specifies the timePeriod property: Has time period for pulling data for the report..
*
* @param timePeriod Has time period for pulling data for the report.
* @return the next definition stage.
*/
Update withTimePeriod(ReportConfigTimePeriod timePeriod);
}
/** The stage of the View update allowing to specify dataSet. */
interface WithDataSet {
/**
* Specifies the dataSet property: Has definition for data in this report config..
*
* @param dataSet Has definition for data in this report config.
* @return the next definition stage.
*/
Update withDataSet(ReportConfigDataset dataSet);
}
}
/**
* Refreshes the resource to sync with Azure.
*
* @return the refreshed resource.
*/
View refresh();
/**
* Refreshes the resource to sync with Azure.
*
* @param context The context to associate with this operation.
* @return the refreshed resource.
*/
View refresh(Context context);
}
| 11,253 |
897 | import java.util.Scanner;
public class RemoveDuplicateElement {
/**
*
* Method 1 Using extra space
*
*/
// The function remove the duplicate element
// Function take two argument array 'a' of size 'n'.
static int DuplicateElement(int[] a, int n){
// check if array is empty or it has only 1 element
if (n==0 || n==1)
return n;
// Now we create an temporary array of size an
// it stores the unique element
int[] temp = new int[n];
int j=0;
// start traversing the array
for (int i=0; i<n-1; i++){
// If current element is not equal
// to next element then store that
// current element
if (a[i] != a[i+1]){
temp[j++] = a[i];
}
}
// Store the last element as whether
// it is unique or repeated, it hasn't
// stored previously
temp[j++] = a[n-1];
// modify original array
for (int i=0; i<j; i++){
a[i] = temp[i];
}
return j;
}
public static void main(String[] args) {
// taking the input from the user
Scanner sc = new Scanner(System.in);
System.out.print("Enter the length of the array : ");
int n = sc.nextInt();
int a[] = new int[n];
System.out.println("Enter the values for the array elements : ");
// taking the values for the array from the user
for(int i=0; i<n; i++)
{
a[i] = sc.nextInt();
}
n = DuplicateElement(a,n);
// print the updated array
for (int i=0; i<n; i++){
System.out.print(a[i] + " ");
}
}
}
/*
Example :
Input:
Enter the length of the array : 4
Enter the values for the array elements :
1 1 2 2
Output:
1 2
*/
| 901 |
11,356 | <filename>coremltools/converters/mil/frontend/tensorflow/tf_op_registry.py
# Copyright (c) 2020, Apple Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-3-clause license that can be
# found in the LICENSE.txt file or at https://opensource.org/licenses/BSD-3-Clause
_TF_OPS_REGISTRY = {}
def register_tf_op(_func=None, tf_alias=None, override=False):
"""
Registration routine for TensorFlow operators
_func: (TF conversion function) [Default=None]
TF conversion function to register
tf_alias: (List of string) [Default=None]
All other TF operators that should also be mapped to
current conversion routine.
e.g. Sort aliased with SortV1, SortV2
All provided alias operators must not be registered previously.
override: (Boolean) [Default=False]
If True, overrides earlier registration i.e. specified
operator and alias will start pointing to current conversion
function.
Otherwise, duplicate registration will error out.
"""
def func_wrapper(func):
f_name = func.__name__
if not override and f_name in _TF_OPS_REGISTRY:
raise ValueError("TF op {} already registered.".format(f_name))
_TF_OPS_REGISTRY[f_name] = func
# If tf_alias is provided, then all the functions mentioned as aliased
# are mapped to current function
if tf_alias is not None:
for name in tf_alias:
if not override and name in _TF_OPS_REGISTRY:
msg = "TF op alias {} already registered."
raise ValueError(msg.format(name))
_TF_OPS_REGISTRY[name] = func
return func
if _func is None:
# decorator called without argument
return func_wrapper
return func_wrapper(_func)
| 708 |
5,169 | <filename>Specs/0/3/5/SwiftyRestKit/1.0.4/SwiftyRestKit.podspec.json<gh_stars>1000+
{
"name": "SwiftyRestKit",
"platforms": {
"ios": "12.0"
},
"summary": "An easy, organized, protocol-oriented way to do REST requests on iOS.",
"requires_arc": true,
"version": "1.0.4",
"description": "SwiftyRestKit is a simple, lightweight library, built using URLSession, to help make REST requests on iOS easier to deal with and more testable.",
"license": {
"type": "MIT",
"file": "LICENSE"
},
"authors": {
"<NAME>": "<EMAIL>"
},
"homepage": "https://github.com/rroca1982/SwiftyRestKit",
"source": {
"git": "https://github.com/rroca1982/SwiftyRestKit.git",
"tag": "1.0.4"
},
"source_files": "SwiftyRestKit/**/*.{swift}",
"swift_version": "4.2"
}
| 316 |
518 | <reponame>rajkumarpalani/PayPal-Java-SDK<gh_stars>100-1000
package com.paypal.api.payments;
import org.testng.Assert;
import org.testng.annotations.Test;
public class RelatedResourcesTestCase {
public static final Sale SALE = SaleTestCase.createSale();
public static final Authorization AUTHORIZATION = AuthorizationTestCase
.createAuthorization();
public static final Refund REFUND = RefundTestCase.createRefund();
public static final Capture CAPTURE = CaptureTestCase.createCapture();
public static RelatedResources createRelatedResources() {
RelatedResources subTransaction = new RelatedResources();
subTransaction.setAuthorization(AUTHORIZATION);
subTransaction.setCapture(CAPTURE);
subTransaction.setRefund(REFUND);
subTransaction.setSale(SALE);
return subTransaction;
}
@Test(groups = "unit")
public void testConstruction() {
RelatedResources subTransaction = createRelatedResources();
Assert.assertEquals(subTransaction.getAuthorization().getId(),
AuthorizationTestCase.ID);
Assert.assertEquals(subTransaction.getSale().getId(), SaleTestCase.ID);
Assert.assertEquals(subTransaction.getRefund().getId(),
RefundTestCase.ID);
Assert.assertEquals(subTransaction.getCapture().getId(),
CaptureTestCase.ID);
}
@Test(groups = "unit")
public void testTOJSON() {
RelatedResources subTransaction = createRelatedResources();
Assert.assertEquals(subTransaction.toJSON().length() == 0, false);
}
@Test(groups = "unit")
public void testTOString() {
RelatedResources subTransaction = createRelatedResources();
Assert.assertEquals(subTransaction.toString().length() == 0, false);
}
}
| 508 |
471 | from datetime import datetime
from django.http import HttpRequest, HttpResponseBadRequest
from dimagi.utils.dates import DateSpan
from dimagi.utils.django.request import request_from_args_or_kwargs
from dimagi.utils.parsing import ISO_DATE_FORMAT
import six
def datespan_in_request(from_param="from", to_param="to",
format_string=ISO_DATE_FORMAT, default_days=30,
inclusive=True, default_function=None):
"""
Wraps a request with dates based on url params or defaults and
Checks date validity.
"""
# you can pass in a function to say what the default should be,
# if you don't it will pull the value from the last default_days
# in. If you override default_function, default_days is ignored.
if default_function is None:
default_function = lambda: DateSpan.since(default_days,
format=format_string,
inclusive=inclusive)
# this is loosely modeled after example number 4 of decorator
# usage here: http://www.python.org/dev/peps/pep-0318/
def get_dates(f):
def wrapped_func(*args, **kwargs):
# attempt to find the request object from all the argument
# values, checking first the args and then the kwargs
req = request_from_args_or_kwargs(*args, **kwargs)
if req:
req_dict = req.POST if req.method == "POST" else req.GET
def date_or_nothing(param):
date = req_dict.get(param, None)
return datetime.strptime(date, format_string) if date else None
try:
startdate = date_or_nothing(from_param)
enddate = date_or_nothing(to_param)
except ValueError as e:
return HttpResponseBadRequest(six.text_type(e))
if startdate or enddate:
req.datespan = DateSpan(startdate, enddate, format_string)
else:
req.datespan = default_function()
req.datespan.is_default = True
return f(*args, **kwargs)
if hasattr(f, "__name__"):
wrapped_func.__name__ = f.__name__
# preserve doc strings
wrapped_func.__doc__ = f.__doc__
return wrapped_func
else:
# this means it wasn't actually a view.
return f
return get_dates
| 1,170 |
4,126 |
#pragma once
// Declared only so it can be partially specialized
template <class Signature>
class cFunctionRef;
/** A light-weight, type-erased reference to a function object.
### Usage
`cFunctionRef` is used whenever you call a normal function with a lambda. e.g.
one of the `cWorld::DoWith` functions,
m_World->DoWithChunkAt(BlockPos, [](cChunk & a_Chunk) -> bool
{
...
}
);
It looks like you're calling it with a lambda but that would require
`DoWithChunkAt` to be a template. The function is really being called with
`cFunctionRef<bool(cChunk&)>` which is constructed from the lambda via a
templated constructor. This gives you a generic pointer to the lambda which
doesn't depend on the type of the function object it references.
### Notes
- This is similar to a `std::function` but doesn't copy the function object.
This means that mutable function objects will be modified for the caller but
would not be if using a `std::function` (See #3990 for implications of this).
- A `cFunctionRef` has no empty state but is non-owning and so is safe to call as
long as the referred object is still alive. */
template <class Ret, class... Args>
class cFunctionRef<Ret(Args...)>
{
public:
/** Construct from a function object. */
template <class FunctionObject,
typename std::enable_if< // Don't disable the default copy constructor
!std::is_same<typename std::decay<FunctionObject>::type, cFunctionRef>::value,
int>::type = 0
>
cFunctionRef(FunctionObject && a_FunctionObject)
{
// Store an opaque reference to the object.
m_CallableData = &a_FunctionObject;
// Along with a function that knows how to call the object.
m_CallFunction = &ObjectFunctionCaller<FunctionObject>;
}
/** Call the referenced function object */
Ret operator () (Args... a_Args)
{
return m_CallFunction(m_CallableData, std::forward<Args>(a_Args)...);
}
private:
/** Function that performs the call. */
template <class ObjectType>
static Ret ObjectFunctionCaller(void * a_Callable, Args... a_Args)
{
// Convert opaque reference to the concrete type.
using ObjectPtr = typename std::add_pointer<ObjectType>::type;
auto & Object = *static_cast<ObjectPtr>(a_Callable);
// Forward the call down to the object.
return Object(std::forward<Args>(a_Args)...);
}
using cCallFunction = Ret(*)(void *, Args...);
/** Type erased reference to a callable. */
void * m_CallableData;
/** Function that knows how to call the type erased reference. */
cCallFunction m_CallFunction;
};
| 766 |
948 | /*
* Copyright (C) 2012 Texas Instruments Incorporated - http://www.ti.com/
* All rights reserved.
*
* Copyright (c) 2013, ADVANSEE - http://www.advansee.com/
* <NAME> <<EMAIL>>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/**
* \addtogroup cc2538
* @{
*
* \defgroup cc2538-flash cc2538 flash memory
*
* Definitions for the cc2538 flash memory
* @{
*
* \file
* Header file for the flash memory definitions
*/
#ifndef FLASH_H_
#define FLASH_H_
#include "dev/cc2538-dev.h"
#include "cfs-coffee-arch.h"
#include <stdint.h>
/*---------------------------------------------------------------------------*/
/** \name Flash memory organization
* @{
*/
#define FLASH_PAGE_SIZE 2048
#define FLASH_WORD_SIZE 4
/** @} */
/*---------------------------------------------------------------------------*/
/** \name Flash lock bit page and CCA location
* @{
*/
#define FLASH_CCA_ADDR (CC2538_DEV_FLASH_ADDR + CC2538_DEV_FLASH_SIZE - \
FLASH_CCA_SIZE) /**< Address */
#define FLASH_CCA_SIZE 0x0000002C /**< Size in bytes */
/** @} */
/*---------------------------------------------------------------------------*/
/** \name Bootloader backdoor configuration bit fields
* @{
*/
#define FLASH_CCA_BOOTLDR_CFG_DISABLE 0xEFFFFFFF /**< Disable backdoor function */
#define FLASH_CCA_BOOTLDR_CFG_ENABLE 0xF0FFFFFF /**< Enable backdoor function */
#define FLASH_CCA_BOOTLDR_CFG_ACTIVE_HIGH 0x08000000 /**< Selected pin on pad A active high */
#define FLASH_CCA_BOOTLDR_CFG_PORT_A_PIN_M 0x07000000 /**< Selected pin on pad A mask */
#define FLASH_CCA_BOOTLDR_CFG_PORT_A_PIN_S 24 /**< Selected pin on pad A shift */
/** @} */
/*---------------------------------------------------------------------------*/
/** \name Image valid definitions
* @{
*/
#define FLASH_CCA_IMAGE_VALID 0x00000000 /**< Indicates valid image in flash */
/** @} */
/*---------------------------------------------------------------------------*/
/** \name Lock page / debug definitions
* @{
*/
#define FLASH_CCA_LOCKED 0 /**< Page or debug locked if bit == 0 */
#define FLASH_CCA_LOCK_DEBUG_BYTE 31 /**< Lock byte containing the debug lock bit */
#define FLASH_CCA_LOCK_DEBUG_BIT 7 /**< Debug lock bit position in the corresponding lock byte */
/** @} */
/*---------------------------------------------------------------------------*/
/** \name Firmware location in flash memory
* @{
*/
#ifdef FLASH_CONF_FW_ADDR
#define FLASH_FW_ADDR FLASH_CONF_FW_ADDR
#elif !defined(COFFEE_CONF_CUSTOM_PORT)
#define FLASH_FW_ADDR (COFFEE_START + COFFEE_SIZE)
#else
#define FLASH_FW_ADDR CC2538_DEV_FLASH_ADDR
#endif
#ifdef FLASH_CONF_FW_SIZE
#define FLASH_FW_SIZE FLASH_CONF_FW_SIZE
#else
#define FLASH_FW_SIZE (FLASH_CCA_ADDR - FLASH_FW_ADDR)
#endif
/** @} */
/*---------------------------------------------------------------------------*/
/** \name Flash lock bit page and CCA layout
* @{
*/
typedef struct {
uint32_t bootldr_cfg; /**< Bootloader backdoor configuration (page bytes 2004 - 2007) */
uint32_t image_valid; /**< Image valid (page bytes 2008 - 2011) */
const void *app_entry_point; /**< Flash vector table address (page bytes 2012 - 2015) */
uint8_t lock[32]; /**< Page and debug lock bits (page bytes 2016 - 2047) */
} flash_cca_lock_page_t;
/** @} */
#endif /* FLASH_H_ */
/**
* @}
* @}
*/
| 1,624 |
419 | <filename>Code/Engine/Animation/Graph/Animation_RuntimeGraph_Controller.cpp
#include "Animation_RuntimeGraph_Controller.h"
#include "Engine/Animation/Components/Component_AnimationGraph.h"
//-------------------------------------------------------------------------
namespace KRG::Animation
{
namespace Internal
{
GraphControllerBase::GraphControllerBase( AnimationGraphComponent* pGraphComponent, Render::SkeletalMeshComponent* pMeshComponent )
: m_pGraphComponent( pGraphComponent )
, m_pAnimatedMeshComponent( pMeshComponent )
{
KRG_ASSERT( m_pGraphComponent != nullptr && pMeshComponent != nullptr );
}
}
//-------------------------------------------------------------------------
GraphController::~GraphController()
{
for ( auto pController : m_subGraphControllers )
{
KRG::Delete( pController );
}
m_subGraphControllers.clear();
}
void GraphController::PreGraphUpdate( Seconds deltaTime )
{
for ( auto pController : m_subGraphControllers )
{
pController->PreGraphUpdate( deltaTime );
}
}
void GraphController::PostGraphUpdate( Seconds deltaTime )
{
for ( auto pController : m_subGraphControllers )
{
pController->PostGraphUpdate( deltaTime );
}
}
} | 497 |
615 | <filename>src/FieldIndexer.h
#pragma once
#include "LineIndexer.h"
#include <string>
// A LineIndexer that indexes based on a separator and field number.
class FieldIndexer : public LineIndexer {
std::string separator_;
int field_;
public:
FieldIndexer(std::string separator, int field)
: separator_(move(separator)),
field_(field) { }
void index(IndexSink &sink, StringView line) override;
};
| 166 |
463 | """A test script the confuses pylint."""
# https://github.com/PyCQA/pylint/issues/2605
from dataclasses import dataclass, field
@dataclass
class Test:
"""A test dataclass with a field, that has a default_factory."""
test: list = field(default_factory=list)
TEST = Test()
TEST.test.append(1)
print(TEST.test[0])
| 122 |
3,702 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package app.metatron.discovery.domain.datasource.ingestion.jdbc;
import java.util.List;
import java.util.Map;
import app.metatron.discovery.domain.dataconnection.DataConnection;
import app.metatron.discovery.domain.datasource.ingestion.IngestionInfo;
import app.metatron.discovery.domain.datasource.ingestion.file.FileFormat;
/**
* Created by kyungtaak on 2017. 4. 30..
*/
public abstract class JdbcIngestionInfo implements IngestionInfo {
/**
* 내부에서 관리되고 있지 않은 JDBC 연결일 경우 적재 정보내 명시하여 관리
*/
DataConnection connection;
/**
* JBDC 데이터베이스(스키마) 정보
*/
String database;
/**
* 질의한 데이터 형태, 쿼리문 또는 테이블
*/
DataType dataType;
/**
* DataType 에 따라 쿼리문 또는 테이블 명
*/
String query;
/**
* 질의 결과를 담는 파일 포맷 정의
*/
FileFormat format;
/**
* Roll-up
*/
Boolean rollup;
/**
* Intervals
*/
List<String> intervals;
/**
* Specify Tuning Configuration, override default Value
*/
Map<String, Object> tuningOptions;
/**
* Fetch Size
*/
Integer fetchSize = 200;
/**
* Max Limit
*/
Integer maxLimit = 10000000;
/**
* JDBC Connection username
*/
String connectionUsername;
/**
* JDBC Connection password
*/
String connectionPassword;
public JdbcIngestionInfo() {
}
public DataConnection getConnection() {
return connection;
}
public void setConnection(DataConnection connection) {
this.connection = connection;
}
public String getDatabase() {
return database;
}
public void setDatabase(String database) {
this.database = database;
}
public DataType getDataType() {
return dataType;
}
public void setDataType(DataType dataType) {
this.dataType = dataType;
}
public String getQuery() {
return query;
}
public void setQuery(String query) {
this.query = query;
}
@Override
public FileFormat getFormat() {
return format;
}
public void setFormat(FileFormat format) {
this.format = format;
}
@Override
public Boolean getRollup() {
return rollup;
}
public void setRollup(Boolean rollup) {
this.rollup = rollup;
}
@Override
public List<String> getIntervals() {
return intervals;
}
public void setIntervals(List<String> intervals) {
this.intervals = intervals;
}
@Override
public Map<String, Object> getTuningOptions() {
return tuningOptions;
}
public void setTuningOptions(Map<String, Object> tuningOptions) {
this.tuningOptions = tuningOptions;
}
public Integer getFetchSize() {
return fetchSize;
}
public void setFetchSize(Integer fetchSize) {
this.fetchSize = fetchSize;
}
public Integer getMaxLimit() {
return maxLimit;
}
public void setMaxLimit(Integer maxLimit) {
this.maxLimit = maxLimit;
}
public String getConnectionUsername() {
return connectionUsername;
}
public void setConnectionUsername(String connectionUsername) {
this.connectionUsername = connectionUsername;
}
public String getConnectionPassword() {
return connectionPassword;
}
public void setConnectionPassword(String connectionPassword) {
this.connectionPassword = connectionPassword;
}
public enum DataType {
TABLE, QUERY
}
}
| 1,421 |
410 | """
Unit tests for the relation module.
This module wasn't matching TensorFlow performance so it's tested pretty heavily.
"""
import torch
from allennlp.common.testing import ModelTestCase
# Needed to get the test framework to see the dataset readers and models.
from dygie import models
from dygie import data
class TestRelation(ModelTestCase):
def setUp(self):
super(TestRelation, self).setUp()
self.config_file = "tests/fixtures/dygie_test.jsonnet"
self.data_file = "tests/fixtures/scierc_article.json"
self.set_up_model(self.config_file, self.data_file)
def test_decode(self):
def convert(x):
return self.model.vocab.get_token_from_index(x, namespace="relation_labels")
top_spans = torch.tensor([[[0, 2], [1, 3], [1, 3]],
[[1, 6], [2, 4], [3, 8]],
[[0, 1], [0, 1], [0, 1]]])
predicted_relations = torch.tensor([[[-1, -1, 1],
[1, -1, -1],
[-1, 0, -1]],
[[-1, -1, -1],
[1, -1, 2],
[-1, -1, 4]],
[[1, 1, 2],
[1, 3, 2],
[-1, 2, 1]]])
num_spans_to_keep = torch.tensor([2, 3, 0])
predict_dict = {"top_spans": top_spans,
"predicted_relations": predicted_relations,
"num_spans_to_keep": num_spans_to_keep}
decoded = self.model._relation.decode(predict_dict)
expected = [{((1, 3), (0, 2)): convert(1)},
{((2, 4), (1, 6)): convert(1),
((2, 4), (3, 8)): convert(2),
((3, 8), (3, 8)): convert(4)},
{}]
assert expected == decoded["decoded_relations_dict"]
def test_compute_span_pair_embeddings(self):
top_span_embeddings = torch.randn([3, 51, 1160]) # Make up random embeddings.
embeddings = self.model._relation._compute_span_pair_embeddings(top_span_embeddings)
batch_ix = 1
ix1 = 22
ix2 = 43
emb1 = top_span_embeddings[batch_ix, ix1]
emb2 = top_span_embeddings[batch_ix, ix2]
emb_prod = emb1 * emb2
emb = torch.cat([emb1, emb2, emb_prod])
assert torch.allclose(emb, embeddings[batch_ix, ix1, ix2])
def test_compute_relation_scores(self):
self.model.eval() # Need eval on in order to reproduce.
relation = self.model._relation
pairwise_embeddings = torch.randn(3, 46, 46, 3480, requires_grad=True)
top_span_mention_scores = torch.randn(3, 46, 1, requires_grad=True)
scores = relation._compute_relation_scores(pairwise_embeddings, top_span_mention_scores)
batch_ix = 0
ix1 = 31
ix2 = 4
score = relation._relation_scorer(
relation._relation_feedforward(pairwise_embeddings[batch_ix, ix1, ix2].unsqueeze(0)))
score += top_span_mention_scores[batch_ix, ix1] + top_span_mention_scores[batch_ix, ix2]
score = torch.cat([torch.tensor([0.0]), score.squeeze()])
assert torch.allclose(scores[batch_ix, ix1, ix2], score)
def test_get_pruned_gold_relations(self):
# Getting the pruned gold labels should add one to the input relation labels, then set all
# the masked entries to -1.
relation_labels = torch.tensor([[[-1, -1, 2, 3],
[1, -1, -1, 0],
[-1, 3, -1, 1],
[0, -1, -1, -1]],
[[0, 2, 1, 2],
[-1, -1, -1, -1],
[3, 0, -1, -1],
[-1, 0, 1, -1]]])
top_span_indices = torch.tensor([[0, 1, 3],
[0, 2, 2]])
top_span_masks = torch.tensor([[1, 1, 1],
[1, 1, 0]]).unsqueeze(-1)
labels = self.model._relation._get_pruned_gold_relations(
relation_labels, top_span_indices, top_span_masks)
expected_labels = torch.tensor([[[0, 0, 4],
[2, 0, 1],
[1, 0, 0]],
[[1, 2, -1],
[4, 0, -1],
[-1, -1, -1]]])
assert torch.equal(labels, expected_labels)
def test_cross_entropy_ignore_index(self):
# Make sure that the cross entropy loss is ignoring entries whose gold label is -1, which
# corresponds, to masked-out entries.
relation_scores = torch.randn(2, 3, 3, self.model._relation._n_labels + 1)
gold_relations = torch.tensor([[[0, 0, 4],
[2, 0, 1],
[1, 0, 0]],
[[1, 2, -1],
[4, 0, -1],
[-1, -1, -1]]])
# Calculate the loss with a loop over entries.
total_loss = torch.tensor([0.0])
for fold in [0, 1]:
for i in range(3):
for j in range(3):
scores_entry = relation_scores[fold, i, j].unsqueeze(0)
gold_entry = gold_relations[fold, i, j].unsqueeze(0)
if gold_entry >= 0:
loss_entry = self.model._relation._loss(scores_entry, gold_entry)
total_loss += loss_entry
model_loss = self.model._relation._get_cross_entropy_loss(relation_scores, gold_relations)
assert torch.allclose(total_loss, model_loss)
| 3,484 |
679 | /**************************************************************
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*************************************************************/
#ifndef _IMAPIMP_HXX_
#define _IMAPIMP_HXX_
// ---------------
// - IMapOwnData -
// ---------------
class IMapOwnData
{
private:
SvxIMapDlg* pIMap;
public:
Timer aTimer;
Timer aTbxTimer;
Graphic aUpdateGraphic;
ImageMap aUpdateImageMap;
TargetList aUpdateTargetList;
void* pUpdateEditingObject;
sal_Bool bExecState;
IMapOwnData( SvxIMapDlg* pIMapDlg ) : pIMap ( pIMapDlg ) {}
};
#endif // _IMAPIMP_HXX
| 425 |
5,279 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.sdk.schemas.utils;
import static org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions.checkArgument;
import org.apache.beam.vendor.bytebuddy.v1_11_0.net.bytebuddy.description.type.TypeDescription.ForLoadedType;
import org.apache.beam.vendor.bytebuddy.v1_11_0.net.bytebuddy.implementation.bytecode.StackManipulation;
import org.apache.beam.vendor.bytebuddy.v1_11_0.net.bytebuddy.implementation.bytecode.StackManipulation.Compound;
import org.apache.beam.vendor.bytebuddy.v1_11_0.net.bytebuddy.implementation.bytecode.assign.TypeCasting;
import org.apache.beam.vendor.bytebuddy.v1_11_0.net.bytebuddy.implementation.bytecode.member.MethodVariableAccess;
/** This class allows managing local variables in a ByteBuddy-generated function. */
class ByteBuddyLocalVariableManager {
private int nextLocalVariableIndex;
// Initialize with the number of arguments to the function (including the this parameter if
// applicable).
public ByteBuddyLocalVariableManager(int numFunctionArgs) {
nextLocalVariableIndex = numFunctionArgs;
}
// Create a new variable.
public int createVariable() {
return nextLocalVariableIndex++;
}
// Returns a StackManipulation to read a variable.
public StackManipulation readVariable(int variableIndex) {
checkArgument(variableIndex < nextLocalVariableIndex);
return MethodVariableAccess.REFERENCE.loadFrom(variableIndex);
}
// Returns a StackManipulation to read a variable, casting to the specified type.
StackManipulation readVariable(int variableIndex, Class<?> type) {
return new Compound(readVariable(variableIndex), TypeCasting.to(new ForLoadedType(type)));
}
// Returns a StackManipulation to write a variable.
StackManipulation writeVariable(int variableIndex) {
checkArgument(variableIndex < nextLocalVariableIndex);
return MethodVariableAccess.REFERENCE.storeAt(variableIndex);
}
// Returns a StackManipulation to copy a variable.
StackManipulation copy(int sourceVariableIndex, int destVariableIndex) {
return new Compound(readVariable(sourceVariableIndex), writeVariable(destVariableIndex));
}
// Returns a class that can be used to backup and restore a variable, using a newly-created temp
// variable.
BackupLocalVariable backupVariable(int variableToBackup) {
return new BackupLocalVariable(variableToBackup);
}
// Gets the total num variables in the function. Should be used when returning the Size parameter.
int getTotalNumVariables() {
return nextLocalVariableIndex;
}
public class BackupLocalVariable {
private final int variableToBackup; // Variable to save.
private final int tempVariable; // Temp variable we are saving in.
public BackupLocalVariable(int variableToBackup) {
this.variableToBackup = variableToBackup;
this.tempVariable = createVariable();
}
public StackManipulation backup() {
return copy(variableToBackup, tempVariable);
}
public StackManipulation restore() {
return copy(tempVariable, variableToBackup);
}
}
}
| 1,115 |
678 | /**
* This header is generated by class-dump-z 0.2b.
*
* Source: /System/Library/PrivateFrameworks/PhotoLibrary.framework/PhotoLibrary
*/
#import <PhotoLibrary/PhotoLibrary-Structs.h>
#import <PhotoLibrary/XXUnknownSuperclass.h>
#import <PhotoLibrary/PLStackViewDataSource.h>
@class PLWStackedImageCache;
@interface PLWStackedImageCacheCreator : XXUnknownSuperclass <PLStackViewDataSource> {
PLWStackedImageCache *_stackedImageCache; // 4 = 0x4
}
- (NSObject *)_albumForStack:(id)stack; // 0xa3795
- (void)stackView:(id)view invalidateCachedImagesForIndex:(int)index; // 0xa3791
- (id)stackView:(id)view collapsedIndexesForCount:(unsigned)count; // 0xa3765
- (void)stackView:(id)view saveStackedImage:(id)image options:(id)options; // 0xa36e5
- (id)stackedImageForStackView:(id)stackView options:(id *)options; // 0xa3691
- (int)stackViewPosterItemIndex:(id)index; // 0xa3661
- (id)stackView:(id)view textBadgeStringForImageAtIndex:(int)index; // 0xa3615
- (id)stackView:(id)view itemViewAtIndex:(int)index loadImagesSynchronously:(BOOL)synchronously; // 0xa33a9
- (int)stackViewItemCount:(id)count; // 0xa3381
- (void)createStackedImageForAlbum:(id)album; // 0xa30e9
- (void)dealloc; // 0xa30bd
- (id)init; // 0xa3045
@end
| 449 |
777 | <reponame>google-ar/chromium
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "services/video_capture/service_impl.h"
#include "base/memory/ptr_util.h"
#include "base/message_loop/message_loop.h"
#include "media/capture/video/fake_video_capture_device_factory.h"
#include "media/capture/video/video_capture_buffer_pool.h"
#include "media/capture/video/video_capture_buffer_tracker.h"
#include "media/capture/video/video_capture_jpeg_decoder.h"
#include "services/service_manager/public/cpp/interface_registry.h"
#include "services/video_capture/device_factory_media_to_mojo_adapter.h"
namespace {
// TODO(chfremer): Replace with an actual decoder factory.
// https://crbug.com/584797
std::unique_ptr<media::VideoCaptureJpegDecoder> CreateJpegDecoder() {
return nullptr;
}
} // anonymous namespace
namespace video_capture {
ServiceImpl::ServiceImpl() = default;
ServiceImpl::~ServiceImpl() = default;
bool ServiceImpl::OnConnect(const service_manager::ServiceInfo& remote_info,
service_manager::InterfaceRegistry* registry) {
registry->AddInterface<mojom::Service>(this);
return true;
}
void ServiceImpl::Create(const service_manager::Identity& remote_identity,
mojom::ServiceRequest request) {
service_bindings_.AddBinding(this, std::move(request));
}
void ServiceImpl::ConnectToDeviceFactory(mojom::DeviceFactoryRequest request) {
LazyInitializeDeviceFactory();
factory_bindings_.AddBinding(device_factory_.get(), std::move(request));
}
void ServiceImpl::ConnectToFakeDeviceFactory(
mojom::DeviceFactoryRequest request) {
LazyInitializeFakeDeviceFactory();
fake_factory_bindings_.AddBinding(fake_device_factory_.get(),
std::move(request));
}
void ServiceImpl::LazyInitializeDeviceFactory() {
if (device_factory_)
return;
// Create the platform-specific device factory.
// Task runner does not seem to actually be used.
std::unique_ptr<media::VideoCaptureDeviceFactory> media_device_factory =
media::VideoCaptureDeviceFactory::CreateFactory(
base::MessageLoop::current()->task_runner());
device_factory_ = base::MakeUnique<DeviceFactoryMediaToMojoAdapter>(
std::move(media_device_factory), base::Bind(CreateJpegDecoder));
}
void ServiceImpl::LazyInitializeFakeDeviceFactory() {
if (fake_device_factory_)
return;
fake_device_factory_ = base::MakeUnique<DeviceFactoryMediaToMojoAdapter>(
base::MakeUnique<media::FakeVideoCaptureDeviceFactory>(),
base::Bind(&CreateJpegDecoder));
}
} // namespace video_capture
| 935 |
1,144 | <gh_stars>1000+
package de.metas.document.archive.interceptor;
import org.adempiere.service.IClientDAO;
import org.compiere.model.I_AD_Client;
import org.compiere.model.MClient;
import org.compiere.model.ModelValidationEngine;
import org.compiere.model.ModelValidator;
import org.compiere.model.PO;
import de.metas.document.archive.api.IDocOutboundProducerService;
import de.metas.document.archive.async.spi.impl.DocOutboundWorkpackageProcessor;
import de.metas.document.archive.model.I_C_Doc_Outbound_Config;
import de.metas.document.archive.spi.impl.AbstractDocOutboundProducer;
import de.metas.document.engine.IDocument;
import de.metas.document.engine.IDocumentBL;
import de.metas.util.Check;
import de.metas.util.Services;
import lombok.NonNull;
/**
* Intercepter which listens to a a table specified in {@link I_C_Doc_Outbound_Config} and enqueues the documents to {@link DocOutboundWorkpackageProcessor}.
*
* @author tsa
*
*/
/* package */class DocOutboundProducerValidator extends AbstractDocOutboundProducer implements ModelValidator
{
private static final String COLUMNNAME_Processed = "Processed";
private final ModelValidationEngine modelValidationEngine;
private int m_AD_Client_ID = -1;
public DocOutboundProducerValidator(@NonNull final ModelValidationEngine modelValidationEngine, final I_C_Doc_Outbound_Config config)
{
super(config);
this.modelValidationEngine = modelValidationEngine;
}
@Override
public void init(final IDocOutboundProducerService producerService)
{
// Detect AD_Client to be used for registering
final I_C_Doc_Outbound_Config config = getC_Doc_Outbound_Config();
final int adClientId = config.getAD_Client_ID();
if (adClientId > 0)
{
final I_AD_Client client = Services.get(IClientDAO.class).getById(adClientId);
modelValidationEngine.addModelValidator(this, client);
}
else
{
// register for all clients
modelValidationEngine.addModelValidator(this);
}
}
// NOTE: keep in sync with initialize method
@Override
public void destroy(final IDocOutboundProducerService producerService)
{
// Unregister from model validation engine
final String tableName = getTableName();
modelValidationEngine.removeModelChange(tableName, this);
modelValidationEngine.removeDocValidate(tableName, this);
}
// NOTE: keep in sync with destroy method
@Override
public void initialize(final ModelValidationEngine engine, final MClient client)
{
if (client != null)
{
m_AD_Client_ID = client.getAD_Client_ID();
}
final String tableName = getTableName();
if (isDocument())
{
engine.addDocValidate(tableName, this);
}
else
{
engine.addModelChange(tableName, this);
}
}
@Override
public int getAD_Client_ID()
{
return m_AD_Client_ID;
}
@Override
public String login(final int AD_Org_ID, final int AD_Role_ID, final int AD_User_ID)
{
return null; // nothing
}
@Override
public String modelChange(PO po, int type) throws Exception
{
if (type == TYPE_AFTER_NEW || type == TYPE_AFTER_CHANGE)
{
if (isDocument())
{
if (!acceptDocument(po))
{
return null;
}
if (po.is_ValueChanged(IDocument.COLUMNNAME_DocStatus) && Services.get(IDocumentBL.class).isDocumentReversedOrVoided(po))
{
voidDocOutbound(po);
}
}
if (isJustProcessed(po, type))
{
createDocOutbound(po);
}
}
return null;
}
@Override
public String docValidate(@NonNull final PO po, final int timing)
{
Check.assume(isDocument(), "PO '{}' is a document", po);
if (!acceptDocument(po))
{
return null;
}
if (timing == ModelValidator.TIMING_AFTER_COMPLETE
&& !Services.get(IDocumentBL.class).isReversalDocument(po))
{
createDocOutbound(po);
}
if (timing == ModelValidator.TIMING_AFTER_VOID
|| timing == ModelValidator.TIMING_AFTER_REVERSEACCRUAL
|| timing == ModelValidator.TIMING_AFTER_REVERSECORRECT)
{
voidDocOutbound(po);
}
return null;
}
/**
*
* @param po
* @param changeType
* @return true if the given PO was just processed
*/
private boolean isJustProcessed(final PO po, final int changeType)
{
final boolean isNew = changeType == ModelValidator.TYPE_BEFORE_NEW || changeType == ModelValidator.TYPE_AFTER_NEW;
final int idxProcessed = po.get_ColumnIndex(DocOutboundProducerValidator.COLUMNNAME_Processed);
final boolean processedColumnAvailable = idxProcessed > 0;
final boolean processed = processedColumnAvailable ? po.get_ValueAsBoolean(idxProcessed) : true;
if (processedColumnAvailable)
{
if (isNew)
{
return processed;
}
else if (po.is_ValueChanged(idxProcessed))
{
return processed;
}
else
{
return false;
}
}
else
// Processed column is not available
{
// If is not available, we always consider the record as processed right after it was created
// This condition was introduced because we need to archive/print records which does not have such a column (e.g. letters)
return isNew;
}
}
}
| 1,793 |
568 | import unittest
from vdebug.opts import Options,OptionsError
class OptionsTest(unittest.TestCase):
def tearDown(self):
Options.instance = None
def test_has_instance(self):
Options.set({1:"hello", 2:"world"})
self.assertIsInstance(Options.inst(), Options)
def test_get_option(self):
Options.set({'foo':"hello",'bar':"world"})
self.assertEqual("hello", Options.get('foo'))
def test_get_option_for_print(self):
Options.set({'foo':"", 'bar':"world"})
self.assertEqual("<empty>", Options.get_for_print('foo'))
self.assertEqual("world", Options.get_for_print('bar'))
def test_get_option_as_type(self):
Options.set({'foo':"1", 'bar':"2"})
opt = Options.get('foo', int)
self.assertIsInstance(opt, int)
self.assertEqual(1, opt)
def test_overwrite(self):
Options.set({'foo':"hello", 'bar':"world"})
Options.overwrite('foo', "hi")
self.assertEqual("hi", Options.get('foo'))
def test_option_is_not_set(self):
Options.set({'foo':"", 'bar':"2"})
self.assertFalse(Options.isset("monkey"))
def test_option_is_not_valid(self):
Options.set({'foo':"", 'bar':"2"})
self.assertFalse(Options.isset("foo"))
def test_option_isset(self):
Options.set({'foo':"", 'bar':"2"})
self.assertTrue(Options.isset("bar"))
def test_uninit_raises_error(self):
self.assertRaises(OptionsError, Options.isset,'something')
def test_get_raises_error(self):
Options.set({'foo':"1", 'bar':"2"})
self.assertRaises(OptionsError, Options.get,'something')
| 724 |
348 | {"nom":"Esparros","circ":"1ère circonscription","dpt":"Hautes-Pyrénées","inscrits":167,"abs":81,"votants":86,"blancs":5,"nuls":3,"exp":78,"res":[{"nuance":"FI","nom":"<NAME>","voix":45},{"nuance":"REM","nom":"<NAME>","voix":33}]} | 93 |
441 | import unittest
import grpc
from _service import Service, ErroringHandler, ExceptionErroringHandler
from _tracer import Tracer, SpanRelationship
from grpc_opentracing import open_tracing_client_interceptor, open_tracing_server_interceptor
import opentracing
class OpenTracingTest(unittest.TestCase):
"""Test that tracers create the correct spans when RPC calls are invoked."""
def setUp(self):
self._tracer = Tracer()
self._service = Service([open_tracing_client_interceptor(self._tracer)],
[open_tracing_server_interceptor(self._tracer)])
def testUnaryUnaryOpenTracing(self):
multi_callable = self._service.unary_unary_multi_callable
request = b'\x01'
expected_response = self._service.handler.handle_unary_unary(request,
None)
response = multi_callable(request)
self.assertEqual(response, expected_response)
span0 = self._tracer.get_span(0)
self.assertIsNotNone(span0)
self.assertEqual(span0.get_tag('span.kind'), 'client')
span1 = self._tracer.get_span(1)
self.assertIsNotNone(span1)
self.assertEqual(span1.get_tag('span.kind'), 'server')
self.assertEqual(
self._tracer.get_relationship(0, 1),
opentracing.ReferenceType.CHILD_OF)
def testUnaryUnaryOpenTracingFuture(self):
multi_callable = self._service.unary_unary_multi_callable
request = b'\x01'
expected_response = self._service.handler.handle_unary_unary(request,
None)
future = multi_callable.future(request)
response = future.result()
self.assertEqual(response, expected_response)
span0 = self._tracer.get_span(0)
self.assertIsNotNone(span0)
self.assertEqual(span0.get_tag('span.kind'), 'client')
span1 = self._tracer.get_span(1)
self.assertIsNotNone(span1)
self.assertEqual(span1.get_tag('span.kind'), 'server')
self.assertEqual(
self._tracer.get_relationship(0, 1),
opentracing.ReferenceType.CHILD_OF)
def testUnaryUnaryOpenTracingWithCall(self):
multi_callable = self._service.unary_unary_multi_callable
request = b'\x01'
expected_response = self._service.handler.handle_unary_unary(request,
None)
response, call = multi_callable.with_call(request)
self.assertEqual(response, expected_response)
self.assertIs(grpc.StatusCode.OK, call.code())
span0 = self._tracer.get_span(0)
self.assertIsNotNone(span0)
self.assertEqual(span0.get_tag('span.kind'), 'client')
span1 = self._tracer.get_span(1)
self.assertIsNotNone(span1)
self.assertEqual(span1.get_tag('span.kind'), 'server')
self.assertEqual(
self._tracer.get_relationship(0, 1),
opentracing.ReferenceType.CHILD_OF)
def testUnaryStreamOpenTracing(self):
multi_callable = self._service.unary_stream_multi_callable
request = b'\x01'
expected_response = self._service.handler.handle_unary_stream(request,
None)
response = multi_callable(request)
self.assertEqual(list(response), list(expected_response))
span0 = self._tracer.get_span(0)
self.assertIsNotNone(span0)
self.assertEqual(span0.get_tag('span.kind'), 'client')
span1 = self._tracer.get_span(1)
self.assertIsNotNone(span1)
self.assertEqual(span1.get_tag('span.kind'), 'server')
self.assertEqual(
self._tracer.get_relationship(0, 1),
opentracing.ReferenceType.CHILD_OF)
def testStreamUnaryOpenTracing(self):
multi_callable = self._service.stream_unary_multi_callable
requests = [b'\x01', b'\x02']
expected_response = self._service.handler.handle_stream_unary(
iter(requests), None)
response = multi_callable(iter(requests))
self.assertEqual(response, expected_response)
span0 = self._tracer.get_span(0)
self.assertIsNotNone(span0)
self.assertEqual(span0.get_tag('span.kind'), 'client')
span1 = self._tracer.get_span(1)
self.assertIsNotNone(span1)
self.assertEqual(span1.get_tag('span.kind'), 'server')
self.assertEqual(
self._tracer.get_relationship(0, 1),
opentracing.ReferenceType.CHILD_OF)
def testStreamUnaryOpenTracingWithCall(self):
multi_callable = self._service.stream_unary_multi_callable
requests = [b'\x01', b'\x02']
expected_response = self._service.handler.handle_stream_unary(
iter(requests), None)
response, call = multi_callable.with_call(iter(requests))
self.assertEqual(response, expected_response)
self.assertIs(grpc.StatusCode.OK, call.code())
span0 = self._tracer.get_span(0)
self.assertIsNotNone(span0)
self.assertEqual(span0.get_tag('span.kind'), 'client')
span1 = self._tracer.get_span(1)
self.assertIsNotNone(span1)
self.assertEqual(span1.get_tag('span.kind'), 'server')
self.assertEqual(
self._tracer.get_relationship(0, 1),
opentracing.ReferenceType.CHILD_OF)
def testStreamUnaryOpenTracingFuture(self):
multi_callable = self._service.stream_unary_multi_callable
requests = [b'\x01', b'\x02']
expected_response = self._service.handler.handle_stream_unary(
iter(requests), None)
result = multi_callable.future(iter(requests))
response = result.result()
self.assertEqual(response, expected_response)
span0 = self._tracer.get_span(0)
self.assertIsNotNone(span0)
self.assertEqual(span0.get_tag('span.kind'), 'client')
span1 = self._tracer.get_span(1)
self.assertIsNotNone(span1)
self.assertEqual(span1.get_tag('span.kind'), 'server')
self.assertEqual(
self._tracer.get_relationship(0, 1),
opentracing.ReferenceType.CHILD_OF)
def testStreamStreamOpenTracing(self):
multi_callable = self._service.stream_stream_multi_callable
requests = [b'\x01', b'\x02']
expected_response = self._service.handler.handle_stream_stream(
iter(requests), None)
response = multi_callable(iter(requests))
self.assertEqual(list(response), list(expected_response))
span0 = self._tracer.get_span(0)
self.assertIsNotNone(span0)
self.assertEqual(span0.get_tag('span.kind'), 'client')
span1 = self._tracer.get_span(1)
self.assertIsNotNone(span1)
self.assertEqual(span1.get_tag('span.kind'), 'server')
self.assertEqual(
self._tracer.get_relationship(0, 1),
opentracing.ReferenceType.CHILD_OF)
class OpenTracingInteroperabilityClientTest(unittest.TestCase):
"""Test that a traced client can interoperate with a non-trace server."""
def setUp(self):
self._tracer = Tracer()
self._service = Service([open_tracing_client_interceptor(self._tracer)],
[])
def testUnaryUnaryOpenTracing(self):
multi_callable = self._service.unary_unary_multi_callable
request = b'\x01'
expected_response = self._service.handler.handle_unary_unary(request,
None)
response = multi_callable(request)
self.assertEqual(response, expected_response)
span0 = self._tracer.get_span(0)
self.assertIsNotNone(span0)
self.assertEqual(span0.get_tag('span.kind'), 'client')
span1 = self._tracer.get_span(1)
self.assertIsNone(span1)
def testUnaryUnaryOpenTracingWithCall(self):
multi_callable = self._service.unary_unary_multi_callable
request = b'\x01'
expected_response = self._service.handler.handle_unary_unary(request,
None)
response, call = multi_callable.with_call(request)
self.assertEqual(response, expected_response)
self.assertIs(grpc.StatusCode.OK, call.code())
span0 = self._tracer.get_span(0)
self.assertIsNotNone(span0)
self.assertEqual(span0.get_tag('span.kind'), 'client')
span1 = self._tracer.get_span(1)
self.assertIsNone(span1)
def testUnaryStreamOpenTracing(self):
multi_callable = self._service.unary_stream_multi_callable
request = b'\x01'
expected_response = self._service.handler.handle_unary_stream(request,
None)
response = multi_callable(request)
self.assertEqual(list(response), list(expected_response))
span0 = self._tracer.get_span(0)
self.assertIsNotNone(span0)
self.assertEqual(span0.get_tag('span.kind'), 'client')
span1 = self._tracer.get_span(1)
self.assertIsNone(span1)
def testStreamUnaryOpenTracing(self):
multi_callable = self._service.stream_unary_multi_callable
requests = [b'\x01', b'\x02']
expected_response = self._service.handler.handle_stream_unary(
iter(requests), None)
response = multi_callable(iter(requests))
self.assertEqual(response, expected_response)
span0 = self._tracer.get_span(0)
self.assertIsNotNone(span0)
self.assertEqual(span0.get_tag('span.kind'), 'client')
span1 = self._tracer.get_span(1)
self.assertIsNone(span1)
def testStreamUnaryOpenTracingWithCall(self):
multi_callable = self._service.stream_unary_multi_callable
requests = [b'\x01', b'\x02']
expected_response = self._service.handler.handle_stream_unary(
iter(requests), None)
response, call = multi_callable.with_call(iter(requests))
self.assertEqual(response, expected_response)
self.assertIs(grpc.StatusCode.OK, call.code())
span0 = self._tracer.get_span(0)
self.assertIsNotNone(span0)
self.assertEqual(span0.get_tag('span.kind'), 'client')
span1 = self._tracer.get_span(1)
self.assertIsNone(span1)
def testStreamStreamOpenTracing(self):
multi_callable = self._service.stream_stream_multi_callable
requests = [b'\x01', b'\x02']
expected_response = self._service.handler.handle_stream_stream(
iter(requests), None)
response = multi_callable(iter(requests))
self.assertEqual(list(response), list(expected_response))
span0 = self._tracer.get_span(0)
self.assertIsNotNone(span0)
self.assertEqual(span0.get_tag('span.kind'), 'client')
span1 = self._tracer.get_span(1)
self.assertIsNone(span1)
class OpenTracingMetadataTest(unittest.TestCase):
"""Test that open-tracing doesn't interfere with passing metadata through the
RPC.
"""
def setUp(self):
self._tracer = Tracer()
self._service = Service([open_tracing_client_interceptor(self._tracer)],
[open_tracing_server_interceptor(self._tracer)])
def testInvocationMetadata(self):
multi_callable = self._service.unary_unary_multi_callable
request = b'\x01'
multi_callable(request, None, (('abc', '123'),))
self.assertIn(('abc', '123'), self._service.handler.invocation_metadata)
def testTrailingMetadata(self):
self._service.handler.trailing_metadata = (('abc', '123'),)
multi_callable = self._service.unary_unary_multi_callable
request = b'\x01'
future = multi_callable.future(request, None, (('abc', '123'),))
self.assertIn(('abc', '123'), future.trailing_metadata())
class OpenTracingInteroperabilityServerTest(unittest.TestCase):
"""Test that a traced server can interoperate with a non-trace client."""
def setUp(self):
self._tracer = Tracer()
self._service = Service([],
[open_tracing_server_interceptor(self._tracer)])
def testUnaryUnaryOpenTracing(self):
multi_callable = self._service.unary_unary_multi_callable
request = b'\x01'
expected_response = self._service.handler.handle_unary_unary(request,
None)
response = multi_callable(request)
self.assertEqual(response, expected_response)
span0 = self._tracer.get_span(0)
self.assertIsNotNone(span0)
self.assertEqual(span0.get_tag('span.kind'), 'server')
span1 = self._tracer.get_span(1)
self.assertIsNone(span1)
def testUnaryUnaryOpenTracingWithCall(self):
multi_callable = self._service.unary_unary_multi_callable
request = b'\x01'
expected_response = self._service.handler.handle_unary_unary(request,
None)
response, call = multi_callable.with_call(request)
self.assertEqual(response, expected_response)
self.assertIs(grpc.StatusCode.OK, call.code())
span0 = self._tracer.get_span(0)
self.assertIsNotNone(span0)
self.assertEqual(span0.get_tag('span.kind'), 'server')
span1 = self._tracer.get_span(1)
self.assertIsNone(span1)
def testUnaryStreamOpenTracing(self):
multi_callable = self._service.unary_stream_multi_callable
request = b'\x01'
expected_response = self._service.handler.handle_unary_stream(request,
None)
response = multi_callable(request)
self.assertEqual(list(response), list(expected_response))
span0 = self._tracer.get_span(0)
self.assertIsNotNone(span0)
self.assertEqual(span0.get_tag('span.kind'), 'server')
span1 = self._tracer.get_span(1)
self.assertIsNone(span1)
def testStreamUnaryOpenTracing(self):
multi_callable = self._service.stream_unary_multi_callable
requests = [b'\x01', b'\x02']
expected_response = self._service.handler.handle_stream_unary(
iter(requests), None)
response = multi_callable(iter(requests))
self.assertEqual(response, expected_response)
span0 = self._tracer.get_span(0)
self.assertIsNotNone(span0)
self.assertEqual(span0.get_tag('span.kind'), 'server')
span1 = self._tracer.get_span(1)
self.assertIsNone(span1)
def testStreamUnaryOpenTracingWithCall(self):
multi_callable = self._service.stream_unary_multi_callable
requests = [b'\x01', b'\x02']
expected_response = self._service.handler.handle_stream_unary(
iter(requests), None)
response, call = multi_callable.with_call(iter(requests))
self.assertEqual(response, expected_response)
self.assertIs(grpc.StatusCode.OK, call.code())
span0 = self._tracer.get_span(0)
self.assertIsNotNone(span0)
self.assertEqual(span0.get_tag('span.kind'), 'server')
span1 = self._tracer.get_span(1)
self.assertIsNone(span1)
def testStreamStreamOpenTracing(self):
multi_callable = self._service.stream_stream_multi_callable
requests = [b'\x01', b'\x02']
expected_response = self._service.handler.handle_stream_stream(
iter(requests), None)
response = multi_callable(iter(requests))
self.assertEqual(list(response), list(expected_response))
span0 = self._tracer.get_span(0)
self.assertIsNotNone(span0)
self.assertEqual(span0.get_tag('span.kind'), 'server')
span1 = self._tracer.get_span(1)
self.assertIsNone(span1)
class OpenTracingErroringTest(unittest.TestCase):
"""Test that tracer spans set the error tag when erroring RPC are invoked."""
def setUp(self):
self._tracer = Tracer()
self._service = Service([open_tracing_client_interceptor(self._tracer)],
[open_tracing_server_interceptor(self._tracer)],
ErroringHandler())
def testUnaryUnaryOpenTracing(self):
multi_callable = self._service.unary_unary_multi_callable
request = b'\x01'
self.assertRaises(grpc.RpcError, multi_callable, request)
span0 = self._tracer.get_span(0)
self.assertIsNotNone(span0)
self.assertTrue(span0.get_tag('error'))
span1 = self._tracer.get_span(1)
self.assertIsNotNone(span1)
self.assertTrue(span1.get_tag('error'))
def testUnaryUnaryOpenTracingWithCall(self):
multi_callable = self._service.unary_unary_multi_callable
request = b'\x01'
self.assertRaises(grpc.RpcError, multi_callable.with_call, request)
span0 = self._tracer.get_span(0)
self.assertIsNotNone(span0)
self.assertTrue(span0.get_tag('error'))
span1 = self._tracer.get_span(1)
self.assertIsNotNone(span1)
self.assertTrue(span1.get_tag('error'))
def testUnaryStreamOpenTracing(self):
multi_callable = self._service.unary_stream_multi_callable
request = b'\x01'
response = multi_callable(request)
self.assertRaises(grpc.RpcError, list, response)
span0 = self._tracer.get_span(0)
self.assertIsNotNone(span0)
self.assertTrue(span0.get_tag('error'))
span1 = self._tracer.get_span(1)
self.assertIsNotNone(span1)
self.assertTrue(span1.get_tag('error'))
def testStreamUnaryOpenTracing(self):
multi_callable = self._service.stream_unary_multi_callable
requests = [b'\x01', b'\x02']
self.assertRaises(grpc.RpcError, multi_callable, iter(requests))
span0 = self._tracer.get_span(0)
self.assertIsNotNone(span0)
self.assertTrue(span0.get_tag('error'))
span1 = self._tracer.get_span(1)
self.assertIsNotNone(span1)
self.assertTrue(span1.get_tag('error'))
def testStreamUnaryOpenTracingWithCall(self):
multi_callable = self._service.stream_unary_multi_callable
requests = [b'\x01', b'\x02']
self.assertRaises(grpc.RpcError, multi_callable.with_call,
iter(requests))
span0 = self._tracer.get_span(0)
self.assertIsNotNone(span0)
self.assertTrue(span0.get_tag('error'))
span1 = self._tracer.get_span(1)
self.assertIsNotNone(span1)
self.assertTrue(span1.get_tag('error'))
def testStreamStreamOpenTracing(self):
multi_callable = self._service.stream_stream_multi_callable
requests = [b'\x01', b'\x02']
response = multi_callable(iter(requests))
self.assertRaises(grpc.RpcError, list, response)
span0 = self._tracer.get_span(0)
self.assertIsNotNone(span0)
self.assertTrue(span0.get_tag('error'))
span1 = self._tracer.get_span(1)
self.assertIsNotNone(span1)
self.assertTrue(span1.get_tag('error'))
class OpenTracingExceptionErroringTest(unittest.TestCase):
"""Test that tracer spans set the error tag when exception erroring RPC are
invoked.
"""
def setUp(self):
self._tracer = Tracer()
self._service = Service([open_tracing_client_interceptor(self._tracer)],
[open_tracing_server_interceptor(self._tracer)],
ExceptionErroringHandler())
def testUnaryUnaryOpenTracing(self):
multi_callable = self._service.unary_unary_multi_callable
request = b'\x01'
self.assertRaises(grpc.RpcError, multi_callable, request)
span0 = self._tracer.get_span(0)
self.assertIsNotNone(span0)
self.assertTrue(span0.get_tag('error'))
span1 = self._tracer.get_span(1)
self.assertIsNotNone(span1)
self.assertTrue(span1.get_tag('error'))
def testUnaryUnaryOpenTracingWithCall(self):
multi_callable = self._service.unary_unary_multi_callable
request = b'\x01'
self.assertRaises(grpc.RpcError, multi_callable.with_call, request)
span0 = self._tracer.get_span(0)
self.assertIsNotNone(span0)
self.assertTrue(span0.get_tag('error'))
span1 = self._tracer.get_span(1)
self.assertIsNotNone(span1)
self.assertTrue(span1.get_tag('error'))
def testUnaryStreamOpenTracing(self):
multi_callable = self._service.unary_stream_multi_callable
request = b'\x01'
response = multi_callable(request)
self.assertRaises(grpc.RpcError, list, response)
span0 = self._tracer.get_span(0)
self.assertIsNotNone(span0)
self.assertTrue(span0.get_tag('error'))
span1 = self._tracer.get_span(1)
self.assertIsNotNone(span1)
self.assertTrue(span1.get_tag('error'))
def testStreamUnaryOpenTracing(self):
multi_callable = self._service.stream_unary_multi_callable
requests = [b'\x01', b'\x02']
self.assertRaises(grpc.RpcError, multi_callable, iter(requests))
span0 = self._tracer.get_span(0)
self.assertIsNotNone(span0)
self.assertTrue(span0.get_tag('error'))
span1 = self._tracer.get_span(1)
self.assertIsNotNone(span1)
self.assertTrue(span1.get_tag('error'))
def testStreamUnaryOpenTracingWithCall(self):
multi_callable = self._service.stream_unary_multi_callable
requests = [b'\x01', b'\x02']
self.assertRaises(grpc.RpcError, multi_callable.with_call,
iter(requests))
span0 = self._tracer.get_span(0)
self.assertIsNotNone(span0)
self.assertTrue(span0.get_tag('error'))
span1 = self._tracer.get_span(1)
self.assertIsNotNone(span1)
self.assertTrue(span1.get_tag('error'))
def testStreamStreamOpenTracing(self):
multi_callable = self._service.stream_stream_multi_callable
requests = [b'\x01', b'\x02']
response = multi_callable(iter(requests))
self.assertRaises(grpc.RpcError, list, response)
span0 = self._tracer.get_span(0)
self.assertIsNotNone(span0)
self.assertTrue(span0.get_tag('error'))
span1 = self._tracer.get_span(1)
self.assertIsNotNone(span1)
self.assertTrue(span1.get_tag('error'))
| 10,533 |
1,711 |
// ===============================================================================
// May be included multiple times - resets structure packing to the defaults
// for all supported compilers. Reverts the changes made by #include <pushpack1.h>
//
// Currently this works on the following compilers:
// MSVC 7,8,9
// GCC
// BORLAND (complains about 'pack state changed but not reverted', but works)
// ===============================================================================
#ifndef AI_PUSHPACK_IS_DEFINED
# error pushpack1.h must be included after poppack1.h
#endif
// reset packing to the original value
#if defined(_MSC_VER) || defined(__BORLANDC__) || defined (__BCPLUSPLUS__)
# pragma pack( pop )
#endif
#undef PACK_STRUCT
#undef AI_PUSHPACK_IS_DEFINED
| 236 |
318 | // __BEGIN_LICENSE__
// Copyright (c) 2006-2013, United States Government as represented by the
// Administrator of the National Aeronautics and Space Administration. All
// rights reserved.
//
// The NASA Vision Workbench is licensed under the Apache License,
// Version 2.0 (the "License"); you may not use this file except in
// compliance with the License. You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// __END_LICENSE__
/// \file Features.h
/// This file contains some useful macros and definitions so they don't get
/// scattered everywhere.
#ifndef __VW_CORE_FEATURES_H__
#define __VW_CORE_FEATURES_H__
#include <vw/config.h>
#if defined(VW_COMPILER_HAS_ATTRIBUTE_DEPRECATED)
#define VW_DEPRECATED __attribute__((deprecated))
#else
#define VW_DEPRECATED
#endif
#if defined(VW_COMPILER_HAS_ATTRIBUTE_NORETURN)
#define VW_NORETURN __attribute__((noreturn))
#else
#define VW_NORETURN
#endif
#if defined(VW_COMPILER_HAS_ATTRIBUTE_WARN_UNUSED_RESULT)
#define VW_WARN_UNUSED __attribute__((warn_unused_result))
#else
#define VW_WARN_UNUSED
#endif
#define VW_NOTHROW VW_IF_EXCEPTIONS(throw())
/// The master compile-time debugging level flag. The default value
/// for VW_DEBUG_LEVEL is guessed based on whether or not NDEBUG
/// is defined if the user has not specified it explicitly.
#ifndef VW_DEBUG_LEVEL
#ifdef NDEBUG
#define VW_DEBUG_LEVEL 0
#else
#define VW_DEBUG_LEVEL 1
#endif
#endif
/// A quick macro for selectively disabling code in non-debug builds.
#if VW_DEBUG_LEVEL == 0
#define VW_DEBUG(x)
#else
#define VW_DEBUG(x) x
#endif
#endif
| 638 |
1,403 | /*
* CsVertexFormat.cpp
*
* This file is part of the "LLGL" project (Copyright (c) 2015-2019 by <NAME>)
* See "LICENSE.txt" for license information.
*/
#include "CsVertexFormat.h"
#include "CsHelper.h"
#include <LLGL/VertexAttribute.h>
#include <algorithm>
namespace SharpLLGL
{
/*
* VertexAttribute class
*/
VertexAttribute::VertexAttribute()
{
Name = "";
Format = SharpLLGL::Format::Undefined;
Location = 0;
SemanticIndex = 0;
Slot = 0;
Offset = 0;
Stride = 0;
SystemValue = SharpLLGL::SystemValue::Undefined;
InstanceDivisor = 0;
}
VertexAttribute::VertexAttribute(String^ name, SharpLLGL::Format format, unsigned int location)
{
Name = name;
Format = format;
Location = location;
SemanticIndex = 0;
SystemValue = SharpLLGL::SystemValue::Undefined;
Slot = 0;
Offset = 0;
Stride = 0;
InstanceDivisor = 0;
}
VertexAttribute::VertexAttribute(String^ name, SharpLLGL::Format format, unsigned int location, unsigned int instanceDivisor)
{
Name = name;
Format = format;
Location = location;
SemanticIndex = 0;
SystemValue = SharpLLGL::SystemValue::Undefined;
Slot = 0;
Offset = 0;
Stride = 0;
InstanceDivisor = instanceDivisor;
}
VertexAttribute::VertexAttribute(String^ name, SharpLLGL::Format format, unsigned int location, unsigned int instanceDivisor, SharpLLGL::SystemValue systemValue)
{
//TODO
}
VertexAttribute::VertexAttribute(String^ name, unsigned int semanticIndex, SharpLLGL::Format format, unsigned int location, unsigned int instanceDivisor)
{
Name = name;
Format = format;
Location = location;
SemanticIndex = semanticIndex;
SystemValue = SharpLLGL::SystemValue::Undefined;
Slot = 0;
Offset = 0;
Stride = 0;
InstanceDivisor = instanceDivisor;
}
VertexAttribute::VertexAttribute(String^ name, SharpLLGL::Format format, unsigned int location, unsigned int offset, unsigned int stride, unsigned int slot, unsigned int instanceDivisor)
{
//TODO
}
VertexAttribute::VertexAttribute(String^ semanticName, unsigned int semanticIndex, SharpLLGL::Format format, unsigned int location, unsigned int offset, unsigned int stride, unsigned int slot, unsigned int instanceDivisor)
{
//TODO
}
unsigned int VertexAttribute::Size::get()
{
const auto& formatAttribs = LLGL::GetFormatAttribs(static_cast<LLGL::Format>(Format));
if ((formatAttribs.flags & LLGL::FormatFlags::SupportsVertex) != 0)
return (formatAttribs.bitSize / 8);
else
return 0;
};
/*
* VertexFormat class
*/
VertexFormat::VertexFormat()
{
Attributes = gcnew List<VertexAttribute^>();
}
void VertexFormat::AppendAttribute(VertexAttribute^ attrib)
{
/* Set offset to previous offset plus previous format size */
if (Attributes->Count > 0)
attrib->Offset = Attributes[Attributes->Count - 1]->Stride;
/* Append new attribute */
Attributes->Add(attrib);
/* Update stride for all attributes */
unsigned int stride = 0;
for (int i = 0; i < Attributes->Count; ++i)
stride = std::max(stride, Attributes[i]->Offset + Attributes[i]->Size);
for (int i = 0; i < Attributes->Count; ++i)
Attributes[i]->Stride = stride;
}
} // /namespace SharpLLGL
// ================================================================================
| 1,400 |
568 | <reponame>Qihoo360/logkafka<filename>src/logkafka/task_conf.h
///////////////////////////////////////////////////////////////////////////
//
// logkafka - Collect logs and send lines to Apache Kafka v0.8+
//
///////////////////////////////////////////////////////////////////////////
//
// Copyright (c) 2015 Qihoo 360 Technology Co., Ltd. All rights reserved.
//
// Licensed under the MIT License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://opensource.org/licenses/MIT
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
///////////////////////////////////////////////////////////////////////////
#ifndef LOGKAFKA_TASK_CONF_H_
#define LOGKAFKA_TASK_CONF_H_
#include <ostream>
#include <queue>
#include <string>
#include "base/tools.h"
#include "logkafka/config.h"
#include "easylogging/easylogging++.h"
using namespace std;
namespace logkafka {
struct LogConf {
/* log file path, example: /usr/local/apache2/logs/access_log.%Y%m%d */
string log_path;
/* if false, we will discard previous messages
* when logkafka goes up after it crash or stopped.
* Otherwise it will send all the messages */
bool follow_last;
/* max lines of messages to be sent */
int batchsize;
bool read_from_head;
char line_delimiter;
bool remove_delimiter;
LogConf()
{/*{{{*/
log_path = "";
follow_last = true;
batchsize = 200;
read_from_head = true;
line_delimiter = '\n';
remove_delimiter = true;
}/*}}}*/
bool operator==(const LogConf& hs) const
{/*{{{*/
return (log_path == hs.log_path) &&
(follow_last == hs.follow_last) &&
(batchsize == hs.batchsize) &&
(line_delimiter == hs.line_delimiter) &&
(remove_delimiter == hs.remove_delimiter);
};/*}}}*/
bool operator!=(const LogConf& hs) const
{/*{{{*/
return !operator==(hs);
};/*}}}*/
friend ostream& operator << (ostream& os, const LogConf& lc)
{/*{{{*/
os << "log path: " << lc.log_path
<< "follow last" << lc.follow_last
<< "batchsize" << lc.batchsize
<< "read from head" << lc.read_from_head
<< "line delimiter" << lc.line_delimiter
<< "remove delimiter" << lc.remove_delimiter;
return os;
}/*}}}*/
bool isLegal()
{/*{{{*/
return isPathPatternLegal();
}/*}}}*/
bool isPathPatternLegal()
{/*{{{*/
std::size_t found = log_path.find("*");
if (found != std::string::npos)
return false;
return true;
}/*}}}*/
};
struct FilterConf {
string regex_filter_pattern;
FilterConf()
{/*{{{*/
regex_filter_pattern = "";
}/*}}}*/
bool operator==(const FilterConf& hs) const
{/*{{{*/
return (regex_filter_pattern == hs.regex_filter_pattern);
};/*}}}*/
bool operator!=(const FilterConf& hs) const
{/*{{{*/
return !operator==(hs);
};/*}}}*/
friend ostream& operator << (ostream& os, const FilterConf& fc)
{/*{{{*/
os << "regex filter pattern: " << fc.regex_filter_pattern;
return os;
}/*}}}*/
};
struct KafkaTopicConf {
string brokers;
string topic;
string compression_codec;
int required_acks;
string key;
int partition;
int message_timeout_ms;
KafkaTopicConf()
{/*{{{*/
brokers = "";
topic = "";
compression_codec = "none";
required_acks = 1;
key = "";
partition = -1;
message_timeout_ms = 0;
}/*}}}*/
bool operator==(const KafkaTopicConf& hs) const
{/*{{{*/
return (brokers == hs.brokers) &&
(topic == hs.topic) &&
(compression_codec == hs.compression_codec) &&
(required_acks == hs.required_acks) &&
(key == hs.key) &&
(partition == hs.partition) &&
(message_timeout_ms == hs.message_timeout_ms);
};/*}}}*/
bool operator!=(const KafkaTopicConf& hs) const
{/*{{{*/
return !operator==(hs);
};/*}}}*/
friend ostream& operator << (ostream& os, const KafkaTopicConf& ktc)
{/*{{{*/
return os;
}/*}}}*/
bool isLegal()
{/*{{{*/
return true;
}/*}}}*/
};
struct TaskConf
{
/* if false, the log file will not be collected */
bool valid;
LogConf log_conf;
KafkaTopicConf kafka_topic_conf;
FilterConf filter_conf;
bool operator==(const TaskConf& hs) const
{/*{{{*/
return (valid == hs.valid) &&
(log_conf == hs.log_conf) &&
(kafka_topic_conf == hs.kafka_topic_conf) &&
(filter_conf == hs.filter_conf);
};/*}}}*/
friend ostream& operator << (ostream& os, const TaskConf& tc)
{/*{{{*/
os << "valid: " << tc.valid
<< "log conf" << tc.log_conf
<< "kafka topic conf" << tc.kafka_topic_conf
<< "filter conf" << tc.filter_conf;
return os;
}/*}}}*/
bool isLegal()
{/*{{{*/
return log_conf.isLegal() && kafka_topic_conf.isLegal();
}/*}}}*/
};
struct TaskStat
{
bool first_update_paths;
queue<string> paths;
TaskStat()
{/*{{{*/
first_update_paths = true;
}/*}}}*/
string getPath()
{ /*{{{*/
return !paths.empty()? paths.front(): "";
};/*}}}*/
};
struct Task
{
Task(unsigned long path_queue_max_size): path_queue_max_size(path_queue_max_size) {};
unsigned long path_queue_max_size;
TaskConf conf;
TaskStat stat;
string getPath() { return getFirstPath(); };
string getFirstPath() { return stat.getPath(); };
void delFirstPath() { stat.paths.pop(); };
bool addPath(string path)
{ /*{{{*/
if (stat.paths.empty()) {
stat.paths.push(path);
LINFO << "Add path " << path;
return true;
} else {
string last_path = stat.paths.back();
if (path != last_path) {
if (stat.paths.size() >= path_queue_max_size) {
LERROR << "Fail to add path " << path
<< ", path queue size >= " << path_queue_max_size;
return false;
}
stat.paths.push(path);
LINFO << "Add path " << path;
return true;
}
}
return false;
};/*}}}*/
bool hasPath() { return !stat.paths.empty(); };
bool getEnabled() { return conf.valid; };
ino_t getInode() { return ::getInode(getPath().c_str()); };
};
} // namespace logkafka
#endif // LOGKAFKA_TASK_CONF_H_
| 3,086 |
14,668 | // Copyright 2021 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/lens/region_search/lens_region_search_controller.h"
#include "base/feature_list.h"
#include "base/test/metrics/histogram_tester.h"
#include "chrome/browser/lens/metrics/lens_metrics.h"
#include "chrome/browser/ui/views/frame/browser_view.h"
#include "chrome/browser/ui/views/frame/test_with_browser_view.h"
#include "components/lens/lens_features.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "ui/gfx/image/image_skia.h"
#include "ui/gfx/image/image_unittest_util.h"
namespace lens {
class LensRegionSearchControllerTest : public TestWithBrowserView {
public:
void SetUp() override {
base::test::ScopedFeatureList features;
features.InitWithFeatures({features::kLensRegionSearch}, {});
TestWithBrowserView::SetUp();
// Create an active web contents.
AddTab(browser_view()->browser(), GURL("about:blank"));
controller_ = std::make_unique<LensRegionSearchController>(
browser_view()->GetActiveWebContents(), browser_view()->browser());
}
protected:
std::unique_ptr<LensRegionSearchController> controller_;
};
TEST_F(LensRegionSearchControllerTest, LensRegionSearchEmptyImage) {
base::HistogramTester tester;
image_editor::ScreenshotCaptureResult result;
result.screen_bounds = gfx::Rect();
result.image = gfx::Image();
controller_->OnCaptureCompleted(result);
tester.ExpectUniqueSample(
lens::kLensRegionSearchCaptureResultHistogramName,
lens::LensRegionSearchCaptureResult::ERROR_CAPTURING_REGION, 1);
}
TEST_F(LensRegionSearchControllerTest, LensRegionSearchEmptyCaptureResults) {
base::HistogramTester tester;
image_editor::ScreenshotCaptureResult result;
controller_->OnCaptureCompleted(result);
tester.ExpectUniqueSample(
lens::kLensRegionSearchCaptureResultHistogramName,
lens::LensRegionSearchCaptureResult::ERROR_CAPTURING_REGION, 1);
}
TEST_F(LensRegionSearchControllerTest, UndefinedAspectRatioTest) {
int height = 0;
int width = 100;
EXPECT_EQ(LensRegionSearchController::GetAspectRatioFromSize(height, width),
LensRegionSearchAspectRatio::UNDEFINED);
}
TEST_F(LensRegionSearchControllerTest, SquareAspectRatioTest) {
int height = 100;
int width = 100;
int screen_height = 1000;
int screen_width = 1000;
base::HistogramTester tester;
image_editor::ScreenshotCaptureResult result;
result.image = gfx::test::CreateImage(width, height);
result.screen_bounds = gfx::Rect(screen_width, screen_height);
controller_->OnCaptureCompleted(result);
tester.ExpectUniqueSample(lens::kLensRegionSearchCaptureResultHistogramName,
lens::LensRegionSearchCaptureResult::SUCCESS, 1);
tester.ExpectUniqueSample(kLensRegionSearchRegionAspectRatioHistogramName,
LensRegionSearchAspectRatio::SQUARE, 1);
tester.ExpectTotalCount(
kLensRegionSearchRegionViewportProportionHistogramName, 1);
}
TEST_F(LensRegionSearchControllerTest, WideAspectRatioTest) {
int height = 100;
int width = 170;
int screen_height = 1000;
int screen_width = 1000;
base::HistogramTester tester;
image_editor::ScreenshotCaptureResult result;
result.image = gfx::test::CreateImage(width, height);
result.screen_bounds = gfx::Rect(screen_width, screen_height);
controller_->OnCaptureCompleted(result);
tester.ExpectUniqueSample(lens::kLensRegionSearchCaptureResultHistogramName,
lens::LensRegionSearchCaptureResult::SUCCESS, 1);
tester.ExpectUniqueSample(kLensRegionSearchRegionAspectRatioHistogramName,
LensRegionSearchAspectRatio::WIDE, 1);
tester.ExpectTotalCount(
kLensRegionSearchRegionViewportProportionHistogramName, 1);
}
TEST_F(LensRegionSearchControllerTest, VeryWideAspectRatioTest) {
int height = 100;
int width = 10000;
int screen_height = 10000;
int screen_width = 10000;
base::HistogramTester tester;
image_editor::ScreenshotCaptureResult result;
result.image = gfx::test::CreateImage(width, height);
result.screen_bounds = gfx::Rect(screen_width, screen_height);
controller_->OnCaptureCompleted(result);
tester.ExpectUniqueSample(lens::kLensRegionSearchCaptureResultHistogramName,
lens::LensRegionSearchCaptureResult::SUCCESS, 1);
tester.ExpectUniqueSample(kLensRegionSearchRegionAspectRatioHistogramName,
LensRegionSearchAspectRatio::VERY_WIDE, 1);
tester.ExpectTotalCount(
kLensRegionSearchRegionViewportProportionHistogramName, 1);
}
TEST_F(LensRegionSearchControllerTest, TallAspectRatioTest) {
int height = 170;
int width = 100;
int screen_height = 1000;
int screen_width = 1000;
base::HistogramTester tester;
image_editor::ScreenshotCaptureResult result;
result.image = gfx::test::CreateImage(width, height);
result.screen_bounds = gfx::Rect(screen_width, screen_height);
controller_->OnCaptureCompleted(result);
tester.ExpectUniqueSample(lens::kLensRegionSearchCaptureResultHistogramName,
lens::LensRegionSearchCaptureResult::SUCCESS, 1);
tester.ExpectUniqueSample(kLensRegionSearchRegionAspectRatioHistogramName,
LensRegionSearchAspectRatio::TALL, 1);
tester.ExpectTotalCount(
kLensRegionSearchRegionViewportProportionHistogramName, 1);
}
TEST_F(LensRegionSearchControllerTest, VeryTallAspectRatioTest) {
int height = 10000;
int width = 100;
int screen_height = 10000;
int screen_width = 10000;
base::HistogramTester tester;
image_editor::ScreenshotCaptureResult result;
result.image = gfx::test::CreateImage(width, height);
result.screen_bounds = gfx::Rect(screen_width, screen_height);
controller_->OnCaptureCompleted(result);
tester.ExpectUniqueSample(lens::kLensRegionSearchCaptureResultHistogramName,
lens::LensRegionSearchCaptureResult::SUCCESS, 1);
tester.ExpectUniqueSample(kLensRegionSearchRegionAspectRatioHistogramName,
LensRegionSearchAspectRatio::VERY_TALL, 1);
tester.ExpectTotalCount(
kLensRegionSearchRegionViewportProportionHistogramName, 1);
}
TEST_F(LensRegionSearchControllerTest, AccurateViewportProportionTest) {
int screen_height = 1000;
int screen_width = 1000;
int image_height = 100;
int image_width = 100;
EXPECT_EQ(LensRegionSearchController::CalculateViewportProportionFromAreas(
screen_height, screen_width, image_width, image_height),
1);
}
TEST_F(LensRegionSearchControllerTest, UndefinedViewportProportionTest) {
int screen_height = 0;
int screen_width = 0;
int image_height = 100;
int image_width = 100;
EXPECT_EQ(LensRegionSearchController::CalculateViewportProportionFromAreas(
screen_height, screen_width, image_width, image_height),
-1);
}
} // namespace lens
| 2,467 |
2,221 | <reponame>avinashk07/flume
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.sink.kudu;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.FlumeException;
import org.apache.kudu.client.Insert;
import org.apache.kudu.client.KuduTable;
import org.apache.kudu.client.Operation;
import org.apache.kudu.client.PartialRow;
import org.apache.kudu.client.Upsert;
/**
* A simple serializer that generates one {@link Insert} or {@link Upsert}
* per {@link Event} by writing the event body into a BINARY column. The pair
* (key column name, key column value) should be a header in the {@link Event};
* the column name is configurable but the column type must be STRING. Multiple
* key columns are not supported.
*
* <p><strong>Simple Keyed Kudu Operations Producer configuration parameters</strong>
*
* <table cellpadding=3 cellspacing=0 border=1
* summary="Simple Keyed Kudu Operations Producer configuration parameters">
* <tr>
* <th>Property Name</th>
* <th>Default</th>
* <th>Required?</th>
* <th>Description</th>
* </tr>
* <tr>
* <td>producer.payloadColumn</td>
* <td>payload</td>
* <td>No</td>
* <td>The name of the BINARY column to write the Flume event body to.</td>
* </tr>
* <tr>
* <td>producer.keyColumn</td>
* <td>key</td>
* <td>No</td>
* <td>The name of the STRING key column of the target Kudu table.</td>
* </tr>
* <tr>
* <td>producer.operation</td>
* <td>upsert</td>
* <td>No</td>
* <td>The operation used to write events to Kudu. Supported operations
* are 'insert' and 'upsert'</td>
* </tr>
* </table>
*/
public class SimpleKeyedKuduOperationsProducer implements KuduOperationsProducer {
public static final String PAYLOAD_COLUMN_PROP = "payloadColumn";
public static final String PAYLOAD_COLUMN_DEFAULT = "payload";
public static final String KEY_COLUMN_PROP = "keyColumn";
public static final String KEY_COLUMN_DEFAULT = "key";
public static final String OPERATION_PROP = "operation";
public static final String OPERATION_DEFAULT = "upsert";
private KuduTable table;
private String payloadColumn;
private String keyColumn;
private String operation = "";
public SimpleKeyedKuduOperationsProducer(){
}
@Override
public void configure(Context context) {
payloadColumn = context.getString(PAYLOAD_COLUMN_PROP, PAYLOAD_COLUMN_DEFAULT);
keyColumn = context.getString(KEY_COLUMN_PROP, KEY_COLUMN_DEFAULT);
operation = context.getString(OPERATION_PROP, OPERATION_DEFAULT);
}
@Override
public void initialize(KuduTable table) {
this.table = table;
}
@Override
public List<Operation> getOperations(Event event) throws FlumeException {
String key = event.getHeaders().get(keyColumn);
if (key == null) {
throw new FlumeException(
String.format("No value provided for key column %s", keyColumn));
}
try {
Operation op;
switch (operation.toLowerCase(Locale.ENGLISH)) {
case "upsert":
op = table.newUpsert();
break;
case "insert":
op = table.newInsert();
break;
default:
throw new FlumeException(
String.format("Unexpected operation %s", operation));
}
PartialRow row = op.getRow();
row.addString(keyColumn, key);
row.addBinary(payloadColumn, event.getBody());
return Collections.singletonList(op);
} catch (Exception e) {
throw new FlumeException("Failed to create Kudu Operation object", e);
}
}
@Override
public void close() {
}
}
| 1,528 |
372 | <reponame>mjhopkins/google-api-java-client-services<filename>clients/google-api-services-file/v1beta1/1.27.0/com/google/api/services/file/v1beta1/model/GoogleCloudSaasacceleratorManagementProvidersV1NodeSloMetadata.java
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.file.v1beta1.model;
/**
* Node information for custom per-node SLO implementations. SSA does not support per-node SLO, but
* producers can populate per-node information in SloMetadata for custom precomputations. SSA
* Eligibility Exporter will emit per-node metric based on this information.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Cloud Filestore API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class GoogleCloudSaasacceleratorManagementProvidersV1NodeSloMetadata extends com.google.api.client.json.GenericJson {
/**
* By default node is eligible if instance is eligible. But individual node might be excluded from
* SLO by adding entry here. For semantic see SloMetadata.exclusions. If both instance and node
* level exclusions are present for time period, the node level's reason will be reported by
* Eligibility Exporter.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<GoogleCloudSaasacceleratorManagementProvidersV1SloExclusion> exclusions;
/**
* The id of the node. This should be equal to SaasInstanceNode.node_id.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String nodeId;
/**
* By default node is eligible if instance is eligible. But individual node might be excluded from
* SLO by adding entry here. For semantic see SloMetadata.exclusions. If both instance and node
* level exclusions are present for time period, the node level's reason will be reported by
* Eligibility Exporter.
* @return value or {@code null} for none
*/
public java.util.List<GoogleCloudSaasacceleratorManagementProvidersV1SloExclusion> getExclusions() {
return exclusions;
}
/**
* By default node is eligible if instance is eligible. But individual node might be excluded from
* SLO by adding entry here. For semantic see SloMetadata.exclusions. If both instance and node
* level exclusions are present for time period, the node level's reason will be reported by
* Eligibility Exporter.
* @param exclusions exclusions or {@code null} for none
*/
public GoogleCloudSaasacceleratorManagementProvidersV1NodeSloMetadata setExclusions(java.util.List<GoogleCloudSaasacceleratorManagementProvidersV1SloExclusion> exclusions) {
this.exclusions = exclusions;
return this;
}
/**
* The id of the node. This should be equal to SaasInstanceNode.node_id.
* @return value or {@code null} for none
*/
public java.lang.String getNodeId() {
return nodeId;
}
/**
* The id of the node. This should be equal to SaasInstanceNode.node_id.
* @param nodeId nodeId or {@code null} for none
*/
public GoogleCloudSaasacceleratorManagementProvidersV1NodeSloMetadata setNodeId(java.lang.String nodeId) {
this.nodeId = nodeId;
return this;
}
@Override
public GoogleCloudSaasacceleratorManagementProvidersV1NodeSloMetadata set(String fieldName, Object value) {
return (GoogleCloudSaasacceleratorManagementProvidersV1NodeSloMetadata) super.set(fieldName, value);
}
@Override
public GoogleCloudSaasacceleratorManagementProvidersV1NodeSloMetadata clone() {
return (GoogleCloudSaasacceleratorManagementProvidersV1NodeSloMetadata) super.clone();
}
}
| 1,361 |
1,844 | <gh_stars>1000+
/*
* Copyright 2010-2013 Ning, Inc.
* Copyright 2014-2018 Groupon, Inc
* Copyright 2014-2018 The Billing Project, LLC
*
* The Billing Project licenses this file to you under the Apache License, version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.killbill.billing.beatrix.integration.overdue;
import java.math.BigDecimal;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.LinkedList;
import java.util.List;
import java.util.UUID;
import org.joda.time.DateTime;
import org.joda.time.LocalDate;
import org.killbill.billing.ErrorCode;
import org.killbill.billing.api.FlakyRetryAnalyzer;
import org.killbill.billing.api.TestApiListener.NextEvent;
import org.killbill.billing.beatrix.integration.BeatrixIntegrationModule;
import org.killbill.billing.beatrix.util.InvoiceChecker.ExpectedInvoiceItemCheck;
import org.killbill.billing.catalog.api.BillingPeriod;
import org.killbill.billing.catalog.api.Currency;
import org.killbill.billing.catalog.api.PlanPhaseSpecifier;
import org.killbill.billing.catalog.api.PriceListSet;
import org.killbill.billing.catalog.api.ProductCategory;
import org.killbill.billing.entitlement.api.BlockingApiException;
import org.killbill.billing.entitlement.api.DefaultEntitlement;
import org.killbill.billing.entitlement.api.DefaultEntitlementSpecifier;
import org.killbill.billing.entitlement.api.Entitlement;
import org.killbill.billing.entitlement.api.EntitlementApiException;
import org.killbill.billing.invoice.api.Invoice;
import org.killbill.billing.invoice.api.InvoiceItem;
import org.killbill.billing.invoice.api.InvoiceItemType;
import org.killbill.billing.invoice.api.InvoicePayment;
import org.killbill.billing.invoice.model.CreditAdjInvoiceItem;
import org.killbill.billing.invoice.model.ExternalChargeInvoiceItem;
import org.killbill.billing.overdue.config.DefaultOverdueConfig;
import org.killbill.billing.overdue.wrapper.OverdueWrapper;
import org.killbill.billing.payment.api.Payment;
import org.killbill.billing.payment.api.PluginProperty;
import org.killbill.xmlloader.XMLLoader;
import org.testng.Assert;
import org.testng.annotations.Test;
import com.google.common.collect.ImmutableList;
import com.google.common.io.Resources;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertTrue;
// For all the tests, we set the the property org.killbill.payment.retry.days=8,8,8,8,8,8,8,8 so that Payment retry logic does not end with an ABORTED state
// preventing final instant payment to succeed.
//
// The tests are difficult to follow because there are actually two tracks of retry in logic:
// - The payment retries
// - The overdue notifications
//
// Flaky, see https://github.com/killbill/killbill/issues/782
public class TestOverdueIntegration extends TestOverdueBase {
@Override
public String getOverdueConfig() {
final String configXml = "<overdueConfig>" +
" <accountOverdueStates>" +
" <initialReevaluationInterval>" +
" <unit>DAYS</unit><number>30</number>" +
" </initialReevaluationInterval>" +
" <state name=\"OD3\">" +
" <condition>" +
" <timeSinceEarliestUnpaidInvoiceEqualsOrExceeds>" +
" <unit>DAYS</unit><number>50</number>" +
" </timeSinceEarliestUnpaidInvoiceEqualsOrExceeds>" +
" </condition>" +
" <externalMessage>Reached OD3</externalMessage>" +
" <blockChanges>true</blockChanges>" +
" <disableEntitlementAndChangesBlocked>true</disableEntitlementAndChangesBlocked>" +
" </state>" +
" <state name=\"OD2\">" +
" <condition>" +
" <timeSinceEarliestUnpaidInvoiceEqualsOrExceeds>" +
" <unit>DAYS</unit><number>40</number>" +
" </timeSinceEarliestUnpaidInvoiceEqualsOrExceeds>" +
" </condition>" +
" <externalMessage>Reached OD2</externalMessage>" +
" <blockChanges>true</blockChanges>" +
" <disableEntitlementAndChangesBlocked>true</disableEntitlementAndChangesBlocked>" +
" <autoReevaluationInterval>" +
" <unit>DAYS</unit><number>10</number>" +
" </autoReevaluationInterval>" +
" </state>" +
" <state name=\"OD1\">" +
" <condition>" +
" <timeSinceEarliestUnpaidInvoiceEqualsOrExceeds>" +
" <unit>DAYS</unit><number>30</number>" +
" </timeSinceEarliestUnpaidInvoiceEqualsOrExceeds>" +
" </condition>" +
" <externalMessage>Reached OD1</externalMessage>" +
" <blockChanges>true</blockChanges>" +
" <disableEntitlementAndChangesBlocked>false</disableEntitlementAndChangesBlocked>" +
" <autoReevaluationInterval>" +
" <unit>DAYS</unit><number>10</number>" +
" </autoReevaluationInterval>" +
" </state>" +
" </accountOverdueStates>" +
"</overdueConfig>";
return configXml;
}
@Test(groups = "slow", description = "Test overdue stages and return to clear prior to CTD", retryAnalyzer = FlakyRetryAnalyzer.class)
public void testOverdueStages1() throws Exception {
// 2012-05-01T00:03:42.000Z
clock.setTime(new DateTime(2012, 5, 1, 0, 3, 42, 0));
setupAccount();
// Set next invoice to fail and create subscription
paymentPlugin.makeAllInvoicesFailWithError(true);
final DefaultEntitlement baseEntitlement = createBaseEntitlementAndCheckForCompletion(account.getId(), "externalKey", productName, ProductCategory.BASE, term, NextEvent.CREATE, NextEvent.BLOCK, NextEvent.INVOICE);
bundle = subscriptionApi.getSubscriptionBundle(baseEntitlement.getBundleId(), callContext);
invoiceChecker.checkInvoice(account.getId(), 1, callContext, new ExpectedInvoiceItemCheck(new LocalDate(2012, 5, 1), null, InvoiceItemType.FIXED, new BigDecimal("0")));
invoiceChecker.checkChargedThroughDate(baseEntitlement.getId(), new LocalDate(2012, 5, 1), callContext);
// 2012-05-31 => DAY 30 have to get out of trial {I0, P0}
addDaysAndCheckForCompletion(30, NextEvent.PHASE, NextEvent.INVOICE, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
invoiceChecker.checkInvoice(account.getId(), 2, callContext, new ExpectedInvoiceItemCheck(new LocalDate(2012, 5, 31), new LocalDate(2012, 6, 30), InvoiceItemType.RECURRING, new BigDecimal("249.95")));
invoiceChecker.checkChargedThroughDate(baseEntitlement.getId(), new LocalDate(2012, 6, 30), callContext);
// 2012-06-08 => Retry P0
addDaysAndCheckForCompletion(8, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
checkODState(OverdueWrapper.CLEAR_STATE_NAME);
// 2012-06-16 => Retry P0
addDaysAndCheckForCompletion(8, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
checkODState(OverdueWrapper.CLEAR_STATE_NAME);
// 2012-06-24 => Retry P0
addDaysAndCheckForCompletion(8, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
checkODState(OverdueWrapper.CLEAR_STATE_NAME);
// 2012-06-30 => P1
addDaysAndCheckForCompletion(6, NextEvent.BLOCK, NextEvent.INVOICE, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
checkODState("OD1");
checkChangePlanWithOverdueState(baseEntitlement, true, true);
invoiceChecker.checkInvoice(account.getId(), 3, callContext, new ExpectedInvoiceItemCheck(new LocalDate(2012, 6, 30), new LocalDate(2012, 7, 31), InvoiceItemType.RECURRING, new BigDecimal("249.95")));
invoiceChecker.checkChargedThroughDate(baseEntitlement.getId(), new LocalDate(2012, 7, 31), callContext);
// 2012-07-02 => Retry P0
addDaysAndCheckForCompletion(2, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
checkODState("OD1");
// 2012-07-08 => Retry P1
addDaysAndCheckForCompletion(6, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
checkODState("OD1");
// 2012-07-10 => Retry P0
//
// This is the first stage that will block the billing (and entitlement).
//
addDaysAndCheckForCompletion(2, NextEvent.BLOCK, NextEvent.TAG, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
checkODState("OD2");
// 2012-07-16 => Retry P1
addDaysAndCheckForCompletion(6, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
checkODState("OD2");
// 2012-07-18 => Retry P0
addDaysAndCheckForCompletion(2, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
checkODState("OD2");
// 2012-07-20
addDaysAndCheckForCompletion(2, NextEvent.BLOCK);
checkODState("OD3");
allowPaymentsAndResetOverdueToClearByPayingAllUnpaidInvoices(false);
invoiceChecker.checkInvoice(account.getId(), 3, callContext,
new ExpectedInvoiceItemCheck(new LocalDate(2012, 6, 30), new LocalDate(2012, 7, 31), InvoiceItemType.RECURRING, new BigDecimal("249.95")));
invoiceChecker.checkInvoice(account.getId(), 4, callContext,
new ExpectedInvoiceItemCheck(new LocalDate(2012, 7, 10), new LocalDate(2012, 7, 20), InvoiceItemType.REPAIR_ADJ, new BigDecimal("-80.63")),
new ExpectedInvoiceItemCheck(new LocalDate(2012, 7, 20), new LocalDate(2012, 7, 20), InvoiceItemType.CBA_ADJ, new BigDecimal("80.63")));
// Add 11 days to generate next invoice. We verify that we indeed have a notification for nextBillingDate
addDaysAndCheckForCompletion(11, NextEvent.INVOICE, NextEvent.PAYMENT, NextEvent.INVOICE_PAYMENT);
invoiceChecker.checkInvoice(account.getId(), 5, callContext,
new ExpectedInvoiceItemCheck(new LocalDate(2012, 7, 31), new LocalDate(2012, 8, 31), InvoiceItemType.RECURRING, new BigDecimal("249.95")),
new ExpectedInvoiceItemCheck(new LocalDate(2012, 7, 31), new LocalDate(2012, 7, 31), InvoiceItemType.CBA_ADJ, new BigDecimal("-80.63")));
invoiceChecker.checkChargedThroughDate(baseEntitlement.getId(), new LocalDate(2012, 8, 31), callContext);
// Verify the account balance is now 0
assertEquals(invoiceUserApi.getAccountBalance(account.getId(), callContext).compareTo(BigDecimal.ZERO), 0);
}
@Test(groups = "slow", description = "Test overdue stages and return to clear on CTD", retryAnalyzer = FlakyRetryAnalyzer.class)
public void testOverdueStages2() throws Exception {
// 2012-05-01T00:03:42.000Z
clock.setTime(new DateTime(2012, 5, 1, 0, 3, 42, 0));
setupAccount();
// Set next invoice to fail and create subscription
paymentPlugin.makeAllInvoicesFailWithError(true);
final DefaultEntitlement baseEntitlement = createBaseEntitlementAndCheckForCompletion(account.getId(), "externalKey", productName, ProductCategory.BASE, term, NextEvent.CREATE, NextEvent.BLOCK, NextEvent.INVOICE);
bundle = subscriptionApi.getSubscriptionBundle(baseEntitlement.getBundleId(), callContext);
invoiceChecker.checkInvoice(account.getId(), 1, callContext, new ExpectedInvoiceItemCheck(new LocalDate(2012, 5, 1), null, InvoiceItemType.FIXED, new BigDecimal("0")));
invoiceChecker.checkChargedThroughDate(baseEntitlement.getId(), new LocalDate(2012, 5, 1), callContext);
// 2012-05-31 => DAY 30 have to get out of trial {I0, P0}
addDaysAndCheckForCompletion(30, NextEvent.PHASE, NextEvent.INVOICE, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
invoiceChecker.checkInvoice(account.getId(), 2, callContext, new ExpectedInvoiceItemCheck(new LocalDate(2012, 5, 31), new LocalDate(2012, 6, 30), InvoiceItemType.RECURRING, new BigDecimal("249.95")));
invoiceChecker.checkChargedThroughDate(baseEntitlement.getId(), new LocalDate(2012, 6, 30), callContext);
// 2012-06-08 => Retry P0
addDaysAndCheckForCompletion(8, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
checkODState(OverdueWrapper.CLEAR_STATE_NAME);
// 2012-06-16 => Retry P0
addDaysAndCheckForCompletion(8, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
checkODState(OverdueWrapper.CLEAR_STATE_NAME);
// 2012-06-24 => Retry P0
addDaysAndCheckForCompletion(8, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
checkODState(OverdueWrapper.CLEAR_STATE_NAME);
// 2012-06-30 => P1
addDaysAndCheckForCompletion(6, NextEvent.BLOCK, NextEvent.INVOICE, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
checkODState("OD1");
checkChangePlanWithOverdueState(baseEntitlement, true, true);
invoiceChecker.checkInvoice(account.getId(), 3, callContext, new ExpectedInvoiceItemCheck(new LocalDate(2012, 6, 30), new LocalDate(2012, 7, 31), InvoiceItemType.RECURRING, new BigDecimal("249.95")));
invoiceChecker.checkChargedThroughDate(baseEntitlement.getId(), new LocalDate(2012, 7, 31), callContext);
// 2012-07-02 => Retry P0
addDaysAndCheckForCompletion(2, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
checkODState("OD1");
// 2012-07-08 => Retry P1
addDaysAndCheckForCompletion(6, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
checkODState("OD1");
// 2012-07-10 => Retry P0
//
// This is the first stage that will block the billing (and entitlement).
//
addDaysAndCheckForCompletion(2, NextEvent.BLOCK, NextEvent.TAG, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
checkODState("OD2");
// 2012-07-16 => Retry P1
addDaysAndCheckForCompletion(6, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
checkODState("OD2");
// 2012-07-18 => Retry P0
addDaysAndCheckForCompletion(2, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
checkODState("OD2");
// 2012-07-20
addDaysAndCheckForCompletion(2, NextEvent.BLOCK);
checkODState("OD3");
// 2012-07-24 => Retry P1
addDaysAndCheckForCompletion(4, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
// 2012-07-26 => Retry P0
addDaysAndCheckForCompletion(2, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
// 2012-07-31 => No NEW INVOICE because OD2 -> still blocked
addDaysAndCheckForCompletion(5);
invoiceChecker.checkChargedThroughDate(baseEntitlement.getId(), new LocalDate(2012, 7, 31), callContext);
// Make sure the 'invoice-service:next-billing-date-queue' gets processed before we continue and since we are in AUTO_INVOICING_OFF
// no event (NULL_INVOICE) will be generated and so we can't synchronize on any event, and we need to add a small amount of sleep
Thread.sleep(1000);
allowPaymentsAndResetOverdueToClearByPayingAllUnpaidInvoices(true);
invoiceChecker.checkInvoice(account.getId(), 3, callContext,
new ExpectedInvoiceItemCheck(new LocalDate(2012, 6, 30), new LocalDate(2012, 7, 31), InvoiceItemType.RECURRING, new BigDecimal("249.95")));
invoiceChecker.checkInvoice(account.getId(), 4, callContext,
// New invoice for the partial period
new ExpectedInvoiceItemCheck(new LocalDate(2012, 7, 31), new LocalDate(2012, 8, 31), InvoiceItemType.RECURRING, new BigDecimal("249.95")),
new ExpectedInvoiceItemCheck(new LocalDate(2012, 7, 10), new LocalDate(2012, 7, 31), InvoiceItemType.REPAIR_ADJ, new BigDecimal("-169.32")));
// Move one month ahead, and check if we get the next invoice
addDaysAndCheckForCompletion(31, NextEvent.INVOICE, NextEvent.PAYMENT, NextEvent.INVOICE_PAYMENT);
invoiceChecker.checkInvoice(account.getId(), 5, callContext,
new ExpectedInvoiceItemCheck(new LocalDate(2012, 8, 31), new LocalDate(2012, 9, 30), InvoiceItemType.RECURRING, new BigDecimal("249.95")));
// Verify the account balance is now 0
assertEquals(invoiceUserApi.getAccountBalance(account.getId(), callContext).compareTo(BigDecimal.ZERO), 0);
}
@Test(groups = "slow", description = "Test overdue stages and return to clear after CTD", retryAnalyzer = FlakyRetryAnalyzer.class)
public void testOverdueStages3() throws Exception {
// 2012-05-01T00:03:42.000Z
clock.setTime(new DateTime(2012, 5, 1, 0, 3, 42, 0));
setupAccount();
// Set next invoice to fail and create subscription
paymentPlugin.makeAllInvoicesFailWithError(true);
final DefaultEntitlement baseEntitlement = createBaseEntitlementAndCheckForCompletion(account.getId(), "externalKey", productName, ProductCategory.BASE, term, NextEvent.CREATE, NextEvent.BLOCK, NextEvent.INVOICE);
bundle = subscriptionApi.getSubscriptionBundle(baseEntitlement.getBundleId(), callContext);
invoiceChecker.checkInvoice(account.getId(), 1, callContext, new ExpectedInvoiceItemCheck(new LocalDate(2012, 5, 1), null, InvoiceItemType.FIXED, new BigDecimal("0")));
invoiceChecker.checkChargedThroughDate(baseEntitlement.getId(), new LocalDate(2012, 5, 1), callContext);
// 2012-05-31 => DAY 30 have to get out of trial {I0, P0}
addDaysAndCheckForCompletion(30, NextEvent.PHASE, NextEvent.INVOICE, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
invoiceChecker.checkInvoice(account.getId(), 2, callContext, new ExpectedInvoiceItemCheck(new LocalDate(2012, 5, 31), new LocalDate(2012, 6, 30), InvoiceItemType.RECURRING, new BigDecimal("249.95")));
invoiceChecker.checkChargedThroughDate(baseEntitlement.getId(), new LocalDate(2012, 6, 30), callContext);
// 2012-06-08 => Retry P0
addDaysAndCheckForCompletion(8, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
checkODState(OverdueWrapper.CLEAR_STATE_NAME);
// 2012-06-16 => Retry P0
addDaysAndCheckForCompletion(8, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
checkODState(OverdueWrapper.CLEAR_STATE_NAME);
// 2012-06-24 => Retry P0
addDaysAndCheckForCompletion(8, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
checkODState(OverdueWrapper.CLEAR_STATE_NAME);
// 2012-06-30
addDaysAndCheckForCompletion(6, NextEvent.BLOCK, NextEvent.INVOICE, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
checkODState("OD1");
checkChangePlanWithOverdueState(baseEntitlement, true, true);
invoiceChecker.checkInvoice(account.getId(), 3, callContext, new ExpectedInvoiceItemCheck(new LocalDate(2012, 6, 30), new LocalDate(2012, 7, 31), InvoiceItemType.RECURRING, new BigDecimal("249.95")));
invoiceChecker.checkChargedThroughDate(baseEntitlement.getId(), new LocalDate(2012, 7, 31), callContext);
// 2012-07-02 => Retry P0
addDaysAndCheckForCompletion(2, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
checkODState("OD1");
// 2012-07-08 => Retry P1
addDaysAndCheckForCompletion(6, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
checkODState("OD1");
// 2012-07-10 => Retry P0
//
// This is the first stage that will block the billing (and entitlement).
//
addDaysAndCheckForCompletion(2, NextEvent.BLOCK, NextEvent.TAG, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
checkODState("OD2");
// 2012-07-16 => Retry P1
addDaysAndCheckForCompletion(6, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
checkODState("OD2");
// 2012-07-18 => Retry P0
addDaysAndCheckForCompletion(2, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
checkODState("OD2");
// 2012-07-20
addDaysAndCheckForCompletion(2, NextEvent.BLOCK);
checkODState("OD3");
// 2012-07-24 => Retry P1
addDaysAndCheckForCompletion(4, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
// 2012-07-26 => Retry P0
addDaysAndCheckForCompletion(2, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
// 2012-07-31 => No NEW INVOICE because OD2 -> still blocked
addDaysAndCheckForCompletion(5);
invoiceChecker.checkChargedThroughDate(baseEntitlement.getId(), new LocalDate(2012, 7, 31), callContext);
// 2012-08-01 => Retry P1
addDaysAndCheckForCompletion(1, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
allowPaymentsAndResetOverdueToClearByPayingAllUnpaidInvoices(true);
invoiceChecker.checkInvoice(account.getId(), 3, callContext,
new ExpectedInvoiceItemCheck(new LocalDate(2012, 6, 30), new LocalDate(2012, 7, 31), InvoiceItemType.RECURRING, new BigDecimal("249.95")));
invoiceChecker.checkInvoice(account.getId(), 4, callContext,
// New invoice for the partial period
new ExpectedInvoiceItemCheck(new LocalDate(2012, 8, 1), new LocalDate(2012, 8, 31), InvoiceItemType.RECURRING, new BigDecimal("241.89")),
new ExpectedInvoiceItemCheck(new LocalDate(2012, 7, 10), new LocalDate(2012, 7, 31), InvoiceItemType.REPAIR_ADJ, new BigDecimal("-169.32")));
// Move one month ahead, and check if we get the next invoice
addDaysAndCheckForCompletion(30, NextEvent.INVOICE, NextEvent.PAYMENT, NextEvent.INVOICE_PAYMENT);
invoiceChecker.checkInvoice(account.getId(), 5, callContext,
new ExpectedInvoiceItemCheck(new LocalDate(2012, 8, 31), new LocalDate(2012, 9, 30), InvoiceItemType.RECURRING, new BigDecimal("249.95")));
// Verify the account balance is now 0
assertEquals(invoiceUserApi.getAccountBalance(account.getId(), callContext).compareTo(BigDecimal.ZERO), 0);
}
//
// This test is similar to the previous one except that instead of moving the clock to check we will get the next invoice
// at the end, we carry a change of plan.
//
@Test(groups = "slow", description = "Test overdue stages and follow with an immediate change of plan", retryAnalyzer = FlakyRetryAnalyzer.class)
public void testOverdueStagesFollowedWithImmediateChange1() throws Exception {
// 2012-05-01T00:03:42.000Z
clock.setTime(new DateTime(2012, 5, 1, 0, 3, 42, 0));
setupAccount();
// Set next invoice to fail and create subscription
paymentPlugin.makeAllInvoicesFailWithError(true);
final DefaultEntitlement baseEntitlement = createBaseEntitlementAndCheckForCompletion(account.getId(), "externalKey", productName, ProductCategory.BASE, term, NextEvent.CREATE, NextEvent.BLOCK, NextEvent.INVOICE);
bundle = subscriptionApi.getSubscriptionBundle(baseEntitlement.getBundleId(), callContext);
invoiceChecker.checkInvoice(account.getId(), 1, callContext, new ExpectedInvoiceItemCheck(new LocalDate(2012, 5, 1), null, InvoiceItemType.FIXED, new BigDecimal("0")));
invoiceChecker.checkChargedThroughDate(baseEntitlement.getId(), new LocalDate(2012, 5, 1), callContext);
// 2012-05-31 => DAY 30 have to get out of trial {I0, P0}
addDaysAndCheckForCompletion(30, NextEvent.PHASE, NextEvent.INVOICE, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
invoiceChecker.checkInvoice(account.getId(), 2, callContext, new ExpectedInvoiceItemCheck(new LocalDate(2012, 5, 31), new LocalDate(2012, 6, 30), InvoiceItemType.RECURRING, new BigDecimal("249.95")));
invoiceChecker.checkChargedThroughDate(baseEntitlement.getId(), new LocalDate(2012, 6, 30), callContext);
// 2012-06-08 => Retry P0
addDaysAndCheckForCompletion(8, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
checkODState(OverdueWrapper.CLEAR_STATE_NAME);
// 2012-06-16 => Retry P0
addDaysAndCheckForCompletion(8, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
checkODState(OverdueWrapper.CLEAR_STATE_NAME);
// 2012-06-24 => Retry P0
addDaysAndCheckForCompletion(8, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
checkODState(OverdueWrapper.CLEAR_STATE_NAME);
// 2012-06-30
addDaysAndCheckForCompletion(6, NextEvent.BLOCK, NextEvent.INVOICE, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
checkODState("OD1");
checkChangePlanWithOverdueState(baseEntitlement, true, true);
invoiceChecker.checkInvoice(account.getId(), 3, callContext, new ExpectedInvoiceItemCheck(new LocalDate(2012, 6, 30), new LocalDate(2012, 7, 31), InvoiceItemType.RECURRING, new BigDecimal("249.95")));
invoiceChecker.checkChargedThroughDate(baseEntitlement.getId(), new LocalDate(2012, 7, 31), callContext);
// 2012-07-02 => Retry P0
addDaysAndCheckForCompletion(2, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
checkODState("OD1");
// 2012-07-08 => Retry P1
addDaysAndCheckForCompletion(6, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
checkODState("OD1");
// 2012-07-10 => Retry P0
addDaysAndCheckForCompletion(2, NextEvent.BLOCK, NextEvent.TAG, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
checkODState("OD2");
// 2012-07-16 => Retry P1
addDaysAndCheckForCompletion(6, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
checkODState("OD2");
// 2012-07-18 => Retry P0
addDaysAndCheckForCompletion(2, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
checkODState("OD2");
// 2012-07-20
addDaysAndCheckForCompletion(2, NextEvent.BLOCK);
checkODState("OD3");
allowPaymentsAndResetOverdueToClearByPayingAllUnpaidInvoices(false);
invoiceChecker.checkInvoice(account.getId(), 3, callContext,
new ExpectedInvoiceItemCheck(new LocalDate(2012, 6, 30), new LocalDate(2012, 7, 31), InvoiceItemType.RECURRING, new BigDecimal("249.95")));
invoiceChecker.checkInvoice(account.getId(), 4, callContext,
new ExpectedInvoiceItemCheck(new LocalDate(2012, 7, 10), new LocalDate(2012, 7, 20), InvoiceItemType.REPAIR_ADJ, new BigDecimal("-80.63")),
new ExpectedInvoiceItemCheck(new LocalDate(2012, 7, 20), new LocalDate(2012, 7, 20), InvoiceItemType.CBA_ADJ, new BigDecimal("80.63")));
// Do an upgrade now
checkChangePlanWithOverdueState(baseEntitlement, false, true);
invoiceChecker.checkInvoice(account.getId(), 4, callContext,
new ExpectedInvoiceItemCheck(new LocalDate(2012, 7, 10), new LocalDate(2012, 7, 20), InvoiceItemType.REPAIR_ADJ, new BigDecimal("-80.63")),
new ExpectedInvoiceItemCheck(new LocalDate(2012, 7, 20), new LocalDate(2012, 7, 20), InvoiceItemType.CBA_ADJ, new BigDecimal("80.63")));
invoiceChecker.checkInvoice(account.getId(), 5, callContext,
// Item for the upgraded recurring plan
new ExpectedInvoiceItemCheck(new LocalDate(2012, 7, 20), new LocalDate(2012, 7, 31), InvoiceItemType.RECURRING, new BigDecimal("212.89")),
new ExpectedInvoiceItemCheck(new LocalDate(2012, 7, 20), new LocalDate(2012, 7, 31), InvoiceItemType.REPAIR_ADJ, new BigDecimal("-88.69")),
new ExpectedInvoiceItemCheck(new LocalDate(2012, 7, 20), new LocalDate(2012, 7, 20), InvoiceItemType.CBA_ADJ, new BigDecimal("-80.63")));
invoiceChecker.checkChargedThroughDate(baseEntitlement.getId(), new LocalDate(2012, 7, 31), callContext);
// Verify the account balance is now 0
assertEquals(invoiceUserApi.getAccountBalance(account.getId(), callContext).compareTo(BigDecimal.ZERO), 0);
}
@Test(groups = "slow", description = "Test overdue stages and follow with an immediate change of plan and use of credit", retryAnalyzer = FlakyRetryAnalyzer.class)
public void testOverdueStagesFollowedWithImmediateChange2() throws Exception {
// 2012-05-01T00:03:42.000Z
clock.setTime(new DateTime(2012, 5, 1, 0, 3, 42, 0));
setupAccount();
// Set next invoice to fail and create subscription
paymentPlugin.makeAllInvoicesFailWithError(true);
final DefaultEntitlement baseEntitlement = createBaseEntitlementAndCheckForCompletion(account.getId(), "externalKey", productName, ProductCategory.BASE, BillingPeriod.ANNUAL, NextEvent.CREATE, NextEvent.BLOCK, NextEvent.INVOICE);
bundle = subscriptionApi.getSubscriptionBundle(baseEntitlement.getBundleId(), callContext);
invoiceChecker.checkInvoice(account.getId(), 1, callContext, new ExpectedInvoiceItemCheck(new LocalDate(2012, 5, 1), null, InvoiceItemType.FIXED, new BigDecimal("0")));
invoiceChecker.checkChargedThroughDate(baseEntitlement.getId(), new LocalDate(2012, 5, 1), callContext);
// 2012-05-31 => DAY 30 have to get out of trial {I0, P0}
addDaysAndCheckForCompletion(30, NextEvent.PHASE, NextEvent.INVOICE, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
invoiceChecker.checkInvoice(account.getId(), 2, callContext, new ExpectedInvoiceItemCheck(new LocalDate(2012, 5, 31), new LocalDate(2013, 5, 31), InvoiceItemType.RECURRING, new BigDecimal("2399.95")));
invoiceChecker.checkChargedThroughDate(baseEntitlement.getId(), new LocalDate(2013, 5, 31), callContext);
// 2012-06-08 => Retry P0
addDaysAndCheckForCompletion(8, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
checkODState(OverdueWrapper.CLEAR_STATE_NAME);
// 2012-06-16 => Retry P0
addDaysAndCheckForCompletion(8, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
checkODState(OverdueWrapper.CLEAR_STATE_NAME);
// 2012-06-24 => Retry P0
addDaysAndCheckForCompletion(8, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
checkODState(OverdueWrapper.CLEAR_STATE_NAME);
// 2012-06-30 => OD1
addDaysAndCheckForCompletion(6, NextEvent.BLOCK);
checkODState("OD1");
// 2012-07-02 => Retry P0
addDaysAndCheckForCompletion(2, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
checkODState("OD1");
// 2012-07-10 => Retry P0 & transition to OD2
addDaysAndCheckForCompletion(8, NextEvent.BLOCK, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR, NextEvent.TAG);
checkODState("OD2");
// 2012-07-18 => Retry P0
addDaysAndCheckForCompletion(8, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
checkODState("OD2");
// 2012-07-20 => OD3
addDaysAndCheckForCompletion(2, NextEvent.BLOCK);
checkODState("OD3");
allowPaymentsAndResetOverdueToClearByPayingAllUnpaidInvoices(false);
invoiceChecker.checkInvoice(account.getId(), 2, callContext,
new ExpectedInvoiceItemCheck(new LocalDate(2012, 5, 31), new LocalDate(2013, 5, 31), InvoiceItemType.RECURRING, new BigDecimal("2399.95")));
invoiceChecker.checkInvoice(account.getId(), 3, callContext,
new ExpectedInvoiceItemCheck(new LocalDate(2012, 7, 10), new LocalDate(2012, 7, 20), InvoiceItemType.REPAIR_ADJ, new BigDecimal("-65.75")),
new ExpectedInvoiceItemCheck(new LocalDate(2012, 7, 31), new LocalDate(2013, 5, 31), InvoiceItemType.REPAIR_ADJ, new BigDecimal("-1998.86")),
new ExpectedInvoiceItemCheck(new LocalDate(2012, 7, 20), new LocalDate(2012, 7, 20), InvoiceItemType.CBA_ADJ, new BigDecimal("2064.61")));
invoiceChecker.checkChargedThroughDate(baseEntitlement.getId(), new LocalDate(2013, 5, 31), callContext);
// Move to 2012-07-31 and make a change of plan
addDaysAndCheckForCompletion(11);
checkChangePlanWithOverdueState(baseEntitlement, false, false);
invoiceChecker.checkInvoice(account.getId(), 4, callContext,
new ExpectedInvoiceItemCheck(new LocalDate(2012, 7, 31), new LocalDate(2012, 8, 31), InvoiceItemType.RECURRING, new BigDecimal("599.95")),
new ExpectedInvoiceItemCheck(new LocalDate(2012, 7, 31), new LocalDate(2012, 7, 31), InvoiceItemType.CBA_ADJ, new BigDecimal("-599.95")));
invoiceChecker.checkChargedThroughDate(baseEntitlement.getId(), new LocalDate(2012, 8, 31), callContext);
assertEquals(invoiceUserApi.getAccountBalance(account.getId(), callContext).compareTo(new BigDecimal("-1464.66")), 0);
}
@Test(groups = "slow", description = "Test overdue stages with missing payment method", retryAnalyzer = FlakyRetryAnalyzer.class)
public void testOverdueStateIfNoPaymentMethod() throws Exception {
// This test is similar to the previous one - but there is no default payment method on the account, so there
// won't be any payment retry
// 2012-05-01T00:03:42.000Z
clock.setTime(new DateTime(2012, 5, 1, 0, 3, 42, 0));
setupAccount();
// Make sure the account doesn't have any payment method
accountInternalApi.removePaymentMethod(account.getId(), internalCallContext);
// Create subscription
final DefaultEntitlement baseEntitlement = createBaseEntitlementAndCheckForCompletion(account.getId(), "externalKey", productName, ProductCategory.BASE, term, NextEvent.CREATE, NextEvent.BLOCK, NextEvent.INVOICE);
bundle = subscriptionApi.getSubscriptionBundle(baseEntitlement.getBundleId(), callContext);
invoiceChecker.checkInvoice(account.getId(), 1, callContext, new ExpectedInvoiceItemCheck(new LocalDate(2012, 5, 1), null, InvoiceItemType.FIXED, new BigDecimal("0")));
invoiceChecker.checkChargedThroughDate(baseEntitlement.getId(), new LocalDate(2012, 5, 1), callContext);
// 2012-05-31 => DAY 30 have to get out of trial before first payment. An invoice payment error, one for each invoice, should be on the bus (because there is no payment method)
addDaysAndCheckForCompletion(30, NextEvent.PHASE, NextEvent.INVOICE, NextEvent.INVOICE_PAYMENT_ERROR);
invoiceChecker.checkInvoice(account.getId(), 2, callContext, new ExpectedInvoiceItemCheck(new LocalDate(2012, 5, 31), new LocalDate(2012, 6, 30), InvoiceItemType.RECURRING, new BigDecimal("249.95")));
invoiceChecker.checkChargedThroughDate(baseEntitlement.getId(), new LocalDate(2012, 6, 30), callContext);
// Should still be in clear state
checkODState(OverdueWrapper.CLEAR_STATE_NAME);
// 2012-06-15 => DAY 45 - 15 days after invoice
addDaysAndCheckForCompletion(15);
// Should still be in clear state
checkODState(OverdueWrapper.CLEAR_STATE_NAME);
// 2012-06-30 => DAY 65 - 30 days after invoice
addDaysAndCheckForCompletion(15, NextEvent.BLOCK, NextEvent.INVOICE, NextEvent.INVOICE_PAYMENT_ERROR);
invoiceChecker.checkInvoice(account.getId(), 3, callContext, new ExpectedInvoiceItemCheck(new LocalDate(2012, 6, 30), new LocalDate(2012, 7, 31), InvoiceItemType.RECURRING, new BigDecimal("249.95")));
invoiceChecker.checkChargedThroughDate(baseEntitlement.getId(), new LocalDate(2012, 7, 31), callContext);
// Now we should be in OD1
checkODState("OD1");
checkChangePlanWithOverdueState(baseEntitlement, true, true);
// 2012-07-07 => DAY 67 - 37 days after invoice
addDaysAndCheckForCompletion(2);
// Should still be in OD1
checkODState("OD1");
checkChangePlanWithOverdueState(baseEntitlement, true, true);
// 2012-07-10 => DAY 75 - 40 days after invoice
addDaysAndCheckForCompletion(8, NextEvent.BLOCK, NextEvent.TAG);
// Should now be in OD2
checkODState("OD2");
checkChangePlanWithOverdueState(baseEntitlement, true, true);
// 2012-07-20 => DAY 85 - 50 days after invoice
addDaysAndCheckForCompletion(10, NextEvent.BLOCK);
// Should now be in OD3
checkODState("OD3");
checkChangePlanWithOverdueState(baseEntitlement, true, true);
// Add a payment method and set it as default
paymentApi.addPaymentMethod(account, UUID.randomUUID().toString(), BeatrixIntegrationModule.NON_OSGI_PLUGIN_NAME, true, paymentMethodPlugin, PLUGIN_PROPERTIES, callContext);
allowPaymentsAndResetOverdueToClearByPayingAllUnpaidInvoices(false);
invoiceChecker.checkInvoice(account.getId(), 3, callContext,
// Item for the upgraded recurring plan
new ExpectedInvoiceItemCheck(new LocalDate(2012, 6, 30), new LocalDate(2012, 7, 31), InvoiceItemType.RECURRING, new BigDecimal("249.95")));
invoiceChecker.checkInvoice(account.getId(), 4, callContext,
// Item for the blocked period
new ExpectedInvoiceItemCheck(new LocalDate(2012, 7, 10), new LocalDate(2012, 7, 20), InvoiceItemType.REPAIR_ADJ, new BigDecimal("-80.63")),
new ExpectedInvoiceItemCheck(new LocalDate(2012, 7, 20), new LocalDate(2012, 7, 20), InvoiceItemType.CBA_ADJ, new BigDecimal("80.63")));
invoiceChecker.checkChargedThroughDate(baseEntitlement.getId(), new LocalDate(2012, 7, 31), callContext);
checkChangePlanWithOverdueState(baseEntitlement, false, true);
invoiceChecker.checkInvoice(account.getId(), 4, callContext,
// Item for the blocked period
new ExpectedInvoiceItemCheck(new LocalDate(2012, 7, 10), new LocalDate(2012, 7, 20), InvoiceItemType.REPAIR_ADJ, new BigDecimal("-80.63")),
new ExpectedInvoiceItemCheck(new LocalDate(2012, 7, 20), new LocalDate(2012, 7, 20), InvoiceItemType.CBA_ADJ, new BigDecimal("80.63")));
invoiceChecker.checkInvoice(account.getId(), 5, callContext,
new ExpectedInvoiceItemCheck(new LocalDate(2012, 7, 20), new LocalDate(2012, 7, 31), InvoiceItemType.RECURRING, new BigDecimal("212.89")),
new ExpectedInvoiceItemCheck(new LocalDate(2012, 7, 20), new LocalDate(2012, 7, 31), InvoiceItemType.REPAIR_ADJ, new BigDecimal("-88.69")),
new ExpectedInvoiceItemCheck(new LocalDate(2012, 7, 20), new LocalDate(2012, 7, 20), InvoiceItemType.CBA_ADJ, new BigDecimal("-80.63")));
invoiceChecker.checkChargedThroughDate(baseEntitlement.getId(), new LocalDate(2012, 7, 31), callContext);
assertEquals(invoiceUserApi.getAccountBalance(account.getId(), callContext).compareTo(BigDecimal.ZERO), 0);
}
@Test(groups = "slow", description = "Test overdue for draft external charge", retryAnalyzer = FlakyRetryAnalyzer.class)
public void testShouldNotBeInOverdueAfterDraftExternalCharge() throws Exception {
// 2012-05-01T00:03:42.000Z
clock.setTime(new DateTime(2012, 5, 1, 0, 3, 42, 0));
setupAccount();
// Create a subscription without failing payments
final DefaultEntitlement baseEntitlement = createBaseEntitlementAndCheckForCompletion(account.getId(), "externalKey", productName, ProductCategory.BASE, term, NextEvent.CREATE, NextEvent.BLOCK, NextEvent.INVOICE);
bundle = subscriptionApi.getSubscriptionBundle(baseEntitlement.getBundleId(), callContext);
invoiceChecker.checkInvoice(account.getId(), 1, callContext, new ExpectedInvoiceItemCheck(new LocalDate(2012, 5, 1), null, InvoiceItemType.FIXED, new BigDecimal("0")));
invoiceChecker.checkChargedThroughDate(baseEntitlement.getId(), new LocalDate(2012, 5, 1), callContext);
// 2012-05-06 => Create an external charge on a new invoice
addDaysAndCheckForCompletion(5);
final InvoiceItem externalCharge = new ExternalChargeInvoiceItem(null, account.getId(), bundle.getId(), "For overdue", new LocalDate(2012, 5, 6), new LocalDate(2012, 6, 6), BigDecimal.TEN, Currency.USD, null);
invoiceUserApi.insertExternalCharges(account.getId(), clock.getUTCToday(), ImmutableList.<InvoiceItem>of(externalCharge), false, null, callContext).get(0);
assertListenerStatus();
invoiceChecker.checkInvoice(account.getId(), 2, callContext, new ExpectedInvoiceItemCheck(new LocalDate(2012, 5, 6), new LocalDate(2012, 6, 6), InvoiceItemType.EXTERNAL_CHARGE, BigDecimal.TEN));
// 2012-05-31 => DAY 30 have to get out of trial before first payment
addDaysAndCheckForCompletion(25, NextEvent.PHASE, NextEvent.INVOICE, NextEvent.PAYMENT, NextEvent.INVOICE_PAYMENT);
invoiceChecker.checkInvoice(account.getId(), 3, callContext, new ExpectedInvoiceItemCheck(new LocalDate(2012, 5, 31), new LocalDate(2012, 6, 30), InvoiceItemType.RECURRING, new BigDecimal("249.95")));
invoiceChecker.checkChargedThroughDate(baseEntitlement.getId(), new LocalDate(2012, 6, 30), callContext);
// Should still be in clear state - the invoice for the bundle has been paid, but not the invoice with the external charge (because it is in draft mode)
// We refresh overdue just to be safe, see below
checkODState(OverdueWrapper.CLEAR_STATE_NAME);
// 2012-06-06 => Past 30 days since the external charge
addDaysAndCheckForCompletion(6);
// We should still be clear
checkODState(OverdueWrapper.CLEAR_STATE_NAME);
Assert.assertEquals(invoiceUserApi.getUnpaidInvoicesByAccountId(account.getId(), null, clock.getUTCToday(), callContext).size(), 0);
}
@Test(groups = "slow", description = "Test overdue after refund with no adjustment", retryAnalyzer = FlakyRetryAnalyzer.class)
public void testShouldBeInOverdueAfterRefundWithoutAdjustment() throws Exception {
// 2012-05-01T00:03:42.000Z
clock.setTime(new DateTime(2012, 5, 1, 0, 3, 42, 0));
setupAccount();
// Create subscription and don't fail payments
final DefaultEntitlement baseEntitlement = createBaseEntitlementAndCheckForCompletion(account.getId(), "externalKey", productName, ProductCategory.BASE, term, NextEvent.CREATE, NextEvent.BLOCK, NextEvent.INVOICE);
bundle = subscriptionApi.getSubscriptionBundle(baseEntitlement.getBundleId(), callContext);
invoiceChecker.checkInvoice(account.getId(), 1, callContext, new ExpectedInvoiceItemCheck(new LocalDate(2012, 5, 1), null, InvoiceItemType.FIXED, new BigDecimal("0")));
invoiceChecker.checkChargedThroughDate(baseEntitlement.getId(), new LocalDate(2012, 5, 1), callContext);
// 2012-05-31 => DAY 30 have to get out of trial before first payment
addDaysAndCheckForCompletion(30, NextEvent.PHASE, NextEvent.INVOICE, NextEvent.PAYMENT, NextEvent.INVOICE_PAYMENT);
invoiceChecker.checkInvoice(account.getId(), 2, callContext, new ExpectedInvoiceItemCheck(new LocalDate(2012, 5, 31), new LocalDate(2012, 6, 30), InvoiceItemType.RECURRING, new BigDecimal("249.95")));
invoiceChecker.checkChargedThroughDate(baseEntitlement.getId(), new LocalDate(2012, 6, 30), callContext);
// Should still be in clear state
checkODState(OverdueWrapper.CLEAR_STATE_NAME);
// 2012-06-15 => DAY 45 - 15 days after invoice
addDaysAndCheckForCompletion(15);
// Should still be in clear state
checkODState(OverdueWrapper.CLEAR_STATE_NAME);
// 2012-07-05 => DAY 65 - 35 days after invoice
addDaysAndCheckForCompletion(20, NextEvent.INVOICE, NextEvent.PAYMENT, NextEvent.INVOICE_PAYMENT);
invoiceChecker.checkInvoice(account.getId(), 3, callContext, new ExpectedInvoiceItemCheck(new LocalDate(2012, 6, 30), new LocalDate(2012, 7, 31), InvoiceItemType.RECURRING, new BigDecimal("249.95")));
invoiceChecker.checkChargedThroughDate(baseEntitlement.getId(), new LocalDate(2012, 7, 31), callContext);
// Should still be in clear state
checkODState(OverdueWrapper.CLEAR_STATE_NAME);
// Now, refund the second (first non-zero dollar) invoice
final Payment payment = paymentApi.getPayment(invoiceUserApi.getInvoicesByAccount(account.getId(), false, false, callContext).get(1).getPayments().get(0).getPaymentId(), false, false, PLUGIN_PROPERTIES, callContext);
refundPaymentAndCheckForCompletion(account, payment, NextEvent.PAYMENT, NextEvent.INVOICE_PAYMENT, NextEvent.BLOCK);
// We should now be in OD1
checkODState("OD1");
checkChangePlanWithOverdueState(baseEntitlement, true, true);
}
@Test(groups = "slow", description = "Test overdue after chargeback", retryAnalyzer = FlakyRetryAnalyzer.class)
public void testShouldBeInOverdueAfterChargeback() throws Exception {
// 2012-05-01T00:03:42.000Z
clock.setTime(new DateTime(2012, 5, 1, 0, 3, 42, 0));
setupAccount();
// Create subscription and don't fail payments
final DefaultEntitlement baseEntitlement = createBaseEntitlementAndCheckForCompletion(account.getId(), "externalKey", productName, ProductCategory.BASE, term, NextEvent.CREATE, NextEvent.BLOCK, NextEvent.INVOICE);
bundle = subscriptionApi.getSubscriptionBundle(baseEntitlement.getBundleId(), callContext);
invoiceChecker.checkInvoice(account.getId(), 1, callContext, new ExpectedInvoiceItemCheck(new LocalDate(2012, 5, 1), null, InvoiceItemType.FIXED, new BigDecimal("0")));
invoiceChecker.checkChargedThroughDate(baseEntitlement.getId(), new LocalDate(2012, 5, 1), callContext);
// 2012-05-31 => DAY 30 have to get out of trial before first payment
addDaysAndCheckForCompletion(30, NextEvent.PHASE, NextEvent.INVOICE, NextEvent.PAYMENT, NextEvent.INVOICE_PAYMENT);
invoiceChecker.checkInvoice(account.getId(), 2, callContext, new ExpectedInvoiceItemCheck(new LocalDate(2012, 5, 31), new LocalDate(2012, 6, 30), InvoiceItemType.RECURRING, new BigDecimal("249.95")));
invoiceChecker.checkChargedThroughDate(baseEntitlement.getId(), new LocalDate(2012, 6, 30), callContext);
// Should still be in clear state
checkODState(OverdueWrapper.CLEAR_STATE_NAME);
// 2012-06-15 => DAY 45 - 15 days after invoice
addDaysAndCheckForCompletion(15);
// Should still be in clear state
checkODState(OverdueWrapper.CLEAR_STATE_NAME);
// 2012-07-05 => DAY 65 - 35 days after invoice
addDaysAndCheckForCompletion(20, NextEvent.INVOICE, NextEvent.PAYMENT, NextEvent.INVOICE_PAYMENT);
invoiceChecker.checkInvoice(account.getId(), 3, callContext, new ExpectedInvoiceItemCheck(new LocalDate(2012, 6, 30), new LocalDate(2012, 7, 31), InvoiceItemType.RECURRING, new BigDecimal("249.95")));
invoiceChecker.checkChargedThroughDate(baseEntitlement.getId(), new LocalDate(2012, 7, 31), callContext);
// Should still be in clear state
checkODState(OverdueWrapper.CLEAR_STATE_NAME);
// Now, create a chargeback for the second (first non-zero dollar) invoice
final InvoicePayment invoicePayment = invoicePaymentApi.getInvoicePayments(invoiceUserApi.getInvoicesByAccount(account.getId(), false, false, callContext).get(1).getPayments().get(0).getPaymentId(), callContext).get(0);
Payment payment = paymentApi.getPayment(invoicePayment.getPaymentId(), false, false, ImmutableList.<PluginProperty>of(), callContext);
payment = createChargeBackAndCheckForCompletion(account, payment, NextEvent.PAYMENT, NextEvent.INVOICE_PAYMENT, NextEvent.BLOCK);
// We should now be in OD1
checkODState("OD1");
checkChangePlanWithOverdueState(baseEntitlement, true, true);
// Reverse the chargeback
createChargeBackReversalAndCheckForCompletion(account, payment, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR, NextEvent.BLOCK);
checkODState(OverdueWrapper.CLEAR_STATE_NAME);
}
@Test(groups = "slow", description = "Test overdue clear after external payment", retryAnalyzer = FlakyRetryAnalyzer.class)
public void testOverdueStateShouldClearAfterExternalPayment() throws Exception {
// 2012-05-01T00:03:42.000Z
clock.setTime(new DateTime(2012, 5, 1, 0, 3, 42, 0));
setupAccount();
// Set next invoice to fail and create subscription
paymentPlugin.makeAllInvoicesFailWithError(true);
final DefaultEntitlement baseEntitlement = createBaseEntitlementAndCheckForCompletion(account.getId(), "externalKey", productName, ProductCategory.BASE, term, NextEvent.CREATE, NextEvent.BLOCK, NextEvent.INVOICE);
bundle = subscriptionApi.getSubscriptionBundle(baseEntitlement.getBundleId(), callContext);
invoiceChecker.checkInvoice(account.getId(), 1, callContext, new ExpectedInvoiceItemCheck(new LocalDate(2012, 5, 1), null, InvoiceItemType.FIXED, new BigDecimal("0")));
invoiceChecker.checkChargedThroughDate(baseEntitlement.getId(), new LocalDate(2012, 5, 1), callContext);
// 2012-05-31 => DAY 30 have to get out of trial before first payment
addDaysAndCheckForCompletion(30, NextEvent.PHASE, NextEvent.INVOICE, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
invoiceChecker.checkInvoice(account.getId(), 2, callContext, new ExpectedInvoiceItemCheck(new LocalDate(2012, 5, 31), new LocalDate(2012, 6, 30), InvoiceItemType.RECURRING, new BigDecimal("249.95")));
invoiceChecker.checkChargedThroughDate(baseEntitlement.getId(), new LocalDate(2012, 6, 30), callContext);
// Should still be in clear state
checkODState(OverdueWrapper.CLEAR_STATE_NAME);
// 2012-06-15 => DAY 45 - 15 days after invoice
addDaysAndCheckForCompletion(15, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
// Should still be in clear state
checkODState(OverdueWrapper.CLEAR_STATE_NAME);
// 2012-07-05 => DAY 65 - 35 days after invoice
addDaysAndCheckForCompletion(20, NextEvent.BLOCK, NextEvent.INVOICE, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
invoiceChecker.checkInvoice(account.getId(), 3, callContext, new ExpectedInvoiceItemCheck(new LocalDate(2012, 6, 30), new LocalDate(2012, 7, 31), InvoiceItemType.RECURRING, new BigDecimal("249.95")));
invoiceChecker.checkChargedThroughDate(baseEntitlement.getId(), new LocalDate(2012, 7, 31), callContext);
// Now we should be in OD1
checkODState("OD1");
checkChangePlanWithOverdueState(baseEntitlement, true, true);
// We have two unpaid non-zero dollar invoices at this point
// Pay the first one via an external payment - we should then be 5 days apart from the second invoice
// (which is the earliest unpaid one) and hence come back to a clear state (see configuration)
paymentPlugin.makeAllInvoicesFailWithError(false);
final Invoice firstNonZeroInvoice = invoiceUserApi.getUnpaidInvoicesByAccountId(account.getId(), null, clock.getUTCToday(), callContext).iterator().next();
createExternalPaymentAndCheckForCompletion(account, firstNonZeroInvoice, NextEvent.PAYMENT, NextEvent.INVOICE_PAYMENT, NextEvent.BLOCK);
// We should be clear now
checkODState(OverdueWrapper.CLEAR_STATE_NAME);
}
@Test(groups = "slow", description = "Test overdue clear after item adjustment", retryAnalyzer = FlakyRetryAnalyzer.class)
public void testOverdueStateShouldClearAfterCreditOrInvoiceItemAdjustment() throws Exception {
// 2012-05-01T00:03:42.000Z
clock.setTime(new DateTime(2012, 5, 1, 0, 3, 42, 0));
setupAccount();
// Set next invoice to fail and create subscription
paymentPlugin.makeAllInvoicesFailWithError(true);
final DefaultEntitlement baseEntitlement = createBaseEntitlementAndCheckForCompletion(account.getId(), "externalKey", productName, ProductCategory.BASE, term, NextEvent.CREATE, NextEvent.BLOCK, NextEvent.INVOICE);
bundle = subscriptionApi.getSubscriptionBundle(baseEntitlement.getBundleId(), callContext);
invoiceChecker.checkInvoice(account.getId(), 1, callContext, new ExpectedInvoiceItemCheck(new LocalDate(2012, 5, 1), null, InvoiceItemType.FIXED, new BigDecimal("0")));
invoiceChecker.checkChargedThroughDate(baseEntitlement.getId(), new LocalDate(2012, 5, 1), callContext);
// DAY 30 have to get out of trial before first payment
addDaysAndCheckForCompletion(30, NextEvent.PHASE, NextEvent.INVOICE, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
invoiceChecker.checkInvoice(account.getId(), 2, callContext, new ExpectedInvoiceItemCheck(new LocalDate(2012, 5, 31), new LocalDate(2012, 6, 30), InvoiceItemType.RECURRING, new BigDecimal("249.95")));
invoiceChecker.checkChargedThroughDate(baseEntitlement.getId(), new LocalDate(2012, 6, 30), callContext);
// Should still be in clear state
checkODState(OverdueWrapper.CLEAR_STATE_NAME);
// DAY 45 - 15 days after invoice
addDaysAndCheckForCompletion(15, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
// Should still be in clear state
checkODState(OverdueWrapper.CLEAR_STATE_NAME);
// DAY 65 - 35 days after invoice
addDaysAndCheckForCompletion(20, NextEvent.BLOCK, NextEvent.INVOICE, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
invoiceChecker.checkInvoice(account.getId(), 3, callContext, new ExpectedInvoiceItemCheck(new LocalDate(2012, 6, 30), new LocalDate(2012, 7, 31), InvoiceItemType.RECURRING, new BigDecimal("249.95")));
invoiceChecker.checkChargedThroughDate(baseEntitlement.getId(), new LocalDate(2012, 7, 31), callContext);
// Now we should be in OD1
checkODState("OD1");
checkChangePlanWithOverdueState(baseEntitlement, true, true);
// We have two unpaid non-zero dollar invoices at this point
// Adjust the first (and only) item of the first invoice - we should then be 5 days apart from the second invoice
// (which is the earliest unpaid one) and hence come back to a clear state (see configuration)
final Invoice firstNonZeroInvoice = invoiceUserApi.getUnpaidInvoicesByAccountId(account.getId(), null, clock.getUTCToday(), callContext).iterator().next();
fullyAdjustInvoiceItemAndCheckForCompletion(account, firstNonZeroInvoice, 1, NextEvent.BLOCK, NextEvent.INVOICE_ADJUSTMENT);
// We should be clear now
checkODState(OverdueWrapper.CLEAR_STATE_NAME);
invoiceChecker.checkInvoice(account.getId(), 2,
callContext, new ExpectedInvoiceItemCheck(new LocalDate(2012, 5, 31), new LocalDate(2012, 6, 30), InvoiceItemType.RECURRING, new BigDecimal("249.95")),
new ExpectedInvoiceItemCheck(new LocalDate(2012, 5, 31), new LocalDate(2012, 5, 31), InvoiceItemType.ITEM_ADJ, new BigDecimal("-249.95")));
invoiceChecker.checkChargedThroughDate(baseEntitlement.getId(), new LocalDate(2012, 7, 31), callContext);
// DAY 70 - 10 days after second invoice
addDaysAndCheckForCompletion(5);
// We should still be clear
checkODState(OverdueWrapper.CLEAR_STATE_NAME);
// DAY 80 - 20 days after second invoice
addDaysAndCheckForCompletion(10, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
// We should still be clear
checkODState(OverdueWrapper.CLEAR_STATE_NAME);
// DAY 95 - 35 days after second invoice
addDaysAndCheckForCompletion(15, NextEvent.BLOCK, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR, NextEvent.INVOICE, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
// We should now be in OD1
checkODState("OD1");
checkChangePlanWithOverdueState(baseEntitlement, true, true);
invoiceChecker.checkInvoice(account.getId(), 4, callContext, new ExpectedInvoiceItemCheck(new LocalDate(2012, 7, 31), new LocalDate(2012, 8, 31), InvoiceItemType.RECURRING, new BigDecimal("249.95")));
// Fully adjust all invoices
final List<Invoice> invoicesToAdjust = getUnpaidInvoicesOrderFromRecent();
for (int i = 0; i < invoicesToAdjust.size(); i++) {
if (i == invoicesToAdjust.size() - 1) {
fullyAdjustInvoiceAndCheckForCompletion(account, invoicesToAdjust.get(i), NextEvent.BLOCK, NextEvent.INVOICE_ADJUSTMENT);
} else {
fullyAdjustInvoiceAndCheckForCompletion(account, invoicesToAdjust.get(i), NextEvent.INVOICE_ADJUSTMENT);
}
}
// We should be cleared again
checkODState(OverdueWrapper.CLEAR_STATE_NAME);
}
@Test(groups = "slow", description = "Test overdue state with number of unpaid invoices condition", retryAnalyzer = FlakyRetryAnalyzer.class)
public void testOverdueStateWithNumberOfUnpaidInvoicesCondition() throws Exception {
// 2012-05-01T00:03:42.000Z
clock.setTime(new DateTime(2012, 5, 1, 0, 3, 42, 0));
final DefaultOverdueConfig config = XMLLoader.getObjectFromString(Resources.getResource("org/killbill/billing/beatrix/overdueWithNumberOfUnpaidInvoicesCondition.xml").toExternalForm(), DefaultOverdueConfig.class);
overdueConfigCache.loadDefaultOverdueConfig(config);
setupAccount();
paymentPlugin.makeAllInvoicesFailWithError(true);
createBaseEntitlementAndCheckForCompletion(account.getId(), "externalKey", productName, ProductCategory.BASE,
term, NextEvent.CREATE, NextEvent.BLOCK, NextEvent.INVOICE);
// 2012-05-31 => DAY 30 have to get out of trial before first payment
addMonthsAndCheckForCompletion(1, NextEvent.PHASE, NextEvent.INVOICE, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
// Verify that number of unpaid invoices is 1
Assert.assertEquals(invoiceUserApi.getUnpaidInvoicesByAccountId(account.getId(), null, clock.getUTCToday(), callContext).size(), 1);
// Should still be in clear state
checkODState(OverdueWrapper.CLEAR_STATE_NAME);
// Add 1 month
addMonthsAndCheckForCompletion(1, NextEvent.INVOICE, NextEvent.BLOCK, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
// Verify that number of unpaid invoices is 2
Assert.assertEquals(invoiceUserApi.getUnpaidInvoicesByAccountId(account.getId(), null, clock.getUTCToday(), callContext).size(), 2);
// Now we should be in OD1
checkODState("OD1");
// Add 1 month
addMonthsAndCheckForCompletion(1, NextEvent.INVOICE, NextEvent.BLOCK, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR,
NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR,
NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
// Verify that number of unpaid invoices is 3
Assert.assertEquals(invoiceUserApi.getUnpaidInvoicesByAccountId(account.getId(), null, clock.getUTCToday(), callContext).size(), 3);
// Now we should be in OD2
checkODState("OD2");
// Add 1 month
addMonthsAndCheckForCompletion(1, NextEvent.INVOICE, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR,
NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR,
NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR,
NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
// Verify that number of unpaid invoices is 4
Assert.assertEquals(invoiceUserApi.getUnpaidInvoicesByAccountId(account.getId(), null, clock.getUTCToday(), callContext).size(), 4);
// We should still be in OD2
checkODState("OD2");
// Add 1 month
addMonthsAndCheckForCompletion(1, NextEvent.INVOICE, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR,
NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR,
NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR,
NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR,
NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR, NextEvent.TAG, NextEvent.BLOCK);
// Verify that number of unpaid invoices is 5
Assert.assertEquals(invoiceUserApi.getUnpaidInvoicesByAccountId(account.getId(), null, clock.getUTCToday(), callContext).size(), 5);
// Now we should be in OD3
checkODState("OD3");
// Get all unpaid invoices and pay them to clear the overdue state
paymentPlugin.makeAllInvoicesFailWithError(false);
List<Invoice> unpaidInvoices = getUnpaidInvoicesOrderFromRecent();
createPaymentAndCheckForCompletion(account, unpaidInvoices.get(0), NextEvent.BLOCK, NextEvent.TAG, NextEvent.NULL_INVOICE, NextEvent.NULL_INVOICE, NextEvent.PAYMENT, NextEvent.INVOICE_PAYMENT);
createPaymentAndCheckForCompletion(account, unpaidInvoices.get(1), NextEvent.PAYMENT, NextEvent.INVOICE_PAYMENT);
createPaymentAndCheckForCompletion(account, unpaidInvoices.get(2), NextEvent.PAYMENT, NextEvent.INVOICE_PAYMENT, NextEvent.BLOCK);
createPaymentAndCheckForCompletion(account, unpaidInvoices.get(3), NextEvent.PAYMENT, NextEvent.INVOICE_PAYMENT, NextEvent.BLOCK);
createPaymentAndCheckForCompletion(account, unpaidInvoices.get(4), NextEvent.PAYMENT, NextEvent.INVOICE_PAYMENT);
// We should be clear now
checkODState(OverdueWrapper.CLEAR_STATE_NAME);
}
@Test(groups = "slow", description = "Test overdue state with total unpaid invoice balance condition", retryAnalyzer = FlakyRetryAnalyzer.class)
public void testOverdueStateWithTotalUnpaidInvoiceBalanceCondition() throws Exception {
// 2012-05-01T00:03:42.000Z
clock.setTime(new DateTime(2012, 5, 1, 0, 3, 42, 0));
final DefaultOverdueConfig config = XMLLoader.getObjectFromString(Resources.getResource("org/killbill/billing/beatrix/overdueWithTotalUnpaidInvoiceBalanceCondition.xml").toExternalForm(), DefaultOverdueConfig.class);
overdueConfigCache.loadDefaultOverdueConfig(config);
setupAccount();
paymentPlugin.makeAllInvoicesFailWithError(true);
createBaseEntitlementAndCheckForCompletion(account.getId(), "externalKey", productName, ProductCategory.BASE,
term, NextEvent.CREATE, NextEvent.BLOCK, NextEvent.INVOICE);
// 2012-05-31 => DAY 30 have to get out of trial before first payment
addMonthsAndCheckForCompletion(1, NextEvent.PHASE, NextEvent.INVOICE, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
// Amount balance should be USD 249.95
assertEquals(invoiceUserApi.getAccountBalance(account.getId(), callContext).compareTo(BigDecimal.valueOf(249.95)), 0);
// Should still be in clear state
checkODState(OverdueWrapper.CLEAR_STATE_NAME);
// Add 1 month
addMonthsAndCheckForCompletion(1, NextEvent.INVOICE, NextEvent.BLOCK, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
// Amount balance should be USD 499.90
assertEquals(invoiceUserApi.getAccountBalance(account.getId(), callContext).compareTo(BigDecimal.valueOf(499.90)), 0);
// Now we should be in OD1
checkODState("OD1");
// Add 1 month
addMonthsAndCheckForCompletion(1, NextEvent.INVOICE, NextEvent.BLOCK, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR,
NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR,
NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
// Amount balance should be USD 749.85
assertEquals(invoiceUserApi.getAccountBalance(account.getId(), callContext).compareTo(BigDecimal.valueOf(749.85)), 0);
// Now we should be in OD2
checkODState("OD2");
// Add 1 month
addMonthsAndCheckForCompletion(1, NextEvent.INVOICE, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR,
NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR,
NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR,
NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
// Amount balance should be USD 999.80
assertEquals(invoiceUserApi.getAccountBalance(account.getId(), callContext).compareTo(BigDecimal.valueOf(999.80)), 0);
// We should still be in OD2
checkODState("OD2");
// Add 1 month
addMonthsAndCheckForCompletion(1, NextEvent.INVOICE, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR,
NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR,
NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR,
NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR,
NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR, NextEvent.TAG, NextEvent.BLOCK);
// Amount balance should be USD 1249.75
assertEquals(invoiceUserApi.getAccountBalance(account.getId(), callContext).compareTo(BigDecimal.valueOf(1249.75)), 0);
// Now we should be in OD3
checkODState("OD3");
// Get all unpaid invoices and pay them to clear the overdue state
paymentPlugin.makeAllInvoicesFailWithError(false);
List<Invoice> unpaidInvoices = getUnpaidInvoicesOrderFromRecent();
createPaymentAndCheckForCompletion(account, unpaidInvoices.get(0), NextEvent.BLOCK, NextEvent.TAG, NextEvent.NULL_INVOICE, NextEvent.NULL_INVOICE, NextEvent.PAYMENT, NextEvent.INVOICE_PAYMENT);
createPaymentAndCheckForCompletion(account, unpaidInvoices.get(1), NextEvent.PAYMENT, NextEvent.INVOICE_PAYMENT);
createPaymentAndCheckForCompletion(account, unpaidInvoices.get(2), NextEvent.PAYMENT, NextEvent.INVOICE_PAYMENT, NextEvent.BLOCK);
createPaymentAndCheckForCompletion(account, unpaidInvoices.get(3), NextEvent.PAYMENT, NextEvent.INVOICE_PAYMENT, NextEvent.BLOCK);
createPaymentAndCheckForCompletion(account, unpaidInvoices.get(4), NextEvent.PAYMENT, NextEvent.INVOICE_PAYMENT);
// We should be clear now
checkODState(OverdueWrapper.CLEAR_STATE_NAME);
}
@Test(groups = "slow", description = "Test clearing balance with credit also clears overdue state", retryAnalyzer = FlakyRetryAnalyzer.class)
public void testOverdueClearWithCredit() throws Exception {
// 2012-05-01T00:03:42.000Z
clock.setTime(new DateTime(2012, 5, 1, 0, 3, 42, 0));
setupAccount();
// Set next invoice to fail and create subscription
paymentPlugin.makeAllInvoicesFailWithError(true);
final DefaultEntitlement baseEntitlement = createBaseEntitlementAndCheckForCompletion(account.getId(), "externalKey", productName, ProductCategory.BASE, term, NextEvent.CREATE, NextEvent.BLOCK, NextEvent.INVOICE);
bundle = subscriptionApi.getSubscriptionBundle(baseEntitlement.getBundleId(), callContext);
invoiceChecker.checkInvoice(account.getId(), 1, callContext, new ExpectedInvoiceItemCheck(new LocalDate(2012, 5, 1), null, InvoiceItemType.FIXED, new BigDecimal("0")));
invoiceChecker.checkChargedThroughDate(baseEntitlement.getId(), new LocalDate(2012, 5, 1), callContext);
// 2012-05-31 => DAY 30 have to get out of trial {I0, P0}
addDaysAndCheckForCompletion(30, NextEvent.PHASE, NextEvent.INVOICE, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
invoiceChecker.checkInvoice(account.getId(), 2, callContext, new ExpectedInvoiceItemCheck(new LocalDate(2012, 5, 31), new LocalDate(2012, 6, 30), InvoiceItemType.RECURRING, new BigDecimal("249.95")));
invoiceChecker.checkChargedThroughDate(baseEntitlement.getId(), new LocalDate(2012, 6, 30), callContext);
// 2012-06-08 => Retry P0
addDaysAndCheckForCompletion(8, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
checkODState(OverdueWrapper.CLEAR_STATE_NAME);
// 2012-06-16 => Retry P0
addDaysAndCheckForCompletion(8, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
checkODState(OverdueWrapper.CLEAR_STATE_NAME);
// 2012-06-24 => Retry P0
addDaysAndCheckForCompletion(8, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
checkODState(OverdueWrapper.CLEAR_STATE_NAME);
// 2012-06-30 => P1
addDaysAndCheckForCompletion(6, NextEvent.BLOCK, NextEvent.INVOICE, NextEvent.PAYMENT_ERROR, NextEvent.INVOICE_PAYMENT_ERROR);
checkODState("OD1");
checkChangePlanWithOverdueState(baseEntitlement, true, true);
invoiceChecker.checkInvoice(account.getId(), 3, callContext, new ExpectedInvoiceItemCheck(new LocalDate(2012, 6, 30), new LocalDate(2012, 7, 31), InvoiceItemType.RECURRING, new BigDecimal("249.95")));
invoiceChecker.checkChargedThroughDate(baseEntitlement.getId(), new LocalDate(2012, 7, 31), callContext);
final BigDecimal accountBalance = invoiceUserApi.getAccountBalance(account.getId(), callContext);
// We rebalance CBA on the last 2 unpaid invoices and therefore we expect 2 INVOICE_ADJUSTMENT events
busHandler.pushExpectedEvents(NextEvent.INVOICE, NextEvent.INVOICE_ADJUSTMENT, NextEvent.INVOICE_ADJUSTMENT, NextEvent.BLOCK);
final InvoiceItem inputCredit = new CreditAdjInvoiceItem(null, account.getId(), new LocalDate(2012, 06, 30), "credit invoice", accountBalance, account.getCurrency(), null);
invoiceUserApi.insertCredits(account.getId(), new LocalDate(2012, 06, 30), ImmutableList.of(inputCredit), true, null, callContext);
assertListenerStatus();
}
private void allowPaymentsAndResetOverdueToClearByPayingAllUnpaidInvoices(final boolean extraPayment) {
// Reset plugin so payments should now succeed
paymentPlugin.makeAllInvoicesFailWithError(false);
//
// We now pay all unpaid invoices.
//
// Upon paying the last invoice, the overdue system will clear the state and notify invoice that it should re-generate a new invoice
// for the part that was unblocked, which explains why on the last payment we expect an additional invoice (and payment if needed).
//
final List<Invoice> sortedInvoices = getUnpaidInvoicesOrderFromRecent();
int remainingUnpaidInvoices = sortedInvoices.size();
for (final Invoice invoice : sortedInvoices) {
if (invoice.getBalance().compareTo(BigDecimal.ZERO) > 0) {
remainingUnpaidInvoices--;
if (remainingUnpaidInvoices > 0) {
createPaymentAndCheckForCompletion(account, invoice, NextEvent.PAYMENT, NextEvent.INVOICE_PAYMENT);
} else {
if (extraPayment) {
createPaymentAndCheckForCompletion(account, invoice, NextEvent.BLOCK, NextEvent.TAG, NextEvent.NULL_INVOICE, NextEvent.INVOICE, NextEvent.PAYMENT, NextEvent.INVOICE_PAYMENT, NextEvent.PAYMENT, NextEvent.INVOICE_PAYMENT);
} else {
createPaymentAndCheckForCompletion(account, invoice, NextEvent.BLOCK, NextEvent.TAG, NextEvent.NULL_INVOICE, NextEvent.INVOICE, NextEvent.PAYMENT, NextEvent.INVOICE_PAYMENT);
}
}
}
}
checkODState(OverdueWrapper.CLEAR_STATE_NAME);
}
private List<Invoice> getUnpaidInvoicesOrderFromRecent() {
final Collection<Invoice> invoices = invoiceUserApi.getUnpaidInvoicesByAccountId(account.getId(), null, clock.getUTCToday(), callContext);
// Sort in reverse order to first pay most recent invoice-- that way overdue state may only flip when we reach the last one.
final List<Invoice> sortedInvoices = new LinkedList<Invoice>(invoices);
Collections.sort(sortedInvoices, new Comparator<Invoice>() {
@Override
public int compare(final Invoice i1, final Invoice i2) {
return i2.getInvoiceDate().compareTo(i1.getInvoiceDate());
}
});
return sortedInvoices;
}
private void checkChangePlanWithOverdueState(final Entitlement entitlement, final boolean shouldFail, final boolean expectedPayment) {
if (shouldFail) {
try {
final PlanPhaseSpecifier spec = new PlanPhaseSpecifier("Pistol", term, PriceListSet.DEFAULT_PRICELIST_NAME);
entitlement.changePlan(new DefaultEntitlementSpecifier(spec), ImmutableList.<PluginProperty>of(), callContext);
} catch (EntitlementApiException e) {
assertTrue(e.getCause() instanceof BlockingApiException || e.getCode() == ErrorCode.SUB_CHANGE_NON_ACTIVE.getCode(),
String.format("Cause is %s, message is %s", e.getCause(), e.getMessage()));
}
} else {
// Upgrade - we don't expect a payment here due to the scenario (the account will have enough CBA)
if (expectedPayment) {
changeEntitlementAndCheckForCompletion(entitlement, "Assault-Rifle", BillingPeriod.MONTHLY, null, NextEvent.CHANGE, NextEvent.INVOICE, NextEvent.PAYMENT, NextEvent.INVOICE_PAYMENT);
} else {
changeEntitlementAndCheckForCompletion(entitlement, "Assault-Rifle", BillingPeriod.MONTHLY, null, NextEvent.CHANGE, NextEvent.INVOICE);
}
}
}
}
| 31,078 |
4,054 | // Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
#include <vespamalloc/malloc/globalpool.hpp>
#include <vespamalloc/malloc/memblockboundscheck_d.h>
namespace vespamalloc {
template class AllocPoolT<MemBlockBoundsCheck>;
}
| 93 |
5,267 | package com.my.sorted.resources;
import io.swagger.v3.jaxrs2.resources.model.Pet;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.Parameter;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import java.util.ArrayList;
@Path("/sorted")
public class SortedThing {
@Operation(operationId = "foo")
@GET
@Path("/pet")
public Pet foo() {
return null;
}
@Operation(operationId = "bar")
@GET
@Path("/pet")
public Pet bar() {
return null;
}
}
| 231 |
556 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.plc4x.simulator.server.s7;
import io.netty.bootstrap.ServerBootstrap;
import io.netty.channel.ChannelInitializer;
import io.netty.channel.ChannelOption;
import io.netty.channel.ChannelPipeline;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.SocketChannel;
import io.netty.channel.socket.nio.NioServerSocketChannel;
import org.apache.plc4x.java.s7.readwrite.S7Driver;
import org.apache.plc4x.java.s7.readwrite.TPKTPacket;
import org.apache.plc4x.java.s7.readwrite.io.TPKTPacketIO;
import org.apache.plc4x.java.spi.configuration.Configuration;
import org.apache.plc4x.java.spi.connection.GeneratedProtocolMessageCodec;
import org.apache.plc4x.java.spi.connection.SingleProtocolStackConfigurer;
import org.apache.plc4x.simulator.exceptions.SimulatorExcepiton;
import org.apache.plc4x.simulator.model.Context;
import org.apache.plc4x.simulator.server.ServerModule;
import org.apache.plc4x.simulator.server.s7.protocol.S7Step7ServerAdapter;
import static org.apache.plc4x.java.spi.configuration.ConfigurationFactory.configure;
public class S7ServerModule implements ServerModule {
private static final int ISO_ON_TCP_PORT = 102;
private EventLoopGroup loopGroup;
private EventLoopGroup workerGroup;
private Context context;
@Override
public String getName() {
return "S7-STEP7";
}
@Override
public void setContext(Context context) {
this.context = context;
}
@Override
public void start() throws SimulatorExcepiton {
S7Driver driver = new S7Driver();
if(loopGroup != null) {
return;
}
try {
loopGroup = new NioEventLoopGroup();
workerGroup = new NioEventLoopGroup();
ServerBootstrap bootstrap = new ServerBootstrap();
bootstrap.group(loopGroup, workerGroup)
.channel(NioServerSocketChannel.class)
.childHandler(new ChannelInitializer<SocketChannel>() {
@Override
public void initChannel(SocketChannel channel) {
ChannelPipeline pipeline = channel.pipeline();
pipeline.addLast(new GeneratedProtocolMessageCodec<>(TPKTPacket.class, new TPKTPacketIO(), true, null,
new S7Driver.ByteLengthEstimator(),
new S7Driver.CorruptPackageCleaner()));
pipeline.addLast(new S7Step7ServerAdapter(context));
}
}).option(ChannelOption.SO_BACKLOG, 128)
.childOption(ChannelOption.SO_KEEPALIVE, true);
bootstrap.bind(ISO_ON_TCP_PORT).sync();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new SimulatorExcepiton(e);
}
}
@Override
public void stop() {
if(workerGroup == null) {
return;
}
workerGroup.shutdownGracefully();
loopGroup.shutdownGracefully();
}
}
| 1,511 |
331 | <filename>src/main/java/org/yx/http/start/JettyHandlerSupplier.java
/**
* Copyright (C) 2016 - 2030 youtongluan.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.yx.http.start;
import java.util.Objects;
import java.util.function.Supplier;
import org.eclipse.jetty.server.handler.ResourceHandler;
import org.eclipse.jetty.server.handler.gzip.GzipHandler;
import org.eclipse.jetty.server.session.SessionHandler;
import org.yx.conf.AppInfo;
public class JettyHandlerSupplier {
private static Supplier<GzipHandler> gzipHandlerSupplier = () -> {
GzipHandler h = new GzipHandler();
h.addIncludedMethods("POST");
h.setMinGzipSize(AppInfo.getInt("sumk.webserver.gzip.minsize", 1000));
return h;
};
private static Supplier<ResourceHandler> resourceHandlerSupplier = () -> {
ResourceHandler handler = new ResourceHandler();
String welcomes = AppInfo.get("sumk.webserver.resource.welcomes");
if (welcomes != null && welcomes.length() > 0) {
handler.setWelcomeFiles(welcomes.replace(',', ',').split(","));
}
return handler;
};
private static Supplier<SessionHandler> sessionHandlerSupplier = SessionHandler::new;
public static Supplier<GzipHandler> gzipHandlerSupplier() {
return gzipHandlerSupplier;
}
public static void setGzipHandlerSupplier(Supplier<GzipHandler> h) {
JettyHandlerSupplier.gzipHandlerSupplier = Objects.requireNonNull(h);
}
public static Supplier<ResourceHandler> resourceHandlerSupplier() {
return resourceHandlerSupplier;
}
public static void setResourceHandlerSupplier(Supplier<ResourceHandler> h) {
JettyHandlerSupplier.resourceHandlerSupplier = Objects.requireNonNull(h);
}
public static Supplier<SessionHandler> sessionHandlerSupplier() {
return sessionHandlerSupplier;
}
public static void setSessionHandlerSupplier(Supplier<SessionHandler> h) {
JettyHandlerSupplier.sessionHandlerSupplier = Objects.requireNonNull(h);
}
}
| 739 |
1,609 | package com.mossle.cms.web.sys;
import java.util.List;
import javax.annotation.Resource;
import com.mossle.api.auth.CurrentUserHolder;
import com.mossle.client.open.OpenAppDTO;
import com.mossle.client.open.OpenClient;
import com.mossle.client.open.SysDTO;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
@Controller
@RequestMapping("cms/sys")
public class CmsSysController {
private static Logger logger = LoggerFactory
.getLogger(CmsSysController.class);
private OpenClient openClient;
private CurrentUserHolder currentUserHolder;
@RequestMapping("")
public String index(Model model) throws Exception {
logger.debug("index");
String userId = currentUserHolder.getUserId();
List<OpenAppDTO> openAppDtos = openClient.findUserApps(userId);
if (openAppDtos.isEmpty()) {
return "cms/sys/index";
}
OpenAppDTO defaultOpenAppDto = openAppDtos.get(0);
// model.addAttribute("defaultOpenAppDto", defaultOpenAppDto);
// model.addAttribute("openAppDtos", openAppDtos);
return "redirect:/cms/sys/" + defaultOpenAppDto.getCode() + "/index.do";
}
@RequestMapping("{sysCode}/index")
public String sysIndex(@PathVariable("sysCode") String sysCode, Model model)
throws Exception {
return "redirect:/cms/sys/" + sysCode + "/list.do";
}
@RequestMapping("{sysCode}/list")
public String list(@PathVariable("sysCode") String sysCode, Model model)
throws Exception {
// open sys
SysDTO sysDto = openClient.findSys(sysCode);
model.addAttribute("sysCode", sysCode);
model.addAttribute("sysDto", sysDto);
return "cms/sys/list";
}
// ~ ======================================================================
@Resource
public void setOpenClient(OpenClient openClient) {
this.openClient = openClient;
}
@Resource
public void setCurrentUserHolder(CurrentUserHolder currentUserHolder) {
this.currentUserHolder = currentUserHolder;
}
}
| 872 |
10,351 | # pylint: disable=redefined-outer-name
from unittest.mock import MagicMock
import pytest
import posixpath
import ftplib
from ftplib import FTP
from mlflow.store.artifact.artifact_repository_registry import get_artifact_repository
from mlflow.store.artifact.ftp_artifact_repo import FTPArtifactRepository
@pytest.fixture
def ftp_mock():
return MagicMock(autospec=FTP)
def test_artifact_uri_factory():
repo = get_artifact_repository("ftp://user:pass@test_ftp:123/some/path")
assert isinstance(repo, FTPArtifactRepository)
def test_list_artifacts_empty(ftp_mock):
repo = FTPArtifactRepository("ftp://test_ftp/some/path")
repo.get_ftp_client = MagicMock()
call_mock = MagicMock(return_value=ftp_mock)
repo.get_ftp_client.return_value = MagicMock(__enter__=call_mock)
ftp_mock.nlst = MagicMock(return_value=[])
assert repo.list_artifacts() == []
ftp_mock.nlst.assert_called_once_with("/some/path")
def test_list_artifacts(ftp_mock):
artifact_root_path = "/experiment_id/run_id/"
repo = FTPArtifactRepository("ftp://test_ftp" + artifact_root_path)
repo.get_ftp_client = MagicMock()
call_mock = MagicMock(return_value=ftp_mock)
repo.get_ftp_client.return_value = MagicMock(__enter__=call_mock)
# mocked file structure
# |- file
# |- model
# |- model.pb
file_path = "file"
file_size = 678
dir_path = "model"
ftp_mock.cwd = MagicMock(side_effect=[None, ftplib.error_perm, None])
ftp_mock.nlst = MagicMock(return_value=[file_path, dir_path])
ftp_mock.size = MagicMock(return_value=file_size)
artifacts = repo.list_artifacts(path=None)
ftp_mock.nlst.assert_called_once_with(artifact_root_path)
ftp_mock.size.assert_called_once_with(artifact_root_path + file_path)
assert len(artifacts) == 2
assert artifacts[0].path == file_path
assert artifacts[0].is_dir is False
assert artifacts[0].file_size == file_size
assert artifacts[1].path == dir_path
assert artifacts[1].is_dir is True
assert artifacts[1].file_size is None
def test_list_artifacts_when_ftp_nlst_returns_absolute_paths(ftp_mock):
artifact_root_path = "/experiment_id/run_id/"
repo = FTPArtifactRepository("ftp://test_ftp" + artifact_root_path)
repo.get_ftp_client = MagicMock()
call_mock = MagicMock(return_value=ftp_mock)
repo.get_ftp_client.return_value = MagicMock(__enter__=call_mock)
# mocked file structure
# |- file
# |- model
# |- model.pb
file_path = "file"
dir_path = "model"
file_size = 678
ftp_mock.cwd = MagicMock(side_effect=[None, ftplib.error_perm, None])
ftp_mock.nlst = MagicMock(
return_value=[
posixpath.join(artifact_root_path, file_path),
posixpath.join(artifact_root_path, dir_path),
]
)
ftp_mock.size = MagicMock(return_value=file_size)
artifacts = repo.list_artifacts(path=None)
ftp_mock.nlst.assert_called_once_with(artifact_root_path)
ftp_mock.size.assert_called_once_with(artifact_root_path + file_path)
assert len(artifacts) == 2
assert artifacts[0].path == file_path
assert artifacts[0].is_dir is False
assert artifacts[0].file_size == file_size
assert artifacts[1].path == dir_path
assert artifacts[1].is_dir is True
assert artifacts[1].file_size is None
def test_list_artifacts_with_subdir(ftp_mock):
artifact_root_path = "/experiment_id/run_id/"
repo = FTPArtifactRepository("sftp://test_sftp" + artifact_root_path)
repo.get_ftp_client = MagicMock()
call_mock = MagicMock(return_value=ftp_mock)
repo.get_ftp_client.return_value = MagicMock(__enter__=call_mock)
# mocked file structure
# |- model
# |- model.pb
# |- variables
dir_name = "model"
# list artifacts at sub directory level
file_path = "model.pb"
file_size = 345
subdir_name = "variables"
ftp_mock.nlst = MagicMock(return_value=[file_path, subdir_name])
ftp_mock.cwd = MagicMock(side_effect=[None, ftplib.error_perm, None])
ftp_mock.size = MagicMock(return_value=file_size)
artifacts = repo.list_artifacts(path=dir_name)
ftp_mock.nlst.assert_called_once_with(artifact_root_path + dir_name)
ftp_mock.size.assert_called_once_with(artifact_root_path + dir_name + "/" + file_path)
assert len(artifacts) == 2
assert artifacts[0].path == dir_name + "/" + file_path
assert artifacts[0].is_dir is False
assert artifacts[0].file_size == file_size
assert artifacts[1].path == dir_name + "/" + subdir_name
assert artifacts[1].is_dir is True
assert artifacts[1].file_size is None
def test_log_artifact(ftp_mock, tmpdir):
repo = FTPArtifactRepository("ftp://test_ftp/some/path")
repo.get_ftp_client = MagicMock()
call_mock = MagicMock(return_value=ftp_mock)
repo.get_ftp_client.return_value = MagicMock(__enter__=call_mock)
d = tmpdir.mkdir("data")
f = d.join("test.txt")
f.write("hello world!")
fpath = d + "/test.txt"
fpath = fpath.strpath
ftp_mock.cwd = MagicMock(side_effect=[ftplib.error_perm, None])
repo.log_artifact(fpath)
ftp_mock.mkd.assert_called_once_with("/some/path")
ftp_mock.cwd.assert_called_with("/some/path")
ftp_mock.storbinary.assert_called_once()
assert ftp_mock.storbinary.call_args_list[0][0][0] == "STOR test.txt"
def test_log_artifact_multiple_calls(ftp_mock, tmpdir):
repo = FTPArtifactRepository("ftp://test_ftp/some/path")
repo.get_ftp_client = MagicMock()
call_mock = MagicMock(return_value=ftp_mock)
repo.get_ftp_client.return_value = MagicMock(__enter__=call_mock)
d = tmpdir.mkdir("data")
file1 = d.join("test1.txt")
file1.write("hello world!")
fpath1 = d + "/test1.txt"
fpath1 = fpath1.strpath
file2 = d.join("test2.txt")
file2.write("hello world!")
fpath2 = d + "/test2.txt"
fpath2 = fpath2.strpath
ftp_mock.cwd = MagicMock(
side_effect=[ftplib.error_perm, None, ftplib.error_perm, None, None, None]
)
repo.log_artifact(fpath1)
ftp_mock.mkd.assert_called_once_with("/some/path")
ftp_mock.cwd.assert_called_with("/some/path")
ftp_mock.storbinary.assert_called()
assert ftp_mock.storbinary.call_args_list[0][0][0] == "STOR test1.txt"
ftp_mock.reset_mock()
repo.log_artifact(fpath1, "subdir")
ftp_mock.mkd.assert_called_once_with("/some/path/subdir")
ftp_mock.cwd.assert_called_with("/some/path/subdir")
ftp_mock.storbinary.assert_called()
assert ftp_mock.storbinary.call_args_list[0][0][0] == "STOR test1.txt"
ftp_mock.reset_mock()
repo.log_artifact(fpath2)
ftp_mock.mkd.assert_not_called()
ftp_mock.cwd.assert_called_with("/some/path")
ftp_mock.storbinary.assert_called()
assert ftp_mock.storbinary.call_args_list[0][0][0] == "STOR test2.txt"
def __posixpath_parents(pathname, root):
parents = [posixpath.dirname(pathname)]
root = posixpath.normpath(root)
while parents[-1] != "/" and parents[-1] != root:
parents.append(posixpath.dirname(parents[-1]))
return parents
@pytest.mark.parametrize("artifact_path", [None, "dir", "dir1/dir2"])
def test_log_artifacts(artifact_path, ftp_mock, tmpdir):
# Setup FTP mock.
dest_path_root = "/some/path"
repo = FTPArtifactRepository("ftp://test_ftp" + dest_path_root)
repo.get_ftp_client = MagicMock()
call_mock = MagicMock(return_value=ftp_mock)
repo.get_ftp_client.return_value = MagicMock(__enter__=call_mock)
dirs_created = set([dest_path_root])
files_created = set()
cwd_history = ["/"]
def mkd_mock(pathname):
abs_pathname = posixpath.join(cwd_history[-1], pathname)
if posixpath.dirname(abs_pathname) not in dirs_created:
raise ftplib.error_perm
dirs_created.add(abs_pathname)
ftp_mock.mkd = MagicMock(side_effect=mkd_mock)
def cwd_mock(pathname):
abs_pathname = posixpath.join(cwd_history[-1], pathname)
if abs_pathname not in dirs_created:
raise ftplib.error_perm
cwd_history.append(abs_pathname)
ftp_mock.cwd = MagicMock(side_effect=cwd_mock)
def storbinary_mock(cmd, _):
head, basename = cmd.split(" ", 1)
assert head == "STOR"
assert "/" not in basename
files_created.add(posixpath.join(cwd_history[-1], basename))
ftp_mock.storbinary = MagicMock(side_effect=storbinary_mock)
# Test
subd = tmpdir.mkdir("data").mkdir("subdir")
subd.join("a.txt").write("A")
subd.join("b.txt").write("B")
subd.join("c.txt").write("C")
subd.mkdir("empty1")
subsubd = subd.mkdir("subsubdir")
subsubd.join("aa.txt").write("AA")
subsubd.join("bb.txt").write("BB")
subsubd.join("cc.txt").write("CC")
subsubd.mkdir("empty2")
dest_path = (
dest_path_root if artifact_path is None else posixpath.join(dest_path_root, artifact_path)
)
dirs_expected = set(
[
dest_path,
posixpath.join(dest_path, "empty1"),
posixpath.join(dest_path, "subsubdir"),
posixpath.join(dest_path, "subsubdir", "empty2"),
]
)
files_expected = set(
[
posixpath.join(dest_path, "a.txt"),
posixpath.join(dest_path, "b.txt"),
posixpath.join(dest_path, "c.txt"),
posixpath.join(dest_path, "subsubdir/aa.txt"),
posixpath.join(dest_path, "subsubdir/bb.txt"),
posixpath.join(dest_path, "subsubdir/cc.txt"),
]
)
for dirs_expected_i in dirs_expected.copy():
if dirs_expected_i != dest_path_root:
dirs_expected |= set(__posixpath_parents(dirs_expected_i, root=dest_path_root))
repo.log_artifacts(subd.strpath, artifact_path)
assert dirs_created == dirs_expected
assert files_created == files_expected
def test_download_artifacts_single(ftp_mock):
repo = FTPArtifactRepository("ftp://test_ftp/some/path")
repo.get_ftp_client = MagicMock()
call_mock = MagicMock(return_value=ftp_mock)
repo.get_ftp_client.return_value = MagicMock(__enter__=call_mock)
ftp_mock.cwd = MagicMock(side_effect=ftplib.error_perm)
repo.download_artifacts("test.txt")
ftp_mock.retrbinary.assert_called_once()
assert ftp_mock.retrbinary.call_args_list[0][0][0] == "RETR /some/path/test.txt"
def test_download_artifacts(ftp_mock):
artifact_root_path = "/some/path"
repo = FTPArtifactRepository("ftp://test_ftp" + artifact_root_path)
repo.get_ftp_client = MagicMock()
call_mock = MagicMock(return_value=ftp_mock)
repo.get_ftp_client.return_value = MagicMock(__enter__=call_mock)
# mocked file structure
# |- model
# |- model.pb
# |- empty_dir
# |- variables
# |- test.txt
dir_path = posixpath.join(artifact_root_path, "model")
# list artifacts at sub directory level
model_file_path_sub = "model.pb"
model_file_path_full = posixpath.join(dir_path, model_file_path_sub)
empty_dir_name = "empty_dir"
empty_dir_path = posixpath.join(dir_path, empty_dir_name)
subdir_name = "variables"
subdir_path_full = posixpath.join(dir_path, subdir_name)
subfile_name = "test.txt"
subfile_path_full = posixpath.join(artifact_root_path, subdir_path_full, subfile_name)
is_dir_mapping = {
dir_path: True,
empty_dir_path: True,
model_file_path_full: False,
subdir_path_full: True,
subfile_path_full: False,
}
is_dir_call_args = [
dir_path,
model_file_path_full,
empty_dir_path,
subdir_path_full,
model_file_path_full,
subdir_path_full,
subfile_path_full,
subfile_path_full,
]
def cwd_side_effect(call_arg):
if not is_dir_mapping[call_arg]:
raise ftplib.error_perm
ftp_mock.cwd = MagicMock(side_effect=cwd_side_effect)
def nlst_side_effect(call_arg):
if call_arg == dir_path:
return [model_file_path_sub, subdir_name, empty_dir_name]
elif call_arg == subdir_path_full:
return [subfile_name]
elif call_arg == empty_dir_path:
return []
else:
raise Exception("should never call nlst for non-directories {}".format(call_arg))
ftp_mock.nlst = MagicMock(side_effect=nlst_side_effect)
repo.download_artifacts("model")
cwd_call_args = [arg_entry[0][0] for arg_entry in ftp_mock.cwd.call_args_list]
assert set(cwd_call_args) == set(is_dir_call_args)
assert ftp_mock.nlst.call_count == 3
assert ftp_mock.retrbinary.call_args_list[0][0][0] == "RETR " + model_file_path_full
assert ftp_mock.retrbinary.call_args_list[1][0][0] == "RETR " + subfile_path_full
def test_log_artifact_reuse_ftp_client(ftp_mock, tmpdir):
repo = FTPArtifactRepository("ftp://test_ftp/some/path")
repo.get_ftp_client = MagicMock()
call_mock = MagicMock(return_value=ftp_mock)
repo.get_ftp_client.return_value = MagicMock(__enter__=call_mock)
d = tmpdir.mkdir("data")
file = d.join("test.txt")
file.write("hello world!")
fpath = file.strpath
repo.log_artifact(fpath)
repo.log_artifact(fpath, "subdir1/subdir2")
repo.log_artifact(fpath, "subdir3")
assert repo.get_ftp_client.call_count == 3
| 5,923 |
682 | /**CFile****************************************************************
FileName [fraHot.c]
SystemName [ABC: Logic synthesis and verification system.]
PackageName [New FRAIG package.]
Synopsis [Computing and using one-hotness conditions.]
Author [<NAME>]
Affiliation [UC Berkeley]
Date [Ver. 1.0. Started - June 30, 2007.]
Revision [$Id: fraHot.c,v 1.00 2007/06/30 00:00:00 alanmi Exp $]
***********************************************************************/
#include "fra.h"
ABC_NAMESPACE_IMPL_START
////////////////////////////////////////////////////////////////////////
/// DECLARATIONS ///
////////////////////////////////////////////////////////////////////////
static inline int Fra_RegToLit( int n, int c ) { return c? -n-1 : n+1; }
static inline int Fra_LitReg( int n ) { return (n>0)? n-1 : -n-1; }
static inline int Fra_LitSign( int n ) { return (n<0); }
////////////////////////////////////////////////////////////////////////
/// FUNCTION DEFINITIONS ///
////////////////////////////////////////////////////////////////////////
/**Function*************************************************************
Synopsis [Returns 1 if simulation info is composed of all zeros.]
Description []
SideEffects []
SeeAlso []
***********************************************************************/
int Fra_OneHotNodeIsConst( Fra_Sml_t * pSeq, Aig_Obj_t * pObj )
{
unsigned * pSims;
int i;
pSims = Fra_ObjSim(pSeq, pObj->Id);
for ( i = pSeq->nWordsPref; i < pSeq->nWordsTotal; i++ )
if ( pSims[i] )
return 0;
return 1;
}
/**Function*************************************************************
Synopsis [Returns 1 if simulation infos are equal.]
Description []
SideEffects []
SeeAlso []
***********************************************************************/
int Fra_OneHotNodesAreEqual( Fra_Sml_t * pSeq, Aig_Obj_t * pObj0, Aig_Obj_t * pObj1 )
{
unsigned * pSims0, * pSims1;
int i;
pSims0 = Fra_ObjSim(pSeq, pObj0->Id);
pSims1 = Fra_ObjSim(pSeq, pObj1->Id);
for ( i = pSeq->nWordsPref; i < pSeq->nWordsTotal; i++ )
if ( pSims0[i] != pSims1[i] )
return 0;
return 1;
}
/**Function*************************************************************
Synopsis [Returns 1 if implications holds.]
Description []
SideEffects []
SeeAlso []
***********************************************************************/
int Fra_OneHotNodesAreClause( Fra_Sml_t * pSeq, Aig_Obj_t * pObj1, Aig_Obj_t * pObj2, int fCompl1, int fCompl2 )
{
unsigned * pSim1, * pSim2;
int k;
pSim1 = Fra_ObjSim(pSeq, pObj1->Id);
pSim2 = Fra_ObjSim(pSeq, pObj2->Id);
if ( fCompl1 && fCompl2 )
{
for ( k = pSeq->nWordsPref; k < pSeq->nWordsTotal; k++ )
if ( pSim1[k] & pSim2[k] )
return 0;
}
else if ( fCompl1 )
{
for ( k = pSeq->nWordsPref; k < pSeq->nWordsTotal; k++ )
if ( pSim1[k] & ~pSim2[k] )
return 0;
}
else if ( fCompl2 )
{
for ( k = pSeq->nWordsPref; k < pSeq->nWordsTotal; k++ )
if ( ~pSim1[k] & pSim2[k] )
return 0;
}
else
assert( 0 );
return 1;
}
/**Function*************************************************************
Synopsis [Computes one-hot implications.]
Description []
SideEffects []
SeeAlso []
***********************************************************************/
Vec_Int_t * Fra_OneHotCompute( Fra_Man_t * p, Fra_Sml_t * pSim )
{
int fSkipConstEqu = 1;
Vec_Int_t * vOneHots;
Aig_Obj_t * pObj1, * pObj2;
int i, k;
int nTruePis = Aig_ManCiNum(pSim->pAig) - Aig_ManRegNum(pSim->pAig);
assert( pSim->pAig == p->pManAig );
vOneHots = Vec_IntAlloc( 100 );
Aig_ManForEachLoSeq( pSim->pAig, pObj1, i )
{
if ( fSkipConstEqu && Fra_OneHotNodeIsConst(pSim, pObj1) )
continue;
assert( i-nTruePis >= 0 );
// Aig_ManForEachLoSeq( pSim->pAig, pObj2, k )
// Vec_PtrForEachEntryStart( Aig_Obj_t *, pSim->pAig->vPis, pObj2, k, Aig_ManCiNum(p)-Aig_ManRegNum(p) )
Vec_PtrForEachEntryStart( Aig_Obj_t *, pSim->pAig->vCis, pObj2, k, i+1 )
{
if ( fSkipConstEqu && Fra_OneHotNodeIsConst(pSim, pObj2) )
continue;
if ( fSkipConstEqu && Fra_OneHotNodesAreEqual( pSim, pObj1, pObj2 ) )
continue;
assert( k-nTruePis >= 0 );
if ( Fra_OneHotNodesAreClause( pSim, pObj1, pObj2, 1, 1 ) )
{
Vec_IntPush( vOneHots, Fra_RegToLit(i-nTruePis, 1) );
Vec_IntPush( vOneHots, Fra_RegToLit(k-nTruePis, 1) );
continue;
}
if ( Fra_OneHotNodesAreClause( pSim, pObj1, pObj2, 0, 1 ) )
{
Vec_IntPush( vOneHots, Fra_RegToLit(i-nTruePis, 0) );
Vec_IntPush( vOneHots, Fra_RegToLit(k-nTruePis, 1) );
continue;
}
if ( Fra_OneHotNodesAreClause( pSim, pObj1, pObj2, 1, 0 ) )
{
Vec_IntPush( vOneHots, Fra_RegToLit(i-nTruePis, 1) );
Vec_IntPush( vOneHots, Fra_RegToLit(k-nTruePis, 0) );
continue;
}
}
}
return vOneHots;
}
/**Function*************************************************************
Synopsis [Assumes one-hot implications in the SAT solver.]
Description []
SideEffects []
SeeAlso []
**********************************************************************/
void Fra_OneHotAssume( Fra_Man_t * p, Vec_Int_t * vOneHots )
{
Aig_Obj_t * pObj1, * pObj2;
int i, Out1, Out2, pLits[2];
int nPiNum = Aig_ManCiNum(p->pManFraig) - Aig_ManRegNum(p->pManFraig);
assert( p->pPars->nFramesK == 1 ); // add to only one frame
for ( i = 0; i < Vec_IntSize(vOneHots); i += 2 )
{
Out1 = Vec_IntEntry( vOneHots, i );
Out2 = Vec_IntEntry( vOneHots, i+1 );
if ( Out1 == 0 && Out2 == 0 )
continue;
pObj1 = Aig_ManCi( p->pManFraig, nPiNum + Fra_LitReg(Out1) );
pObj2 = Aig_ManCi( p->pManFraig, nPiNum + Fra_LitReg(Out2) );
pLits[0] = toLitCond( Fra_ObjSatNum(pObj1), Fra_LitSign(Out1) );
pLits[1] = toLitCond( Fra_ObjSatNum(pObj2), Fra_LitSign(Out2) );
// add constraint to solver
if ( !sat_solver_addclause( p->pSat, pLits, pLits + 2 ) )
{
printf( "Fra_OneHotAssume(): Adding clause makes SAT solver unsat.\n" );
sat_solver_delete( p->pSat );
p->pSat = NULL;
return;
}
}
}
/**Function*************************************************************
Synopsis [Checks one-hot implications.]
Description []
SideEffects []
SeeAlso []
**********************************************************************/
void Fra_OneHotCheck( Fra_Man_t * p, Vec_Int_t * vOneHots )
{
Aig_Obj_t * pObj1, * pObj2;
int RetValue, i, Out1, Out2;
int nTruePos = Aig_ManCoNum(p->pManFraig) - Aig_ManRegNum(p->pManFraig);
for ( i = 0; i < Vec_IntSize(vOneHots); i += 2 )
{
Out1 = Vec_IntEntry( vOneHots, i );
Out2 = Vec_IntEntry( vOneHots, i+1 );
if ( Out1 == 0 && Out2 == 0 )
continue;
pObj1 = Aig_ManCo( p->pManFraig, nTruePos + Fra_LitReg(Out1) );
pObj2 = Aig_ManCo( p->pManFraig, nTruePos + Fra_LitReg(Out2) );
RetValue = Fra_NodesAreClause( p, pObj1, pObj2, Fra_LitSign(Out1), Fra_LitSign(Out2) );
if ( RetValue != 1 )
{
p->pCla->fRefinement = 1;
if ( RetValue == 0 )
Fra_SmlResimulate( p );
if ( Vec_IntEntry(vOneHots, i) != 0 )
printf( "Fra_OneHotCheck(): Clause is not refined!\n" );
assert( Vec_IntEntry(vOneHots, i) == 0 );
}
}
}
/**Function*************************************************************
Synopsis [Removes those implications that no longer hold.]
Description [Returns 1 if refinement has happened.]
SideEffects []
SeeAlso []
***********************************************************************/
int Fra_OneHotRefineUsingCex( Fra_Man_t * p, Vec_Int_t * vOneHots )
{
Aig_Obj_t * pObj1, * pObj2;
int i, Out1, Out2, RetValue = 0;
int nPiNum = Aig_ManCiNum(p->pManAig) - Aig_ManRegNum(p->pManAig);
assert( p->pSml->pAig == p->pManAig );
for ( i = 0; i < Vec_IntSize(vOneHots); i += 2 )
{
Out1 = Vec_IntEntry( vOneHots, i );
Out2 = Vec_IntEntry( vOneHots, i+1 );
if ( Out1 == 0 && Out2 == 0 )
continue;
// get the corresponding nodes
pObj1 = Aig_ManCi( p->pManAig, nPiNum + Fra_LitReg(Out1) );
pObj2 = Aig_ManCi( p->pManAig, nPiNum + Fra_LitReg(Out2) );
// check if implication holds using this simulation info
if ( !Fra_OneHotNodesAreClause( p->pSml, pObj1, pObj2, Fra_LitSign(Out1), Fra_LitSign(Out2) ) )
{
Vec_IntWriteEntry( vOneHots, i, 0 );
Vec_IntWriteEntry( vOneHots, i+1, 0 );
RetValue = 1;
}
}
return RetValue;
}
/**Function*************************************************************
Synopsis [Removes those implications that no longer hold.]
Description [Returns 1 if refinement has happened.]
SideEffects []
SeeAlso []
***********************************************************************/
int Fra_OneHotCount( Fra_Man_t * p, Vec_Int_t * vOneHots )
{
int i, Out1, Out2, Counter = 0;
for ( i = 0; i < Vec_IntSize(vOneHots); i += 2 )
{
Out1 = Vec_IntEntry( vOneHots, i );
Out2 = Vec_IntEntry( vOneHots, i+1 );
if ( Out1 == 0 && Out2 == 0 )
continue;
Counter++;
}
return Counter;
}
/**Function*************************************************************
Synopsis [Estimates the coverage of state space by clauses.]
Description []
SideEffects []
SeeAlso []
***********************************************************************/
void Fra_OneHotEstimateCoverage( Fra_Man_t * p, Vec_Int_t * vOneHots )
{
int nSimWords = (1<<14);
int nRegs = Aig_ManRegNum(p->pManAig);
Vec_Ptr_t * vSimInfo;
unsigned * pSim1, * pSim2, * pSimTot;
int i, w, Out1, Out2, nCovered, Counter = 0;
abctime clk = Abc_Clock();
// generate random sim-info at register outputs
vSimInfo = Vec_PtrAllocSimInfo( nRegs + 1, nSimWords );
// srand( 0xAABBAABB );
Aig_ManRandom(1);
for ( i = 0; i < nRegs; i++ )
{
pSim1 = (unsigned *)Vec_PtrEntry( vSimInfo, i );
for ( w = 0; w < nSimWords; w++ )
pSim1[w] = Fra_ObjRandomSim();
}
pSimTot = (unsigned *)Vec_PtrEntry( vSimInfo, nRegs );
// collect simulation info
memset( pSimTot, 0, sizeof(unsigned) * nSimWords );
for ( i = 0; i < Vec_IntSize(vOneHots); i += 2 )
{
Out1 = Vec_IntEntry( vOneHots, i );
Out2 = Vec_IntEntry( vOneHots, i+1 );
if ( Out1 == 0 && Out2 == 0 )
continue;
//printf( "(%c%d,%c%d) ",
//Fra_LitSign(Out1)? '-': '+', Fra_LitReg(Out1),
//Fra_LitSign(Out2)? '-': '+', Fra_LitReg(Out2) );
Counter++;
pSim1 = (unsigned *)Vec_PtrEntry( vSimInfo, Fra_LitReg(Out1) );
pSim2 = (unsigned *)Vec_PtrEntry( vSimInfo, Fra_LitReg(Out2) );
if ( Fra_LitSign(Out1) && Fra_LitSign(Out2) )
for ( w = 0; w < nSimWords; w++ )
pSimTot[w] |= pSim1[w] & pSim2[w];
else if ( Fra_LitSign(Out1) )
for ( w = 0; w < nSimWords; w++ )
pSimTot[w] |= pSim1[w] & ~pSim2[w];
else if ( Fra_LitSign(Out2) )
for ( w = 0; w < nSimWords; w++ )
pSimTot[w] |= ~pSim1[w] & pSim2[w];
else
assert( 0 );
}
//printf( "\n" );
// count the total number of patterns contained in the don't-care
nCovered = 0;
for ( w = 0; w < nSimWords; w++ )
nCovered += Aig_WordCountOnes( pSimTot[w] );
Vec_PtrFree( vSimInfo );
// print the result
printf( "Care states ratio = %f. ", 1.0 * (nSimWords * 32 - nCovered) / (nSimWords * 32) );
printf( "(%d out of %d patterns) ", nSimWords * 32 - nCovered, nSimWords * 32 );
ABC_PRT( "Time", Abc_Clock() - clk );
}
/**Function*************************************************************
Synopsis [Creates one-hotness EXDC.]
Description []
SideEffects []
SeeAlso []
***********************************************************************/
Aig_Man_t * Fra_OneHotCreateExdc( Fra_Man_t * p, Vec_Int_t * vOneHots )
{
Aig_Man_t * pNew;
Aig_Obj_t * pObj1, * pObj2, * pObj;
int i, Out1, Out2, nTruePis;
pNew = Aig_ManStart( Vec_IntSize(vOneHots)/2 );
// for ( i = 0; i < Aig_ManRegNum(p->pManAig); i++ )
// Aig_ObjCreateCi(pNew);
Aig_ManForEachCi( p->pManAig, pObj, i )
Aig_ObjCreateCi(pNew);
nTruePis = Aig_ManCiNum(p->pManAig) - Aig_ManRegNum(p->pManAig);
for ( i = 0; i < Vec_IntSize(vOneHots); i += 2 )
{
Out1 = Vec_IntEntry( vOneHots, i );
Out2 = Vec_IntEntry( vOneHots, i+1 );
if ( Out1 == 0 && Out2 == 0 )
continue;
pObj1 = Aig_ManCi( pNew, nTruePis + Fra_LitReg(Out1) );
pObj2 = Aig_ManCi( pNew, nTruePis + Fra_LitReg(Out2) );
pObj1 = Aig_NotCond( pObj1, Fra_LitSign(Out1) );
pObj2 = Aig_NotCond( pObj2, Fra_LitSign(Out2) );
pObj = Aig_Or( pNew, pObj1, pObj2 );
Aig_ObjCreateCo( pNew, pObj );
}
Aig_ManCleanup(pNew);
// printf( "Created AIG with %d nodes and %d outputs.\n", Aig_ManNodeNum(pNew), Aig_ManCoNum(pNew) );
return pNew;
}
/**Function*************************************************************
Synopsis [Assumes one-hot implications in the SAT solver.]
Description []
SideEffects []
SeeAlso []
**********************************************************************/
void Fra_OneHotAddKnownConstraint( Fra_Man_t * p, Vec_Ptr_t * vOnehots )
{
Vec_Int_t * vGroup;
Aig_Obj_t * pObj1, * pObj2;
int k, i, j, Out1, Out2, pLits[2];
//
// these constrants should be added to different timeframes!
// (also note that PIs follow first - then registers)
//
Vec_PtrForEachEntry( Vec_Int_t *, vOnehots, vGroup, k )
{
Vec_IntForEachEntry( vGroup, Out1, i )
Vec_IntForEachEntryStart( vGroup, Out2, j, i+1 )
{
pObj1 = Aig_ManCi( p->pManFraig, Out1 );
pObj2 = Aig_ManCi( p->pManFraig, Out2 );
pLits[0] = toLitCond( Fra_ObjSatNum(pObj1), 1 );
pLits[1] = toLitCond( Fra_ObjSatNum(pObj2), 1 );
// add constraint to solver
if ( !sat_solver_addclause( p->pSat, pLits, pLits + 2 ) )
{
printf( "Fra_OneHotAddKnownConstraint(): Adding clause makes SAT solver unsat.\n" );
sat_solver_delete( p->pSat );
p->pSat = NULL;
return;
}
}
}
}
////////////////////////////////////////////////////////////////////////
/// END OF FILE ///
////////////////////////////////////////////////////////////////////////
ABC_NAMESPACE_IMPL_END
| 7,289 |
409 | // ==========================================================================
// SeqAn - The Library for Sequence Analysis
// ==========================================================================
// Copyright (c) 2006-2018, <NAME>, FU Berlin
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// * Neither the name of Knut Reinert or the FU Berlin nor the names of
// its contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL KNUT REINERT OR THE FU BERLIN BE LIABLE
// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
// OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
// DAMAGE.
//
// ==========================================================================
// Author: <NAME> <[email protected]>
// <NAME> <[email protected]>
// ==========================================================================
// Tests for SIMD vectors.
// ==========================================================================
#ifndef SEQAN_CORE_TESTS_SIMD_TEST_SIMD_VECTOR_H_
#define SEQAN_CORE_TESTS_SIMD_TEST_SIMD_VECTOR_H_
#include <seqan/simd.h>
#include <random>
#include <seqan/sequence.h>
#include <seqan/misc/bit_twiddling.h>
#if defined(SEQAN_SIMD_ENABLED)
namespace seqan {
template <int ROWS, typename TVector>
inline void test_matrix_transpose()
{
typedef typename Value<TVector>::Type TValue;
typedef TVector TMatrix[LENGTH<TVector>::VALUE];
const int COLS = LENGTH<TVector>::VALUE;
String<TValue> random;
resize(random, ROWS * COLS);
std::mt19937 rng;
// http://stackoverflow.com/questions/31460733/why-arent-stduniform-int-distributionuint8-t-and-stduniform-int-distri
std::uniform_int_distribution<uint64_t> pdf(0, MaxValue<TValue>::VALUE);
for (unsigned i = 0; i < length(random); ++i)
random[i] = static_cast<TValue>(pdf(rng));
TMatrix tmp;
for (int i = 0; i < ROWS; ++i)
for (int j = 0; j < COLS; ++j)
tmp[i][j] = random[i * COLS + j];
// for(int i=0;i<ROWS;++i)
// print(std::cout, tmp[i]) << std::endl;
transpose<ROWS>(tmp);
// std::cout << std::endl;
// std::cout << std::endl;
// for(int i=0;i<DIM;++i)
// print(std::cout, tmp[i]) << std::endl;
#if defined(__x86_64__) || defined(__amd64__)
_mm_empty(); // Fixes icpc warning #13203: No EMMS instruction before call to function
#endif // defined(__x86_64__) || defined(__amd64__)
for (int i = 0; i < ROWS; ++i)
for (int j = 0; j < COLS; ++j)
SEQAN_ASSERT_EQ(tmp[i][j], random[j * ROWS + i]);
}
template <typename TSimdVector>
void fillVectors(TSimdVector & a, TSimdVector & b)
{
using namespace seqan;
constexpr auto length = LENGTH<TSimdVector>::VALUE;
for (auto i = 0; i < length; ++i)
{
a[i] = (i - 1) * 3;
b[i] = length - i;
}
}
template <typename TSimdVector, typename TSize>
void reverseIndexSequence(TSimdVector & idx, TSize length)
{
for (auto i = 0; i < length; ++i)
{
// note: umesimd swizzle interface has no a[i] = i; support.
assignValue(idx, i, length - i - 1);
}
}
template <typename TSimdVector>
constexpr auto trueValue()
{
using TSimdMaskVector = typename SimdMaskVector<TSimdVector>::Type;
using TValue = typename Value<TSimdMaskVector>::Type;
return static_cast<TValue>(-1);
}
} // namespace seqan
// ----------------------------------------------------------------------------
// Configuration of typed tests for simd vectors.
// ----------------------------------------------------------------------------
template <typename TSimdVector_>
class SimdVectorTestCommon : public seqan::Test
{
public:
using TValue = typename seqan::Value<TSimdVector_>::Type;
constexpr static auto const LENGTH = seqan::LENGTH<TSimdVector_>::VALUE;
using TSimdVector = TSimdVector_;
};
template <typename TSimdVector_>
class SimdVectorTestGather : public seqan::Test
{
public:
using TValue = typename seqan::Value<TSimdVector_>::Type;
constexpr static auto const LENGTH = seqan::LENGTH<TSimdVector_>::VALUE;
using TSimdVector = TSimdVector_;
};
typedef
seqan::TagList<seqan::SimdVector<int8_t, 16>::Type,
seqan::TagList<seqan::SimdVector<int16_t, 8>::Type,
seqan::TagList<seqan::SimdVector<int32_t, 4>::Type,
seqan::TagList<seqan::SimdVector<int64_t, 2>::Type,
seqan::TagList<seqan::SimdVector<uint8_t, 16>::Type,
seqan::TagList<seqan::SimdVector<uint16_t, 8>::Type,
seqan::TagList<seqan::SimdVector<uint32_t, 4>::Type,
seqan::TagList<seqan::SimdVector<uint64_t, 2>::Type
#if SEQAN_SIZEOF_MAX_VECTOR >= 32
, // Extension of the list above
seqan::TagList<seqan::SimdVector<int8_t, 32>::Type,
seqan::TagList<seqan::SimdVector<int16_t, 16>::Type,
seqan::TagList<seqan::SimdVector<int32_t, 8>::Type,
seqan::TagList<seqan::SimdVector<int64_t, 4>::Type,
seqan::TagList<seqan::SimdVector<uint8_t, 32>::Type,
seqan::TagList<seqan::SimdVector<uint16_t, 16>::Type,
seqan::TagList<seqan::SimdVector<uint32_t, 8>::Type,
seqan::TagList<seqan::SimdVector<uint64_t, 4>::Type
#if SEQAN_SIZEOF_MAX_VECTOR >= 64
, // Extension of the list above
seqan::TagList<seqan::SimdVector<int8_t, 64>::Type,
seqan::TagList<seqan::SimdVector<int16_t, 32>::Type,
seqan::TagList<seqan::SimdVector<int32_t, 16>::Type,
seqan::TagList<seqan::SimdVector<int64_t, 8>::Type,
seqan::TagList<seqan::SimdVector<uint8_t, 64>::Type,
seqan::TagList<seqan::SimdVector<uint16_t, 32>::Type,
seqan::TagList<seqan::SimdVector<uint32_t, 16>::Type,
seqan::TagList<seqan::SimdVector<uint64_t, 8>::Type
> > > > > > > >
#endif
> > > > > > > >
#endif
> > > > > > > >
SimdVectorCommonCommonTypes;
SEQAN_TYPED_TEST_CASE(SimdVectorTestCommon, SimdVectorCommonCommonTypes);
SEQAN_TYPED_TEST_CASE(SimdVectorTestGather, SimdVectorCommonCommonTypes);
SEQAN_DEFINE_TEST(test_simd_types)
{
using namespace seqan;
// SimdVector16Char
static_assert(std::is_same<SimdVector<int8_t, 16>::Type, SimdVector16SChar>::value, "should be the same type");
static_assert(std::is_same<SimdVector<int16_t, 8>::Type, SimdVector8Short>::value, "should be the same type");
static_assert(std::is_same<SimdVector<int32_t, 4>::Type, SimdVector4Int>::value, "should be the same type");
static_assert(std::is_same<SimdVector<int64_t, 2>::Type, SimdVector2Int64>::value, "should be the same type");
static_assert(std::is_same<SimdVector<uint8_t, 16>::Type, SimdVector16UChar>::value, "should be the same type");
static_assert(std::is_same<SimdVector<uint16_t, 8>::Type, SimdVector8UShort>::value, "should be the same type");
static_assert(std::is_same<SimdVector<uint32_t, 4>::Type, SimdVector4UInt>::value, "should be the same type");
static_assert(std::is_same<SimdVector<uint64_t, 2>::Type, SimdVector2UInt64>::value, "should be the same type");
static_assert(LENGTH<SimdVector4UInt>::VALUE == 4, "128bit register fits 4 int's");
static_assert(LENGTH<SimdVector<uint32_t, 4>::Type>::VALUE == 4, "128bit register fits 4 int's");
SimdVector<uint32_t, 4>::Type a128 = {0, 1, 2, 3};
for (uint32_t i = 0; i < 4; ++i) {
// std::cout << "DEBUG: " << i << ": " << a128[i] << " = " << i << std::endl;
SEQAN_ASSERT_EQ(a128[i], i);
}
// SimdVector32Char
#if SEQAN_SIZEOF_MAX_VECTOR >= 32
static_assert(std::is_same<SimdVector<int8_t, 32>::Type, SimdVector32SChar>::value, "should be the same type");
static_assert(std::is_same<SimdVector<int16_t, 16>::Type, SimdVector16Short>::value, "should be the same type");
static_assert(std::is_same<SimdVector<int32_t, 8>::Type, SimdVector8Int>::value, "should be the same type");
static_assert(std::is_same<SimdVector<int64_t, 4>::Type, SimdVector4Int64>::value, "should be the same type");
static_assert(std::is_same<SimdVector<uint8_t, 32>::Type, SimdVector32UChar>::value, "should be the same type");
static_assert(std::is_same<SimdVector<uint16_t, 16>::Type, SimdVector16UShort>::value, "should be the same type");
static_assert(std::is_same<SimdVector<uint32_t, 8>::Type, SimdVector8UInt>::value, "should be the same type");
static_assert(std::is_same<SimdVector<uint64_t, 4>::Type, SimdVector4UInt64>::value, "should be the same type");
static_assert(LENGTH<SimdVector8UInt>::VALUE == 8, "256bit register fits 8 int's");
static_assert(LENGTH<SimdVector<uint32_t, 8>::Type>::VALUE == 8, "256bit register fits 8 int's");
SimdVector<uint32_t, 8>::Type a256 = {0, 1, 2, 3, 4, 5, 6, 7};
for (uint32_t i = 0; i < 8; ++i) {
// std::cout << "DEBUG: " << i << ": " << a256[i] << " = " << i << std::endl;
SEQAN_ASSERT_EQ(a256[i], i);
}
#endif
#if SEQAN_SIZEOF_MAX_VECTOR >= 64
static_assert(std::is_same<SimdVector<int8_t, 64>::Type, SimdVector64SChar>::value, "should be the same type");
static_assert(std::is_same<SimdVector<int16_t, 32>::Type, SimdVector32Short>::value, "should be the same type");
static_assert(std::is_same<SimdVector<int32_t, 16>::Type, SimdVector16Int>::value, "should be the same type");
static_assert(std::is_same<SimdVector<int64_t, 8>::Type, SimdVector8Int64>::value, "should be the same type");
static_assert(std::is_same<SimdVector<uint8_t, 64>::Type, SimdVector64UChar>::value, "should be the same type");
static_assert(std::is_same<SimdVector<uint16_t, 32>::Type, SimdVector32UShort>::value, "should be the same type");
static_assert(std::is_same<SimdVector<uint32_t, 16>::Type, SimdVector16UInt>::value, "should be the same type");
static_assert(std::is_same<SimdVector<uint64_t, 8>::Type, SimdVector8UInt64>::value, "should be the same type");
static_assert(LENGTH<SimdVector16UInt>::VALUE == 16, "512bit register fits 16 int's");
static_assert(LENGTH<SimdVector<uint32_t, 16>::Type>::VALUE == 16, "512bit register fits 16 int's");
SimdVector<uint32_t, 16>::Type a512 = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15};
for (uint32_t i = 0; i < 16; ++i) {
// std::cout << "DEBUG: " << i << ": " << a512[i] << " = " << i << std::endl;
SEQAN_ASSERT_EQ(a512[i], i);
}
#endif
}
SEQAN_TYPED_TEST(SimdVectorTestCommon, MetaFunctions)
{
using namespace seqan;
using TSimdVector = typename TestFixture::TSimdVector;
// NOTE(marehr): explicit namespace is necessary for msvc 2015:
// error C2039: 'VALUE': is not a member of '`global namespace''
constexpr auto length = seqan::LENGTH<TSimdVector>::VALUE;
using TValue = typename Value<TSimdVector>::Type;
typedef typename SimdVector<TValue, length>::Type TSimdVectorNew;
bool sameType = IsSameType<TSimdVector, TSimdVectorNew>::VALUE;
SEQAN_ASSERT(sameType);
}
SEQAN_TYPED_TEST(SimdVectorTestCommon, SizeOf)
{
using namespace seqan;
using TSimdVector = typename TestFixture::TSimdVector;
using TValue = typename TestFixture::TValue;
constexpr auto length = TestFixture::LENGTH;
TSimdVector a{0u};
// only on windows are the values unequal
SEQAN_ASSERT_GEQ(sizeof(a), sizeof(TValue) * length);
// on linux we assume that the sizes are equal
#ifndef STDLIB_VS
SEQAN_ASSERT_EQ(sizeof(a), sizeof(TValue) * length);
#endif
}
SEQAN_TYPED_TEST(SimdVectorTestCommon, SubscriptType)
{
using namespace seqan;
using TSimdVector = typename TestFixture::TSimdVector;
using TValue = typename TestFixture::TValue;
TSimdVector a{0u}, b{0u};
fillVectors(a, b);
TValue c = a[0];
bool sameType = IsSameType<TValue, decltype(c)>::VALUE;
SEQAN_ASSERT(sameType);
}
SEQAN_TYPED_TEST(SimdVectorTestCommon, ClearVector)
{
using namespace seqan;
using TSimdVector = typename TestFixture::TSimdVector;
using TValue = typename TestFixture::TValue;
constexpr auto length = TestFixture::LENGTH;
auto zero = static_cast<TValue>(0);
TSimdVector a{0u}, b{0u};
fillVectors(a, b);
clearVector(a);
for (auto i = 0; i < length; ++i)
{
// std::cout << "DEBUG: " << i << " / " << length << ": " << (int)a[i] << " = " << 0 << std::endl;
SEQAN_ASSERT_EQ(a[i], zero);
}
}
SEQAN_TYPED_TEST(SimdVectorTestCommon, CreateVector)
{
using namespace seqan;
using TSimdVector = typename TestFixture::TSimdVector;
using TValue = typename TestFixture::TValue;
constexpr auto length = TestFixture::LENGTH;
auto scalar = static_cast<TValue>(23);
auto a = createVector<TSimdVector>(scalar);
for (auto i = 0; i < length; ++i)
{
// std::cout << "DEBUG: " << i << " / " << length << ": " << (int)a[i] << " = " << 23 << std::endl;
SEQAN_ASSERT_EQ(a[i], scalar);
}
}
SEQAN_TYPED_TEST(SimdVectorTestCommon, FillVectorConstant)
{
using namespace seqan;
using TSimdVector = typename TestFixture::TSimdVector;
using TValue = typename TestFixture::TValue;
constexpr auto length = TestFixture::LENGTH;
TSimdVector a{0u};
fillVector(a, 5);
for (auto i = 0; i < length; ++i)
{
// std::cout << "DEBUG: " << i << " / " << length << ": " << (int)a[i] << " = " << i << std::endl;
SEQAN_ASSERT_EQ(a[i], static_cast<TValue>(5));
}
}
template <typename TSimdVector, std::size_t... index >
inline void
call_fill_vector(TSimdVector & a, std::index_sequence<index...>)
{
seqan::fillVector(a, index...);
}
SEQAN_TYPED_TEST(SimdVectorTestCommon, FillVector)
{
using namespace seqan;
using TSimdVector = typename TestFixture::TSimdVector;
using TValue = typename TestFixture::TValue;
constexpr auto length = TestFixture::LENGTH;
TSimdVector a{0u};
// calls seqan::fillVector(a, 0, 1, 2, 3, ..., length-1);
call_fill_vector(a, std::make_index_sequence<length>{});
for (auto i = 0; i < length; ++i)
{
// std::cout << "DEBUG: " << i << " / " << length << ": " << (int)a[i] << " = " << i << std::endl;
SEQAN_ASSERT_EQ(a[i], static_cast<TValue>(i));
}
}
SEQAN_TYPED_TEST(SimdVectorTestCommon, CmpEqual)
{
using namespace seqan;
using TSimdVector = typename TestFixture::TSimdVector;
using TSimdMaskVector = typename SimdMaskVector<TSimdVector>::Type;
using TValue = typename TestFixture::TValue;
using TBoolValue = decltype(trueValue<TSimdVector>());
constexpr auto length = TestFixture::LENGTH;
TBoolValue false_ = 0,
true_ = trueValue<TSimdVector>();
TSimdVector a{0u}, b{0u};
fillVectors(a, b);
// There is never a match for the most instantiations of this test.
a[1] = 23;
b[1] = 23;
TSimdMaskVector c = cmpEq(a, b);
for (auto i = 0; i < length; ++i)
{
TValue a_i = (i == 1) ? 23 : (-3 + i * 3);
TValue b_i = (i == 1) ? 23 : (length - i);
// std::cout << "DEBUG: " << i << " / " << length << ": " << (int)c[i] << " = " << (int)a[i] << " == " << (int)b[i] << std::endl;
SEQAN_ASSERT_EQ(c[i], a[i] == b[i] ? true_ : false_);
SEQAN_ASSERT_EQ(c[i], a_i == b_i ? true_ : false_);
}
}
SEQAN_TYPED_TEST(SimdVectorTestCommon, CmpGt)
{
using namespace seqan;
using TSimdVector = typename TestFixture::TSimdVector;
using TSimdMaskVector = typename SimdMaskVector<TSimdVector>::Type;
using TValue = typename TestFixture::TValue;
using TBoolValue = decltype(trueValue<TSimdVector>());
constexpr auto length = TestFixture::LENGTH;
TBoolValue false_ = 0,
true_ = trueValue<TSimdVector>();
TSimdVector a{0u}, b{0u};
fillVectors(a, b);
TSimdMaskVector c = cmpGt(a, b);
for (auto i = 0; i < length; ++i)
{
TValue a_i = -3 + i * 3, b_i = length - i;
// std::cout << "DEBUG: " << i << " / " << length << ": " << (int)c[i] << " = " << (int)a[i] << " > " << (int)b[i] << std::endl;
SEQAN_ASSERT_EQ(c[i], a[i] > b[i] ? true_ : false_);
SEQAN_ASSERT_EQ(c[i], a_i > b_i ? true_ : false_);
}
}
SEQAN_TYPED_TEST(SimdVectorTestCommon, Max)
{
using namespace seqan;
using TSimdVector = typename TestFixture::TSimdVector;
using TValue = typename TestFixture::TValue;
constexpr auto length = TestFixture::LENGTH;
TSimdVector a{0u}, b{0u};
fillVectors(a, b);
auto c = max(a, b);
for (auto i = 0; i < length; ++i)
{
TValue a_i = -3 + i * 3, b_i = length - i;
// std::cout << "DEBUG: " << i << " / " << length << ": " << (int)c[i] << " = max (" << (int)a[i] << ", " << (int)b[i] << ")" << std::endl;
SEQAN_ASSERT_EQ(c[i], std::max(a[i], b[i]));
SEQAN_ASSERT_EQ(c[i], std::max(a_i, b_i));
}
}
SEQAN_TYPED_TEST(SimdVectorTestCommon, Min)
{
using namespace seqan;
using TSimdVector = typename TestFixture::TSimdVector;
using TValue = typename TestFixture::TValue;
constexpr auto length = TestFixture::LENGTH;
TSimdVector a{0u}, b{0u};
fillVectors(a, b);
auto c = min(a, b);
for (auto i = 0; i < length; ++i)
{
TValue a_i = -3 + i * 3, b_i = length - i;
// std::cout << "DEBUG: " << i << " / " << length << ": " << (int)c[i] << " = min (" << (int)a[i] << ", " << (int)b[i] << ")" << std::endl;
SEQAN_ASSERT_EQ(c[i], std::min(a[i], b[i]));
SEQAN_ASSERT_EQ(c[i], std::min(a_i, b_i));
}
}
SEQAN_TYPED_TEST(SimdVectorTestCommon, BitwiseOr)
{
using namespace seqan;
using TSimdVector = typename TestFixture::TSimdVector;
using TValue = typename TestFixture::TValue;
constexpr auto length = TestFixture::LENGTH;
TSimdVector a{0u}, b{0u};
fillVectors(a, b);
auto c = a | b;
for (auto i = 0; i < length; ++i)
{
TValue a_i = -3 + i * 3, b_i = length - i;
// std::cout << "DEBUG: " << i << " / " << length << ": " << (int)c[i] << " = " << (int)a[i] << " | " << (int)b[i] << std::endl;
SEQAN_ASSERT_EQ(c[i], a[i] | b[i]);
SEQAN_ASSERT_EQ(c[i], static_cast<TValue>(a_i | b_i));
}
}
SEQAN_TYPED_TEST(SimdVectorTestCommon, BitwiseOrAssign)
{
using namespace seqan;
using TSimdVector = typename TestFixture::TSimdVector;
using TValue = typename TestFixture::TValue;
constexpr auto length = TestFixture::LENGTH;
TSimdVector a{0u}, b{0u}, c{0u};
fillVectors(a, b);
c = a;
c |= b;
for (auto i = 0; i < length; ++i)
{
TValue a_i = -3 + i * 3, b_i = length - i;
// std::cout << "DEBUG: " << i << " / " << length << ": " << (int)c[i] << " = " << (int)a[i] << " | " << (int)b[i] << std::endl;
SEQAN_ASSERT_EQ(c[i], a[i] | b[i]);
SEQAN_ASSERT_EQ(c[i], static_cast<TValue>(a_i | b_i));
}
}
SEQAN_TYPED_TEST(SimdVectorTestCommon, BitwiseAnd)
{
using namespace seqan;
using TSimdVector = typename TestFixture::TSimdVector;
using TValue = typename TestFixture::TValue;
constexpr auto length = TestFixture::LENGTH;
TSimdVector a{0u}, b{0u};
fillVectors(a, b);
auto c = a & b;
for (auto i = 0; i < length; ++i)
{
TValue a_i = -3 + i * 3, b_i = length - i;
// std::cout << "DEBUG: " << i << " / " << length << ": " << (int)c[i] << " = " << (int)a[i] << " & " << (int)b[i] << std::endl;
SEQAN_ASSERT_EQ(c[i], a[i] & b[i]);
SEQAN_ASSERT_EQ(c[i], static_cast<TValue>(a_i & b_i));
}
}
SEQAN_TYPED_TEST(SimdVectorTestCommon, BitwiseAndAssign)
{
using namespace seqan;
using TSimdVector = typename TestFixture::TSimdVector;
using TValue = typename TestFixture::TValue;
constexpr auto length = TestFixture::LENGTH;
TSimdVector a{0u}, b{0u}, c{0u};
fillVectors(a, b);
c = a;
c &= b;
for (auto i = 0; i < length; ++i)
{
TValue a_i = -3 + i * 3, b_i = length - i;
// std::cout << "DEBUG: " << i << " / " << length << ": " << (int)c[i] << " = " << (int)a[i] << " & " << (int)b[i] << std::endl;
SEQAN_ASSERT_EQ(c[i], a[i] & b[i]);
SEQAN_ASSERT_EQ(c[i], static_cast<TValue>(a_i & b_i));
}
}
SEQAN_TYPED_TEST(SimdVectorTestCommon, BitwiseNot)
{
using namespace seqan;
using TSimdVector = typename TestFixture::TSimdVector;
using TValue = typename TestFixture::TValue;
constexpr auto length = TestFixture::LENGTH;
TSimdVector a{0u}, b{0u};
fillVectors(a, b);
auto c = ~a;
for (auto i = 0; i < length; ++i)
{
TValue a_i = -3 + i * 3;
// std::cout << "DEBUG: " << i << " / " << length << ": " << (int)c[i] << " = ~" << (int)a[i] << std::endl;
SEQAN_ASSERT_EQ(c[i], static_cast<TValue>(~a[i]));
SEQAN_ASSERT_EQ(c[i], static_cast<TValue>(~a_i));
}
}
SEQAN_TYPED_TEST(SimdVectorTestCommon, Addition)
{
using namespace seqan;
using TSimdVector = typename TestFixture::TSimdVector;
using TValue = typename TestFixture::TValue;
constexpr auto length = TestFixture::LENGTH;
TSimdVector a{0u}, b{0u};
fillVectors(a, b);
auto c = a + b;
for (auto i = 0; i < length; ++i)
{
TValue a_i = -3 + i * 3, b_i = length - i;
// std::cout << "DEBUG: " << i << " / " << length << ": " << (int)c[i] << " = " << (int)a[i] << " + " << (int)b[i] << std::endl;
SEQAN_ASSERT_EQ(c[i], static_cast<TValue>(a[i] + b[i]));
SEQAN_ASSERT_EQ(c[i], static_cast<TValue>(a_i + b_i));
}
}
SEQAN_TYPED_TEST(SimdVectorTestCommon, Subtraction)
{
using namespace seqan;
using TSimdVector = typename TestFixture::TSimdVector;
using TValue = typename TestFixture::TValue;
constexpr auto length = TestFixture::LENGTH;
TSimdVector a{0u}, b{0u};
fillVectors(a, b);
auto c = a - b;
for (auto i = 0; i < length; ++i)
{
TValue a_i = -3 + i * 3, b_i = length - i;
// std::cout << "DEBUG: " << i << " / " << length << ": " << (int)c[i] << " = " << (int)a[i] << " - " << (int)b[i] << std::endl;
SEQAN_ASSERT_EQ(c[i], static_cast<TValue>(a[i] - b[i]));
SEQAN_ASSERT_EQ(c[i], static_cast<TValue>(a_i - b_i));
}
}
SEQAN_TYPED_TEST(SimdVectorTestCommon, Multiplication)
{
using namespace seqan;
using TSimdVector = typename TestFixture::TSimdVector;
using TValue = typename TestFixture::TValue;
constexpr auto length = TestFixture::LENGTH;
TSimdVector a{0u}, b{0u};
fillVectors(a, b);
auto c = a * b;
for (size_t i = 0; i < length; ++i)
{
TValue a_i = -3 + i * 3, b_i = length - i;
// std::cout << "DEBUG: " << i << " / " << length << ": " << (int)c[i] << " = " << (int)a[i] << " * " << (int)b[i] << std::endl;
SEQAN_ASSERT_EQ(c[i], static_cast<TValue>(a[i] * b[i]));
SEQAN_ASSERT_EQ(c[i], static_cast<TValue>(a_i * b_i));
}
}
SEQAN_TYPED_TEST(SimdVectorTestCommon, Division)
{
using namespace seqan;
using TSimdVector = typename TestFixture::TSimdVector;
using TValue = typename TestFixture::TValue;
constexpr auto length = TestFixture::LENGTH;
TSimdVector a{0u}, b{0u};
fillVectors(a, b);
auto c = a / b;
for (auto i = 0; i < length; ++i)
{
TValue a_i = -3 + i * 3, b_i = length - i;
// std::cout << "DEBUG: " << i << " / " << length << ": " << (int)c[i] << " = " << (int)a[i] << " / " << (int)b[i] << std::endl;
SEQAN_ASSERT_EQ(c[i], a[i] / b[i]);
SEQAN_ASSERT_EQ(c[i], a_i / b_i);
}
}
SEQAN_TYPED_TEST(SimdVectorTestCommon, BitwiseAndNot)
{
using namespace seqan;
using TSimdVector = typename TestFixture::TSimdVector;
using TValue = typename TestFixture::TValue;
constexpr auto length = TestFixture::LENGTH;
TSimdVector a{0u}, b{0u};
fillVectors(a, b);
auto c = andNot(a, b);
for (auto i = 0; i < length; ++i)
{
// std::cout << "DEBUG: " << i << " / " << length << ": " << (int)c[i] << " = (~" << (int)a[i] << ") & " << (int)b[i] << std::endl;
SEQAN_ASSERT_EQ(c[i], static_cast<TValue>(~a[i] & b[i]));
SEQAN_ASSERT_EQ(c[i], static_cast<TValue>(~(-3 + i * 3) & (length - i)));
}
}
SEQAN_TYPED_TEST(SimdVectorTestCommon, ShiftRightLogical)
{
using namespace seqan;
using TSimdVector = typename TestFixture::TSimdVector;
using TValue = typename TestFixture::TValue;
constexpr auto length = TestFixture::LENGTH;
TSimdVector a{0u}, b{0u};
fillVectors(a, b);
// ensure that a >= 0, because (-3) >> 2 has undefined behavior according to
// C++ 11 standard.
a = a + createVector<TSimdVector>(3);
auto c = shiftRightLogical(a, 2);
for (auto i = 0; i < length; ++i)
{
TValue a_i = i * 3;
// std::cout << "DEBUG: " << i << " / " << length << ": " << (int)c[i] << " = " << (int)a[i] << " >> " << (int)2 << std::endl;
SEQAN_ASSERT_EQ(c[i], a[i] >> 2);
SEQAN_ASSERT_EQ(c[i], static_cast<TValue>(a_i >> 2));
}
}
SEQAN_TYPED_TEST(SimdVectorTestCommon, Blend)
{
using namespace seqan;
using TSimdVector = typename TestFixture::TSimdVector;
using TValue = typename TestFixture::TValue;
constexpr auto length = TestFixture::LENGTH;
TSimdVector a{0u}, b{0u};
fillVectors(a, b);
auto c = blend(b, a, cmpGt(a, b));
for (auto i = 0; i < length; ++i)
{
TValue a_i = -3 + i * 3, b_i = length - i;
// std::cout << "DEBUG: " << i << " / " << length << ": " << (int)c[i] << " = " << (int)a[i] << " > " << (int)b[i] << " ? " << (int)a[i] << " : " << (int)b[i] << std::endl;
SEQAN_ASSERT_EQ(c[i], a[i] > b[i] ? (TValue)a[i] : (TValue)b[i]);
SEQAN_ASSERT_EQ(c[i], a_i > b_i ? a_i : b_i);
}
}
SEQAN_TYPED_TEST(SimdVectorTestCommon, Storeu)
{
using namespace seqan;
using TSimdVector = typename TestFixture::TSimdVector;
using TValue = typename TestFixture::TValue;
constexpr auto length = TestFixture::LENGTH;
TSimdVector a{0u}, b{0u};
fillVectors(a, b);
TValue c[length];
storeu(c, a);
for (auto i = 0; i < length; ++i)
{
// std::cout << "DEBUG: " << i << " / " << length << ": " << (int)c[i] << " = " << (int)a[i] << std::endl;
SEQAN_ASSERT_EQ(c[i], a[i]);
SEQAN_ASSERT_EQ(c[i], static_cast<TValue>(-3 + i * 3));
}
}
SEQAN_TYPED_TEST(SimdVectorTestCommon, Load)
{
using namespace seqan;
using TSimdVector = typename TestFixture::TSimdVector;
using TValue = typename TestFixture::TValue;
constexpr auto length = TestFixture::LENGTH;
TSimdVector a{0u}, c{0u};
fillVectors(a, c);
alignas(TSimdVector) TValue b[length];
storeu(b, a);
c = load<TSimdVector>(b);
for (auto i = 0; i < length; ++i)
{
// std::cout << "DEBUG: " << i << " / " << length << ": " << (int)c[i] << " = " << (int)a[i] << std::endl;
SEQAN_ASSERT_EQ(c[i], a[i]);
SEQAN_ASSERT_EQ(c[i], static_cast<TValue>(-3 + i * 3));
}
}
SEQAN_TYPED_TEST(SimdVectorTestCommon, Gather)
{
using namespace seqan;
using TSimdVector = typename TestFixture::TSimdVector;
using TValue = typename TestFixture::TValue;
constexpr auto length = TestFixture::LENGTH;
TSimdVector a{0u}, idx{0u};
fillVectors(a, idx);
reverseIndexSequence(idx, length);
TValue b[length];
storeu(b, a);
auto c = gather(b, idx);
for (auto i = 0; i < length; ++i)
{
// std::cout << "DEBUG: " << i << " / " << length << ": " << (int)c[i] << " = " << (int)a[idx[i]] << std::endl;
SEQAN_ASSERT_EQ(c[i], a[idx[i]]);
SEQAN_ASSERT_EQ(c[i], a[length - i - 1]);
}
}
SEQAN_TYPED_TEST(SimdVectorTestCommon, ShuffleConstant1)
{
using namespace seqan;
using TSimdVector = typename TestFixture::TSimdVector;
constexpr auto length = TestFixture::LENGTH;
typedef typename SimdSwizzleVector<TSimdVector>::Type TSimdSwizzleVector;
TSimdVector a{0u}, b{0u};
auto idx = createVector<TSimdSwizzleVector>(1);
fillVectors(a, b);
auto c = shuffleVector(a, idx);
for (auto i = 0; i < length; ++i)
{
// std::cout << "DEBUG: " << i << " / " << length << ": " << (int)c[i] << " = " << (int)a[idx[i]] << ", idx: " << (int)idx[i] << std::endl;
SEQAN_ASSERT_EQ(c[i], a[idx[i]]);
SEQAN_ASSERT_EQ(c[i], a[1]);
}
}
SEQAN_TYPED_TEST(SimdVectorTestCommon, ShuffleConstant2)
{
using namespace seqan;
using TSimdVector = typename TestFixture::TSimdVector;
constexpr auto length = TestFixture::LENGTH;
typedef typename SimdSwizzleVector<TSimdVector>::Type TSimdSwizzleVector;
TSimdVector a{0u}, b{0u};
auto idx = createVector<TSimdSwizzleVector>(length - 2);
fillVectors(a, b);
auto c = shuffleVector(a, idx);
for (auto i = 0; i < length; ++i)
{
// std::cout << "DEBUG: " << i << " / " << length << ": " << (int)c[i] << " = " << (int)a[idx[i]] << ", idx: " << (int)idx[i] << std::endl;
SEQAN_ASSERT_EQ(c[i], a[idx[i]]);
SEQAN_ASSERT_EQ(c[i], a[length-2]);
}
}
SEQAN_TYPED_TEST(SimdVectorTestCommon, Shuffle)
{
using namespace seqan;
using TSimdVector = typename TestFixture::TSimdVector;
constexpr auto length = TestFixture::LENGTH;
typedef typename SimdSwizzleVector<TSimdVector>::Type TSimdSwizzleVector;
TSimdVector a{0u}, b{0u};
TSimdSwizzleVector idx{0u};
fillVectors(a, b);
reverseIndexSequence(idx, length);
auto c = shuffleVector(a, idx);
for (auto i = 0; i < length; ++i)
{
// std::cout << "DEBUG: " << i << " / " << length << ": " << (int)c[i] << " = " << (int)a[idx[i]] << ", idx: " << (int)idx[i] << std::endl;
SEQAN_ASSERT_EQ(c[i], a[idx[i]]);
SEQAN_ASSERT_EQ(c[i], a[length - i - 1]);
}
}
template <typename TSimdVector, typename TValue, typename TArrayValue>
inline void test_gather_array()
{
using namespace seqan;
constexpr auto length = LENGTH<TSimdVector>::VALUE;
TSimdVector idx{0u};
reverseIndexSequence(idx, length);
TArrayValue a[2*length];
// fill gather array
for (auto i = 0; i < 2*length; ++i)
{
a[i] = (i-1)*3;
}
auto c = gather(a, idx);
for (auto i = 0; i < length; ++i)
{
// std::cout << "DEBUG: " << i << " / " << length << ": " << (TValue)c[i] << " = " << (TValue)a[idx[i]] << std::endl;
SEQAN_ASSERT_EQ(c[i], static_cast<TValue>(a[idx[i]]));
SEQAN_ASSERT_EQ(c[i], static_cast<TValue>(a[length - i - 1]));
}
}
SEQAN_TYPED_TEST(SimdVectorTestGather, CharArray)
{
test_gather_array<
typename TestFixture::TSimdVector,
typename TestFixture::TValue,
int8_t
>();
}
SEQAN_TYPED_TEST(SimdVectorTestGather, ShortArray)
{
test_gather_array<
typename TestFixture::TSimdVector,
typename TestFixture::TValue,
int16_t
>();
}
SEQAN_TYPED_TEST(SimdVectorTestGather, IntArray)
{
test_gather_array<
typename TestFixture::TSimdVector,
typename TestFixture::TValue,
int32_t
>();
}
SEQAN_TYPED_TEST(SimdVectorTestGather, LongArray)
{
test_gather_array<
typename TestFixture::TSimdVector,
typename TestFixture::TValue,
int64_t
>();
}
SEQAN_TYPED_TEST(SimdVectorTestGather, UCharArray)
{
test_gather_array<
typename TestFixture::TSimdVector,
typename TestFixture::TValue,
uint8_t
>();
}
SEQAN_TYPED_TEST(SimdVectorTestGather, UShortArray)
{
test_gather_array<
typename TestFixture::TSimdVector,
typename TestFixture::TValue,
uint16_t
>();
}
SEQAN_TYPED_TEST(SimdVectorTestGather, UIntArray)
{
test_gather_array<
typename TestFixture::TSimdVector,
typename TestFixture::TValue,
uint32_t
>();
}
SEQAN_TYPED_TEST(SimdVectorTestGather, ULongArray)
{
test_gather_array<
typename TestFixture::TSimdVector,
typename TestFixture::TValue,
uint64_t
>();
}
#ifdef __SSE4_1__
SEQAN_DEFINE_TEST(test_simd_transpose_8x8)
{
seqan::test_matrix_transpose<8, seqan::SimdVector<unsigned char, 8>::Type>();
}
SEQAN_DEFINE_TEST(test_simd_transpose_16x16)
{
seqan::test_matrix_transpose<16, seqan::SimdVector<unsigned char, 16>::Type>();
}
#endif // #ifdef __SSE4_1__
#ifdef __AVX2__
SEQAN_DEFINE_TEST(test_simd_transpose_32x32)
{
seqan::test_matrix_transpose<32, seqan::SimdVector<unsigned char, 32>::Type >();
}
#endif // #ifdef __AVX2__
#endif // SEQAN_SIMD_ENABLED
#endif // #ifndef SEQAN_CORE_TESTS_SIMD_TEST_SIMD_VECTOR_H_
| 15,228 |
999 | //
// Copyright (C) 2004-2008 <NAME>, <NAME>
// Copyright (C) 2017 <NAME>.
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
//
#define SOCI_SOURCE
#include "soci/error.h"
#include "soci-mktime.h"
#include <climits>
#include <cstdlib>
#include <ctime>
namespace // anonymous
{
// helper function for parsing decimal data (for std::tm)
int parse10(char const * & p1, char * & p2)
{
long v = std::strtol(p1, &p2, 10);
if (p2 != p1)
{
if (v < 0)
throw soci::soci_error("Negative date/time field component.");
if (v > INT_MAX)
throw soci::soci_error("Out of range date/time field component.");
p1 = p2 + 1;
// Cast is safe due to check above.
return static_cast<int>(v);
}
else
{
throw soci::soci_error("Cannot parse date/time field component.");
}
}
} // namespace anonymous
void soci::details::parse_std_tm(char const * buf, std::tm & t)
{
char const * p1 = buf;
char * p2;
char separator;
int a, b, c;
int year = 1900, month = 1, day = 1;
int hour = 0, minute = 0, second = 0;
a = parse10(p1, p2);
separator = *p2;
b = parse10(p1, p2);
c = parse10(p1, p2);
if (*p2 == ' ')
{
// there are more elements to parse
// - assume that what was already parsed is a date part
// and that the remaining elements describe the time of day
year = a;
month = b;
day = c;
hour = parse10(p1, p2);
minute = parse10(p1, p2);
second = parse10(p1, p2);
}
else
{
// only three values have been parsed
if (separator == '-')
{
// assume the date value was read
// (leave the time of day as 00:00:00)
year = a;
month = b;
day = c;
}
else
{
// assume the time of day was read
// (leave the date part as 1900-01-01)
hour = a;
minute = b;
second = c;
}
}
mktime_from_ymdhms(t, year, month, day, hour, minute, second);
}
| 1,047 |
1,091 | /*
* Copyright 2017-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.codec.impl;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.onosproject.codec.CodecContext;
import org.onosproject.codec.JsonCodec;
import org.onosproject.net.pi.model.PiActionModel;
import org.onosproject.net.pi.model.PiActionParamModel;
/**
* Codec for PiActionModel.
*/
public class PiActionModelCodec extends JsonCodec<PiActionModel> {
private static final String NAME = "name";
private static final String PARAMS = "params";
@Override
public ObjectNode encode(PiActionModel action, CodecContext context) {
ObjectNode result = context.mapper().createObjectNode();
result.put(NAME, action.id().toString());
ArrayNode params = result.putArray(PARAMS);
action.params().forEach(param -> {
ObjectNode paramData = context.encode(param, PiActionParamModel.class);
params.add(paramData);
});
return result;
}
}
| 520 |
466 | <gh_stars>100-1000
package com.gykj.zhumulangma.home.fragment;
import android.Manifest;
import android.os.Bundle;
import android.view.View;
import android.widget.TextView;
import androidx.fragment.app.Fragment;
import androidx.lifecycle.ViewModelProvider;
import com.alibaba.android.arouter.facade.Postcard;
import com.alibaba.android.arouter.facade.annotation.Route;
import com.blankj.utilcode.util.BarUtils;
import com.blankj.utilcode.util.CollectionUtils;
import com.gykj.zhumulangma.common.Constants;
import com.gykj.zhumulangma.common.adapter.TFragmentStateAdapter;
import com.gykj.zhumulangma.common.adapter.TabNavigatorAdapter;
import com.gykj.zhumulangma.common.event.KeyCode;
import com.gykj.zhumulangma.common.extra.ViewPagerHelper;
import com.gykj.zhumulangma.common.mvvm.view.BaseMvvmFragment;
import com.gykj.zhumulangma.common.util.RouteHelper;
import com.gykj.zhumulangma.common.util.ToastUtil;
import com.gykj.zhumulangma.home.R;
import com.gykj.zhumulangma.home.bean.TabBean;
import com.gykj.zhumulangma.home.databinding.HomeFragmentMainBinding;
import com.gykj.zhumulangma.home.mvvm.ViewModelFactory;
import com.gykj.zhumulangma.home.mvvm.viewmodel.MainHomeViewModel;
import com.jakewharton.rxbinding3.view.RxView;
import com.sunfusheng.marqueeview.MarqueeView;
import com.tbruyelle.rxpermissions2.RxPermissions;
import com.wuhenzhizao.titlebar.statusbar.StatusBarUtils;
import com.ximalaya.ting.android.opensdk.model.word.HotWord;
import net.lucode.hackware.magicindicator.buildins.commonnavigator.CommonNavigator;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.TimeUnit;
/**
* Author: Thomas.
* <br/>Date: 2019/9/10 8:23
* <br/>Email: <EMAIL>
* <br/>Description:首页
*/
@Route(path = Constants.Router.Home.F_MAIN)
public class MainHomeFragment extends BaseMvvmFragment<HomeFragmentMainBinding, MainHomeViewModel>
implements View.OnClickListener, MarqueeView.OnItemClickListener {
public MainHomeFragment() {
}
@Override
public int onBindLayout() {
return R.layout.home_fragment_main;
}
@Override
protected boolean enableSwipeBack() {
return false;
}
@Override
public void initView() {
if (StatusBarUtils.supportTransparentStatusBar()) {
mBinding.clTitlebar.setPadding(0, BarUtils.getStatusBarHeight(), 0, 0);
}
}
@Override
public void initListener() {
super.initListener();
RxView.clicks(mBinding.llSearch)
.doOnSubscribe(this)
.throttleFirst(1, TimeUnit.SECONDS)
.subscribe(unit -> {
Postcard postcard = mRouter.build(Constants.Router.Home.F_SEARCH);
if (!CollectionUtils.isEmpty(mBinding.marqueeView.getMessages())) {
postcard.withString(KeyCode.Home.HOTWORD, (String) mBinding.marqueeView.getMessages()
.get(mBinding.marqueeView.getPosition()));
}
RouteHelper.navigateTo(postcard);
});
mBinding.ivDownload.setOnClickListener(this);
mBinding.ivHistory.setOnClickListener(this);
RxView.clicks(mBinding.ivMessage)
.doOnSubscribe(this)
.throttleFirst(1, TimeUnit.SECONDS)
.subscribe(unit -> RouteHelper.navigateTo(Constants.Router.User.F_MESSAGE));
mBinding.marqueeView.setOnItemClickListener(this);
}
@Override
public void initData() {
mViewModel.init();
}
@Override
protected void onRevisible() {
super.onRevisible();
if (CollectionUtils.isEmpty(mBinding.marqueeView.getMessages())) {
mViewModel.getHotWords();
}else {
mBinding.marqueeView.startFlipping();
}
}
@Override
public boolean enableSimplebar() {
return false;
}
@Override
public void onClick(View v) {
int id = v.getId();
if (id == R.id.iv_download) {
new RxPermissions(this).requestEach(new String[]{Manifest.permission.CAMERA})
.subscribe(permission -> {
if (permission.granted) {
RouteHelper.navigateTo(Constants.Router.Discover.F_SCAN);
} else {
ToastUtil.showToast("请允许应用使用相机权限");
}
});
} else if (id == R.id.iv_history) {
RouteHelper.navigateTo(Constants.Router.Listen.F_HISTORY);
}
}
@Override
public ViewModelProvider.Factory onBindViewModelFactory() {
return ViewModelFactory.getInstance(mApplication);
}
@Override
public void initViewObservable() {
mViewModel.getHotWordsEvent().observe(this, hotWords -> {
List<String> words = new ArrayList<>(hotWords.size());
for (HotWord word : hotWords) {
words.add(word.getSearchword());
}
mBinding.marqueeView.startWithList(words);
});
mViewModel.getTabsEvent().observe(this, tabBeans -> {
List<String> titles = new ArrayList<>(tabBeans.size());
List<Fragment> fragments = new ArrayList<>();
for (TabBean tabBean : tabBeans) {
ColumnFragment columnFragment = new ColumnFragment();
Bundle bundle = new Bundle();
bundle.putParcelable(KeyCode.Home.TAB,tabBean);
columnFragment.setArguments(bundle);
titles.add(tabBean.getCatName());
fragments.add(columnFragment);
}
TFragmentStateAdapter adapter = new TFragmentStateAdapter(MainHomeFragment.this, fragments);
mBinding.viewpager.setOffscreenPageLimit(fragments.size());
mBinding.viewpager.setAdapter(adapter);
final CommonNavigator commonNavigator = new CommonNavigator(mActivity);
commonNavigator.setAdapter(new TabNavigatorAdapter(titles, mBinding.viewpager, 50));
// commonNavigator.setAdjustMode(true);
mBinding.magicIndicator.setNavigator(commonNavigator);
ViewPagerHelper.bind(mBinding.magicIndicator, mBinding.viewpager);
});
}
@Override
public void onItemClick(int position, TextView textView) {
RouteHelper.navigateTo(mRouter.build(Constants.Router.Home.F_SEARCH)
.withString(KeyCode.Home.HOTWORD, (String) mBinding.marqueeView.getMessages().get(position)));
}
@Override
public void onPause() {
super.onPause();
if (mBinding.marqueeView != null) {
mBinding.marqueeView.stopFlipping();
}
}
@Override
protected boolean enableLazy() {
return false;
}
}
| 3,096 |
3,442 | /*
* Jitsi, the OpenSource Java VoIP and Instant Messaging client.
*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.java.sip.communicator.plugin.branding;
import java.lang.reflect.*;
import java.util.*;
import net.java.sip.communicator.service.gui.*;
import net.java.sip.communicator.service.resources.*;
import net.java.sip.communicator.util.*;
import org.jitsi.service.configuration.*;
import org.jitsi.service.resources.*;
import org.osgi.framework.*;
/**
* Branding bundle activator.
*/
public class BrandingActivator
extends AbstractServiceDependentActivator
{
private final Logger logger = Logger.getLogger(BrandingActivator.class);
private static BundleContext bundleContext;
private static ResourceManagementService resourcesService;
/**
* Setting context to the activator, as soon as we have one.
*
* @param context the context to set.
*/
@Override
public void setBundleContext(BundleContext context)
{
bundleContext = context;
}
/**
* This activator depends on UIService.
* @return the class name of uiService.
*/
@Override
public Class<?> getDependentServiceClass()
{
return UIService.class;
}
/**
* The dependent service is available and the bundle will start.
* @param dependentService the UIService this activator is waiting.
*/
@Override
public void start(Object dependentService)
{
// register the about dialog menu entry
registerMenuEntry((UIService)dependentService);
}
@Override
public void stop(BundleContext context) throws Exception
{
}
/**
* Register the about menu entry.
* @param uiService
*/
private void registerMenuEntry(UIService uiService)
{
if ((uiService == null)
|| !uiService.useMacOSXScreenMenuBar()
|| !registerMenuEntryMacOSX(uiService))
{
registerMenuEntryNonMacOSX(uiService);
}
}
private boolean registerMenuEntryMacOSX(UIService uiService)
{
try
{
Class<?> clazz =
Class
.forName("net.java.sip.communicator.plugin.branding.MacOSXAboutRegistration");
Method method = clazz.getMethod("run", (Class<?>[]) null);
Object result = method.invoke(null, (Object[]) null);
if (result instanceof Boolean)
{
return ((Boolean) result).booleanValue();
}
}
catch (Exception ex)
{
logger.error("Failed to register Mac OS X-specific About handling.",
ex);
}
return false;
}
private void registerMenuEntryNonMacOSX(UIService uiService)
{
// Register the about window plugin component in the main help menu.
Hashtable<String, String> helpMenuFilter
= new Hashtable<String, String>();
helpMenuFilter.put( Container.CONTAINER_ID,
Container.CONTAINER_HELP_MENU.getID());
bundleContext.registerService(
PluginComponentFactory.class.getName(),
new PluginComponentFactory(
Container.CONTAINER_HELP_MENU)
{
@Override
protected PluginComponent getPluginInstance()
{
return new AboutWindowPluginComponent(getContainer(), this);
}
},
helpMenuFilter);
if (logger.isInfoEnabled())
logger.info("ABOUT WINDOW ... [REGISTERED]");
// Register the about window plugin component in the chat help menu.
Hashtable<String, String> chatHelpMenuFilter
= new Hashtable<String, String>();
chatHelpMenuFilter.put( Container.CONTAINER_ID,
Container.CONTAINER_CHAT_HELP_MENU.getID());
bundleContext.registerService(
PluginComponentFactory.class.getName(),
new PluginComponentFactory(
Container.CONTAINER_CHAT_HELP_MENU)
{
@Override
protected PluginComponent getPluginInstance()
{
return new AboutWindowPluginComponent(getContainer(), this);
}
},
chatHelpMenuFilter);
if (logger.isInfoEnabled())
logger.info("CHAT ABOUT WINDOW ... [REGISTERED]");
}
static BundleContext getBundleContext()
{
return bundleContext;
}
/**
* Returns the <tt>ResourceManagementService</tt>.
*
* @return the <tt>ResourceManagementService</tt>.
*/
public static ResourceManagementService getResources()
{
if (resourcesService == null)
resourcesService
= ResourceManagementServiceUtils.getService(bundleContext);
return resourcesService;
}
}
| 2,272 |
844 | <gh_stars>100-1000
{
"github-username": "tanujajoshi1",
"favourite-emoji": "💚",
"favourite-music": "https://soundcloud.com/luiz-alves-625547605/sparkle-kimi-no-na-wa-your-name-ost" ,
"favourite-color": "#000000"
} | 109 |
1,673 | <filename>include/cbm_petscii_charmap.h
/*****************************************************************************/
/* */
/* cbm_petscii_charmap.h */
/* */
/* CBM system standard string mapping (ISO-8859-1 -> PetSCII) */
/* */
/* */
/* 2019-03-10, <NAME> */
/* */
/* This software is provided "as-is", without any expressed or implied */
/* warranty. In no event will the authors be held liable for any damages */
/* arising from the use of this software. */
/* */
/* Permission is granted to anyone to use this software for any purpose, */
/* including commercial applications, and to alter it and redistribute it */
/* freely, subject to the following restrictions: */
/* */
/* 1. The origin of this software must not be misrepresented; you must not */
/* claim that you wrote the original software. If you use this software */
/* in a product, an acknowledgment in the product documentation would be */
/* appreciated, but is not required. */
/* 2. Altered source versions must be plainly marked as such, and must not */
/* be misrepresented as being the original software. */
/* 3. This notice must not be removed or altered from any source */
/* distribution. */
/* */
/*****************************************************************************/
/* No include guard here! Multiple use in one file might be intentional. */
#pragma warn (remap-zero, push, off)
#pragma charmap (0x00, 0x00)
#pragma charmap (0x01, 0x01)
#pragma charmap (0x02, 0x02)
#pragma charmap (0x03, 0x03)
#pragma charmap (0x04, 0x04)
#pragma charmap (0x05, 0x05)
#pragma charmap (0x06, 0x06)
#pragma charmap (0x07, 0x07)
#pragma charmap (0x08, 0x14)
#pragma charmap (0x09, 0x09)
#pragma charmap (0x0A, 0x0D)
#pragma charmap (0x0B, 0x11)
#pragma charmap (0x0C, 0x93)
#pragma charmap (0x0D, 0x0A)
#pragma charmap (0x0E, 0x0E)
#pragma charmap (0x0F, 0x0F)
#pragma charmap (0x10, 0x10)
#pragma charmap (0x11, 0x0B)
#pragma charmap (0x12, 0x12)
#pragma charmap (0x13, 0x13)
#pragma charmap (0x14, 0x08)
#pragma charmap (0x15, 0x15)
#pragma charmap (0x16, 0x16)
#pragma charmap (0x17, 0x17)
#pragma charmap (0x18, 0x18)
#pragma charmap (0x19, 0x19)
#pragma charmap (0x1A, 0x1A)
#pragma charmap (0x1B, 0x1B)
#pragma charmap (0x1C, 0x1C)
#pragma charmap (0x1D, 0x1D)
#pragma charmap (0x1E, 0x1E)
#pragma charmap (0x1F, 0x1F)
#pragma charmap (0x20, 0x20)
#pragma charmap (0x21, 0x21)
#pragma charmap (0x22, 0x22)
#pragma charmap (0x23, 0x23)
#pragma charmap (0x24, 0x24)
#pragma charmap (0x25, 0x25)
#pragma charmap (0x26, 0x26)
#pragma charmap (0x27, 0x27)
#pragma charmap (0x28, 0x28)
#pragma charmap (0x29, 0x29)
#pragma charmap (0x2A, 0x2A)
#pragma charmap (0x2B, 0x2B)
#pragma charmap (0x2C, 0x2C)
#pragma charmap (0x2D, 0x2D)
#pragma charmap (0x2E, 0x2E)
#pragma charmap (0x2F, 0x2F)
#pragma charmap (0x30, 0x30)
#pragma charmap (0x31, 0x31)
#pragma charmap (0x32, 0x32)
#pragma charmap (0x33, 0x33)
#pragma charmap (0x34, 0x34)
#pragma charmap (0x35, 0x35)
#pragma charmap (0x36, 0x36)
#pragma charmap (0x37, 0x37)
#pragma charmap (0x38, 0x38)
#pragma charmap (0x39, 0x39)
#pragma charmap (0x3A, 0x3A)
#pragma charmap (0x3B, 0x3B)
#pragma charmap (0x3C, 0x3C)
#pragma charmap (0x3D, 0x3D)
#pragma charmap (0x3E, 0x3E)
#pragma charmap (0x3F, 0x3F)
#pragma charmap (0x40, 0x40)
#pragma charmap (0x41, 0xC1)
#pragma charmap (0x42, 0xC2)
#pragma charmap (0x43, 0xC3)
#pragma charmap (0x44, 0xC4)
#pragma charmap (0x45, 0xC5)
#pragma charmap (0x46, 0xC6)
#pragma charmap (0x47, 0xC7)
#pragma charmap (0x48, 0xC8)
#pragma charmap (0x49, 0xC9)
#pragma charmap (0x4A, 0xCA)
#pragma charmap (0x4B, 0xCB)
#pragma charmap (0x4C, 0xCC)
#pragma charmap (0x4D, 0xCD)
#pragma charmap (0x4E, 0xCE)
#pragma charmap (0x4F, 0xCF)
#pragma charmap (0x50, 0xD0)
#pragma charmap (0x51, 0xD1)
#pragma charmap (0x52, 0xD2)
#pragma charmap (0x53, 0xD3)
#pragma charmap (0x54, 0xD4)
#pragma charmap (0x55, 0xD5)
#pragma charmap (0x56, 0xD6)
#pragma charmap (0x57, 0xD7)
#pragma charmap (0x58, 0xD8)
#pragma charmap (0x59, 0xD9)
#pragma charmap (0x5A, 0xDA)
#pragma charmap (0x5B, 0x5B)
#pragma charmap (0x5C, 0xBF)
#pragma charmap (0x5D, 0x5D)
#pragma charmap (0x5E, 0x5E)
#pragma charmap (0x5F, 0xA4)
#pragma charmap (0x60, 0xAD)
#pragma charmap (0x61, 0x41)
#pragma charmap (0x62, 0x42)
#pragma charmap (0x63, 0x43)
#pragma charmap (0x64, 0x44)
#pragma charmap (0x65, 0x45)
#pragma charmap (0x66, 0x46)
#pragma charmap (0x67, 0x47)
#pragma charmap (0x68, 0x48)
#pragma charmap (0x69, 0x49)
#pragma charmap (0x6A, 0x4A)
#pragma charmap (0x6B, 0x4B)
#pragma charmap (0x6C, 0x4C)
#pragma charmap (0x6D, 0x4D)
#pragma charmap (0x6E, 0x4E)
#pragma charmap (0x6F, 0x4F)
#pragma charmap (0x70, 0x50)
#pragma charmap (0x71, 0x51)
#pragma charmap (0x72, 0x52)
#pragma charmap (0x73, 0x53)
#pragma charmap (0x74, 0x54)
#pragma charmap (0x75, 0x55)
#pragma charmap (0x76, 0x56)
#pragma charmap (0x77, 0x57)
#pragma charmap (0x78, 0x58)
#pragma charmap (0x79, 0x59)
#pragma charmap (0x7A, 0x5A)
#pragma charmap (0x7B, 0xB3)
#pragma charmap (0x7C, 0xDD)
#pragma charmap (0x7D, 0xAB)
#pragma charmap (0x7E, 0xB1)
#pragma charmap (0x7F, 0xDF)
#pragma charmap (0x80, 0x80)
#pragma charmap (0x81, 0x81)
#pragma charmap (0x82, 0x82)
#pragma charmap (0x83, 0x83)
#pragma charmap (0x84, 0x84)
#pragma charmap (0x85, 0x85)
#pragma charmap (0x86, 0x86)
#pragma charmap (0x87, 0x87)
#pragma charmap (0x88, 0x88)
#pragma charmap (0x89, 0x89)
#pragma charmap (0x8A, 0x8A)
#pragma charmap (0x8B, 0x8B)
#pragma charmap (0x8C, 0x8C)
#pragma charmap (0x8D, 0x8D)
#pragma charmap (0x8E, 0x8E)
#pragma charmap (0x8F, 0x8F)
#pragma charmap (0x90, 0x90)
#pragma charmap (0x91, 0x91)
#pragma charmap (0x92, 0x92)
#pragma charmap (0x93, 0x0C)
#pragma charmap (0x94, 0x94)
#pragma charmap (0x95, 0x95)
#pragma charmap (0x96, 0x96)
#pragma charmap (0x97, 0x97)
#pragma charmap (0x98, 0x98)
#pragma charmap (0x99, 0x99)
#pragma charmap (0x9A, 0x9A)
#pragma charmap (0x9B, 0x9B)
#pragma charmap (0x9C, 0x9C)
#pragma charmap (0x9D, 0x9D)
#pragma charmap (0x9E, 0x9E)
#pragma charmap (0x9F, 0x9F)
#pragma charmap (0xA0, 0xA0)
#pragma charmap (0xA1, 0xA1)
#pragma charmap (0xA2, 0xA2)
#pragma charmap (0xA3, 0xA3)
#pragma charmap (0xA4, 0xA4)
#pragma charmap (0xA5, 0xA5)
#pragma charmap (0xA6, 0xA6)
#pragma charmap (0xA7, 0xA7)
#pragma charmap (0xA8, 0xA8)
#pragma charmap (0xA9, 0xA9)
#pragma charmap (0xAA, 0xAA)
#pragma charmap (0xAB, 0xAB)
#pragma charmap (0xAC, 0xAC)
#pragma charmap (0xAD, 0xAD)
#pragma charmap (0xAE, 0xAE)
#pragma charmap (0xAF, 0xAF)
#pragma charmap (0xB0, 0xB0)
#pragma charmap (0xB1, 0xB1)
#pragma charmap (0xB2, 0xB2)
#pragma charmap (0xB3, 0xB3)
#pragma charmap (0xB4, 0xB4)
#pragma charmap (0xB5, 0xB5)
#pragma charmap (0xB6, 0xB6)
#pragma charmap (0xB7, 0xB7)
#pragma charmap (0xB8, 0xB8)
#pragma charmap (0xB9, 0xB9)
#pragma charmap (0xBA, 0xBA)
#pragma charmap (0xBB, 0xBB)
#pragma charmap (0xBC, 0xBC)
#pragma charmap (0xBD, 0xBD)
#pragma charmap (0xBE, 0xBE)
#pragma charmap (0xBF, 0xBF)
#pragma charmap (0xC0, 0x60)
#pragma charmap (0xC1, 0x61)
#pragma charmap (0xC2, 0x62)
#pragma charmap (0xC3, 0x63)
#pragma charmap (0xC4, 0x64)
#pragma charmap (0xC5, 0x65)
#pragma charmap (0xC6, 0x66)
#pragma charmap (0xC7, 0x67)
#pragma charmap (0xC8, 0x68)
#pragma charmap (0xC9, 0x69)
#pragma charmap (0xCA, 0x6A)
#pragma charmap (0xCB, 0x6B)
#pragma charmap (0xCC, 0x6C)
#pragma charmap (0xCD, 0x6D)
#pragma charmap (0xCE, 0x6E)
#pragma charmap (0xCF, 0x6F)
#pragma charmap (0xD0, 0x70)
#pragma charmap (0xD1, 0x71)
#pragma charmap (0xD2, 0x72)
#pragma charmap (0xD3, 0x73)
#pragma charmap (0xD4, 0x74)
#pragma charmap (0xD5, 0x75)
#pragma charmap (0xD6, 0x76)
#pragma charmap (0xD7, 0x77)
#pragma charmap (0xD8, 0x78)
#pragma charmap (0xD9, 0x79)
#pragma charmap (0xDA, 0x7A)
#pragma charmap (0xDB, 0x7B)
#pragma charmap (0xDC, 0x7C)
#pragma charmap (0xDD, 0x7D)
#pragma charmap (0xDE, 0x7E)
#pragma charmap (0xDF, 0x7F)
#pragma charmap (0xE0, 0xE0)
#pragma charmap (0xE1, 0xE1)
#pragma charmap (0xE2, 0xE2)
#pragma charmap (0xE3, 0xE3)
#pragma charmap (0xE4, 0xE4)
#pragma charmap (0xE5, 0xE5)
#pragma charmap (0xE6, 0xE6)
#pragma charmap (0xE7, 0xE7)
#pragma charmap (0xE8, 0xE8)
#pragma charmap (0xE9, 0xE9)
#pragma charmap (0xEA, 0xEA)
#pragma charmap (0xEB, 0xEB)
#pragma charmap (0xEC, 0xEC)
#pragma charmap (0xED, 0xED)
#pragma charmap (0xEE, 0xEE)
#pragma charmap (0xEF, 0xEF)
#pragma charmap (0xF0, 0xF0)
#pragma charmap (0xF1, 0xF1)
#pragma charmap (0xF2, 0xF2)
#pragma charmap (0xF3, 0xF3)
#pragma charmap (0xF4, 0xF4)
#pragma charmap (0xF5, 0xF5)
#pragma charmap (0xF6, 0xF6)
#pragma charmap (0xF7, 0xF7)
#pragma charmap (0xF8, 0xF8)
#pragma charmap (0xF9, 0xF9)
#pragma charmap (0xFA, 0xFA)
#pragma charmap (0xFB, 0xFB)
#pragma charmap (0xFC, 0xFC)
#pragma charmap (0xFD, 0xFD)
#pragma charmap (0xFE, 0xFE)
#pragma charmap (0xFF, 0xFF)
#pragma warn (remap-zero, pop)
| 5,638 |
5,964 | <reponame>domenic/mojo<gh_stars>1000+
/*
* Copyright (C) 2012 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "config.h"
#include "DataLog.h"
#if OS(POSIX)
#include <pthread.h>
#include <unistd.h>
#endif
#define DATA_LOG_TO_FILE 0
// Uncomment to force logging to the given file regardless of what the environment variable says. Note that
// we will append ".<pid>.txt" where <pid> is the PID.
// This path won't work on Windows, make sure to change to something like C:\\Users\\<more path>\\log.txt.
#define DATA_LOG_FILENAME "/tmp/WTFLog"
namespace WTF {
#if USE(PTHREADS)
static pthread_once_t initializeLogFileOnceKey = PTHREAD_ONCE_INIT;
#endif
static FilePrintStream* file;
static void initializeLogFileOnce()
{
#if DATA_LOG_TO_FILE
#ifdef DATA_LOG_FILENAME
const char* filename = DATA_LOG_FILENAME;
#else
const char* filename = getenv("WTF_DATA_LOG_FILENAME");
#endif
char actualFilename[1024];
snprintf(actualFilename, sizeof(actualFilename), "%s.%d.txt", filename, getpid());
if (filename) {
file = FilePrintStream::open(actualFilename, "w").leakPtr();
if (!file)
fprintf(stderr, "Warning: Could not open log file %s for writing.\n", actualFilename);
}
#endif // DATA_LOG_TO_FILE
if (!file)
file = new FilePrintStream(stderr, FilePrintStream::Borrow);
setvbuf(file->file(), 0, _IONBF, 0); // Prefer unbuffered output, so that we get a full log upon crash or deadlock.
}
static void initializeLogFile()
{
#if USE(PTHREADS)
pthread_once(&initializeLogFileOnceKey, initializeLogFileOnce);
#else
if (!file)
initializeLogFileOnce();
#endif
}
FilePrintStream& dataFile()
{
initializeLogFile();
return *file;
}
void dataLogFV(const char* format, va_list argList)
{
dataFile().vprintf(format, argList);
}
void dataLogF(const char* format, ...)
{
va_list argList;
va_start(argList, format);
dataLogFV(format, argList);
va_end(argList);
}
void dataLogFString(const char* str)
{
dataFile().printf("%s", str);
}
} // namespace WTF
| 1,135 |
1,568 | <reponame>KirilBangachev/Python
"""
this algorithm tries to find the pattern from every position of
the mainString if pattern is found from position i it add it to
the answer and does the same for position i+1
Complexity : O(n*m)
n=length of main string
m=length of pattern string
"""
def naivePatternSearch(mainString,pattern):
patLen=len(pattern)
strLen=len(mainString)
position=[]
for i in range(strLen-patLen+1):
match_found=True
for j in range(patLen):
if mainString[i+j]!=pattern[j]:
match_found=False
break
if match_found:
position.append(i)
return position
mainString="ABAAABCDBBABCDDEBCABC"
pattern="ABC"
position=naivePatternSearch(mainString,pattern)
print("Pattern found in position ")
for x in position:
print(x) | 332 |
528 | <reponame>jiosec/tdameritrade
from tdameritrade.orders.models import base
def test_base_order_json():
base_order = base.BaseOrder()
base_order.some_field = 123
assert base_order.json() == "{}"
| 77 |
776 | <reponame>Diffblue-benchmarks/actframework<filename>legacy-testapp/src/main/java/testapp/endpoint/ghissues/GH352.java<gh_stars>100-1000
package testapp.endpoint.ghissues;
import act.controller.annotation.TemplateContext;
import act.controller.annotation.UrlContext;
import act.view.ViewManager;
import org.osgl.mvc.annotation.Before;
import org.osgl.mvc.annotation.GetAction;
/**
* Verify fix to #352 and #424
*/
@UrlContext("352")
@TemplateContext("352")
public class GH352 extends GithubIssueBase {
@Before
public void setupRenderArgs() {
context.renderArg("who", "Act");
}
@GetAction("inline")
public String testInline(ViewManager viewManager) {
return viewManager.getTemplate("Hello @who").render(context);
}
@GetAction
public String test(ViewManager viewManager) {
return viewManager.getTemplate("/gh/352/test.html").render(context);
}
@GetAction("relative")
public String testRelativePath(ViewManager viewManager) {
return viewManager.getTemplate("test").render(context);
}
}
| 368 |
1,204 | <filename>hunter-transform/src/main/java/com/quinn/hunter/transform/RunVariant.java<gh_stars>1000+
package com.quinn.hunter.transform;
/**
* Created by Quinn on 10/05/2018
*/
public enum RunVariant {
DEBUG, RELEASE, ALWAYS, NEVER
}
| 83 |
898 | /*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* #define LOG_NDEBUG 0 */
#define LOG_TAG "audio_utils_format"
#include "audio/android/cutils/log.h"
#include "audio/android/audio_utils/include/audio_utils/primitives.h"
#include "audio/android/audio_utils/include/audio_utils/format.h"
#include "audio/android/audio.h"
void memcpy_by_audio_format(void *dst, audio_format_t dst_format,
const void *src, audio_format_t src_format, size_t count)
{
/* default cases for error falls through to fatal log below. */
if (dst_format == src_format) {
switch (dst_format) {
case AUDIO_FORMAT_PCM_16_BIT:
case AUDIO_FORMAT_PCM_FLOAT:
case AUDIO_FORMAT_PCM_8_BIT:
case AUDIO_FORMAT_PCM_24_BIT_PACKED:
case AUDIO_FORMAT_PCM_32_BIT:
case AUDIO_FORMAT_PCM_8_24_BIT:
memcpy(dst, src, count * audio_bytes_per_sample(dst_format));
return;
default:
break;
}
}
switch (dst_format) {
case AUDIO_FORMAT_PCM_16_BIT:
switch (src_format) {
case AUDIO_FORMAT_PCM_FLOAT:
memcpy_to_i16_from_float((int16_t*)dst, (float*)src, count);
return;
case AUDIO_FORMAT_PCM_8_BIT:
memcpy_to_i16_from_u8((int16_t*)dst, (uint8_t*)src, count);
return;
case AUDIO_FORMAT_PCM_24_BIT_PACKED:
memcpy_to_i16_from_p24((int16_t*)dst, (uint8_t*)src, count);
return;
case AUDIO_FORMAT_PCM_32_BIT:
memcpy_to_i16_from_i32((int16_t*)dst, (int32_t*)src, count);
return;
case AUDIO_FORMAT_PCM_8_24_BIT:
memcpy_to_i16_from_q8_23((int16_t*)dst, (int32_t*)src, count);
return;
default:
break;
}
break;
case AUDIO_FORMAT_PCM_FLOAT:
switch (src_format) {
case AUDIO_FORMAT_PCM_16_BIT:
memcpy_to_float_from_i16((float*)dst, (int16_t*)src, count);
return;
case AUDIO_FORMAT_PCM_8_BIT:
memcpy_to_float_from_u8((float*)dst, (uint8_t*)src, count);
return;
case AUDIO_FORMAT_PCM_24_BIT_PACKED:
memcpy_to_float_from_p24((float*)dst, (uint8_t*)src, count);
return;
case AUDIO_FORMAT_PCM_32_BIT:
memcpy_to_float_from_i32((float*)dst, (int32_t*)src, count);
return;
case AUDIO_FORMAT_PCM_8_24_BIT:
memcpy_to_float_from_q8_23((float*)dst, (int32_t*)src, count);
return;
default:
break;
}
break;
case AUDIO_FORMAT_PCM_8_BIT:
switch (src_format) {
case AUDIO_FORMAT_PCM_16_BIT:
memcpy_to_u8_from_i16((uint8_t*)dst, (int16_t*)src, count);
return;
case AUDIO_FORMAT_PCM_FLOAT:
memcpy_to_u8_from_float((uint8_t*)dst, (float*)src, count);
return;
default:
break;
}
break;
case AUDIO_FORMAT_PCM_24_BIT_PACKED:
switch (src_format) {
case AUDIO_FORMAT_PCM_16_BIT:
memcpy_to_p24_from_i16((uint8_t*)dst, (int16_t*)src, count);
return;
case AUDIO_FORMAT_PCM_FLOAT:
memcpy_to_p24_from_float((uint8_t*)dst, (float*)src, count);
return;
default:
break;
}
break;
case AUDIO_FORMAT_PCM_32_BIT:
switch (src_format) {
case AUDIO_FORMAT_PCM_16_BIT:
memcpy_to_i32_from_i16((int32_t*)dst, (int16_t*)src, count);
return;
case AUDIO_FORMAT_PCM_FLOAT:
memcpy_to_i32_from_float((int32_t*)dst, (float*)src, count);
return;
default:
break;
}
break;
case AUDIO_FORMAT_PCM_8_24_BIT:
switch (src_format) {
case AUDIO_FORMAT_PCM_16_BIT:
memcpy_to_q8_23_from_i16((int32_t*)dst, (int16_t*)src, count);
return;
case AUDIO_FORMAT_PCM_FLOAT:
memcpy_to_q8_23_from_float_with_clamp((int32_t*)dst, (float*)src, count);
return;
case AUDIO_FORMAT_PCM_24_BIT_PACKED: {
memcpy_to_q8_23_from_p24((int32_t *)dst, (uint8_t *)src, count);
return;
}
default:
break;
}
break;
default:
break;
}
LOG_ALWAYS_FATAL("invalid src format %#x for dst format %#x",
src_format, dst_format);
}
size_t memcpy_by_index_array_initialization_from_channel_mask(int8_t *idxary, size_t arysize,
audio_channel_mask_t dst_channel_mask, audio_channel_mask_t src_channel_mask)
{
const audio_channel_representation_t src_representation =
audio_channel_mask_get_representation(src_channel_mask);
const audio_channel_representation_t dst_representation =
audio_channel_mask_get_representation(dst_channel_mask);
const uint32_t src_bits = audio_channel_mask_get_bits(src_channel_mask);
const uint32_t dst_bits = audio_channel_mask_get_bits(dst_channel_mask);
switch (src_representation) {
case AUDIO_CHANNEL_REPRESENTATION_POSITION:
switch (dst_representation) {
case AUDIO_CHANNEL_REPRESENTATION_POSITION:
return memcpy_by_index_array_initialization(idxary, arysize,
dst_bits, src_bits);
case AUDIO_CHANNEL_REPRESENTATION_INDEX:
return memcpy_by_index_array_initialization_dst_index(idxary, arysize,
dst_bits, src_bits);
default:
return 0;
}
break;
case AUDIO_CHANNEL_REPRESENTATION_INDEX:
switch (dst_representation) {
case AUDIO_CHANNEL_REPRESENTATION_POSITION:
return memcpy_by_index_array_initialization_src_index(idxary, arysize,
dst_bits, src_bits);
case AUDIO_CHANNEL_REPRESENTATION_INDEX:
return memcpy_by_index_array_initialization(idxary, arysize,
dst_bits, src_bits);
default:
return 0;
}
break;
default:
return 0;
}
}
| 3,433 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.