max_stars_count
int64 301
224k
| text
stringlengths 6
1.05M
| token_count
int64 3
727k
|
---|---|---|
5,169 | <gh_stars>1000+
{
"name": "VitalSignsHandler",
"version": "0.0.1",
"summary": "VitalSignsHandler is ios framework. If you need to handler ECG or PPG data use it.",
"description": "This framework was developer of Higher School of Information Technology and Information Sys-tems of Kazan Federal University.\"",
"homepage": "https://github.com/MariMiMari/VitalSignsHandler",
"license": {
"type": "MIT",
"file": "LICENSE"
},
"authors": {
"<NAME>": "<EMAIL>"
},
"source": {
"git": "https://github.com/MariMiMari/VitalSignsHandler.git",
"tag": "0.0.1"
},
"platforms": {
"ios": "8.0"
},
"libraries": "c++",
"source_files": "VitalSignsHandler/Classes/**/**",
"requires_arc": true,
"xcconfig": {
"CLANG_CXX_LANGUAGE_STANDARD": "c++11",
"CLANG_CXX_LIBRARY": "libc++"
},
"preserve_paths": "Example/Pods/Target Support Files/VitalSignsHandler/VitalSignsHandler.modulemap",
"module_map": "Example/Pods/Target Support Files/VitalSignsHandler/VitalSignsHandler.modulemap",
"public_header_files": "VitalSignsHandler/Classes/**/*.h"
}
| 438 |
429 | <reponame>nipunsadvilkar/pysbd
import pytest
from pysbd.languages import LANGUAGE_CODES, Language
def test_lang_code2instance_mapping():
for code, language_module in LANGUAGE_CODES.items():
assert Language.get_language_code(code) == language_module
def test_exception_on_no_lang_code_provided():
with pytest.raises(ValueError) as e:
Language.get_language_code('')
assert "Provide valid language ID i.e. ISO code." in str(e.value)
def test_exception_on_unsupported_lang_code_provided():
with pytest.raises(ValueError) as e:
Language.get_language_code('elvish')
assert "Provide valid language ID i.e. ISO code." in str(e.value)
| 257 |
789 | <filename>lsm/lsm.cpp
#include "lsm.h"
#include <iostream>
#include <string>
#include <variant>
#include <vector>
#include <fstream>
#include <limits>
#include <bitset>
#include <filesystem>
#include "MurmurHash3.h"
#include <cassert>
void BloomFilter::insert_key(int32_t k) {
uint32_t seed = 1;
uint32_t key_hash;
MurmurHash3_x86_32(&k, sizeof k, seed, &key_hash);
auto last_d = key_hash % 10;
bset[last_d] = 1;
}
char BloomFilter::contains_key(int32_t k) {
uint32_t seed = 1;
uint32_t key_hash;
MurmurHash3_x86_32(&k, sizeof k, seed, &key_hash);
auto last_d = key_hash % 10;
return bset[last_d];
}
int Level::size() {
return files.size();
}
void Level::add_file(File f) {
files.push_back(f);
}
std::string Level::next_file_path() {
return level_directory / std::to_string(files.size());
}
File Level::write_to_file(std::map<int32_t, maybe_value> m, std::string filename) {
std::ofstream ostrm(filename, std::ios::binary);
BloomFilter bloomFilter;
int min = std::numeric_limits<int>::max();
int max = std::numeric_limits<int>::lowest();
for (auto const&x : m) {
bloomFilter.insert_key(x.first);
ostrm.write(reinterpret_cast<const char*>(&x.first), sizeof x.first);
ostrm.write(reinterpret_cast<const char*>(&x.second.v), sizeof x.second.v);
ostrm.write(reinterpret_cast<const char*>(&x.second.is_deleted), sizeof x.second.is_deleted);
if (x.first > max) {max = x.first;}
if (x.first < min) {min = x.first;}
}
if (!ostrm) {
std::cerr << "could not write to file successfully" << "\n";
exit(5);
}
return File(filename, bloomFilter, min, max);
}
std::map<int32_t, maybe_value> Level::read_from_file(std::string filename) {
std::map<int32_t, maybe_value> new_m;
read_to_map(filename, new_m);
return new_m;
}
void Level::read_to_map(std::string filename, std::map<int32_t, maybe_value> &m) {
if (auto istrm = std::ifstream(filename, std::ios::binary)) {
int k;
while (istrm.read(reinterpret_cast<char *>(&k), sizeof k)) {
int v;
char d;
istrm.read(reinterpret_cast<char *>(&v), sizeof v);
istrm.read(reinterpret_cast<char *>(&d), sizeof d);
m.insert_or_assign(k, maybe_value(v,d));
}
} else {
std::cerr << "could not open " << filename << "\n";
exit(3);
}
std::cerr << "read file " << filename << " to map" << "\n";
}
File Level::merge(File older_f, File newer_f) {
std::map<int32_t, maybe_value> m;
read_to_map(older_f.filename, m);
read_to_map(newer_f.filename, m);
std::cerr << "merge files " << older_f.filename << "and" << newer_f.filename << " to new file " << "\n";
return write_to_file(m, next_file_path());
}
void Level::add(std::map<int32_t, maybe_value> m) {
File f = write_to_file(m, next_file_path());
add_file(f);
}
void LSM::add_to_level(std::map<int32_t, maybe_value> m, size_t l_index) {
Level& level = get_level(l_index);
if (level.size() + 1 == level.max_size) {
assert(level.max_size == 2);
File merged_f = level.merge(level.files.back(),
level.write_to_file(m, level.next_file_path()));
std::map<int32_t, maybe_value> merged_m;
level.read_to_map(merged_f.filename, merged_m);
add_to_level(merged_m, l_index + 1);
std::filesystem::path file_path = get_level(l_index).files.back().filename;
get_level(l_index).files.pop_back();
std::filesystem::remove(file_path);
} else {
assert(level.size() < level.max_size);
level.add(m);
}
}
Level& LSM::get_level(size_t index) {
if (index > levels.size()) {
std::cerr << "skipped a level!?";
exit(1);
} else if (index == levels.size()) {
levels.push_back(Level(index, directory));
return levels.back();
} else {
return levels[index];
}
}
void LSM::put(int32_t k, int32_t v, char deleted) {
if (m.size() >= 4) {
add_to_level(m, 0);
m.clear();
}
m.insert_or_assign(k,maybe_value(v, deleted));
}
std::optional<int32_t> LSM::get(int32_t k) {
auto it = m.find(k);
if (it != m.end()) {
if(!it->second.is_deleted) {
return it->second.v;
} else {
return std::nullopt;
}
} else {
for (auto it = levels.rbegin(); it != levels.rend(); ++it) {
auto& level = *it;
for (auto j = level.files.rbegin(); j != level.files.rend(); ++j) {
auto& file = *j;
if (file.bloomFilter.contains_key(k) && k >= file.min && k <= file.max) {
std::map <int32_t, maybe_value> file_map = level.read_from_file(file.filename);
auto it_m = file_map.find(k);
if (it_m != file_map.end()) {
if (!it_m->second.is_deleted) {
return it_m->second.v;
} else {
return std::nullopt;
}
}
}
}
}
}
return std::nullopt;
}
std::vector<std::pair<int32_t, int32_t>> LSM::range(int32_t l, int32_t r) {
std::vector<std::pair<int32_t, int32_t>> res;
std::map<int32_t,int32_t> res_map;
for (auto &level : levels) {
for (auto &file : level.files) {
if (r >= file.min && l <= file.max) {
std::map <int32_t, maybe_value> file_map = level.read_from_file(file.filename);
auto it_ml = file_map.lower_bound(l);
auto it_mu = file_map.lower_bound(r);
for (auto it = it_ml; it != it_mu; ++it) {
if (!it->second.is_deleted) {
res_map.insert_or_assign(it->first, it->second.v);
} else {
res_map.erase(it->first);
}
}
}
}
}
auto it_l = m.lower_bound(l);
auto it_u = m.lower_bound(r);
for (auto it=it_l; it!=it_u; ++it) {
if (!it->second.is_deleted) {
res_map.insert_or_assign(it->first, it->second.v);
} else {
res_map.erase(it->first);
}
}
auto rit_l = res_map.lower_bound(l);
auto rit_u = res_map.lower_bound(r);
for (auto it=rit_l; it!=rit_u; ++it) {
res.push_back(std::make_pair(it->first, it->second));
}
return res;
}
void LSM::del(int32_t k) {
put(k, 0, 1);
}
void LSM::dump_map() {
for (auto const&x : m) {
std::cout << x.first << "\n";
}
}
| 2,751 |
832 | <gh_stars>100-1000
package com.litesuits.http.impl.apache;
import com.litesuits.http.data.Consts;
import com.litesuits.http.exception.HttpClientException;
import com.litesuits.http.impl.apache.entity.FileEntity;
import com.litesuits.http.impl.apache.entity.InputStreamEntity;
import com.litesuits.http.impl.apache.entity.MultipartEntity;
import com.litesuits.http.request.AbstractRequest;
import com.litesuits.http.request.content.*;
import com.litesuits.http.request.content.multi.MultipartBody;
import org.apache.http.HttpEntity;
import org.apache.http.entity.ByteArrayEntity;
import org.apache.http.entity.StringEntity;
/**
* help us to build {@link org.apache.http.HttpEntity}
*
* @author MaTianyu
* 2014-1-18上午1:41:41
*/
public class EntityBuilder {
public static HttpEntity build(AbstractRequest req) throws HttpClientException {
try {
HttpBody body = req.getHttpBody();
if (body != null) {
req.addHeader(Consts.CONTENT_TYPE, body.getContentType());
if (body instanceof StringBody) {
// StringBody JsonBody UrlEncodedFormBody
StringBody b = (StringBody) body;
return new StringEntity(b.getString(), b.getCharset());
} else if (body instanceof ByteArrayBody) {
// ByteArrayBody SerializableBody
ByteArrayBody b = (ByteArrayBody) body;
return new ByteArrayEntity(b.getBytes());
} else if (body instanceof InputStreamBody) {
InputStreamBody b = (InputStreamBody) body;
return new InputStreamEntity(b.getInstream(), b.getInstream().available(), req);
} else if (body instanceof FileBody) {
FileBody b = (FileBody) body;
return new FileEntity(b.getFile(), b.getContentType(), req);
} else if (body instanceof MultipartBody) {
return new MultipartEntity((MultipartBody) body);
} else {
throw new RuntimeException("Unpredictable Entity Body(非法实体)");
}
}
} catch (Exception e) {
throw new HttpClientException(e);
}
return null;
}
}
| 1,032 |
8,851 | from django.db import transaction
from django.http import HttpResponseRedirect
from django.shortcuts import redirect
from django.urls import reverse
from django.utils.translation import gettext as _
from django.utils.translation import gettext_lazy
from django.views.generic.edit import BaseCreateView, BaseDeleteView, BaseUpdateView
from django.views.generic.list import BaseListView
from wagtail.admin import messages
from wagtail.admin.ui.tables import Table, TitleColumn
from wagtail.core.log_actions import log
from .base import WagtailAdminTemplateMixin
from .permissions import PermissionCheckedMixin
class IndexView(PermissionCheckedMixin, WagtailAdminTemplateMixin, BaseListView):
model = None
index_url_name = None
add_url_name = None
edit_url_name = None
template_name = 'wagtailadmin/generic/index.html'
context_object_name = None
any_permission_required = ['add', 'change', 'delete']
page_kwarg = 'p'
default_ordering = None
def get(self, request, *args, **kwargs):
if not hasattr(self, 'columns'):
self.columns = self.get_columns()
return super().get(request, *args, **kwargs)
def get_columns(self):
try:
return self.columns
except AttributeError:
return [
TitleColumn(
'name', label=gettext_lazy("Name"), accessor=str, get_url=lambda obj: self.get_edit_url(obj)
),
]
def get_index_url(self):
if self.index_url_name:
return reverse(self.index_url_name)
def get_edit_url(self, instance):
if self.edit_url_name:
return reverse(self.edit_url_name, args=(instance.pk,))
def get_valid_orderings(self):
orderings = []
for col in self.columns:
if col.sort_key:
orderings.append(col.sort_key)
orderings.append('-%s' % col.sort_key)
return orderings
def get_ordering(self):
ordering = self.request.GET.get('ordering', self.default_ordering)
if ordering not in self.get_valid_orderings():
ordering = self.default_ordering
return ordering
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
index_url = self.get_index_url()
table = Table(
self.columns, context['object_list'], base_url=index_url, ordering=self.get_ordering()
)
context['can_add'] = (
self.permission_policy is None
or self.permission_policy.user_has_permission(self.request.user, 'add')
)
context['table'] = table
context['media'] = table.media
context['index_url'] = index_url
context['is_paginated'] = bool(self.paginate_by)
return context
class CreateView(PermissionCheckedMixin, WagtailAdminTemplateMixin, BaseCreateView):
model = None
form_class = None
index_url_name = None
add_url_name = None
edit_url_name = None
template_name = 'wagtailadmin/generic/create.html'
permission_required = 'add'
success_message = None
error_message = None
def get_add_url(self):
return reverse(self.add_url_name)
def get_success_url(self):
return reverse(self.index_url_name)
def get_success_message(self, instance):
if self.success_message is None:
return None
return self.success_message.format(instance)
def get_error_message(self):
if self.error_message is None:
return None
return self.error_message
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['action_url'] = self.get_add_url()
return context
def save_instance(self):
"""
Called after the form is successfully validated - saves the object to the db
and returns the new object. Override this to implement custom save logic.
"""
return self.form.save()
def form_valid(self, form):
self.form = form
with transaction.atomic():
self.object = self.save_instance()
log(instance=self.object, action='wagtail.create')
success_message = self.get_success_message(self.object)
if success_message is not None:
messages.success(self.request, success_message, buttons=[
messages.button(reverse(self.edit_url_name, args=(self.object.id,)), _('Edit'))
])
return redirect(self.get_success_url())
def form_invalid(self, form):
self.form = form
error_message = self.get_error_message()
if error_message is not None:
messages.error(self.request, error_message)
return super().form_invalid(form)
class EditView(PermissionCheckedMixin, WagtailAdminTemplateMixin, BaseUpdateView):
model = None
form_class = None
index_url_name = None
edit_url_name = None
delete_url_name = None
page_title = gettext_lazy("Editing")
context_object_name = None
template_name = 'wagtailadmin/generic/edit.html'
permission_required = 'change'
delete_item_label = gettext_lazy("Delete")
success_message = None
error_message = None
def get_object(self, queryset=None):
if 'pk' not in self.kwargs:
self.kwargs['pk'] = self.args[0]
return super().get_object(queryset)
def get_page_subtitle(self):
return str(self.object)
def get_edit_url(self):
return reverse(self.edit_url_name, args=(self.object.id,))
def get_delete_url(self):
return reverse(self.delete_url_name, args=(self.object.id,))
def get_success_url(self):
return reverse(self.index_url_name)
def save_instance(self):
"""
Called after the form is successfully validated - saves the object to the db.
Override this to implement custom save logic.
"""
return self.form.save()
def get_success_message(self):
if self.success_message is None:
return None
return self.success_message.format(self.object)
def get_error_message(self):
if self.error_message is None:
return None
return self.error_message
def form_valid(self, form):
self.form = form
with transaction.atomic():
self.object = self.save_instance()
log(instance=self.object, action='wagtail.edit')
success_message = self.get_success_message()
if success_message is not None:
messages.success(self.request, success_message, buttons=[
messages.button(reverse(self.edit_url_name, args=(self.object.id,)), _('Edit'))
])
return redirect(self.get_success_url())
def form_invalid(self, form):
self.form = form
error_message = self.get_error_message()
if error_message is not None:
messages.error(self.request, error_message)
return super().form_invalid(form)
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['action_url'] = self.get_edit_url()
context['delete_url'] = self.get_delete_url()
context['delete_item_label'] = self.delete_item_label
context['can_delete'] = (
self.permission_policy is None
or self.permission_policy.user_has_permission(self.request.user, 'delete')
)
return context
class DeleteView(PermissionCheckedMixin, WagtailAdminTemplateMixin, BaseDeleteView):
model = None
index_url_name = None
delete_url_name = None
template_name = 'wagtailadmin/generic/confirm_delete.html'
context_object_name = None
permission_required = 'delete'
success_message = None
def get_object(self, queryset=None):
if 'pk' not in self.kwargs:
self.kwargs['pk'] = self.args[0]
return super().get_object(queryset)
def get_success_url(self):
return reverse(self.index_url_name)
def get_page_subtitle(self):
return str(self.object)
def get_delete_url(self):
return reverse(self.delete_url_name, args=(self.object.id,))
def get_success_message(self):
if self.success_message is None:
return None
return self.success_message.format(self.object)
def delete(self, request, *args, **kwargs):
self.object = self.get_object()
success_url = self.get_success_url()
with transaction.atomic():
log(instance=self.object, action='wagtail.delete')
self.object.delete()
messages.success(request, self.get_success_message())
return HttpResponseRedirect(success_url)
| 3,627 |
5,678 | """Test the datasets loader.
Skipped if datasets is not already downloaded to data_home.
"""
# Authors: <NAME> <<EMAIL>>
# <NAME>
# License: MIT
import pytest
from imblearn.datasets import fetch_datasets
from sklearn.utils._testing import SkipTest
DATASET_SHAPE = {
"ecoli": (336, 7),
"optical_digits": (5620, 64),
"satimage": (6435, 36),
"pen_digits": (10992, 16),
"abalone": (4177, 10),
"sick_euthyroid": (3163, 42),
"spectrometer": (531, 93),
"car_eval_34": (1728, 21),
"isolet": (7797, 617),
"us_crime": (1994, 100),
"yeast_ml8": (2417, 103),
"scene": (2407, 294),
"libras_move": (360, 90),
"thyroid_sick": (3772, 52),
"coil_2000": (9822, 85),
"arrhythmia": (452, 278),
"solar_flare_m0": (1389, 32),
"oil": (937, 49),
"car_eval_4": (1728, 21),
"wine_quality": (4898, 11),
"letter_img": (20000, 16),
"yeast_me2": (1484, 8),
"webpage": (34780, 300),
"ozone_level": (2536, 72),
"mammography": (11183, 6),
"protein_homo": (145751, 74),
"abalone_19": (4177, 10),
}
def fetch(*args, **kwargs):
return fetch_datasets(*args, download_if_missing=True, **kwargs)
@pytest.mark.xfail
def test_fetch():
try:
datasets1 = fetch(shuffle=True, random_state=42)
except IOError:
raise SkipTest("Zenodo dataset can not be loaded.")
datasets2 = fetch(shuffle=True, random_state=37)
for k in DATASET_SHAPE.keys():
X1, X2 = datasets1[k].data, datasets2[k].data
assert DATASET_SHAPE[k] == X1.shape
assert X1.shape == X2.shape
y1, y2 = datasets1[k].target, datasets2[k].target
assert (X1.shape[0],) == y1.shape
assert (X1.shape[0],) == y2.shape
def test_fetch_filter():
try:
datasets1 = fetch(filter_data=tuple([1]), shuffle=True, random_state=42)
except IOError:
raise SkipTest("Zenodo dataset can not be loaded.")
datasets2 = fetch(filter_data=tuple(["ecoli"]), shuffle=True, random_state=37)
X1, X2 = datasets1["ecoli"].data, datasets2["ecoli"].data
assert DATASET_SHAPE["ecoli"] == X1.shape
assert X1.shape == X2.shape
assert X1.sum() == pytest.approx(X2.sum())
y1, y2 = datasets1["ecoli"].target, datasets2["ecoli"].target
assert (X1.shape[0],) == y1.shape
assert (X1.shape[0],) == y2.shape
@pytest.mark.parametrize(
"filter_data, err_msg",
[
(("rnf",), "is not a dataset available"),
((-1,), "dataset with the ID="),
((100,), "dataset with the ID="),
((1.00,), "value in the tuple"),
],
)
def test_fetch_error(filter_data, err_msg):
with pytest.raises(ValueError, match=err_msg):
fetch_datasets(filter_data=filter_data)
| 1,248 |
3,266 |
package org.antlr.gunit.swingui.model;
import javax.swing.JComponent;
import javax.swing.JLabel;
/**
*
* @author scai
*/
public class TestCaseInputFile implements ITestCaseInput {
private String fileName;
public TestCaseInputFile(String file) {
this.fileName = file;
}
public String getLabel() {
return "FILE:" + fileName;
}
public void setScript(String script) {
this.fileName = script;
}
@Override
public String toString() {
return fileName;
}
public String getScript() {
return this.fileName;
}
} | 236 |
852 | <reponame>ckamtsikis/cmssw
#ifndef _BTAGGER_SIGNEDIMPACTPARAMETER3D_H_
#define _BTAGGER_SIGNEDIMPACTPARAMETER3D_H_
#include "TrackingTools/TrajectoryState/interface/TrajectoryStateOnSurface.h"
#include "DataFormats/GeometryCommonDetAlgo/interface/Measurement1D.h"
#include "TrackingTools/TransientTrack/interface/TransientTrack.h"
#include "DataFormats/VertexReco/interface/Vertex.h"
#include "DataFormats/GeometryVector/interface/GlobalVector.h"
#include <utility>
/** Threedimensional track impact parameter signed according to the jet
* direction
*/
class SignedImpactParameter3D {
public:
// construct
SignedImpactParameter3D(){};
std::pair<bool, Measurement1D> apply(const reco::TransientTrack &,
const GlobalVector &direction,
const reco::Vertex &vertex) const;
int id() const { return 2; }
/**
Return a pair:
first is the decay length
second is the distance of the track from jet axis
*/
static std::pair<double, Measurement1D> distanceWithJetAxis(const reco::TransientTrack &transientTrack,
const GlobalVector &direction,
const reco::Vertex &vertex);
private:
static GlobalVector distance(const TrajectoryStateOnSurface &, const reco::Vertex &, const GlobalVector &);
static TrajectoryStateOnSurface closestApproachToJet(const FreeTrajectoryState &,
const reco::Vertex &,
const GlobalVector &,
const MagneticField *);
};
#endif
| 811 |
500 | from flask import redirect, url_for
def get_scope_redirect(type):
redirects = {
"scope": url_for("admin.scope"),
"blacklist": url_for("admin.blacklist"),
"default": url_for("admin.admin"),
}
return redirect(redirects.get(type, "default"))
| 111 |
517 | /*
* Copyright (c) 2010. All rights reserved.
*/
package ro.isdc.wro.manager.factory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import java.io.ByteArrayInputStream;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.Callable;
import javax.servlet.FilterConfig;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.MockitoAnnotations;
import ro.isdc.wro.WroRuntimeException;
import ro.isdc.wro.cache.CacheStrategy;
import ro.isdc.wro.cache.ConfigurableCacheStrategy;
import ro.isdc.wro.cache.impl.MemoryCacheStrategy;
import ro.isdc.wro.config.Context;
import ro.isdc.wro.config.support.ContextPropagatingCallable;
import ro.isdc.wro.manager.WroManager;
import ro.isdc.wro.model.factory.ConfigurableModelFactory;
import ro.isdc.wro.model.factory.WroModelFactory;
import ro.isdc.wro.model.factory.XmlModelFactory;
import ro.isdc.wro.model.resource.locator.ClasspathUriLocator;
import ro.isdc.wro.model.resource.locator.ServletContextUriLocator;
import ro.isdc.wro.model.resource.locator.UriLocator;
import ro.isdc.wro.model.resource.locator.UrlUriLocator;
import ro.isdc.wro.model.resource.locator.factory.ConfigurableLocatorFactory;
import ro.isdc.wro.model.resource.processor.ResourcePostProcessor;
import ro.isdc.wro.model.resource.processor.ResourcePreProcessor;
import ro.isdc.wro.model.resource.processor.decorator.ExtensionsAwareProcessorDecorator;
import ro.isdc.wro.model.resource.processor.decorator.ProcessorDecorator;
import ro.isdc.wro.model.resource.processor.factory.ConfigurableProcessorsFactory;
import ro.isdc.wro.model.resource.processor.factory.ProcessorsFactory;
import ro.isdc.wro.model.resource.processor.impl.css.CssImportPreProcessor;
import ro.isdc.wro.model.resource.processor.impl.css.CssMinProcessor;
import ro.isdc.wro.model.resource.processor.impl.css.CssVariablesProcessor;
import ro.isdc.wro.model.resource.processor.impl.js.JSMinProcessor;
import ro.isdc.wro.model.resource.support.AbstractConfigurableMultipleStrategy;
import ro.isdc.wro.model.resource.support.hash.ConfigurableHashStrategy;
import ro.isdc.wro.model.resource.support.hash.HashStrategy;
import ro.isdc.wro.model.resource.support.hash.MD5HashStrategy;
import ro.isdc.wro.model.resource.support.naming.ConfigurableNamingStrategy;
import ro.isdc.wro.model.resource.support.naming.FolderHashEncoderNamingStrategy;
import ro.isdc.wro.model.resource.support.naming.NamingStrategy;
import ro.isdc.wro.model.resource.support.naming.TimestampNamingStrategy;
import ro.isdc.wro.util.AbstractDecorator;
import ro.isdc.wro.util.WroTestUtils;
import ro.isdc.wro.util.WroUtil;
/**
* TestConfigurableWroManagerFactory.
*
* @author <NAME>
* @created Created on Jan 5, 2010
*/
public class TestConfigurableWroManagerFactory {
private ConfigurableWroManagerFactory victim;
@Mock
private FilterConfig mockFilterConfig;
private ConfigurableLocatorFactory uriLocatorFactory;
@Mock
private ServletContext mockServletContext;
private ProcessorsFactory processorsFactory;
@Mock
private HttpServletRequest mockRequest;
@Mock
private HttpServletResponse mockResponse;
private Properties configProperties;
@BeforeClass
public static void onBeforeClass() {
assertEquals(0, Context.countActive());
}
@AfterClass
public static void onAfterClass() {
assertEquals(0, Context.countActive());
}
@Before
public void setUp() {
MockitoAnnotations.initMocks(this);
// init context
Context.set(Context.webContext(mockRequest, mockResponse, mockFilterConfig));
Mockito.when(mockFilterConfig.getServletContext()).thenReturn(mockServletContext);
victim = new ConfigurableWroManagerFactory();
configProperties = new Properties();
victim.setConfigProperties(configProperties);
}
/**
* Creates the manager and initialize processors with locators used for assetions.
*/
private void createManager() {
// create one instance for test
final WroManager manager = victim.create();
processorsFactory = manager.getProcessorsFactory();
uriLocatorFactory = (ConfigurableLocatorFactory) AbstractDecorator.getOriginalDecoratedObject(manager.getUriLocatorFactory());
}
/**
* When no uri locators are set, the default factory is used.
*/
@Test
public void shouldHaveNoLocatorsWhenNoUriLocatorsParamSet() {
createManager();
assertTrue(uriLocatorFactory.getConfiguredStrategies().isEmpty());
}
@Test
public void shouldHaveNoLocatorsWhenNoLocatorsInitParamSet() {
createManager();
Mockito.when(mockFilterConfig.getInitParameter(ConfigurableLocatorFactory.PARAM_URI_LOCATORS)).thenReturn("");
assertTrue(uriLocatorFactory.getConfiguredStrategies().isEmpty());
}
@Test
public void shouldLoadUriLocatorsFromConfigurationFile() {
configProperties.setProperty(ConfigurableLocatorFactory.PARAM_URI_LOCATORS, "servletContext");
createManager();
assertEquals(1, uriLocatorFactory.getConfiguredStrategies().size());
assertSame(ServletContextUriLocator.class, uriLocatorFactory.getConfiguredStrategies().iterator().next().getClass());
}
@Test
public void shouldLoadUriLocatorsFromFilterConfigRatherThanFromConfigProperties() {
configProperties.setProperty(ConfigurableLocatorFactory.PARAM_URI_LOCATORS, "servletContext");
Mockito.when(mockFilterConfig.getInitParameter(ConfigurableLocatorFactory.PARAM_URI_LOCATORS)).thenReturn(
"classpath, servletContext");
createManager();
assertEquals(2, uriLocatorFactory.getConfiguredStrategies().size());
final Iterator<UriLocator> locatorsIterator = uriLocatorFactory.getConfiguredStrategies().iterator();
assertSame(ClasspathUriLocator.class, locatorsIterator.next().getClass());
assertSame(ServletContextUriLocator.class, locatorsIterator.next().getClass());
}
@Test(expected = WroRuntimeException.class)
public void cannotUseInvalidUriLocatorsSet() {
Mockito.when(mockFilterConfig.getInitParameter(ConfigurableLocatorFactory.PARAM_URI_LOCATORS)).thenReturn(
"INVALID1,INVALID2");
createManager();
uriLocatorFactory.getConfiguredStrategies();
}
@Test
public void shouldHaveCorrectLocatorsSet() {
configureValidUriLocators(mockFilterConfig);
createManager();
assertEquals(3, uriLocatorFactory.getConfiguredStrategies().size());
}
/**
* @param filterConfig
*/
private void configureValidUriLocators(final FilterConfig filterConfig) {
Mockito.when(filterConfig.getInitParameter(ConfigurableLocatorFactory.PARAM_URI_LOCATORS)).thenReturn(
ConfigurableLocatorFactory.createItemsAsString(ServletContextUriLocator.ALIAS, UrlUriLocator.ALIAS,
ClasspathUriLocator.ALIAS));
}
@Test
public void testProcessorsExecutionOrder() {
createManager();
configureValidUriLocators(mockFilterConfig);
Mockito.when(mockFilterConfig.getInitParameter(ConfigurableProcessorsFactory.PARAM_PRE_PROCESSORS)).thenReturn(
AbstractConfigurableMultipleStrategy.createItemsAsString(JSMinProcessor.ALIAS, CssImportPreProcessor.ALIAS,
CssVariablesProcessor.ALIAS));
final List<ResourcePreProcessor> list = (List<ResourcePreProcessor>) processorsFactory.getPreProcessors();
assertEquals(JSMinProcessor.class, list.get(0).getClass());
assertEquals(CssImportPreProcessor.class, list.get(1).getClass());
assertEquals(CssVariablesProcessor.class, list.get(2).getClass());
}
@Test
public void testWithEmptyPreProcessors() {
createManager();
configureValidUriLocators(mockFilterConfig);
Mockito.when(mockFilterConfig.getInitParameter(ConfigurableProcessorsFactory.PARAM_PRE_PROCESSORS)).thenReturn("");
assertTrue(processorsFactory.getPreProcessors().isEmpty());
}
@Test(expected = WroRuntimeException.class)
public void cannotUseInvalidPreProcessorsSet() {
createManager();
configureValidUriLocators(mockFilterConfig);
Mockito.when(mockFilterConfig.getInitParameter(ConfigurableProcessorsFactory.PARAM_PRE_PROCESSORS)).thenReturn(
"INVALID1,INVALID2");
processorsFactory.getPreProcessors();
}
@Test
public void testWhenValidPreProcessorsSet() {
createManager();
configureValidUriLocators(mockFilterConfig);
Mockito.when(mockFilterConfig.getInitParameter(ConfigurableProcessorsFactory.PARAM_PRE_PROCESSORS)).thenReturn(
"cssUrlRewriting");
assertEquals(1, processorsFactory.getPreProcessors().size());
}
@Test
public void testWithEmptyPostProcessors() {
createManager();
configureValidUriLocators(mockFilterConfig);
Mockito.when(mockFilterConfig.getInitParameter(ConfigurableProcessorsFactory.PARAM_POST_PROCESSORS)).thenReturn("");
assertTrue(processorsFactory.getPostProcessors().isEmpty());
}
@Test(expected = WroRuntimeException.class)
public void cannotUseInvalidPostProcessorsSet() {
createManager();
configureValidUriLocators(mockFilterConfig);
Mockito.when(mockFilterConfig.getInitParameter(ConfigurableProcessorsFactory.PARAM_POST_PROCESSORS)).thenReturn(
"INVALID1,INVALID2");
processorsFactory.getPostProcessors();
}
@Test
public void testWhenValidPostProcessorsSet() {
createManager();
configureValidUriLocators(mockFilterConfig);
Mockito.when(mockFilterConfig.getInitParameter(ConfigurableProcessorsFactory.PARAM_POST_PROCESSORS)).thenReturn(
"cssMinJawr, jsMin, cssVariables");
assertEquals(3, processorsFactory.getPostProcessors().size());
}
@Test
public void testConfigPropertiesWithValidPreProcessor() {
final Properties configProperties = new Properties();
configProperties.setProperty(ConfigurableProcessorsFactory.PARAM_PRE_PROCESSORS, "cssMin");
victim.setConfigProperties(configProperties);
createManager();
final Collection<ResourcePreProcessor> list = processorsFactory.getPreProcessors();
assertEquals(1, list.size());
assertEquals(CssMinProcessor.class, list.iterator().next().getClass());
}
@Test
public void testConfigPropertiesWithValidPostProcessor() {
final Properties configProperties = new Properties();
configProperties.setProperty(ConfigurableProcessorsFactory.PARAM_POST_PROCESSORS, "jsMin");
victim.setConfigProperties(configProperties);
createManager();
assertEquals(1, processorsFactory.getPostProcessors().size());
assertEquals(JSMinProcessor.class,
((ProcessorDecorator) processorsFactory.getPostProcessors().iterator().next()).getDecoratedObject().getClass());
}
@Test
public void testConfigPropertiesWithMultipleValidPostProcessor() {
final Properties configProperties = new Properties();
configProperties.setProperty(ConfigurableProcessorsFactory.PARAM_POST_PROCESSORS, "jsMin, cssMin");
victim.setConfigProperties(configProperties);
createManager();
assertEquals(2, processorsFactory.getPostProcessors().size());
assertEquals(JSMinProcessor.class,
((ProcessorDecorator) processorsFactory.getPostProcessors().iterator().next()).getDecoratedObject().getClass());
}
@Test(expected = WroRuntimeException.class)
public void testConfigPropertiesWithInvalidPreProcessor() {
final Properties configProperties = new Properties();
configProperties.setProperty(ConfigurableProcessorsFactory.PARAM_PRE_PROCESSORS, "INVALID");
victim.setConfigProperties(configProperties);
createManager();
processorsFactory.getPreProcessors();
}
public void shouldUseExtensionAwareProcessorWhenProcessorNameContainsDotCharacter() {
final Properties configProperties = new Properties();
configProperties.setProperty(ConfigurableProcessorsFactory.PARAM_PRE_PROCESSORS, "jsMin.js");
victim.setConfigProperties(configProperties);
assertEquals(1, processorsFactory.getPreProcessors().size());
assertTrue(processorsFactory.getPreProcessors().iterator().next() instanceof ExtensionsAwareProcessorDecorator);
}
@Test(expected = WroRuntimeException.class)
public void testConfigPropertiesWithInvalidPostProcessor() {
final Properties configProperties = new Properties();
configProperties.setProperty(ConfigurableProcessorsFactory.PARAM_POST_PROCESSORS, "INVALID");
victim.setConfigProperties(configProperties);
createManager();
processorsFactory.getPostProcessors();
}
@Test(expected = WroRuntimeException.class)
public void cannotConfigureInvalidNamingStrategy()
throws Exception {
final Properties configProperties = new Properties();
configProperties.setProperty(ConfigurableNamingStrategy.KEY, "INVALID");
victim.setConfigProperties(configProperties);
victim.create().getNamingStrategy().rename("name", WroUtil.EMPTY_STREAM);
}
@Test
public void shouldUseConfiguredNamingStrategy()
throws Exception {
final Properties configProperties = new Properties();
configProperties.setProperty(ConfigurableNamingStrategy.KEY, TimestampNamingStrategy.ALIAS);
victim.setConfigProperties(configProperties);
final NamingStrategy actual = ((ConfigurableNamingStrategy) victim.create().getNamingStrategy()).getConfiguredStrategy();
assertEquals(TimestampNamingStrategy.class, actual.getClass());
}
@Test(expected = WroRuntimeException.class)
public void cannotConfigureInvalidHashStrategy()
throws Exception {
final Properties configProperties = new Properties();
configProperties.setProperty(ConfigurableHashStrategy.KEY, "INVALID");
victim.setConfigProperties(configProperties);
victim.create().getHashStrategy().getHash(WroUtil.EMPTY_STREAM);
}
@Test
public void shouldUseConfiguredHashStrategy()
throws Exception {
final Properties configProperties = new Properties();
configProperties.setProperty(ConfigurableHashStrategy.KEY, MD5HashStrategy.ALIAS);
victim.setConfigProperties(configProperties);
final HashStrategy actual = ((ConfigurableHashStrategy) victim.create().getHashStrategy()).getConfiguredStrategy();
assertEquals(MD5HashStrategy.class, actual.getClass());
}
@Test(expected = WroRuntimeException.class)
public void cannotConfigureInvalidCacheStrategy()
throws Exception {
final Properties configProperties = new Properties();
configProperties.setProperty(ConfigurableCacheStrategy.KEY, "INVALID");
victim.setConfigProperties(configProperties);
victim.create().getCacheStrategy().clear();
}
@Test
public void shouldUseConfiguredCacheStrategy()
throws Exception {
final Properties configProperties = new Properties();
configProperties.setProperty(ConfigurableCacheStrategy.KEY, MemoryCacheStrategy.ALIAS);
victim.setConfigProperties(configProperties);
final CacheStrategy<?, ?> actual = ((ConfigurableCacheStrategy) AbstractDecorator.getOriginalDecoratedObject(victim.create().getCacheStrategy())).getConfiguredStrategy();
assertEquals(MemoryCacheStrategy.class, actual.getClass());
}
@Test(expected = WroRuntimeException.class)
public void cannotConfigureInvalidRequestHandler()
throws Exception {
final Properties configProperties = new Properties();
configProperties.setProperty(ConfigurableCacheStrategy.KEY, "INVALID");
victim.setConfigProperties(configProperties);
victim.create().getCacheStrategy().clear();
}
@Test
public void shouldUseConfiguredRequestHandler()
throws Exception {
final Properties configProperties = new Properties();
configProperties.setProperty(ConfigurableCacheStrategy.KEY, MemoryCacheStrategy.ALIAS);
victim.setConfigProperties(configProperties);
final CacheStrategy<?, ?> actual = ((ConfigurableCacheStrategy) AbstractDecorator.getOriginalDecoratedObject(victim.create().getCacheStrategy())).getConfiguredStrategy();
assertEquals(MemoryCacheStrategy.class, actual.getClass());
}
@Test
public void shouldUseConfiguredModelFactory()
throws Exception {
final Properties configProperties = new Properties();
configProperties.setProperty(ConfigurableModelFactory.KEY, XmlModelFactory.ALIAS);
victim.setConfigProperties(configProperties);
final WroModelFactory actual = ((ConfigurableModelFactory) AbstractDecorator.getOriginalDecoratedObject(victim.create().getModelFactory())).getConfiguredStrategy();
assertEquals(XmlModelFactory.class, actual.getClass());
}
@Test(expected = WroRuntimeException.class)
public void cannotUseInvalidConfiguredModelFactory()
throws Exception {
final Properties configProperties = new Properties();
configProperties.setProperty(ConfigurableModelFactory.KEY, "invalid");
victim.setConfigProperties(configProperties);
((ConfigurableModelFactory) AbstractDecorator.getOriginalDecoratedObject(victim.create().getModelFactory())).getConfiguredStrategy();
}
@Test
public void shouldConsiderContributeMethodsWhenManagerFactoryIsExtended() {
final String alias = "contributed";
victim = new ConfigurableWroManagerFactory() {
@Override
protected void contributePreProcessors(final Map<String, ResourcePreProcessor> map) {
map.put(alias, new JSMinProcessor());
}
@Override
protected void contributePostProcessors(final Map<String, ResourcePostProcessor> map) {
map.put(alias, new JSMinProcessor());
}
@Override
protected void contributeLocators(final Map<String, UriLocator> map) {
map.put(alias, new UrlUriLocator());
}
};
final Properties configProperties = new Properties();
configProperties.setProperty(ConfigurableProcessorsFactory.PARAM_PRE_PROCESSORS, alias);
configProperties.setProperty(ConfigurableProcessorsFactory.PARAM_POST_PROCESSORS, alias);
configProperties.setProperty(ConfigurableLocatorFactory.PARAM_URI_LOCATORS, alias);
victim.setConfigProperties(configProperties);
final WroManager manager = victim.create();
assertFalse(manager.getProcessorsFactory().getPostProcessors().isEmpty());
assertFalse(manager.getProcessorsFactory().getPreProcessors().isEmpty());
}
@Test
public void shouldApplyNamingStrategyConcurrently()
throws Exception {
final Properties configProperties = new Properties();
configProperties.setProperty(ConfigurableNamingStrategy.KEY, FolderHashEncoderNamingStrategy.ALIAS);
victim.setConfigProperties(configProperties);
WroTestUtils.runConcurrently(ContextPropagatingCallable.decorate(new Callable<Void>() {
public Void call()
throws Exception {
victim.create().getNamingStrategy().rename("", new ByteArrayInputStream("".getBytes()));
return null;
}
}));
}
@After
public void tearDown() {
Context.unset();
}
}
| 6,089 |
2,151 | <reponame>zipated/src
// Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "media/capture/video/chromeos/camera_device_context.h"
namespace media {
CameraDeviceContext::CameraDeviceContext(
std::unique_ptr<VideoCaptureDevice::Client> client)
: state_(State::kStopped),
sensor_orientation_(0),
screen_rotation_(0),
client_(std::move(client)) {
DCHECK(client_);
DETACH_FROM_SEQUENCE(sequence_checker_);
}
CameraDeviceContext::~CameraDeviceContext() = default;
void CameraDeviceContext::SetState(State state) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
state_ = state;
if (state_ == State::kCapturing) {
client_->OnStarted();
}
}
CameraDeviceContext::State CameraDeviceContext::GetState() {
return state_;
}
void CameraDeviceContext::SetErrorState(const base::Location& from_here,
const std::string& reason) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
state_ = State::kError;
LOG(ERROR) << reason;
client_->OnError(from_here, reason);
}
void CameraDeviceContext::LogToClient(std::string message) {
client_->OnLog(message);
}
void CameraDeviceContext::SubmitCapturedData(
gfx::GpuMemoryBuffer* buffer,
const VideoCaptureFormat& frame_format,
base::TimeTicks reference_time,
base::TimeDelta timestamp) {
client_->OnIncomingCapturedGfxBuffer(buffer, frame_format,
GetCameraFrameOrientation(),
reference_time, timestamp);
}
void CameraDeviceContext::SetSensorOrientation(int sensor_orientation) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(sensor_orientation >= 0 && sensor_orientation < 360 &&
sensor_orientation % 90 == 0);
sensor_orientation_ = sensor_orientation;
}
void CameraDeviceContext::SetScreenRotation(int screen_rotation) {
DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
DCHECK(screen_rotation >= 0 && screen_rotation < 360 &&
screen_rotation % 90 == 0);
screen_rotation_ = screen_rotation;
}
int CameraDeviceContext::GetCameraFrameOrientation() {
return (sensor_orientation_ + screen_rotation_) % 360;
}
} // namespace media
| 870 |
1,433 | /*
* Copyright (c) 2016, 2017, 2018, 2019 FabricMC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.fabricmc.fabric.mixin.object.builder;
import java.util.Random;
import java.util.stream.Stream;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.injection.At;
import org.spongepowered.asm.mixin.injection.Inject;
import org.spongepowered.asm.mixin.injection.Redirect;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfoReturnable;
import org.spongepowered.asm.mixin.injection.callback.LocalCapture;
import net.minecraft.entity.Entity;
import net.minecraft.item.ItemStack;
import net.minecraft.util.registry.DefaultedRegistry;
import net.minecraft.village.TradeOffer;
@Mixin(targets = "net/minecraft/village/TradeOffers$TypeAwareBuyForOneEmeraldFactory")
public abstract class TypeAwareTradeMixin {
/**
* Vanilla will check the "VillagerType -> Item" map in the stream and throw an exception for villager types not specified in the map.
* This breaks any and all custom villager types.
* We want to prevent this default logic so modded villager types will work.
* So we return an empty stream so an exception is never thrown.
*/
@Redirect(method = "<init>", at = @At(value = "INVOKE", target = "Lnet/minecraft/util/registry/DefaultedRegistry;stream()Ljava/util/stream/Stream;"))
private <T> Stream<T> disableVanillaCheck(DefaultedRegistry<T> registry) {
return Stream.empty();
}
/**
* To prevent "item" -> "air" trades, if the result of a type aware trade is air, make sure no offer is created.
*/
@Inject(method = "create(Lnet/minecraft/entity/Entity;Ljava/util/Random;)Lnet/minecraft/village/TradeOffer;", at = @At(value = "NEW", target = "net/minecraft/village/TradeOffer"), locals = LocalCapture.CAPTURE_FAILEXCEPTION, cancellable = true)
private void failOnNullItem(Entity entity, Random random, CallbackInfoReturnable<TradeOffer> cir, ItemStack buyingItem) {
if (buyingItem.isEmpty()) { // Will return true for an "empty" item stack that had null passed in the ctor
cir.setReturnValue(null); // Return null to prevent creation of empty trades
}
}
}
| 805 |
692 | import torch
import torch.nn as nn
import torch.nn.functional as F
import os
import math
try: # CUDA kernel
assert not ('FORCE_NATIVE' in os.environ and os.environ['FORCE_NATIVE']) # add FORCE_NATIVE in env to force native
from cuda_op.fused_act import FusedLeakyReLU, fused_leaky_relu
from cuda_op.upfirdn2d import upfirdn2d
except Exception as e:
print(e)
print(' # Using native op...')
from cuda_op.op_native import FusedLeakyReLU, fused_leaky_relu, upfirdn2d
__all__ = ['PixelNorm', 'EqualConv2d', 'EqualLinear', 'ModulatedConv2d', 'StyledConv', 'ConvLayer', 'ResBlock',
'ConstantInput', 'ToRGB']
class PixelNorm(nn.Module):
def __init__(self):
super().__init__()
def forward(self, x):
return x * torch.rsqrt(torch.mean(x ** 2, dim=1, keepdim=True) + 1e-8)
class ConstantInput(nn.Module):
def __init__(self, channel, size=4):
super().__init__()
self.input = nn.Parameter(torch.randn(1, channel, size, size))
def forward(self, batch):
out = self.input.repeat(batch, 1, 1, 1)
if hasattr(self, 'first_k_oup') and self.first_k_oup is not None: # support dynamic channel
assert self.first_k_oup <= out.shape[1]
return out[:, :self.first_k_oup]
else:
return out
class NoiseInjection(nn.Module):
def __init__(self):
super().__init__()
self.weight = nn.Parameter(torch.zeros(1))
def forward(self, image, noise=None):
if noise is None:
batch, _, height, width = image.shape
noise = image.new_empty(batch, 1, height, width).normal_() # random noise
return image + self.weight * noise
def make_kernel(k):
k = torch.tensor(k, dtype=torch.float32)
if k.ndim == 1:
k = k[None, :] * k[:, None]
k /= k.sum()
k = torch.flip(k, [0, 1]) # move from runtime to here
return k
class Upsample(nn.Module):
def __init__(self, kernel, factor=2):
super().__init__()
self.factor = factor
kernel = make_kernel(kernel) * (factor ** 2)
self.register_buffer('kernel', kernel)
p = kernel.shape[0] - factor
pad0 = (p + 1) // 2 + factor - 1
pad1 = p // 2
self.pad = (pad0, pad1)
def forward(self, x):
out = upfirdn2d(x, self.kernel, up=self.factor, down=1, pad=self.pad)
return out
class Blur(nn.Module):
def __init__(self, kernel, pad, upsample_factor=1):
super().__init__()
kernel = make_kernel(kernel)
if upsample_factor > 1:
kernel = kernel * (upsample_factor ** 2)
self.register_buffer('kernel', kernel)
self.pad = pad
def forward(self, x):
out = upfirdn2d(x, self.kernel, pad=self.pad)
return out
class EqualConv2d(nn.Module):
def __init__(self, in_channel, out_channel, kernel_size, stride=1, padding=0, bias=True):
super().__init__()
self.weight = nn.Parameter(torch.randn(out_channel, in_channel, kernel_size, kernel_size))
self.scale = 1 / math.sqrt(in_channel * kernel_size ** 2)
self.stride = stride
self.padding = padding
if bias:
self.bias = nn.Parameter(torch.zeros(out_channel))
else:
self.bias = None
def forward(self, x):
in_channel = x.shape[1]
weight = self.weight
if hasattr(self, 'first_k_oup') and self.first_k_oup is not None:
weight = weight[:self.first_k_oup]
weight = weight[:, :in_channel].contiguous() # index sub channels for inference
out = F.conv2d(
x,
weight * self.scale,
bias=self.bias,
stride=self.stride,
padding=self.padding,
)
return out
def __repr__(self):
return (
f'{self.__class__.__name__}({self.weight.shape[1]}, {self.weight.shape[0]},'
f' {self.weight.shape[2]}, stride={self.stride}, padding={self.padding})'
)
class EqualLinear(nn.Module):
def __init__(
self, in_dim, out_dim, bias=True, bias_init=0, lr_mul=1., activation=None
):
super().__init__()
self.weight = nn.Parameter(torch.randn(out_dim, in_dim).div_(lr_mul))
if bias:
self.bias = nn.Parameter(torch.zeros(out_dim).fill_(bias_init))
else:
self.bias = None
self.activation = activation
self.scale = (1 / math.sqrt(in_dim)) * lr_mul
self.lr_mul = lr_mul
def forward(self, x):
if self.activation:
out = F.linear(x, self.weight * self.scale)
if self.activation == 'lrelu':
out = fused_leaky_relu(out, self.bias * self.lr_mul)
else:
raise NotImplementedError
else:
out = F.linear(x, self.weight * self.scale, bias=self.bias * self.lr_mul)
return out
def __repr__(self):
return (
f'{self.__class__.__name__}({self.weight.shape[1]}, {self.weight.shape[0]})'
)
class ModulatedConv2d(nn.Module):
def __init__(
self,
in_channel,
out_channel,
kernel_size,
style_dim,
demodulate=True,
upsample=False,
downsample=False,
blur_kernel=(1, 3, 3, 1),
):
super().__init__()
self.eps = 1e-8
self.kernel_size = kernel_size
self.in_channel = in_channel
self.out_channel = out_channel
self.upsample = upsample # if true, use deconvolution
self.downsample = downsample
assert not downsample, 'Downsample is not implemented yet!'
self.modulation = EqualLinear(style_dim, in_channel, bias_init=1)
self.demodulate = demodulate
if upsample:
factor = 2
p = (len(blur_kernel) - factor) - (kernel_size - 1)
self.blur = Blur(blur_kernel, pad=((p + 1) // 2 + factor - 1, p // 2 + 1), upsample_factor=factor)
self.scale = 1 / math.sqrt(in_channel * kernel_size ** 2)
self.padding = kernel_size // 2
self.weight = nn.Parameter(torch.randn(1, out_channel, in_channel, kernel_size, kernel_size))
def __repr__(self):
return (
f'{self.__class__.__name__}({self.in_channel}, {self.out_channel}, {self.kernel_size}, '
f'upsample={self.upsample}, downsample={self.downsample})'
)
def forward(self, x, style):
batch, in_channel, height, width = x.shape
style = self.modulation(style)
style = style.view(batch, 1, -1, 1, 1)
# process weight for dynamic channel
first_k_oup = self.first_k_oup if hasattr(self, 'first_k_oup') and self.first_k_oup is not None \
else self.weight.shape[1]
assert first_k_oup <= self.weight.shape[1]
weight = self.weight
weight = weight[:, :first_k_oup, :in_channel].contiguous() # index sub channels fro inference
# modulate weight
weight = self.scale * weight * style[:, :, :in_channel]
# demodulate weight
if self.demodulate:
weight = weight * torch.rsqrt(weight.pow(2).sum([2, 3, 4], keepdim=True) + self.eps)
if self.upsample:
x = x.view(1, batch * in_channel, height, width)
weight = weight.transpose(1, 2)
weight = weight.reshape(weight.shape[0] * weight.shape[1], weight.shape[2], weight.shape[3],
weight.shape[4])
out = F.conv_transpose2d(x, weight, padding=0, stride=2, groups=batch)
out = out.view(batch, -1, out.shape[-2], out.shape[-1])
out = self.blur(out)
else:
x = x.contiguous().view(1, batch * in_channel, height, width)
weight = weight.view(weight.shape[0] * weight.shape[1], weight.shape[2], weight.shape[3], weight.shape[4])
out = F.conv2d(x, weight, padding=self.padding, groups=batch)
out = out.view(batch, -1, out.shape[-2], out.shape[-1])
return out
class StyledConv(nn.Module):
def __init__(
self,
in_channel,
out_channel,
kernel_size,
style_dim,
upsample=False,
blur_kernel=(1, 3, 3, 1),
demodulate=True,
activation='lrelu',
):
super().__init__()
self.conv = ModulatedConv2d(
in_channel,
out_channel,
kernel_size,
style_dim,
upsample=upsample,
blur_kernel=blur_kernel,
demodulate=demodulate,
)
self.noise = NoiseInjection()
if activation == 'lrelu':
self.activate = FusedLeakyReLU(out_channel)
else:
raise NotImplementedError
def forward(self, x, style, noise=None):
out = self.conv(x, style)
out = self.noise(out, noise=noise)
out = self.activate(out)
return out
class ToRGB(nn.Module):
def __init__(self, in_channel, style_dim, upsample=True, blur_kernel=(1, 3, 3, 1)):
super().__init__()
if upsample:
self.upsample = Upsample(blur_kernel)
self.conv = ModulatedConv2d(in_channel, 3, 1, style_dim, demodulate=False)
self.bias = nn.Parameter(torch.zeros(1, 3, 1, 1))
def forward(self, x, style, skip=None):
out = self.conv(x, style)
out = out + self.bias
if skip is not None:
skip = self.upsample(skip)
out = out + skip
return out
class AdaptiveModulate(nn.Module):
def __init__(self, num_features, g_arch_len):
super(AdaptiveModulate, self).__init__()
self.weight_mapping = nn.Linear(g_arch_len, num_features)
self.bias_mapping = nn.Linear(g_arch_len, num_features)
def forward(self, x, g_arch):
assert x.dim() == 4
weight = self.weight_mapping(g_arch.view(1, -1)).view(-1) + 1. # add 1 to make a smooth start
bias = self.bias_mapping(g_arch.view(1, -1)).view(-1)
return x * weight.view(1, -1, 1, 1) + bias.view(1, -1, 1, 1)
class ConvLayer(nn.Sequential):
def __init__(
self,
in_channel,
out_channel,
kernel_size,
downsample=False,
blur_kernel=(1, 3, 3, 1),
bias=True,
activate='lrelu',
modulate=False,
g_arch_len=18 * 4,
):
layers = []
if downsample:
factor = 2
p = (len(blur_kernel) - factor) + (kernel_size - 1)
pad0 = (p + 1) // 2
pad1 = p // 2
layers.append(Blur(blur_kernel, pad=(pad0, pad1)))
stride = 2
self.padding = 0
else:
stride = 1
self.padding = kernel_size // 2
layers.append(
EqualConv2d(in_channel,
out_channel,
kernel_size,
padding=self.padding,
stride=stride,
bias=bias and not activate,
)
)
# if conditioned on g_arch
if modulate:
layers.append(AdaptiveModulate(out_channel, g_arch_len))
assert bias == (activate != 'none')
if activate == 'lrelu': # if activate then bias = True
layers.append(FusedLeakyReLU(out_channel))
else:
assert activate == 'none'
super().__init__(*layers)
def forward(self, x, g_arch=None):
for module in self:
if isinstance(module, AdaptiveModulate):
x = module(x, g_arch)
else:
x = module(x)
return x
class ResBlock(nn.Module):
def __init__(self, in_channel, out_channel, blur_kernel=(1, 3, 3, 1), act_func='lrelu',
modulate=False, g_arch_len=18 * 4):
super().__init__()
self.out_channel = out_channel
self.conv1 = ConvLayer(in_channel, in_channel, 3, activate=act_func, modulate=modulate, g_arch_len=g_arch_len)
self.conv2 = ConvLayer(in_channel, out_channel, 3, downsample=True, blur_kernel=blur_kernel, activate=act_func,
modulate=modulate, g_arch_len=g_arch_len)
self.skip = ConvLayer(in_channel, out_channel, 1, downsample=True, activate='none', bias=False,
modulate=modulate, g_arch_len=g_arch_len)
def forward(self, x, g_arch=None):
out = self.conv1(x, g_arch)
out = self.conv2(out, g_arch)
skip = self.skip(x, g_arch)
out = (out + skip) / math.sqrt(2)
return out
| 6,280 |
5,169 | <reponame>Gantios/Specs
{
"name": "MappedinVenueFormat",
"version": "1.0.7",
"summary": "This is a library to download mvf files",
"homepage": "https://mappedin.com",
"module_name": "MappedinVenueFormat",
"license": {
"type": "Commercial",
"file": "LICENSE"
},
"authors": {
"<NAME>": "<EMAIL>"
},
"platforms": {
"ios": "9.0"
},
"source": {
"git": "https://github.com/MappedIn/MVFDownloader-iOS.git",
"tag": "1.0.7"
},
"libraries": "z",
"xcconfig": {
"FRAMEWORK_SEARCH_PATHS": "\"$(PODS_ROOT)/MappedinVenueFormat/\""
},
"requires_arc": true,
"dependencies": {
"Zip": [
"1.1.0"
]
},
"swift_versions": "5.0.0",
"vendored_frameworks": "MappedinVenueFormat.xcframework",
"swift_version": "5.0.0"
}
| 369 |
1,503 | #include "textformat.hpp"
using Microsoft::WRL::ComPtr;
DWRITE_FONT_STYLE dwrite_style(const FontOpts& fo)
{
if (fo & FontOpts::Italic) return DWRITE_FONT_STYLE_ITALIC;
return DWRITE_FONT_STYLE_NORMAL;
}
DWRITE_FONT_WEIGHT dwrite_weight(const FontOpts& fo)
{
if (fo & FontOpts::Normal) return DWRITE_FONT_WEIGHT_NORMAL;
if (fo & FontOpts::Thin) return DWRITE_FONT_WEIGHT_THIN;
if (fo & FontOpts::Light) return DWRITE_FONT_WEIGHT_LIGHT;
if (fo & FontOpts::Medium) return DWRITE_FONT_WEIGHT_MEDIUM;
if (fo & FontOpts::SemiBold) return DWRITE_FONT_WEIGHT_SEMI_BOLD;
if (fo & FontOpts::Bold) return DWRITE_FONT_WEIGHT_BOLD;
if (fo & FontOpts::ExtraBold) return DWRITE_FONT_WEIGHT_EXTRA_BOLD;
return DWRITE_FONT_WEIGHT_NORMAL;
}
DWRITE_HIT_TEST_METRICS metrics_for(
std::wstring_view text,
IDWriteFactory* factory,
IDWriteTextFormat* text_format
)
{
ComPtr<IDWriteTextLayout> tl = nullptr;
factory->CreateTextLayout(
text.data(),
(UINT32) text.size(),
text_format,
std::numeric_limits<float>::max(),
std::numeric_limits<float>::max(),
&tl
);
DWRITE_HIT_TEST_METRICS ht_metrics;
float ignore;
tl->HitTestTextPosition(0, 0, &ignore, &ignore, &ht_metrics);
return ht_metrics;
}
TextFormat::TextFormat(
IDWriteFactory* factory,
const std::wstring& name,
float pointsize,
float dpi,
FontOpts default_weight,
FontOpts default_style
)
{
auto w = dwrite_weight(default_weight);
auto s = dwrite_style(default_style);
// Create reg with default weight
const auto create = [&](auto pptf, auto weight, auto style) {
factory->CreateTextFormat(
name.c_str(),
nullptr,
weight,
style,
DWRITE_FONT_STRETCH_NORMAL,
pointsize * (dpi / 72.0f),
L"en-us",
pptf
);
};
create(reg.GetAddressOf(), w, s);
create(bold.GetAddressOf(), DWRITE_FONT_WEIGHT_BOLD, s);
create(italic.GetAddressOf(), w, DWRITE_FONT_STYLE_ITALIC);
create(bolditalic.GetAddressOf(), DWRITE_FONT_WEIGHT_BOLD, DWRITE_FONT_STYLE_ITALIC);
auto w_metrics = metrics_for(L"W", factory, reg.Get());
auto a_metrics = metrics_for(L"a", factory, reg.Get());
is_mono = w_metrics.width == a_metrics.width;
}
IDWriteTextFormat* TextFormat::font_for(const FontOptions& fo) const
{
if (fo & FontOpts::Italic && fo & FontOpts::Bold)
{
return bolditalic.Get();
}
else if (fo & FontOpts::Bold) return bold.Get();
else if (fo & FontOpts::Italic) return italic.Get();
else return reg.Get();
}
bool TextFormat::is_monospace() const
{
return is_mono;
}
| 1,080 |
2,603 | /*
* FreeRTOS V202107.00
* Copyright (C) 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
* http://www.FreeRTOS.org
* http://aws.amazon.com/freertos
*
* 1 tab == 4 spaces!
*/
/*
Changes from V2.4.0
+ Made serial ISR handling more complete and robust.
Changes from V2.4.1
+ Split serial.c into serial.c and serialISR.c. serial.c can be
compiled using ARM or THUMB modes. serialISR.c must always be
compiled in ARM mode.
+ Another small change to cSerialPutChar().
Changed from V2.5.1
+ In cSerialPutChar() an extra check is made to ensure the post to
the queue was successful if then attempting to retrieve the posted
character.
*/
/*
BASIC INTERRUPT DRIVEN SERIAL PORT DRIVER FOR UART0.
This file contains all the serial port components that can be compiled to
either ARM or THUMB mode. Components that must be compiled to ARM mode are
contained in serialISR.c.
*/
/* Standard includes. */
#include <stdlib.h>
/* Scheduler includes. */
#include "FreeRTOS.h"
#include "queue.h"
#include "task.h"
/* Demo application includes. */
#include "serial.h"
/*-----------------------------------------------------------*/
/* Constants to setup and access the UART. */
#define serDLAB ( ( unsigned char ) 0x80 )
#define serENABLE_INTERRUPTS ( ( unsigned char ) 0x03 )
#define serNO_PARITY ( ( unsigned char ) 0x00 )
#define ser1_STOP_BIT ( ( unsigned char ) 0x00 )
#define ser8_BIT_CHARS ( ( unsigned char ) 0x03 )
#define serFIFO_ON ( ( unsigned char ) 0x01 )
#define serCLEAR_FIFO ( ( unsigned char ) 0x06 )
#define serWANTED_CLOCK_SCALING ( ( unsigned long ) 16 )
/* Constants to setup and access the VIC. */
#define serUART0_VIC_CHANNEL ( ( unsigned long ) 0x0006 )
#define serUART0_VIC_CHANNEL_BIT ( ( unsigned long ) 0x0040 )
#define serUART0_VIC_ENABLE ( ( unsigned long ) 0x0020 )
#define serCLEAR_VIC_INTERRUPT ( ( unsigned long ) 0 )
#define serINVALID_QUEUE ( ( QueueHandle_t ) 0 )
#define serHANDLE ( ( xComPortHandle ) 1 )
#define serNO_BLOCK ( ( TickType_t ) 0 )
/*-----------------------------------------------------------*/
/* Queues used to hold received characters, and characters waiting to be
transmitted. */
static QueueHandle_t xRxedChars;
static QueueHandle_t xCharsForTx;
/*-----------------------------------------------------------*/
/* Communication flag between the interrupt service routine and serial API. */
static volatile long *plTHREEmpty;
/*
* The queues are created in serialISR.c as they are used from the ISR.
* Obtain references to the queues and THRE Empty flag.
*/
extern void vSerialISRCreateQueues( unsigned portBASE_TYPE uxQueueLength, QueueHandle_t *pxRxedChars, QueueHandle_t *pxCharsForTx, long volatile **pplTHREEmptyFlag );
/*-----------------------------------------------------------*/
xComPortHandle xSerialPortInitMinimal( unsigned long ulWantedBaud, unsigned portBASE_TYPE uxQueueLength )
{
unsigned long ulDivisor, ulWantedClock;
xComPortHandle xReturn = serHANDLE;
extern void ( vUART_ISR_Wrapper )( void );
/* The queues are used in the serial ISR routine, so are created from
serialISR.c (which is always compiled to ARM mode. */
vSerialISRCreateQueues( uxQueueLength, &xRxedChars, &xCharsForTx, &plTHREEmpty );
if(
( xRxedChars != serINVALID_QUEUE ) &&
( xCharsForTx != serINVALID_QUEUE ) &&
( ulWantedBaud != ( unsigned long ) 0 )
)
{
portENTER_CRITICAL();
{
/* Setup the baud rate: Calculate the divisor value. */
ulWantedClock = ulWantedBaud * serWANTED_CLOCK_SCALING;
ulDivisor = configCPU_CLOCK_HZ / ulWantedClock;
/* Set the DLAB bit so we can access the divisor. */
UART0_LCR |= serDLAB;
/* Setup the divisor. */
UART0_DLL = ( unsigned char ) ( ulDivisor & ( unsigned long ) 0xff );
ulDivisor >>= 8;
UART0_DLM = ( unsigned char ) ( ulDivisor & ( unsigned long ) 0xff );
/* Turn on the FIFO's and clear the buffers. */
UART0_FCR = ( serFIFO_ON | serCLEAR_FIFO );
/* Setup transmission format. */
UART0_LCR = serNO_PARITY | ser1_STOP_BIT | ser8_BIT_CHARS;
/* Setup the VIC for the UART. */
VICIntSelect &= ~( serUART0_VIC_CHANNEL_BIT );
VICIntEnable |= serUART0_VIC_CHANNEL_BIT;
VICVectAddr1 = ( long ) vUART_ISR_Wrapper;
VICVectCntl1 = serUART0_VIC_CHANNEL | serUART0_VIC_ENABLE;
/* Enable UART0 interrupts. */
UART0_IER |= serENABLE_INTERRUPTS;
}
portEXIT_CRITICAL();
}
else
{
xReturn = ( xComPortHandle ) 0;
}
return xReturn;
}
/*-----------------------------------------------------------*/
signed portBASE_TYPE xSerialGetChar( xComPortHandle pxPort, signed char *pcRxedChar, TickType_t xBlockTime )
{
/* The port handle is not required as this driver only supports UART0. */
( void ) pxPort;
/* Get the next character from the buffer. Return false if no characters
are available, or arrive before xBlockTime expires. */
if( xQueueReceive( xRxedChars, pcRxedChar, xBlockTime ) )
{
return pdTRUE;
}
else
{
return pdFALSE;
}
}
/*-----------------------------------------------------------*/
void vSerialPutString( xComPortHandle pxPort, const signed char * const pcString, unsigned short usStringLength )
{
signed char *pxNext;
/* NOTE: This implementation does not handle the queue being full as no
block time is used! */
/* The port handle is not required as this driver only supports UART0. */
( void ) pxPort;
( void ) usStringLength;
/* Send each character in the string, one at a time. */
pxNext = ( signed char * ) pcString;
while( *pxNext )
{
xSerialPutChar( pxPort, *pxNext, serNO_BLOCK );
pxNext++;
}
}
/*-----------------------------------------------------------*/
signed portBASE_TYPE xSerialPutChar( xComPortHandle pxPort, signed char cOutChar, TickType_t xBlockTime )
{
signed portBASE_TYPE xReturn;
/* This demo driver only supports one port so the parameter is not used. */
( void ) pxPort;
portENTER_CRITICAL();
{
/* Is there space to write directly to the UART? */
if( *plTHREEmpty == ( long ) pdTRUE )
{
/* We wrote the character directly to the UART, so was
successful. */
*plTHREEmpty = pdFALSE;
UART0_THR = cOutChar;
xReturn = pdPASS;
}
else
{
/* We cannot write directly to the UART, so queue the character.
Block for a maximum of xBlockTime if there is no space in the
queue. */
xReturn = xQueueSend( xCharsForTx, &cOutChar, xBlockTime );
/* Depending on queue sizing and task prioritisation: While we
were blocked waiting to post interrupts were not disabled. It is
possible that the serial ISR has emptied the Tx queue, in which
case we need to start the Tx off again. */
if( ( *plTHREEmpty == ( long ) pdTRUE ) && ( xReturn == pdPASS ) )
{
xQueueReceive( xCharsForTx, &cOutChar, serNO_BLOCK );
*plTHREEmpty = pdFALSE;
UART0_THR = cOutChar;
}
}
}
portEXIT_CRITICAL();
return xReturn;
}
/*-----------------------------------------------------------*/
void vSerialClose( xComPortHandle xPort )
{
/* Not supported as not required by the demo application. */
( void ) xPort;
}
/*-----------------------------------------------------------*/
| 3,116 |
3,702 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package app.metatron.discovery.domain.workbench.util;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.util.Assert;
import java.util.HashMap;
import java.util.Map;
import app.metatron.discovery.common.exception.BadRequestException;
import app.metatron.discovery.common.exception.ResourceNotFoundException;
import app.metatron.discovery.domain.dataconnection.DataConnection;
import app.metatron.discovery.domain.dataconnection.DataConnectionHelper;
import app.metatron.discovery.domain.dataconnection.DataConnectionRepository;
import app.metatron.discovery.domain.user.CachedUserService;
import app.metatron.discovery.domain.user.User;
import app.metatron.discovery.extension.dataconnection.jdbc.connector.JdbcConnector;
import app.metatron.discovery.extension.dataconnection.jdbc.dialect.JdbcDialect;
import app.metatron.discovery.extension.dataconnection.jdbc.exception.JdbcDataConnectionException;
import app.metatron.discovery.util.AuthUtils;
/**
* The type Work bench data source utils.
*/
@Component
public class WorkbenchDataSourceManager {
private static final Logger LOGGER = LoggerFactory.getLogger(WorkbenchDataSourceManager.class);
private Map<String, WorkbenchDataSource> pooledDataSourceList = new HashMap<>();
@Autowired
DataConnectionRepository dataConnectionRepository;
@Autowired
CachedUserService cachedUserService;
public WorkbenchDataSource findDataSourceInfo(String webSocketId){
WorkbenchDataSource dataSourceInfo = pooledDataSourceList.get(webSocketId);
if(dataSourceInfo != null){
LOGGER.debug("Created datasourceInfo Existed : {} ", webSocketId);
return dataSourceInfo;
} else {
LOGGER.debug("Created datasourceInfo Not Existed : {} ", webSocketId);
return null;
}
}
/**
* Destroy data source.
*
* @param webSocketId the web socket id
* @throws JdbcDataConnectionException the jdbc data connection exception
*/
public void destroyDataSource(String webSocketId) throws JdbcDataConnectionException {
Assert.isTrue(!webSocketId.isEmpty(), "webSocketId Required.");
WorkbenchDataSource dataSourceInfo = pooledDataSourceList.get(webSocketId);
if(dataSourceInfo != null){
pooledDataSourceList.remove(webSocketId);
LOGGER.debug("datasource Destroy : {} - {}", dataSourceInfo.getConnectionId(), webSocketId);
dataSourceInfo.destroy();
dataSourceInfo = null;
}
}
/**
* Create data source info single connection data source info.
*
* @param dataConnection the connection
* @param webSocketId the web socket id
* @return the single connection data source info
* @throws JdbcDataConnectionException the jdbc data connection exception
*/
private WorkbenchDataSource createDataSourceInfo(DataConnection dataConnection,
String webSocketId,
String username,
String password) throws JdbcDataConnectionException{
JdbcDialect jdbcDialect = DataConnectionHelper.lookupDialect(dataConnection);
JdbcConnector jdbcConnector = DataConnectionHelper.lookupJdbcConnector(dataConnection, jdbcDialect);
WorkbenchDataSource dataSourceInfo = new WorkbenchDataSource(dataConnection.getId(), webSocketId, dataConnection, jdbcConnector);
dataSourceInfo.setUsername(username);
dataSourceInfo.setPassword(password);
pooledDataSourceList.put(webSocketId, dataSourceInfo);
return dataSourceInfo;
}
public WorkbenchDataSource createDataSourceInfo(DataConnection connection,
String webSocketId) throws JdbcDataConnectionException{
return createDataSourceInfo(connection, webSocketId, connection.getUsername(), connection.getPassword());
}
public Map<String, WorkbenchDataSource> getCurrentConnections(){
return pooledDataSourceList;
}
public WorkbenchDataSource getWorkbenchDataSource(String dataConnectionId, String webSocketId, String username, String password) {
WorkbenchDataSource dataSource = this.findDataSourceInfo(webSocketId);
if(dataSource == null){
DataConnection dataConnection = dataConnectionRepository.findOne(dataConnectionId);
if(dataConnection == null){
throw new ResourceNotFoundException("DataConnection(" + dataConnectionId + ")");
}
dataSource = this.getWorkbenchDataSource(dataConnection, webSocketId, username, password);
}
return dataSource;
}
public WorkbenchDataSource getWorkbenchDataSource(DataConnection jdbcDataConnection, String webSocketId, String username, String password){
WorkbenchDataSource dataSource = this.findDataSourceInfo(webSocketId);
if(dataSource == null){
String connectionUsername;
String connectionPassword;
DataConnection.AuthenticationType authenticationType = jdbcDataConnection.getAuthenticationType();
if(authenticationType == null){
authenticationType = DataConnection.AuthenticationType.MANUAL;
}
switch (authenticationType){
case USERINFO:
connectionUsername = AuthUtils.getAuthUserName();
User user = cachedUserService.findUser(connectionUsername);
if(user == null){
throw new ResourceNotFoundException("User(" + connectionUsername + ")");
}
connectionPassword = cachedUserService.findUser(connectionUsername).<PASSWORD>();
break;
case MANUAL:
connectionUsername = jdbcDataConnection.getUsername();
connectionPassword = <PASSWORD>DataConnection.getPassword();
break;
default:
if(StringUtils.isEmpty(username)){
throw new BadRequestException("Empty username");
}
if(StringUtils.isEmpty(password)){
throw new BadRequestException("Empty password");
}
connectionUsername = username;
connectionPassword = password;
break;
}
dataSource = this.createDataSourceInfo(jdbcDataConnection, webSocketId, connectionUsername, connectionPassword);
}
return dataSource;
}
}
| 2,329 |
4,879 | #pragma once
#include "map/booking_filter_params.hpp"
#include "map/everywhere_search_params.hpp"
#include "map/search_product_info.hpp"
#include "search/result.hpp"
#include <functional>
#include <vector>
namespace search
{
// An on-results-callback that should be used for search over all
// maps.
//
// *NOTE* the class is NOT thread safe.
class EverywhereSearchCallback
{
public:
class Delegate
{
public:
virtual void FilterResultsForHotelsQuery(booking::filter::Tasks const & filterTasks,
search::Results const & results, bool inViewport) = 0;
};
EverywhereSearchCallback(Delegate & hotelsDelegate, ProductInfo::Delegate & productInfoDelegate,
booking::filter::Tasks const & bookingFilterTasks,
EverywhereSearchParams::OnResults onResults);
void operator()(Results const & results);
private:
Delegate & m_hotelsDelegate;
ProductInfo::Delegate & m_productInfoDelegate;
EverywhereSearchParams::OnResults m_onResults;
std::vector<ProductInfo> m_productInfo;
booking::filter::Tasks m_bookingFilterTasks;
};
} // namespace search
| 426 |
5,169 | {
"name": "HideKeyboardWhenTappedAround",
"version": "1.0.0",
"summary": "As the name suggest, it hides the keyboard when user tapped around.",
"description": "It hides the keyboard when user tapped anywhere around a UIViewController.",
"homepage": "https://github.com/Shritesh99/HideKeyboardWhenTappedAround",
"screenshots": "https://raw.githubusercontent.com/Shritesh99/HideKeyboardWhenTappedAround/master/gif/ss.gif",
"license": "MIT",
"authors": {
"Shritesh": "<EMAIL>"
},
"platforms": {
"ios": "10.0"
},
"source": {
"git": "https://github.com/Shritesh99/HideKeyboardWhenTappedAround.git",
"tag": "1.0.0"
},
"source_files": "HideKeyboardWhenTappedAround/*.swift",
"swift_versions": "4.2",
"swift_version": "4.2"
}
| 284 |
348 | {"nom":"Laneuvelotte","dpt":"Meurthe-et-Moselle","inscrits":359,"abs":71,"votants":288,"blancs":6,"nuls":42,"exp":240,"res":[{"panneau":"1","voix":155},{"panneau":"2","voix":85}]} | 77 |
2,338 | for (int c0 = 1; c0 <= 101; c0 += 1)
for (int c1 = (c0 % 2) + c0; c1 <= 400; c1 += 2)
s0(c0, c1);
| 60 |
1,644 | <gh_stars>1000+
// Copyright 2017 The Nomulus Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package google.registry.tools.params;
import com.google.common.base.Splitter;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableMap;
import java.util.Map;
import org.joda.money.CurrencyUnit;
/**
* Combined converter and validator class for key-value map JCommander argument strings.
*
* <p>These strings have the form {@code <K-str>=<V-str>,[<K-str>=<V-str>]*} where
* {@code <K-str>} and {@code <V-str>} are strings that can be parsed into instances of some key
* type {@code K} and value type {@code V}, respectively. This class converts a string into an
* ImmutableMap mapping {@code K} to {@code V}. Validation and conversion share the same logic;
* validation is just done by attempting conversion and throwing exceptions if need be.
*
* <p>Subclasses must implement parseKey() and parseValue() to define how to parse {@code <K-str>}
* and {@code <V-str>} into {@code K} and {@code V}, respectively.
*
* @param <K> instance key type
* @param <V> instance value type
*/
public abstract class KeyValueMapParameter<K, V>
extends ParameterConverterValidator<ImmutableMap<K, V>> {
public KeyValueMapParameter(String messageForInvalid) {
super(messageForInvalid);
}
public KeyValueMapParameter() {
super("Not formatted correctly.");
}
/** Override to define how to parse rawKey into an object of type K. */
protected abstract K parseKey(String rawKey);
/** Override to define how to parse rawValue into an object of type V. */
protected abstract V parseValue(String rawValue);
/** Override to perform any post-processing on the map. */
protected ImmutableMap<K, V> processMap(ImmutableMap<K, V> map) {
return map;
}
@Override
public final ImmutableMap<K, V> convert(String keyValueMapString) {
ImmutableMap.Builder<K, V> builder = new ImmutableMap.Builder<>();
if (!Strings.isNullOrEmpty(keyValueMapString)) {
for (Map.Entry<String, String> entry :
Splitter.on(',').withKeyValueSeparator('=').split(keyValueMapString).entrySet()) {
builder.put(parseKey(entry.getKey()), parseValue(entry.getValue()));
}
}
return processMap(builder.build());
}
/** Combined converter and validator class for string-to-string Map argument strings. */
public static class StringToStringMap extends KeyValueMapParameter<String, String> {
@Override
protected String parseKey(String rawKey) {
return rawKey;
}
@Override
protected String parseValue(String value) {
return value;
}
}
/** Combined converter and validator class for string-to-integer Map argument strings. */
public static class StringToIntegerMap extends KeyValueMapParameter<String, Integer> {
@Override
protected String parseKey(String rawKey) {
return rawKey;
}
@Override
protected Integer parseValue(String value) {
return Integer.parseInt(value);
}
}
/** Combined converter and validator class for currency unit-to-string Map argument strings. */
public static class CurrencyUnitToStringMap extends KeyValueMapParameter<CurrencyUnit, String> {
@Override
protected CurrencyUnit parseKey(String rawKey) {
return CurrencyUnit.of(rawKey);
}
@Override
protected String parseValue(String value) {
return value;
}
}
}
| 1,213 |
812 | package edu.stanford.nlp.sempre.interactive.test;
import static org.testng.AssertJUnit.assertEquals;
import java.util.*;
import java.util.function.Predicate;
import fig.basic.*;
import edu.stanford.nlp.sempre.*;
import edu.stanford.nlp.sempre.Parser.Spec;
import edu.stanford.nlp.sempre.interactive.InteractiveBeamParser;
import edu.stanford.nlp.sempre.interactive.DALExecutor;
import org.testng.Assert;
import org.testng.annotations.Test;
import org.testng.collections.Lists;
/**
* Test the parser, and some floating examples
* @author sidaw
*/
@Test(groups = { "InteractiveLearning" })
public class FloatingParsingTest {
Predicate<Example> contains(String formula) {
Formula answer = Formulas.fromLispTree(LispTree.proto.parseFromString(formula));
return e -> e.predDerivations.stream().anyMatch(d -> d.formula.equals(answer));
}
Predicate<Example> moreThan(int count) {
return e -> e.predDerivations.size() > count;
}
Predicate<Example> hasAll(String...substrings) {
return new Predicate<Example>() {
List<String> required = Lists.newArrayList(substrings);
@Override
public boolean test(Example e) {
int match = 0;
for (Derivation deriv : e.predDerivations) {
String formula = deriv.formula.toString();
if (required.stream().anyMatch(s -> formula.indexOf(s)!=-1)) {
match ++;
LogInfo.log("Got a match: " + formula);
}
}
if (match == 0)
throw new RuntimeException("Failed to match " + required.toString() + " : " + e.utterance);
return true;
}
};
}
private static Spec defaultSpec() {
FloatingParser.opts.defaultIsFloating = true;
DALExecutor.opts.convertNumberValues = true;
DALExecutor.opts.printStackTrace = true;
DALExecutor.opts.worldType = "VoxelWorld";
Grammar.opts.inPaths = Lists.newArrayList("./interactive/voxelurn.grammar");
Grammar.opts.useApplyFn = "interactive.ApplyFn";
Grammar.opts.binarizeRules = false;
DALExecutor executor = new DALExecutor();
DALExecutor.opts.worldType = "BlocksWorld";
FeatureExtractor extractor = new FeatureExtractor(executor);
FeatureExtractor.opts.featureDomains.add("rule");
ValueEvaluator valueEvaluator = new ExactValueEvaluator();
Grammar grammar = new Grammar();
grammar.read();
grammar.write();
return new Parser.Spec(grammar, extractor, executor, valueEvaluator);
}
protected static void parse(String beamUtt, String floatUtt, ContextValue context, Predicate<Example> checker) {
LogInfo.begin_track("Cannonical: %s\t Float: %s", beamUtt, floatUtt);
Example.Builder b = new Example.Builder();
b.setId("session:test");
b.setUtterance(floatUtt);
b.setContext(context);
Example ex = b.createExample();
ex.preprocess();
Spec defSpec = defaultSpec();
Parser parser = new InteractiveBeamParser(defSpec);
ParserState state = parser.parse(new Params(), ex, false);
LogInfo.end_track();
// Add the floating parser and check?
if (checker != null) {
if (!checker.test(ex)) {
Assert.fail(floatUtt);
}
}
}
private static ContextValue getContext(String blocks) {
// a hack to pass in the world state without much change to the code
String strigify2 = Json.writeValueAsStringHard(blocks); // some parsing issue inside lisptree parser
return ContextValue.fromString(String.format("(context (graph NaiveKnowledgeGraph ((string \"%s\") (name b) (name c))))", strigify2));
}
public void basicTest() {
String defaultBlocks = "[[1,1,1,\"Green\",[]],[1,2,1,\"Blue\",[]],[2,2,1,\"Red\",[]],[3,2,2,\"Yellow\",[]]]";
ContextValue context = getContext(defaultBlocks);
LogInfo.begin_track("testJoin");
parse("select all", "select all", context, contains("(: select *)"));
// parse("select has color red", "red blocks", context, contains("(:for (color red) (: select))"));
parse("select has color red", "red blocks", context, hasAll("(color red)", "(: select)", ":foreach"));
parse("add red top", "add some to top of red blocks", context, contains("(: add red top)"));
parse("for has color red [ remove ]", "remove red blocks", context, contains("(:foreach (color red) (: remove))"));
parse("repeat 3 [add red]", "add red 3 times", context, contains("(:loop (number 3) (: add red))"));
parse("for has color red [ add yellow top ]", "add red to top of yellow", context, moreThan(0));
// parse("select has row 3", "select row 3", context, moreThan(0));
// parse("select has color red or has color green", "select red and green", context, contains("(:for (or (color red) (color green)) (: select))"));
// parse("select has color red or has color green", "select red or green", context, contains("(:for (or (color red) (color green)) (: select))"));
parse("remove has color red ; remove has color blue", "remove red then remove blue", context, moreThan(0));
LogInfo.end_track();
}
public void advanced() {
String defaultBlocks = "[[1,1,1,\"Green\",[]],[1,2,1,\"Blue\",[]],[2,2,1,\"Red\",[]],[3,2,2,\"Yellow\",[]]]";
ContextValue context = getContext(defaultBlocks);
LogInfo.begin_track("testJoin");
parse("repeat 4 [add yellow]", "add 4 yellow blocks", context, hasAll("(:loop", "(number 4)", "(color yellow)"));
parse("repeat 4 [for has color red [ add yellow left ] ]", "put 4 yellow left of red", context, hasAll(":for", "red", "left"));
parse("", "put 4 yellow to the left of red", context, hasAll(":foreach", "red", "left"));
parse("", "select has color red or has color green", context, hasAll("(color red)", "(color green)", "(: select)"));
parse("", "add red then add green and then add yellow", context, hasAll("(color red)", "(color green)", "(color yellow)"));
parse("", "add red then add green and then add yellow", context, hasAll("(: add", "(color red)", "(color green)", "(color yellow)"));
parse("", "remove the very left yellow block", context, moreThan(0));
parse("", "add red top then add yellow then add green", context, moreThan(0));
parse("", "add 4 yellow to red or green", context, moreThan(0));
// might be manageable with projectivity
parse("", "add 4 yellow to the left of red or green", context, moreThan(0));
parse("", "repeat 3 [delete top of all]", context, moreThan(0));
parse("", "repeat 3 [delete top of all]", context, moreThan(0));
// parse("", "add 3 red to left", context, hasAll("(:loop (number 3) (: add red left))"));
// parse("repeat 5 [ add red left ]", "add 5 red left", context, hasAll("(:loop (number 5) (: add red left))"));
LogInfo.end_track();
}
// things we won't handle
public void outOfScope() {
String defaultBlocks = "[[1,1,1,\"Green\",[]],[1,2,1,\"Blue\",[]],[2,2,1,\"Red\",[]],[3,2,2,\"Yellow\",[]]]";
ContextValue context = getContext(defaultBlocks);
LogInfo.begin_track("testJoin");
parse("", "repeat 3 [ repeat 3 [delete very top of all] ]", context, moreThan(0));
LogInfo.end_track();
}
}
| 2,526 |
318 | <reponame>adam11grafik/maven-surefire
package org.apache.maven.surefire.its;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import com.googlecode.junittoolbox.ParallelParameterized;
import org.apache.maven.surefire.its.fixture.OutputValidator;
import org.apache.maven.surefire.its.fixture.SurefireJUnit4IntegrationTestCase;
import org.apache.maven.surefire.its.fixture.SurefireLauncher;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized.Parameter;
import org.junit.runners.Parameterized.Parameters;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
/**
* Test project using -Dtest=mtClass#myMethod
*
* @author <NAME>
*/
@RunWith( ParallelParameterized.class )
public class TestMethodPatternIT
extends SurefireJUnit4IntegrationTestCase
{
private static final String RUNNING_WITH_PROVIDER47 = "parallel='none', perCoreThreadCount=true, threadCount=0";
private static final String LEGACY_FORK_NODE =
"org.apache.maven.plugin.surefire.extensions.LegacyForkNodeFactory";
private static final String SUREFIRE_FORK_NODE =
"org.apache.maven.plugin.surefire.extensions.SurefireForkNodeFactory";
@Parameters
public static Iterable<Object[]> data()
{
ArrayList<Object[]> args = new ArrayList<>();
args.add( new Object[] { "tcp" } );
args.add( new Object[] { null } );
return args;
}
@Parameter
@SuppressWarnings( "checkstyle:visibilitymodifier" )
public String profileId;
private OutputValidator runMethodPattern( String projectName, Map<String, String> props, String... goals )
throws Exception
{
SurefireLauncher launcher = unpack( projectName, profileId == null ? "" : "-" + profileId );
if ( profileId != null )
{
launcher.activateProfile( profileId );
}
for ( Entry<String, String> entry : props.entrySet() )
{
launcher.sysProp( entry.getKey(), entry.getValue() );
}
for ( String goal : goals )
{
launcher.addGoal( goal );
}
String cls = profileId == null ? LEGACY_FORK_NODE : SUREFIRE_FORK_NODE;
return launcher.showErrorStackTraces().debugLogging()
.executeTest()
.assertTestSuiteResults( 2, 0, 0, 0 )
.assertThatLogLine(
containsString( "Found implementation of fork node factory: " + cls ),
equalTo( 1 ) );
}
@Test
public void testJUnit44()
throws Exception
{
runMethodPattern( "junit44-method-pattern", Collections.<String, String>emptyMap() );
}
@Test
public void testJUnit48Provider4()
throws Exception
{
runMethodPattern( "junit48-method-pattern", Collections.<String, String>emptyMap(), "-P surefire-junit4" );
}
@Test
public void testJUnit48Provider47()
throws Exception
{
runMethodPattern( "junit48-method-pattern", Collections.<String, String>emptyMap(), "-P surefire-junit47" )
.verifyTextInLog( RUNNING_WITH_PROVIDER47 );
}
@Test
public void testJUnit48WithCategoryFilter() throws Exception
{
String cls = profileId == null ? LEGACY_FORK_NODE : SUREFIRE_FORK_NODE;
SurefireLauncher launcher = unpack( "junit48-method-pattern", profileId == null ? "" : "-" + profileId );
if ( profileId != null )
{
launcher.activateProfile( profileId );
}
launcher.debugLogging()
.addGoal( "-Dgroups=junit4.SampleCategory" )
.executeTest()
.assertTestSuiteResults( 1, 0, 0, 0 )
.assertThatLogLine(
containsString( "Found implementation of fork node factory: " + cls ),
equalTo( 1 ) );
}
@Test
public void testTestNgMethodBefore()
throws Exception
{
Map<String, String> props = new HashMap<>();
props.put( "testNgVersion", "5.7" );
props.put( "testNgClassifier", "jdk15" );
runMethodPattern( "testng-method-pattern-before", props );
}
@Test
public void testTestNGMethodPattern()
throws Exception
{
Map<String, String> props = new HashMap<>();
props.put( "testNgVersion", "5.7" );
props.put( "testNgClassifier", "jdk15" );
runMethodPattern( "/testng-method-pattern", props );
}
@Test
public void testMethodPatternAfter() throws Exception
{
String cls = profileId == null ? LEGACY_FORK_NODE : SUREFIRE_FORK_NODE;
SurefireLauncher launcher = unpack( "testng-method-pattern-after", profileId == null ? "" : "-" + profileId );
if ( profileId != null )
{
launcher.activateProfile( profileId );
}
launcher.debugLogging()
.sysProp( "testNgVersion", "5.7" )
.sysProp( "testNgClassifier", "jdk15" )
.executeTest()
.verifyErrorFree( 2 )
.verifyTextInLog( "Called tearDown" )
.assertThatLogLine(
containsString( "Found implementation of fork node factory: " + cls ),
equalTo( 1 ) );
}
}
| 2,448 |
450 | package com.zhizus.forest.controller;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
/**
* Created by Dempe on 2016/12/29.
*/
@Controller
@RequestMapping("/configuration")
public class ConfigurationController {
}
| 83 |
6,904 | <reponame>wonism/anypixel
{
"name": "anypixel-emulator",
"version": "1.0.0",
"main": "index.js",
"license" : "Apache-2.0",
"scripts": {
"build": "browserify -d src/emulator.js > index.js"
}
}
| 95 |
1,338 | /*
* Copyright 2017 Haiku, Inc. All rights reserved.
* Distributed under the terms of the MIT License.
*/
#ifndef _BSD_ENDIAN_H_
#define _BSD_ENDIAN_H_
#include_next <endian.h>
#include <features.h>
#ifdef _DEFAULT_SOURCE
#include <config/HaikuConfig.h>
#include <support/ByteOrder.h>
#include <support/SupportDefs.h>
#ifdef __cplusplus
extern "C" {
#endif
/*
* General byte order swapping functions.
*/
#define bswap16(x) __swap_int16(x)
#define bswap32(x) __swap_int32(x)
#define bswap64(x) __swap_int64(x)
/*
* Host to big endian, host to little endian, big endian to host, and little
* endian to host byte order functions as detailed in byteorder(9).
*/
#if BYTE_ORDER == LITTLE_ENDIAN
#define htobe16(x) bswap16((x))
#define htobe32(x) bswap32((x))
#define htobe64(x) bswap64((x))
#define htole16(x) ((uint16_t)(x))
#define htole32(x) ((uint32_t)(x))
#define htole64(x) ((uint64_t)(x))
#define be16toh(x) bswap16((x))
#define be32toh(x) bswap32((x))
#define be64toh(x) bswap64((x))
#define le16toh(x) ((uint16_t)(x))
#define le32toh(x) ((uint32_t)(x))
#define le64toh(x) ((uint64_t)(x))
#else /* BYTE_ORDER != LITTLE_ENDIAN */
#define htobe16(x) ((uint16_t)(x))
#define htobe32(x) ((uint32_t)(x))
#define htobe64(x) ((uint64_t)(x))
#define htole16(x) bswap16((x))
#define htole32(x) bswap32((x))
#define htole64(x) bswap64((x))
#define be16toh(x) ((uint16_t)(x))
#define be32toh(x) ((uint32_t)(x))
#define be64toh(x) ((uint64_t)(x))
#define le16toh(x) bswap16((x))
#define le32toh(x) bswap32((x))
#define le64toh(x) bswap64((x))
#endif /* BYTE_ORDER == LITTLE_ENDIAN */
/* Alignment-agnostic encode/decode bytestream to/from little/big endian. */
static __inline uint32_t
be32dec(const void *pp)
{
uint8_t const *p = (uint8_t const *)pp;
return (((unsigned)p[0] << 24) | (p[1] << 16) | (p[2] << 8) | p[3]);
}
static __inline uint64_t
be64dec(const void *pp)
{
uint8_t const *p = (uint8_t const *)pp;
return (((uint64_t)be32dec(p) << 32) | be32dec(p + 4));
}
static __inline void
be32enc(void *pp, uint32_t u)
{
uint8_t *p = (uint8_t *)pp;
p[0] = (u >> 24) & 0xff;
p[1] = (u >> 16) & 0xff;
p[2] = (u >> 8) & 0xff;
p[3] = u & 0xff;
}
static __inline void
be64enc(void *pp, uint64_t u)
{
uint8_t *p = (uint8_t *)pp;
be32enc(p, (uint32_t)(u >> 32));
be32enc(p + 4, (uint32_t)(u & 0xffffffffU));
}
#ifdef __cplusplus
}
#endif
#endif
#endif /* _BSD_ENDIAN_H_ */
| 1,164 |
348 | <filename>docs/data/leg-t2/078/07806367.json
{"nom":"Mareil-Marly","circ":"6ème circonscription","dpt":"Yvelines","inscrits":2609,"abs":1070,"votants":1539,"blancs":48,"nuls":9,"exp":1482,"res":[{"nuance":"REM","nom":"<NAME>","voix":856},{"nuance":"LR","nom":"<NAME>","voix":626}]} | 118 |
653 | //===- ArithmeticOps.cpp - MLIR Arithmetic dialect ops implementation -----===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
#include "mlir/Dialect/Arithmetic/IR/Arithmetic.h"
#include "mlir/Dialect/CommonFolders.h"
#include "mlir/IR/Builders.h"
#include "mlir/IR/Matchers.h"
#include "mlir/IR/OpImplementation.h"
#include "mlir/IR/PatternMatch.h"
#include "mlir/IR/TypeUtilities.h"
using namespace mlir;
using namespace mlir::arith;
//===----------------------------------------------------------------------===//
// Pattern helpers
//===----------------------------------------------------------------------===//
static IntegerAttr addIntegerAttrs(PatternRewriter &builder, Value res,
Attribute lhs, Attribute rhs) {
return builder.getIntegerAttr(res.getType(),
lhs.cast<IntegerAttr>().getInt() +
rhs.cast<IntegerAttr>().getInt());
}
static IntegerAttr subIntegerAttrs(PatternRewriter &builder, Value res,
Attribute lhs, Attribute rhs) {
return builder.getIntegerAttr(res.getType(),
lhs.cast<IntegerAttr>().getInt() -
rhs.cast<IntegerAttr>().getInt());
}
/// Invert an integer comparison predicate.
static arith::CmpIPredicate invertPredicate(arith::CmpIPredicate pred) {
switch (pred) {
case arith::CmpIPredicate::eq:
return arith::CmpIPredicate::ne;
case arith::CmpIPredicate::ne:
return arith::CmpIPredicate::eq;
case arith::CmpIPredicate::slt:
return arith::CmpIPredicate::sge;
case arith::CmpIPredicate::sle:
return arith::CmpIPredicate::sgt;
case arith::CmpIPredicate::sgt:
return arith::CmpIPredicate::sle;
case arith::CmpIPredicate::sge:
return arith::CmpIPredicate::slt;
case arith::CmpIPredicate::ult:
return arith::CmpIPredicate::uge;
case arith::CmpIPredicate::ule:
return arith::CmpIPredicate::ugt;
case arith::CmpIPredicate::ugt:
return arith::CmpIPredicate::ule;
case arith::CmpIPredicate::uge:
return arith::CmpIPredicate::ult;
}
llvm_unreachable("unknown cmpi predicate kind");
}
static arith::CmpIPredicateAttr invertPredicate(arith::CmpIPredicateAttr pred) {
return arith::CmpIPredicateAttr::get(pred.getContext(),
invertPredicate(pred.getValue()));
}
//===----------------------------------------------------------------------===//
// TableGen'd canonicalization patterns
//===----------------------------------------------------------------------===//
namespace {
#include "ArithmeticCanonicalization.inc"
} // end anonymous namespace
//===----------------------------------------------------------------------===//
// AddIOp
//===----------------------------------------------------------------------===//
OpFoldResult arith::AddIOp::fold(ArrayRef<Attribute> operands) {
// addi(x, 0) -> x
if (matchPattern(rhs(), m_Zero()))
return lhs();
return constFoldBinaryOp<IntegerAttr>(operands,
[](APInt a, APInt b) { return a + b; });
}
void arith::AddIOp::getCanonicalizationPatterns(
OwningRewritePatternList &patterns, MLIRContext *context) {
patterns.insert<AddIAddConstant, AddISubConstantRHS, AddISubConstantLHS>(
context);
}
//===----------------------------------------------------------------------===//
// SubIOp
//===----------------------------------------------------------------------===//
OpFoldResult arith::SubIOp::fold(ArrayRef<Attribute> operands) {
// subi(x,x) -> 0
if (getOperand(0) == getOperand(1))
return Builder(getContext()).getZeroAttr(getType());
// subi(x,0) -> x
if (matchPattern(rhs(), m_Zero()))
return lhs();
return constFoldBinaryOp<IntegerAttr>(operands,
[](APInt a, APInt b) { return a - b; });
}
void arith::SubIOp::getCanonicalizationPatterns(
OwningRewritePatternList &patterns, MLIRContext *context) {
patterns.insert<SubIRHSAddConstant, SubILHSAddConstant, SubIRHSSubConstantRHS,
SubIRHSSubConstantLHS, SubILHSSubConstantRHS,
SubILHSSubConstantLHS>(context);
}
//===----------------------------------------------------------------------===//
// MulIOp
//===----------------------------------------------------------------------===//
OpFoldResult arith::MulIOp::fold(ArrayRef<Attribute> operands) {
// muli(x, 0) -> 0
if (matchPattern(rhs(), m_Zero()))
return rhs();
// muli(x, 1) -> x
if (matchPattern(rhs(), m_One()))
return getOperand(0);
// TODO: Handle the overflow case.
// default folder
return constFoldBinaryOp<IntegerAttr>(operands,
[](APInt a, APInt b) { return a * b; });
}
//===----------------------------------------------------------------------===//
// DivUIOp
//===----------------------------------------------------------------------===//
OpFoldResult arith::DivUIOp::fold(ArrayRef<Attribute> operands) {
// Don't fold if it would require a division by zero.
bool div0 = false;
auto result = constFoldBinaryOp<IntegerAttr>(operands, [&](APInt a, APInt b) {
if (div0 || !b) {
div0 = true;
return a;
}
return a.udiv(b);
});
// Fold out division by one. Assumes all tensors of all ones are splats.
if (auto rhs = operands[1].dyn_cast_or_null<IntegerAttr>()) {
if (rhs.getValue() == 1)
return lhs();
} else if (auto rhs = operands[1].dyn_cast_or_null<SplatElementsAttr>()) {
if (rhs.getSplatValue<IntegerAttr>().getValue() == 1)
return lhs();
}
return div0 ? Attribute() : result;
}
//===----------------------------------------------------------------------===//
// DivSIOp
//===----------------------------------------------------------------------===//
OpFoldResult arith::DivSIOp::fold(ArrayRef<Attribute> operands) {
// Don't fold if it would overflow or if it requires a division by zero.
bool overflowOrDiv0 = false;
auto result = constFoldBinaryOp<IntegerAttr>(operands, [&](APInt a, APInt b) {
if (overflowOrDiv0 || !b) {
overflowOrDiv0 = true;
return a;
}
return a.sdiv_ov(b, overflowOrDiv0);
});
// Fold out division by one. Assumes all tensors of all ones are splats.
if (auto rhs = operands[1].dyn_cast_or_null<IntegerAttr>()) {
if (rhs.getValue() == 1)
return lhs();
} else if (auto rhs = operands[1].dyn_cast_or_null<SplatElementsAttr>()) {
if (rhs.getSplatValue<IntegerAttr>().getValue() == 1)
return lhs();
}
return overflowOrDiv0 ? Attribute() : result;
}
//===----------------------------------------------------------------------===//
// Ceil and floor division folding helpers
//===----------------------------------------------------------------------===//
static APInt signedCeilNonnegInputs(APInt a, APInt b, bool &overflow) {
// Returns (a-1)/b + 1
APInt one(a.getBitWidth(), 1, true); // Signed value 1.
APInt val = a.ssub_ov(one, overflow).sdiv_ov(b, overflow);
return val.sadd_ov(one, overflow);
}
//===----------------------------------------------------------------------===//
// CeilDivSIOp
//===----------------------------------------------------------------------===//
OpFoldResult arith::CeilDivSIOp::fold(ArrayRef<Attribute> operands) {
// Don't fold if it would overflow or if it requires a division by zero.
bool overflowOrDiv0 = false;
auto result = constFoldBinaryOp<IntegerAttr>(operands, [&](APInt a, APInt b) {
if (overflowOrDiv0 || !b) {
overflowOrDiv0 = true;
return a;
}
unsigned bits = a.getBitWidth();
APInt zero = APInt::getZero(bits);
if (a.sgt(zero) && b.sgt(zero)) {
// Both positive, return ceil(a, b).
return signedCeilNonnegInputs(a, b, overflowOrDiv0);
}
if (a.slt(zero) && b.slt(zero)) {
// Both negative, return ceil(-a, -b).
APInt posA = zero.ssub_ov(a, overflowOrDiv0);
APInt posB = zero.ssub_ov(b, overflowOrDiv0);
return signedCeilNonnegInputs(posA, posB, overflowOrDiv0);
}
if (a.slt(zero) && b.sgt(zero)) {
// A is negative, b is positive, return - ( -a / b).
APInt posA = zero.ssub_ov(a, overflowOrDiv0);
APInt div = posA.sdiv_ov(b, overflowOrDiv0);
return zero.ssub_ov(div, overflowOrDiv0);
}
// A is positive (or zero), b is negative, return - (a / -b).
APInt posB = zero.ssub_ov(b, overflowOrDiv0);
APInt div = a.sdiv_ov(posB, overflowOrDiv0);
return zero.ssub_ov(div, overflowOrDiv0);
});
// Fold out floor division by one. Assumes all tensors of all ones are
// splats.
if (auto rhs = operands[1].dyn_cast_or_null<IntegerAttr>()) {
if (rhs.getValue() == 1)
return lhs();
} else if (auto rhs = operands[1].dyn_cast_or_null<SplatElementsAttr>()) {
if (rhs.getSplatValue<IntegerAttr>().getValue() == 1)
return lhs();
}
return overflowOrDiv0 ? Attribute() : result;
}
//===----------------------------------------------------------------------===//
// FloorDivSIOp
//===----------------------------------------------------------------------===//
OpFoldResult arith::FloorDivSIOp::fold(ArrayRef<Attribute> operands) {
// Don't fold if it would overflow or if it requires a division by zero.
bool overflowOrDiv0 = false;
auto result = constFoldBinaryOp<IntegerAttr>(operands, [&](APInt a, APInt b) {
if (overflowOrDiv0 || !b) {
overflowOrDiv0 = true;
return a;
}
unsigned bits = a.getBitWidth();
APInt zero = APInt::getZero(bits);
if (a.sge(zero) && b.sgt(zero)) {
// Both positive (or a is zero), return a / b.
return a.sdiv_ov(b, overflowOrDiv0);
}
if (a.sle(zero) && b.slt(zero)) {
// Both negative (or a is zero), return -a / -b.
APInt posA = zero.ssub_ov(a, overflowOrDiv0);
APInt posB = zero.ssub_ov(b, overflowOrDiv0);
return posA.sdiv_ov(posB, overflowOrDiv0);
}
if (a.slt(zero) && b.sgt(zero)) {
// A is negative, b is positive, return - ceil(-a, b).
APInt posA = zero.ssub_ov(a, overflowOrDiv0);
APInt ceil = signedCeilNonnegInputs(posA, b, overflowOrDiv0);
return zero.ssub_ov(ceil, overflowOrDiv0);
}
// A is positive, b is negative, return - ceil(a, -b).
APInt posB = zero.ssub_ov(b, overflowOrDiv0);
APInt ceil = signedCeilNonnegInputs(a, posB, overflowOrDiv0);
return zero.ssub_ov(ceil, overflowOrDiv0);
});
// Fold out floor division by one. Assumes all tensors of all ones are
// splats.
if (auto rhs = operands[1].dyn_cast_or_null<IntegerAttr>()) {
if (rhs.getValue() == 1)
return lhs();
} else if (auto rhs = operands[1].dyn_cast_or_null<SplatElementsAttr>()) {
if (rhs.getSplatValue<IntegerAttr>().getValue() == 1)
return lhs();
}
return overflowOrDiv0 ? Attribute() : result;
}
//===----------------------------------------------------------------------===//
// RemUIOp
//===----------------------------------------------------------------------===//
OpFoldResult arith::RemUIOp::fold(ArrayRef<Attribute> operands) {
auto rhs = operands.back().dyn_cast_or_null<IntegerAttr>();
if (!rhs)
return {};
auto rhsValue = rhs.getValue();
// x % 1 = 0
if (rhsValue.isOneValue())
return IntegerAttr::get(rhs.getType(), APInt(rhsValue.getBitWidth(), 0));
// Don't fold if it requires division by zero.
if (rhsValue.isNullValue())
return {};
auto lhs = operands.front().dyn_cast_or_null<IntegerAttr>();
if (!lhs)
return {};
return IntegerAttr::get(lhs.getType(), lhs.getValue().urem(rhsValue));
}
//===----------------------------------------------------------------------===//
// RemSIOp
//===----------------------------------------------------------------------===//
OpFoldResult arith::RemSIOp::fold(ArrayRef<Attribute> operands) {
auto rhs = operands.back().dyn_cast_or_null<IntegerAttr>();
if (!rhs)
return {};
auto rhsValue = rhs.getValue();
// x % 1 = 0
if (rhsValue.isOneValue())
return IntegerAttr::get(rhs.getType(), APInt(rhsValue.getBitWidth(), 0));
// Don't fold if it requires division by zero.
if (rhsValue.isNullValue())
return {};
auto lhs = operands.front().dyn_cast_or_null<IntegerAttr>();
if (!lhs)
return {};
return IntegerAttr::get(lhs.getType(), lhs.getValue().srem(rhsValue));
}
//===----------------------------------------------------------------------===//
// AndIOp
//===----------------------------------------------------------------------===//
OpFoldResult arith::AndIOp::fold(ArrayRef<Attribute> operands) {
/// and(x, 0) -> 0
if (matchPattern(rhs(), m_Zero()))
return rhs();
/// and(x, allOnes) -> x
APInt intValue;
if (matchPattern(rhs(), m_ConstantInt(&intValue)) && intValue.isAllOnes())
return lhs();
/// and(x, x) -> x
if (lhs() == rhs())
return rhs();
return constFoldBinaryOp<IntegerAttr>(operands,
[](APInt a, APInt b) { return a & b; });
}
//===----------------------------------------------------------------------===//
// OrIOp
//===----------------------------------------------------------------------===//
OpFoldResult arith::OrIOp::fold(ArrayRef<Attribute> operands) {
/// or(x, 0) -> x
if (matchPattern(rhs(), m_Zero()))
return lhs();
/// or(x, x) -> x
if (lhs() == rhs())
return rhs();
return constFoldBinaryOp<IntegerAttr>(operands,
[](APInt a, APInt b) { return a | b; });
}
//===----------------------------------------------------------------------===//
// XOrIOp
//===----------------------------------------------------------------------===//
OpFoldResult arith::XOrIOp::fold(ArrayRef<Attribute> operands) {
/// xor(x, 0) -> x
if (matchPattern(rhs(), m_Zero()))
return lhs();
/// xor(x, x) -> 0
if (lhs() == rhs())
return Builder(getContext()).getZeroAttr(getType());
return constFoldBinaryOp<IntegerAttr>(operands,
[](APInt a, APInt b) { return a ^ b; });
}
void arith::XOrIOp::getCanonicalizationPatterns(
OwningRewritePatternList &patterns, MLIRContext *context) {
patterns.insert<XOrINotCmpI>(context);
}
//===----------------------------------------------------------------------===//
// AddFOp
//===----------------------------------------------------------------------===//
OpFoldResult arith::AddFOp::fold(ArrayRef<Attribute> operands) {
return constFoldBinaryOp<FloatAttr>(
operands, [](APFloat a, APFloat b) { return a + b; });
}
//===----------------------------------------------------------------------===//
// SubFOp
//===----------------------------------------------------------------------===//
OpFoldResult arith::SubFOp::fold(ArrayRef<Attribute> operands) {
return constFoldBinaryOp<FloatAttr>(
operands, [](APFloat a, APFloat b) { return a - b; });
}
//===----------------------------------------------------------------------===//
// MulFOp
//===----------------------------------------------------------------------===//
OpFoldResult arith::MulFOp::fold(ArrayRef<Attribute> operands) {
return constFoldBinaryOp<FloatAttr>(
operands, [](APFloat a, APFloat b) { return a * b; });
}
//===----------------------------------------------------------------------===//
// DivFOp
//===----------------------------------------------------------------------===//
OpFoldResult arith::DivFOp::fold(ArrayRef<Attribute> operands) {
return constFoldBinaryOp<FloatAttr>(
operands, [](APFloat a, APFloat b) { return a / b; });
}
//===----------------------------------------------------------------------===//
// Verifiers for integer and floating point extension/truncation ops
//===----------------------------------------------------------------------===//
// Extend ops can only extend to a wider type.
template <typename ValType, typename Op>
static LogicalResult verifyExtOp(Op op) {
Type srcType = getElementTypeOrSelf(op.in().getType());
Type dstType = getElementTypeOrSelf(op.getType());
if (srcType.cast<ValType>().getWidth() >= dstType.cast<ValType>().getWidth())
return op.emitError("result type ")
<< dstType << " must be wider than operand type " << srcType;
return success();
}
// Truncate ops can only truncate to a shorter type.
template <typename ValType, typename Op>
static LogicalResult verifyTruncateOp(Op op) {
Type srcType = getElementTypeOrSelf(op.in().getType());
Type dstType = getElementTypeOrSelf(op.getType());
if (srcType.cast<ValType>().getWidth() <= dstType.cast<ValType>().getWidth())
return op.emitError("result type ")
<< dstType << " must be shorter than operand type " << srcType;
return success();
}
//===----------------------------------------------------------------------===//
// ExtUIOp
//===----------------------------------------------------------------------===//
OpFoldResult arith::ExtUIOp::fold(ArrayRef<Attribute> operands) {
if (auto lhs = operands[0].dyn_cast_or_null<IntegerAttr>())
return IntegerAttr::get(
getType(), lhs.getValue().zext(getType().getIntOrFloatBitWidth()));
return {};
}
//===----------------------------------------------------------------------===//
// ExtSIOp
//===----------------------------------------------------------------------===//
OpFoldResult arith::ExtSIOp::fold(ArrayRef<Attribute> operands) {
if (auto lhs = operands[0].dyn_cast_or_null<IntegerAttr>())
return IntegerAttr::get(
getType(), lhs.getValue().sext(getType().getIntOrFloatBitWidth()));
return {};
}
// TODO temporary fixes until second patch is in
OpFoldResult arith::TruncFOp::fold(ArrayRef<Attribute> operands) {
return {};
}
bool arith::TruncFOp::areCastCompatible(TypeRange inputs, TypeRange outputs) {
return true;
}
OpFoldResult arith::TruncIOp::fold(ArrayRef<Attribute> operands) {
return {};
}
bool arith::TruncIOp::areCastCompatible(TypeRange inputs, TypeRange outputs) {
return true;
}
bool arith::ExtUIOp::areCastCompatible(TypeRange inputs, TypeRange outputs) {
return true;
}
bool arith::ExtSIOp::areCastCompatible(TypeRange inputs, TypeRange outputs) {
return true;
}
bool arith::ExtFOp::areCastCompatible(TypeRange inputs, TypeRange outputs) {
return true;
}
OpFoldResult arith::ConstantOp::fold(ArrayRef<Attribute> operands) {
return {};
}
bool arith::SIToFPOp::areCastCompatible(TypeRange inputs, TypeRange outputs) {
return true;
}
bool arith::UIToFPOp::areCastCompatible(TypeRange inputs, TypeRange outputs) {
return true;
}
bool arith::FPToSIOp::areCastCompatible(TypeRange inputs, TypeRange outputs) {
return true;
}
bool arith::FPToUIOp::areCastCompatible(TypeRange inputs, TypeRange outputs) {
return true;
}
//===----------------------------------------------------------------------===//
// IndexCastOp
//===----------------------------------------------------------------------===//
bool arith::IndexCastOp::areCastCompatible(TypeRange inputs,
TypeRange outputs) {
assert(inputs.size() == 1 && outputs.size() == 1 &&
"index_cast op expects one result and one result");
// Shape equivalence is guaranteed by op traits.
auto srcType = getElementTypeOrSelf(inputs.front());
auto dstType = getElementTypeOrSelf(outputs.front());
return (srcType.isIndex() && dstType.isSignlessInteger()) ||
(srcType.isSignlessInteger() && dstType.isIndex());
}
OpFoldResult arith::IndexCastOp::fold(ArrayRef<Attribute> operands) {
// index_cast(constant) -> constant
// A little hack because we go through int. Otherwise, the size of the
// constant might need to change.
if (auto value = operands[0].dyn_cast_or_null<IntegerAttr>())
return IntegerAttr::get(getType(), value.getInt());
return {};
}
void arith::IndexCastOp::getCanonicalizationPatterns(
OwningRewritePatternList &patterns, MLIRContext *context) {
patterns.insert<IndexCastOfIndexCast, IndexCastOfExtSI>(context);
}
//===----------------------------------------------------------------------===//
// BitcastOp
//===----------------------------------------------------------------------===//
bool arith::BitcastOp::areCastCompatible(TypeRange inputs, TypeRange outputs) {
assert(inputs.size() == 1 && outputs.size() == 1 &&
"bitcast op expects one operand and one result");
// Shape equivalence is guaranteed by op traits.
auto srcType = getElementTypeOrSelf(inputs.front());
auto dstType = getElementTypeOrSelf(outputs.front());
// Types are guarnateed to be integers or floats by constraints.
return srcType.getIntOrFloatBitWidth() == dstType.getIntOrFloatBitWidth();
}
OpFoldResult arith::BitcastOp::fold(ArrayRef<Attribute> operands) {
assert(operands.size() == 1 && "bitcast op expects 1 operand");
auto resType = getType();
auto operand = operands[0];
if (!operand)
return {};
/// Bitcast dense elements.
if (auto denseAttr = operand.dyn_cast_or_null<DenseElementsAttr>())
return denseAttr.bitcast(resType.cast<ShapedType>().getElementType());
/// Other shaped types unhandled.
if (resType.isa<ShapedType>())
return {};
/// Bitcast integer or float to integer or float.
APInt bits = operand.isa<FloatAttr>()
? operand.cast<FloatAttr>().getValue().bitcastToAPInt()
: operand.cast<IntegerAttr>().getValue();
if (auto resFloatType = resType.dyn_cast<FloatType>())
return FloatAttr::get(resType,
APFloat(resFloatType.getFloatSemantics(), bits));
return IntegerAttr::get(resType, bits);
}
void arith::BitcastOp::getCanonicalizationPatterns(
OwningRewritePatternList &patterns, MLIRContext *context) {
patterns.insert<BitcastOfBitcast>(context);
}
//===----------------------------------------------------------------------===//
// Helpers for compare ops
//===----------------------------------------------------------------------===//
/// Return the type of the same shape (scalar, vector or tensor) containing i1.
static Type getI1SameShape(Type type) {
auto i1Type = IntegerType::get(type.getContext(), 1);
if (auto tensorType = type.dyn_cast<RankedTensorType>())
return RankedTensorType::get(tensorType.getShape(), i1Type);
if (type.isa<UnrankedTensorType>())
return UnrankedTensorType::get(i1Type);
if (auto vectorType = type.dyn_cast<VectorType>())
return VectorType::get(vectorType.getShape(), i1Type);
return i1Type;
}
//===----------------------------------------------------------------------===//
// CmpIOp
//===----------------------------------------------------------------------===//
/// Compute `lhs` `pred` `rhs`, where `pred` is one of the known integer
/// comparison predicates.
bool mlir::arith::applyCmpPredicate(arith::CmpIPredicate predicate,
const APInt &lhs, const APInt &rhs) {
switch (predicate) {
case arith::CmpIPredicate::eq:
return lhs.eq(rhs);
case arith::CmpIPredicate::ne:
return lhs.ne(rhs);
case arith::CmpIPredicate::slt:
return lhs.slt(rhs);
case arith::CmpIPredicate::sle:
return lhs.sle(rhs);
case arith::CmpIPredicate::sgt:
return lhs.sgt(rhs);
case arith::CmpIPredicate::sge:
return lhs.sge(rhs);
case arith::CmpIPredicate::ult:
return lhs.ult(rhs);
case arith::CmpIPredicate::ule:
return lhs.ule(rhs);
case arith::CmpIPredicate::ugt:
return lhs.ugt(rhs);
case arith::CmpIPredicate::uge:
return lhs.uge(rhs);
}
llvm_unreachable("unknown cmpi predicate kind");
}
/// Returns true if the predicate is true for two equal operands.
static bool applyCmpPredicateToEqualOperands(arith::CmpIPredicate predicate) {
switch (predicate) {
case arith::CmpIPredicate::eq:
case arith::CmpIPredicate::sle:
case arith::CmpIPredicate::sge:
case arith::CmpIPredicate::ule:
case arith::CmpIPredicate::uge:
return true;
case arith::CmpIPredicate::ne:
case arith::CmpIPredicate::slt:
case arith::CmpIPredicate::sgt:
case arith::CmpIPredicate::ult:
case arith::CmpIPredicate::ugt:
return false;
}
llvm_unreachable("unknown cmpi predicate kind");
}
OpFoldResult arith::CmpIOp::fold(ArrayRef<Attribute> operands) {
assert(operands.size() == 2 && "cmpi takes two operands");
// cmpi(pred, x, x)
if (lhs() == rhs()) {
auto val = applyCmpPredicateToEqualOperands(getPredicate());
return BoolAttr::get(getContext(), val);
}
auto lhs = operands.front().dyn_cast_or_null<IntegerAttr>();
auto rhs = operands.back().dyn_cast_or_null<IntegerAttr>();
if (!lhs || !rhs)
return {};
auto val = applyCmpPredicate(getPredicate(), lhs.getValue(), rhs.getValue());
return BoolAttr::get(getContext(), val);
}
//===----------------------------------------------------------------------===//
// CmpFOp
//===----------------------------------------------------------------------===//
/// Compute `lhs` `pred` `rhs`, where `pred` is one of the known floating point
/// comparison predicates.
bool mlir::arith::applyCmpPredicate(arith::CmpFPredicate predicate,
const APFloat &lhs, const APFloat &rhs) {
auto cmpResult = lhs.compare(rhs);
switch (predicate) {
case arith::CmpFPredicate::AlwaysFalse:
return false;
case arith::CmpFPredicate::OEQ:
return cmpResult == APFloat::cmpEqual;
case arith::CmpFPredicate::OGT:
return cmpResult == APFloat::cmpGreaterThan;
case arith::CmpFPredicate::OGE:
return cmpResult == APFloat::cmpGreaterThan ||
cmpResult == APFloat::cmpEqual;
case arith::CmpFPredicate::OLT:
return cmpResult == APFloat::cmpLessThan;
case arith::CmpFPredicate::OLE:
return cmpResult == APFloat::cmpLessThan || cmpResult == APFloat::cmpEqual;
case arith::CmpFPredicate::ONE:
return cmpResult != APFloat::cmpUnordered && cmpResult != APFloat::cmpEqual;
case arith::CmpFPredicate::ORD:
return cmpResult != APFloat::cmpUnordered;
case arith::CmpFPredicate::UEQ:
return cmpResult == APFloat::cmpUnordered || cmpResult == APFloat::cmpEqual;
case arith::CmpFPredicate::UGT:
return cmpResult == APFloat::cmpUnordered ||
cmpResult == APFloat::cmpGreaterThan;
case arith::CmpFPredicate::UGE:
return cmpResult == APFloat::cmpUnordered ||
cmpResult == APFloat::cmpGreaterThan ||
cmpResult == APFloat::cmpEqual;
case arith::CmpFPredicate::ULT:
return cmpResult == APFloat::cmpUnordered ||
cmpResult == APFloat::cmpLessThan;
case arith::CmpFPredicate::ULE:
return cmpResult == APFloat::cmpUnordered ||
cmpResult == APFloat::cmpLessThan || cmpResult == APFloat::cmpEqual;
case arith::CmpFPredicate::UNE:
return cmpResult != APFloat::cmpEqual;
case arith::CmpFPredicate::UNO:
return cmpResult == APFloat::cmpUnordered;
case arith::CmpFPredicate::AlwaysTrue:
return true;
}
llvm_unreachable("unknown cmpf predicate kind");
}
OpFoldResult arith::CmpFOp::fold(ArrayRef<Attribute> operands) {
assert(operands.size() == 2 && "cmpf takes two operands");
auto lhs = operands.front().dyn_cast_or_null<FloatAttr>();
auto rhs = operands.back().dyn_cast_or_null<FloatAttr>();
if (!lhs || !rhs)
return {};
auto val = applyCmpPredicate(getPredicate(), lhs.getValue(), rhs.getValue());
return BoolAttr::get(getContext(), val);
}
//===----------------------------------------------------------------------===//
// TableGen'd op method definitions
//===----------------------------------------------------------------------===//
#define GET_OP_CLASSES
#include "mlir/Dialect/Arithmetic/IR/ArithmeticOps.cpp.inc"
//===----------------------------------------------------------------------===//
// TableGen'd enum attribute definitions
//===----------------------------------------------------------------------===//
#include "mlir/Dialect/Arithmetic/IR/ArithmeticOpsEnums.cpp.inc"
| 9,867 |
704 | <filename>monero/external/supercop/crypto_sign/ed25519/amd64-51-30k/ge25519_base.c<gh_stars>100-1000
#include "ge25519.h"
/* Base point in P^3 coordinates (with Z=1) */
const ge25519 ge25519_base = {{{0x00062d608f25d51a, 0x000412a4b4f6592a, 0x00075b7171a4b31d, 0x0001ff60527118fe, 0x000216936d3cd6e5}},
{{0x0006666666666658, 0x0004cccccccccccc, 0x0001999999999999, 0x0003333333333333, 0x0006666666666666}},
{{0x0000000000000001, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000}},
{{0x00068AB3A5B7DDA3, 0x00000EEA2A5EADBB, 0x0002AF8DF483C27E, 0x000332B375274732, 0x00067875F0FD78B7}}};
| 363 |
308 | <gh_stars>100-1000
import unittest
import torch
from torchvision.transforms import Compose, ToTensor
from super_gradients.training.transforms.transforms import Rescale, RandomRescale, CropImageAndMask, PadShortToCropSize
from PIL import Image
from super_gradients.training.datasets.segmentation_datasets.segmentation_dataset import SegmentationDataSet
class SegmentationTransformsTest(unittest.TestCase):
def setUp(self) -> None:
self.default_image_value = 0
self.default_mask_value = 0
def create_sample(self, size):
sample = {
"image": Image.new(mode="RGB", size=size, color=self.default_image_value),
"mask": Image.new(mode="L", size=size, color=self.default_mask_value)
}
return sample
def test_rescale_with_scale_factor(self):
# test raise exception for negative and zero scale factor
kwargs = {"scale_factor": -2}
self.failUnlessRaises(ValueError, Rescale, **kwargs)
kwargs = {"scale_factor": 0}
self.failUnlessRaises(ValueError, Rescale, **kwargs)
# test scale down
sample = self.create_sample((1024, 512))
rescale_scale05 = Rescale(scale_factor=0.5)
out = rescale_scale05(sample)
self.assertEqual((512, 256), out["image"].size)
# test scale up
sample = self.create_sample((1024, 512))
rescale_scale2 = Rescale(scale_factor=2.0)
out = rescale_scale2(sample)
self.assertEqual((2048, 1024), out["image"].size)
# test scale_factor is stronger than other params
sample = self.create_sample((1024, 512))
rescale_scale05 = Rescale(scale_factor=0.5, short_size=300, long_size=600)
out = rescale_scale05(sample)
self.assertEqual((512, 256), out["image"].size)
def test_rescale_with_short_size(self):
# test raise exception for negative and zero short_size
kwargs = {"short_size": 0}
self.failUnlessRaises(ValueError, Rescale, **kwargs)
kwargs = {"short_size": -200}
self.failUnlessRaises(ValueError, Rescale, **kwargs)
# test scale by short size
sample = self.create_sample((1024, 512))
rescale_short256 = Rescale(short_size=256)
out = rescale_short256(sample)
self.assertEqual((512, 256), out["image"].size)
# test short_size is stronger than long_size
sample = self.create_sample((1024, 512))
rescale_scale05 = Rescale(short_size=301, long_size=301)
out = rescale_scale05(sample)
self.assertEqual((602, 301), out["image"].size)
def test_rescale_with_long_size(self):
# test raise exception for negative and zero short_size
kwargs = {"long_size": 0}
self.failUnlessRaises(ValueError, Rescale, **kwargs)
kwargs = {"long_size": -200}
self.failUnlessRaises(ValueError, Rescale, **kwargs)
# test scale by long size
sample = self.create_sample((1024, 512))
rescale_long256 = Rescale(long_size=256)
out = rescale_long256(sample)
self.assertEqual((256, 128), out["image"].size)
def test_random_rescale(self):
# test passing scales argument
random_rescale = RandomRescale(scales=0.1)
self.assertEqual((0.1, 1), random_rescale.scales)
random_rescale = RandomRescale(scales=1.2)
self.assertEqual((1, 1.2), random_rescale.scales)
random_rescale = RandomRescale(scales=(0.5, 1.2))
self.assertEqual((0.5, 1.2), random_rescale.scales)
kwargs = {"scales": -0.5}
self.failUnlessRaises(ValueError, RandomRescale, **kwargs)
# test random rescale
size = [1024, 512]
scales = [0.8, 1.2]
sample = self.create_sample(size)
random_rescale = RandomRescale(scales=(0.8, 1.2))
min_size = [scales[0] * s for s in size]
max_size = [scales[1] * s for s in size]
out = random_rescale(sample)
for i in range(len(min_size)):
self.assertGreaterEqual(out["image"].size[i], min_size[i])
self.assertLessEqual(out["image"].size[i], max_size[i])
def test_padding(self):
# test arguments are valid
pad = PadShortToCropSize(crop_size=200)
self.assertEqual((200, 200), pad.crop_size)
kwargs = {"crop_size": (0, 200)}
self.failUnlessRaises(ValueError, PadShortToCropSize, **kwargs)
kwargs = {"crop_size": 200, "fill_image": 256}
self.failUnlessRaises(ValueError, PadShortToCropSize, **kwargs)
kwargs = {"crop_size": 200, "fill_mask": 256}
self.failUnlessRaises(ValueError, PadShortToCropSize, **kwargs)
in_size = (512, 256)
out_size = (512, 512)
sample = self.create_sample(in_size)
padding = PadShortToCropSize(crop_size=out_size)
out = padding(sample)
self.assertEqual(out_size, out["image"].size)
# pad to odd size
out_size = (512, 501)
sample = self.create_sample(in_size)
padding = PadShortToCropSize(crop_size=out_size)
out = padding(sample)
self.assertEqual(out_size, out["image"].size)
def test_padding_fill_values(self):
image_to_tensor = ToTensor()
# test fill mask
in_size = (256, 128)
out_size = (256, 256)
# padding fill values
fill_mask_value = 32
fill_image_value = 127
sample = self.create_sample(in_size)
padding = PadShortToCropSize(crop_size=out_size, fill_mask=fill_mask_value, fill_image=fill_image_value)
out = padding(sample)
out_mask = SegmentationDataSet.target_transform(out["mask"])
# same as SegmentationDataset transform just without normalization to easily keep track of values.
out_image = image_to_tensor(out["image"])
# test transformed mask values
original_values = out_mask[128 // 2:-128 // 2].unique().tolist()
pad_values = torch.cat([out_mask[:128 // 2], out_mask[-128 // 2:]], dim=0).unique().tolist()
self.assertEqual(len(original_values), 1)
self.assertEqual(original_values[0], self.default_mask_value)
self.assertEqual(len(pad_values), 1)
self.assertEqual(pad_values[0], fill_mask_value)
# test transformed image values
original_values = out_image[:, 128 // 2:-128 // 2].unique().tolist()
pad_values = torch.cat([out_image[:, :128 // 2], out_image[:, -128 // 2:]], dim=1).unique().tolist()
self.assertEqual(len(original_values), 1)
self.assertEqual(original_values[0], self.default_image_value)
self.assertEqual(len(pad_values), 1)
self.assertAlmostEqual(pad_values[0], fill_image_value / 255, delta=1e-5)
def test_crop(self):
# test arguments are valid
pad = CropImageAndMask(crop_size=200, mode="center")
self.assertEqual((200, 200), pad.crop_size)
kwargs = {"crop_size": (0, 200), "mode": "random"}
self.failUnlessRaises(ValueError, CropImageAndMask, **kwargs)
# test unsupported mode
kwargs = {"crop_size": (200, 200), "mode": "deci"}
self.failUnlessRaises(ValueError, CropImageAndMask, **kwargs)
in_size = (1024, 512)
out_size = (128, 256)
crop_center = CropImageAndMask(crop_size=out_size, mode="center")
crop_random = CropImageAndMask(crop_size=out_size, mode="random")
sample = self.create_sample(in_size)
out_center = crop_center(sample)
sample = self.create_sample(in_size)
out_random = crop_random(sample)
self.assertEqual(out_size, out_center["image"].size)
self.assertEqual(out_size, out_random["image"].size)
def test_rescale_padding(self):
in_size = (1024, 512)
out_size = (512, 512)
sample = self.create_sample(in_size)
transform = Compose([
Rescale(long_size=out_size[0]), # rescale to (512, 256)
PadShortToCropSize(crop_size=out_size) # pad to (512, 512)
])
out = transform(sample)
self.assertEqual(out_size, out["image"].size)
def test_random_rescale_padding_random_crop(self):
img_size = (1024, 512)
crop_size = (256, 128)
sample = self.create_sample(img_size)
transform = Compose([
RandomRescale(scales=(0.1, 2.0)),
PadShortToCropSize(crop_size=crop_size),
CropImageAndMask(crop_size=crop_size, mode="random")
])
out = transform(sample)
self.assertEqual(crop_size, out["image"].size)
if __name__ == '__main__':
unittest.main()
| 3,787 |
1,116 | <gh_stars>1000+
/*********************************************************************
* Software License Agreement (BSD License)
*
* Copyright (c) 2011, <NAME>, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
* * Neither the name of Willow Garage nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*********************************************************************/
/* Author: <NAME> */
#pragma once
#include <moveit_msgs/Constraints.h>
#include <geometry_msgs/PointStamped.h>
#include <geometry_msgs/PoseStamped.h>
#include <geometry_msgs/QuaternionStamped.h>
#include <moveit/robot_state/robot_state.h>
#include <limits>
namespace XmlRpc
{
class XmlRpcValue;
}
namespace kinematic_constraints
{
/**
* \brief Merge two sets of constraints into one.
*
* This just does appending of all constraints except joint
* constraints. For members of type \ref JointConstraint, the bounds
* specified in the parameter \e first take precedence over parameter
* \e second
*
* @param [in] first The first constraint to merge
* @param [in] second The second constraint to merge
*
* @return The merged set of constraints
*/
moveit_msgs::Constraints mergeConstraints(const moveit_msgs::Constraints& first, const moveit_msgs::Constraints& second);
/** \brief Check if any constraints were specified */
[[deprecated("Use moveit/utils/message_checks.h instead")]] bool isEmpty(const moveit_msgs::Constraints& constr);
std::size_t countIndividualConstraints(const moveit_msgs::Constraints& constr);
/**
* \brief Generates a constraint message intended to be used as a goal
* constraint for a joint group. The full constraint will contain a
* vector of type \ref JointConstraint, one for each DOF in the group.
*
* @param [in] state The state from which to generate goal joint constraints
* @param [in] jmg The group for which to generate goal joint constraints
* @param [in] tolerance_below The below tolerance to apply to all constraints
* @param [in] tolerance_above The above tolerance to apply to all constraints
*
* @return A full constraint message containing all the joint constraints
*/
moveit_msgs::Constraints constructGoalConstraints(const moveit::core::RobotState& state,
const moveit::core::JointModelGroup* jmg, double tolerance_below,
double tolerance_above);
/**
* \brief Generates a constraint message intended to be used as a goal
* constraint for a joint group. The full constraint will contain a
* vector of type \ref JointConstraint, one for each DOF in the group.
*
* @param [in] state The state from which to generate goal joint constraints
* @param [in] jmg The group for which to generate joint constraints
* @param [in] tolerance A tolerance to apply both above and below for all constraints
*
* @return A full constraint message containing all the joint constraints
*/
moveit_msgs::Constraints constructGoalConstraints(const moveit::core::RobotState& state,
const moveit::core::JointModelGroup* jmg,
double tolerance = std::numeric_limits<double>::epsilon());
/**
* \brief Generates a constraint message intended to be used as a goal
* constraint for a given link. The full constraint will contain a
* \ref PositionConstraint and a \ref OrientationConstraint,
* constructed from the pose. A sphere will be used to represent the
* constraint region for the \ref PositionConstraint.
*
* @param [in] link_name The link name for both constraints
* @param [in] pose The pose stamped to be used for the target region.
* @param [in] tolerance_pos The dimension of the sphere associated with the target region of the \ref
*PositionConstraint
* @param [in] tolerance_angle The value to assign to the absolute tolerances of the \ref OrientationConstraint
*
* @return A full constraint message containing both constraints
*/
moveit_msgs::Constraints constructGoalConstraints(const std::string& link_name, const geometry_msgs::PoseStamped& pose,
double tolerance_pos = 1e-3, double tolerance_angle = 1e-2);
/**
* \brief Generates a constraint message intended to be used as a goal
* constraint for a given link. The full constraint will contain a
* \ref PositionConstraint and a \ref OrientationConstraint,
* constructed from the pose. A box will be used to represent the
* constraint region for the \ref PositionConstraint.
*
* @param [in] link_name The link name for both constraints
* @param [in] pose The pose stamped to be used for the target region.
* @param [in] tolerance_pos The dimensions of the box (xyz) associated with the target region of the \ref
*PositionConstraint
* @param [in] tolerance_angle The values to assign to the absolute tolerances (xyz) of the \ref OrientationConstraint
*
* @return A full constraint message containing both constraints
*/
moveit_msgs::Constraints constructGoalConstraints(const std::string& link_name, const geometry_msgs::PoseStamped& pose,
const std::vector<double>& tolerance_pos,
const std::vector<double>& tolerance_angle);
/**
* \brief Generates a constraint message intended to be used as a goal
* constraint for a given link. The full constraint message will
* contain only an \ref OrientationConstraint.
*
* @param [in] link_name The link name for the \ref OrientationConstraint
* @param [in] quat The quaternion for the \ref OrientationConstraint
* @param [in] tolerance The absolute axes tolerances to apply to the \ref OrientationConstraint
*
* @return A full constraint message containing the orientation constraint
*/
moveit_msgs::Constraints constructGoalConstraints(const std::string& link_name,
const geometry_msgs::QuaternionStamped& quat,
double tolerance = 1e-2);
/**
* \brief Generates a constraint message intended to be used as a goal
* constraint for a given link. The full constraint message will
* contain only a \ref PositionConstraint. A sphere will be used to
* represent the constraint region.
*
* @param [in] link_name The link name for the \ref PositionConstraint
* @param [in] reference_point A point corresponding to the target_point_offset of the \ref PositionConstraint
* @param [in] goal_point The position associated with the constraint region
* @param [in] tolerance The radius associated with the sphere volume associated with the constraint region
*
* @return A full constraint message containing the position constraint
*/
moveit_msgs::Constraints constructGoalConstraints(const std::string& link_name,
const geometry_msgs::Point& reference_point,
const geometry_msgs::PointStamped& goal_point,
double tolerance = 1e-3);
/**
* \brief Generates a constraint message intended to be used as a goal
* constraint for a given link. The full constraint message will
* contain only a \ref PositionConstraint. A sphere will be used to
* represent the constraint region.
*
* @param [in] link_name The link name for the \ref PositionConstraint
* @param [in] goal_point The position associated with the constraint region
* @param [in] tolerance The radius associated with the sphere volume associated with the constraint region
*
* @return A full constraint message containing the position constraint
*/
moveit_msgs::Constraints constructGoalConstraints(const std::string& link_name,
const geometry_msgs::PointStamped& goal_point,
double tolerance = 1e-3);
/**
* \brief extract constraint message from XmlRpc node.
*
* This can be used to construct a Constraints message from
* specifications uploaded on the parameter server.
*
* @param [in] params XmlRpc node of the parameter specification
* @param [out] constraints The constructed constraints message
*
* @return was the construction successful?
*/
bool constructConstraints(XmlRpc::XmlRpcValue& params, moveit_msgs::Constraints& constraints);
/**
* \brief Resolves frames used in constraints to links in the robot model.
*
* The link_name field of a constraint is changed from the name of an object's frame or subframe
* to the name of the robot link that the object is attached to.
*
* This is used in a planning request adapter which ensures that the planning problem is defined
* properly (the attached objects' frames are not known to the planner).
*
* @param [in] state The RobotState used to resolve frames.
* @param [in] constraints The constraint to resolve.
*/
bool resolveConstraintFrames(const moveit::core::RobotState& state, moveit_msgs::Constraints& constraints);
} // namespace kinematic_constraints
| 3,336 |
14,668 | <reponame>zealoussnow/chromium
// Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.download.home.filter;
import org.chromium.components.offline_items_collection.OfflineItem;
import java.util.Collection;
/**
* A filtered source of {@link OfflineItem}s. This class supports querying a collection of
* {@link OfflineItem}s as well as registration of observers to be notified of changes.
*/
public interface OfflineItemFilterSource {
/**
* @return The current collection of {@link OfflineItem}s as understood by this source. Note
* that this list may be filtered by the source itself if it deems it necessary.
*/
Collection<OfflineItem> getItems();
/**
* @return Whether or not the items are available, which is meant to help determine the
* difference between an empty set and a set that hasn't loaded yet.
*/
boolean areItemsAvailable();
/**
* Registers {@code observer} to be notified of changes to the item collection managed by this
* source.
*/
void addObserver(OfflineItemFilterObserver observer);
/**
* Unregisters {@code observer} from notifications of changes to the item collection managed by
* this source.
*/
void removeObserver(OfflineItemFilterObserver observer);
} | 415 |
416 | <gh_stars>100-1000
//
// CSSearchableItemAttributeSet_Images.h
// CoreSpotlight
//
// Copyright © 2015 Apple. All rights reserved.
//
#import <CoreSpotlight/CSSearchableItemAttributeSet.h>
NS_ASSUME_NONNULL_BEGIN
@interface CSSearchableItemAttributeSet (CSImages)
//The height of the item in pixels (ie Image height or Video frame height)
@property(nullable, strong) NSNumber *pixelHeight;
//The width of the item in pixels (ie Image width or Video frame width)
@property(nullable, strong) NSNumber *pixelWidth;
//The total number of pixels in the item.
@property(nullable, strong) NSNumber *pixelCount;
//What color space model is this item following (For example, "RGB", "CMYK", "YUV", "YCbCr")
@property(nullable, copy) NSString *colorSpace;
//Number of bits per sample
//For example bit depth of an image (8-bit, 16-bit etc..) or bit
//depth per audio sample of uncompressed audio data (8, 16, 24, 32, 64, etc..)
@property(nullable, strong) NSNumber *bitsPerSample;
//Indicates if the flash was used to take the picture. Should be 1 if flash is on, 0 otherwise.
@property(nullable, strong, getter=isFlashOn) NSNumber *flashOn;
//The actual focal length of the lens in mm.
@property(nullable, strong) NSNumber *focalLength;
//Indicates if the focal length is 35mm. Should be 1 if true, 0 otherwise
@property(nullable, strong, getter=isFocalLength35mm) NSNumber *focalLength35mm;
//Device make that was used to acquire this item
@property(nullable, copy) NSString *acquisitionMake;
//Device model that was used to acquire this item
@property(nullable, copy) NSString *acquisitionModel;
//The owner of the camera used to capture this image.
@property(nullable, copy) NSString *cameraOwner;
//The model of the lens used to capture this image.
@property(nullable, copy) NSString *lensModel;
//The ISO Speed the camera was set to when the image was taken. Examples are 100, 200, 400, etc.
@property(nullable, strong) NSNumber *ISOSpeed;
//The orientation of the data. Should be 0 for Landscape or 1 for Portrait.
@property(nullable, strong) NSNumber *orientation;
//The names of the various layers in the file
@property(nullable, copy) NSArray<NSString*> *layerNames;
//The white balance setting of the camera when the image was acquired. Should be 0 for Auto or 1 for Manual.
@property(nullable, strong) NSNumber *whiteBalance;
//The size of the lens aperture as a log-scale APEX value when the image was acquired.
@property(nullable, strong) NSNumber *aperture;
//Name of the color profile used for the image
@property(nullable, copy) NSString *profileName;
//Resolution width of this image in DPI
@property(nullable, strong) NSNumber *resolutionWidthDPI;
//Resolution height of this image in DPI
@property(nullable, strong) NSNumber *resolutionHeightDPI;
//Mode that was used for the exposure. Should be 0 for AutoExposure, 1 for Manual, 2 for AutoBracket.
@property(nullable, strong) NSNumber *exposureMode;
//Time that the lens was open during exposure in seconds
@property(nullable, strong) NSNumber *exposureTime;
//The verion of the EXIF header that was used to generate the metadata
@property(nullable, copy) NSString *EXIFVersion;
//The version of GPSInfoIFD header that was used to generate the metadata
@property(nullable, copy) NSString *EXIFGPSVersion;
//Indicates if this image file has an alpha channel. Should be 0 for no alpha channel, 1 for alpha channel.
@property(nullable, strong) NSNumber *hasAlphaChannel;
//Indicates if red-eye reduction was used to take the picture. Should be 0 for no red-eye, 1 for red-eye
@property(nullable, strong,getter=isRedEyeOn) NSNumber *redEyeOn;
//The metering mode for the image (Average, Partial, Pattern, ...)
@property(nullable, copy) NSString *meteringMode;
//The smallest F number of the lens. The unit is the APEX
//value. Ordinarily it is given in the range of 00.00 to 99.99.
@property(nullable, strong) NSNumber *maxAperture;
//The focal length of the lens divided by the diameter of the aperture when the image was acquired.
@property(nullable, strong) NSNumber *fNumber;
//The class of the program used by the camera to set exposure when the picture is taken (Manual, Normal, Aperture Priority, ...)
@property(nullable, copy) NSString *exposureProgram;
//The time of the exposure as a string, e.g. "1/250 seconds".
@property(nullable, copy) NSString *exposureTimeString;
@end
NS_ASSUME_NONNULL_END
| 1,300 |
335 | {
"word": "Notwithstanding",
"definitions": [
"In spite of."
],
"parts-of-speech": "Preposition"
} | 56 |
392 | {"name":"seven_zip","version":"2.0.2","description":"Installs/Configures the 7-zip file archiver","long_description":"[](https://supermarket.chef.io/cookbooks/seven_zip)\n[](https://ci.appveyor.com/project/ChefWindowsCookbooks65871/seven-zip/branch/master)\n\n# seven_zip Cookbook\n[7-Zip](http://www.7-zip.org/) is a file archiver with a high compression ratio. This cookbook installs the full 7-zip suite of tools (GUI and CLI). This cookbook replaces the older [7-zip cookbook](https://github.com/sneal/7-zip).\n\n# Requirements\n## Platforms\n- Windows XP\n- Windows Vista\n- Windows 7\n- Windows 8, 8.1\n- Windows 10\n- Windows Server 2003 R2\n- Windows Server 2008 (R1, R2)\n- Windows Server 2012 (R1, R2)\n\n## Chef\n- Chef >= 11.6\n\n## Cookbooks\n- windows\n\n# Attributes\n## Optional\n<table>\n <tr>\n <th>Key</th>\n <th>Type</th>\n <th>Description</th>\n <th>Default</th>\n </tr>\n <tr>\n <td><code>['seven_zip']['home']</code></td>\n <td>String</td>\n <td>7-Zip installation directory.</td>\n <td></td>\n </tr>\n <tr>\n <td><code>['seven_zip']['syspath']</code></td>\n <td>Boolean</td>\n <td>If true, adds 7-zip directory to system PATH environment variable.</td>\n <td></td>\n </tr>\n <tr>\n <td><code>['seven_zip']['default_extract_timeout']</code></td>\n <td>Integer</td>\n <td>The default timeout for an extract operation in seconds. This can be overridden by a resource attribute.</td>\n <td>600</td>\n </tr>\n</table>\n\n# Usage\n## default\n\nAdd `seven_zip::default` to your run\\_list which will download and install 7-zip for the current Windows platform. \n\n# Resource/Provider\n## seven_zip_archive\nExtracts a 7-zip compatible archive (iso, zip, 7z etc) to the specified destination directory.\n\n#### Actions\n- `:extract` - Extract a 7-zip compatible archive\n\n#### Attribute Parameters\n- `path` - Name attribute. The destination to extract to.\n- `source` - The file path to the archive to extract.\n- `overwrite` - Defaults to false. If true, the destination files will be overwritten.\n- `checksum` - The archive file checksum.\n- `timeout` - The extract action timeout in seconds, defaults to `node['seven_zip']['default_extract_timeout']`.\n\n#### Examples\nExtract 7-zip source files to `C:\\seven_zip_source`.\n\n```ruby\nseven_zip_archive 'seven_zip_source' do\n path 'C:\\seven_zip_source'\n source 'http://www.7-zip.org/a/7z1514-src.7z'\n overwrite true\n checksum '3713aed72728eae8f6649e4803eba0b3676785200c76df6269034c520df4bbd5'\n timeout 30\nend\n```\n\n# Recipes\n## default\n\nInstalls 7-zip and adds it to your system PATH.\n\n# License & Authors\n- Author:: <NAME> (<<EMAIL>>)\n- Author:: <NAME> (<<EMAIL>>)\n\n```text\nCopyright:: 2011-2016, Chef Software, Inc.\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\nyou may not use this file except in compliance with the License.\nYou may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\nUnless required by applicable law or agreed to in writing, software\ndistributed under the License is distributed on an \"AS IS\" BASIS,\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\nSee the License for the specific language governing permissions and\nlimitations under the License.\n```\n","maintainer":"<NAME>","maintainer_email":"<EMAIL>","license":"Apache 2.0","platforms":{"windows":">= 0.0.0"},"dependencies":{"windows":">= 1.2.2"},"recommendations":{},"suggestions":{},"conflicting":{},"providing":{},"replacing":{},"attributes":{},"groupings":{},"recipes":{}} | 1,374 |
6,851 | from pyramid.config import Configurator
from pyramid.response import Response
def hello_world(request):
return Response('Hello world!')
def goodbye_world(request):
return Response('Goodbye world!')
config = Configurator()
config.add_view(hello_world)
config.add_view(goodbye_world, name='goodbye')
app = config.make_wsgi_app()
| 105 |
4,339 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.client.thin;
import java.util.Collection;
import java.util.Collections;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.ignite.IgniteBinary;
import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion;
import org.apache.ignite.internal.util.GridConcurrentHashSet;
/**
* Client cache partition awareness context.
*/
public class ClientCacheAffinityContext {
/** Binary data processor. */
private final IgniteBinary binary;
/** Contains last topology version and known nodes of this version. */
private final AtomicReference<TopologyNodes> lastTop = new AtomicReference<>();
/** Current affinity mapping. */
private volatile ClientCacheAffinityMapping affinityMapping;
/** Cache IDs, which should be included to the next affinity mapping request. */
private final Set<Integer> pendingCacheIds = new GridConcurrentHashSet<>();
/**
* @param binary Binary data processor.
*/
public ClientCacheAffinityContext(IgniteBinary binary) {
this.binary = binary;
}
/**
* Update topology version if it's greater than current version and store nodes for last topology.
*
* @param topVer Topology version.
* @param nodeId Node id.
* @return {@code True} if last topology was updated to the new version.
*/
public boolean updateLastTopologyVersion(AffinityTopologyVersion topVer, UUID nodeId) {
while (true) {
TopologyNodes lastTop = this.lastTop.get();
if (lastTop == null || topVer.compareTo(lastTop.topVer) > 0) {
if (this.lastTop.compareAndSet(lastTop, new TopologyNodes(topVer, nodeId)))
return true;
}
else if (topVer.equals(lastTop.topVer)) {
lastTop.nodes.add(nodeId);
return false;
}
else
return false;
}
}
/**
* Is affinity update required for given cache.
*
* @param cacheId Cache id.
*/
public boolean affinityUpdateRequired(int cacheId) {
TopologyNodes top = lastTop.get();
if (top == null) { // Don't know current topology.
pendingCacheIds.add(cacheId);
return false;
}
ClientCacheAffinityMapping mapping = affinityMapping;
if (mapping == null) {
pendingCacheIds.add(cacheId);
return true;
}
if (top.topVer.compareTo(mapping.topologyVersion()) > 0) {
pendingCacheIds.add(cacheId);
return true;
}
if (mapping.cacheIds().contains(cacheId))
return false;
else {
pendingCacheIds.add(cacheId);
return true;
}
}
/**
* @param ch Payload output channel.
*/
public void writePartitionsUpdateRequest(PayloadOutputChannel ch) {
ClientCacheAffinityMapping.writeRequest(ch, pendingCacheIds);
}
/**
* @param ch Payload input channel.
*/
public synchronized boolean readPartitionsUpdateResponse(PayloadInputChannel ch) {
if (lastTop.get() == null)
return false;
ClientCacheAffinityMapping newMapping = ClientCacheAffinityMapping.readResponse(ch);
ClientCacheAffinityMapping oldMapping = affinityMapping;
if (oldMapping == null || newMapping.topologyVersion().compareTo(oldMapping.topologyVersion()) > 0) {
affinityMapping = newMapping;
if (oldMapping != null)
pendingCacheIds.addAll(oldMapping.cacheIds());
pendingCacheIds.removeAll(newMapping.cacheIds());
return true;
}
if (newMapping.topologyVersion().equals(oldMapping.topologyVersion())) {
affinityMapping = ClientCacheAffinityMapping.merge(oldMapping, newMapping);
pendingCacheIds.removeAll(newMapping.cacheIds());
return true;
}
// Obsolete mapping.
return true;
}
/**
* Gets last topology information.
*/
public TopologyNodes lastTopology() {
return lastTop.get();
}
/**
* Resets affinity context.
*
* @param top Topology which triggers reset.
*/
public synchronized void reset(TopologyNodes top) {
if (lastTop.compareAndSet(top, null)) {
affinityMapping = null;
pendingCacheIds.clear();
}
}
/**
* Calculates affinity node for given cache and key.
*
* @param cacheId Cache ID.
* @param key Key.
* @return Affinity node id or {@code null} if affinity node can't be determined for given cache and key.
*/
public UUID affinityNode(int cacheId, Object key) {
ClientCacheAffinityMapping mapping = currentMapping();
return mapping == null ? null : mapping.affinityNode(binary, cacheId, key);
}
/**
* Calculates affinity node for given cache and partition.
*
* @param cacheId Cache ID.
* @param part Partition.
* @return Affinity node id or {@code null} if affinity node can't be determined for given cache and partition.
*/
public UUID affinityNode(int cacheId, int part) {
ClientCacheAffinityMapping mapping = currentMapping();
return mapping == null ? null : mapping.affinityNode(cacheId, part);
}
/**
* Current affinity mapping.
*/
private ClientCacheAffinityMapping currentMapping() {
TopologyNodes top = lastTop.get();
if (top == null)
return null;
ClientCacheAffinityMapping mapping = affinityMapping;
if (mapping == null)
return null;
if (top.topVer.compareTo(mapping.topologyVersion()) > 0)
return null;
return mapping;
}
/**
* Holder for list of nodes for topology version.
*/
static class TopologyNodes {
/** Topology version. */
private final AffinityTopologyVersion topVer;
/** Nodes. */
private final Collection<UUID> nodes = new ConcurrentLinkedQueue<>();
/**
* @param topVer Topology version.
* @param nodeId Node id.
*/
private TopologyNodes(AffinityTopologyVersion topVer, UUID nodeId) {
this.topVer = topVer;
nodes.add(nodeId);
}
/**
* Gets nodes of this topology.
*/
public Iterable<UUID> nodes() {
return Collections.unmodifiableCollection(nodes);
}
}
}
| 2,903 |
634 | /*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInspection.ex;
import com.intellij.codeInsight.daemon.impl.HighlightInfo;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.PsiFile;
import com.intellij.util.SequentialModalProgressTask;
import com.intellij.util.SequentialTask;
import consulo.logging.Logger;
import java.util.Collections;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
class SequentialCleanupTask implements SequentialTask {
private static final Logger LOG = Logger.getInstance(SequentialCleanupTask.class);
private final Project myProject;
private final LinkedHashMap<PsiFile, List<HighlightInfo>> myResults;
private Iterator<PsiFile> myFileIterator;
private final SequentialModalProgressTask myProgressTask;
private int myCount = 0;
public SequentialCleanupTask(Project project, LinkedHashMap<PsiFile, List<HighlightInfo>> results, SequentialModalProgressTask task) {
myProject = project;
myResults = results;
myProgressTask = task;
myFileIterator = myResults.keySet().iterator();
}
@Override
public void prepare() {}
@Override
public boolean isDone() {
return myFileIterator == null || !myFileIterator.hasNext();
}
@Override
public boolean iteration() {
final ProgressIndicator indicator = myProgressTask.getIndicator();
if (indicator != null) {
indicator.setFraction((double) myCount++/myResults.size());
}
final PsiFile file = myFileIterator.next();
final List<HighlightInfo> infos = myResults.get(file);
Collections.reverse(infos); //sort bottom - top
for (HighlightInfo info : infos) {
for (final Pair<HighlightInfo.IntentionActionDescriptor, TextRange> actionRange : info.quickFixActionRanges) {
try {
actionRange.getFirst().getAction().invoke(myProject, null, file);
}
catch (ProcessCanceledException e) {
throw e;
}
catch (Exception e) {
LOG.error(e);
}
}
}
return true;
}
@Override
public void stop() {
myFileIterator = null;
}
}
| 951 |
466 | <filename>libgpos/include/gpos/common/CSyncHashtable.h
//---------------------------------------------------------------------------
// Greenplum Database
// Copyright (C) 2008 Greenplum, Inc.
//
// @filename:
// CSyncHashtable.h
//
// @doc:
// Allocation-less static hashtable;
// Manages client objects without additional allocations; this is a
// requirement for system programming tasks to ensure the hashtable
// works in exception situations, e.g. OOM;
//
// 1) Hashtable is static and cannot resize during operations;
// 2) expects target type to have SLink (see CList.h) and Key
// members with appopriate accessors;
// 3) clients must provide their own hash function;
// 4) hashtable is not thread-safe, despite the name;
//---------------------------------------------------------------------------
#ifndef GPOS_CSyncHashtable_H
#define GPOS_CSyncHashtable_H
#include "gpos/base.h"
#include "gpos/common/CAutoRg.h"
#include "gpos/common/CList.h"
#include "gpos/task/CAutoSuspendAbort.h"
namespace gpos
{
// prototypes
template <class T, class K>
class CSyncHashtableAccessorBase;
template <class T, class K>
class CSyncHashtableAccessByKey;
template <class T, class K>
class CSyncHashtableIter;
template <class T, class K>
class CSyncHashtableAccessByIter;
//---------------------------------------------------------------------------
// @class:
// CSyncHashtable<T, K, S>
//
// @doc:
// Allocation-less static hash table;
//
// Ideally the offset of the key would be a template parameter too in order
// to avoid accidental tampering with this value -- not all compiler allow
// the use of the offset macro in the template definition, however.
//
//---------------------------------------------------------------------------
template <class T, class K>
class CSyncHashtable
{
// accessor and iterator classes are friends
friend class CSyncHashtableAccessorBase<T, K>;
friend class CSyncHashtableAccessByKey<T, K>;
friend class CSyncHashtableAccessByIter<T, K>;
friend class CSyncHashtableIter<T, K>;
private:
// hash bucket is a list of entries
struct SBucket
{
private:
// no copy ctor
SBucket(const SBucket &);
public:
// ctor
SBucket(){};
// hash chain
CList<T> m_chain;
#ifdef GPOS_DEBUG
// bucket number
ULONG m_bucket_idx;
#endif // GPOS_DEBUG
};
// range of buckets
SBucket *m_buckets;
// number of ht buckets
ULONG m_nbuckets;
// number of ht entries
ULONG_PTR m_size;
// offset of key
ULONG m_key_offset;
// invalid key - needed for iteration
const K *m_invalid_key;
// pointer to hashing function
ULONG (*m_hashfn)(const K &);
// pointer to key equality function
BOOL (*m_eqfn)(const K &, const K &);
// function to compute bucket index for key
ULONG
GetBucketIndex(const K &key) const
{
GPOS_ASSERT(IsValid(key) && "Invalid key is inaccessible");
return m_hashfn(key) % m_nbuckets;
}
// function to get bucket by index
SBucket &
GetBucket(const ULONG index) const
{
GPOS_ASSERT(index < m_nbuckets && "Invalid bucket index");
return m_buckets[index];
}
// extract key out of type
K &
Key(T *value) const
{
GPOS_ASSERT(gpos::ulong_max != m_key_offset &&
"Key offset not initialized.");
K &k = *(K *) ((BYTE *) value + m_key_offset);
return k;
}
// key validity check
BOOL
IsValid(const K &key) const
{
return !m_eqfn(key, *m_invalid_key);
}
public:
// type definition of function used to cleanup element
typedef void (*DestroyEntryFuncPtr)(T *);
// ctor
CSyncHashtable<T, K>()
: m_buckets(NULL),
m_nbuckets(0),
m_size(0),
m_key_offset(gpos::ulong_max),
m_invalid_key(NULL)
{
}
// dtor
// deallocates hashtable internals, does not destroy
// client objects
~CSyncHashtable<T, K>()
{
Cleanup();
}
// Initialization of hashtable
void
Init(CMemoryPool *mp, ULONG size, ULONG link_offset, ULONG key_offset,
const K *invalid_key, ULONG (*func_hash)(const K &),
BOOL (*func_equal)(const K &, const K &))
{
GPOS_ASSERT(NULL == m_buckets);
GPOS_ASSERT(0 == m_nbuckets);
GPOS_ASSERT(NULL != invalid_key);
GPOS_ASSERT(NULL != func_hash);
GPOS_ASSERT(NULL != func_equal);
m_nbuckets = size;
m_key_offset = key_offset;
m_invalid_key = invalid_key;
m_hashfn = func_hash;
m_eqfn = func_equal;
m_buckets = GPOS_NEW_ARRAY(mp, SBucket, m_nbuckets);
// NOTE: 03/25/2008; since it's the only allocation in the
// constructor the protection is not needed strictly speaking;
// Using auto range here just for cleanliness;
CAutoRg<SBucket> argbucket;
argbucket = m_buckets;
for (ULONG i = 0; i < m_nbuckets; i++)
{
m_buckets[i].m_chain.Init(link_offset);
#ifdef GPOS_DEBUG
// add serial number
m_buckets[i].m_bucket_idx = i;
#endif // GPOS_DEBUG
}
// unhook from protector
argbucket.RgtReset();
}
// dealloc bucket range and reset members
void
Cleanup()
{
GPOS_DELETE_ARRAY(m_buckets);
m_buckets = NULL;
m_nbuckets = 0;
}
// iterate over all entries and call destroy function on each entry
void
DestroyEntries(DestroyEntryFuncPtr pfunc_destroy)
{
// need to suspend cancellation while cleaning up
CAutoSuspendAbort asa;
T *value = NULL;
CSyncHashtableIter<T, K> it(*this);
// since removing an entry will automatically advance iter's
// position, we need to make sure that advance iter is called
// only when we do not have an entry to delete
while (NULL != value || it.Advance())
{
if (NULL != value)
{
pfunc_destroy(value);
}
{
CSyncHashtableAccessByIter<T, K> acc(it);
if (NULL != (value = acc.Value()))
{
acc.Remove(value);
}
}
}
#ifdef GPOS_DEBUG
CSyncHashtableIter<T, K> it_snd(*this);
GPOS_ASSERT(!it_snd.Advance());
#endif // GPOS_DEBUG
}
// insert function;
void
Insert(T *value)
{
K &key = Key(value);
GPOS_ASSERT(IsValid(key));
// determine target bucket
SBucket &bucket = GetBucket(GetBucketIndex(key));
// inserting at bucket's head is required by hashtable iteration
bucket.m_chain.Prepend(value);
// increase number of entries
m_size++;
}
// return number of entries
ULONG_PTR
Size() const
{
return m_size;
}
}; // class CSyncHashtable
} // namespace gpos
#endif // !GPOS_CSyncHashtable_H
// EOF
| 2,314 |
1,830 | /*
* Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH under
* one or more contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright ownership.
* Licensed under the Zeebe Community License 1.1. You may not use this file
* except in compliance with the Zeebe Community License 1.1.
*/
package io.camunda.zeebe.snapshots.impl;
import io.camunda.zeebe.protocol.Protocol;
import io.camunda.zeebe.snapshots.SnapshotChunk;
import io.camunda.zeebe.snapshots.SnapshotChunkReader;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.NavigableSet;
import java.util.NoSuchElementException;
import java.util.TreeSet;
import org.agrona.AsciiSequenceView;
import org.agrona.DirectBuffer;
import org.agrona.concurrent.UnsafeBuffer;
/**
* Implements a chunk reader where each chunk is a single file in a root directory. Chunks are then
* ordered lexicographically, and the files are assumed to be immutable, i.e. no more are added to
* the directory once this is created.
*/
public final class FileBasedSnapshotChunkReader implements SnapshotChunkReader {
static final Charset ID_CHARSET = StandardCharsets.US_ASCII;
private final Path directory;
private final NavigableSet<CharSequence> chunks;
private final CharSequenceView chunkIdView;
private NavigableSet<CharSequence> chunksView;
private final int totalCount;
private final long snapshotChecksum;
private final String snapshotID;
FileBasedSnapshotChunkReader(final Path directory, final long checksum) throws IOException {
this.directory = directory;
chunks = collectChunks(directory);
totalCount = chunks.size();
chunksView = chunks;
chunkIdView = new CharSequenceView();
snapshotChecksum = checksum;
snapshotID = directory.getFileName().toString();
}
private NavigableSet<CharSequence> collectChunks(final Path directory) throws IOException {
final var set = new TreeSet<>(CharSequence::compare);
try (final var stream = Files.list(directory).sorted()) {
stream.map(directory::relativize).map(Path::toString).forEach(set::add);
}
return set;
}
@Override
public void seek(final ByteBuffer id) {
if (id == null) {
return;
}
final var path = decodeChunkId(id);
chunksView = chunks.tailSet(path, true);
}
@Override
public ByteBuffer nextId() {
if (chunksView.isEmpty()) {
return null;
}
return encodeChunkId(chunksView.first());
}
@Override
public void close() {
chunks.clear();
chunksView.clear();
}
@Override
public boolean hasNext() {
return !chunksView.isEmpty();
}
@Override
public SnapshotChunk next() {
final var chunkName = chunksView.pollFirst();
if (chunkName == null) {
throw new NoSuchElementException();
}
final var path = directory.resolve(chunkName.toString());
try {
return SnapshotChunkUtil.createSnapshotChunkFromFile(
path, snapshotID, totalCount, snapshotChecksum);
} catch (final IOException e) {
throw new UncheckedIOException(e);
}
}
private ByteBuffer encodeChunkId(final CharSequence path) {
return ByteBuffer.wrap(path.toString().getBytes(ID_CHARSET)).order(Protocol.ENDIANNESS);
}
private CharSequence decodeChunkId(final ByteBuffer id) {
return chunkIdView.wrap(id);
}
private static final class CharSequenceView {
private final DirectBuffer wrapper = new UnsafeBuffer();
private final AsciiSequenceView view = new AsciiSequenceView();
private CharSequence wrap(final ByteBuffer buffer) {
wrapper.wrap(buffer);
return view.wrap(wrapper, 0, wrapper.capacity());
}
}
}
| 1,261 |
427 | //===- YAMLOutputStyle.cpp ------------------------------------ *- C++ --*-===//
//
// The LLVM Compiler Infrastructure
//
// This file is distributed under the University of Illinois Open Source
// License. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
#include "YAMLOutputStyle.h"
#include "PdbYaml.h"
#include "llvm-pdbdump.h"
#include "llvm/DebugInfo/MSF/MappedBlockStream.h"
#include "llvm/DebugInfo/PDB/Raw/DbiStream.h"
#include "llvm/DebugInfo/PDB/Raw/InfoStream.h"
#include "llvm/DebugInfo/PDB/Raw/ModStream.h"
#include "llvm/DebugInfo/PDB/Raw/PDBFile.h"
#include "llvm/DebugInfo/PDB/Raw/RawConstants.h"
#include "llvm/DebugInfo/PDB/Raw/TpiStream.h"
using namespace llvm;
using namespace llvm::pdb;
YAMLOutputStyle::YAMLOutputStyle(PDBFile &File)
: File(File), Out(outs()), Obj(File.getAllocator()) {}
Error YAMLOutputStyle::dump() {
if (opts::pdb2yaml::StreamDirectory)
opts::pdb2yaml::StreamMetadata = true;
if (opts::pdb2yaml::DbiModuleSyms)
opts::pdb2yaml::DbiModuleInfo = true;
if (opts::pdb2yaml::DbiModuleSourceFileInfo)
opts::pdb2yaml::DbiModuleInfo = true;
if (opts::pdb2yaml::DbiModuleInfo)
opts::pdb2yaml::DbiStream = true;
if (auto EC = dumpFileHeaders())
return EC;
if (auto EC = dumpStreamMetadata())
return EC;
if (auto EC = dumpStreamDirectory())
return EC;
if (auto EC = dumpPDBStream())
return EC;
if (auto EC = dumpDbiStream())
return EC;
if (auto EC = dumpTpiStream())
return EC;
if (auto EC = dumpIpiStream())
return EC;
flush();
return Error::success();
}
Error YAMLOutputStyle::dumpFileHeaders() {
if (opts::pdb2yaml::NoFileHeaders)
return Error::success();
yaml::MSFHeaders Headers;
Obj.Headers.emplace();
Obj.Headers->SuperBlock.NumBlocks = File.getBlockCount();
Obj.Headers->SuperBlock.BlockMapAddr = File.getBlockMapIndex();
Obj.Headers->SuperBlock.BlockSize = File.getBlockSize();
auto Blocks = File.getDirectoryBlockArray();
Obj.Headers->DirectoryBlocks.assign(Blocks.begin(), Blocks.end());
Obj.Headers->NumDirectoryBlocks = File.getNumDirectoryBlocks();
Obj.Headers->SuperBlock.NumDirectoryBytes = File.getNumDirectoryBytes();
Obj.Headers->NumStreams =
opts::pdb2yaml::StreamMetadata ? File.getNumStreams() : 0;
Obj.Headers->SuperBlock.FreeBlockMapBlock = File.getFreeBlockMapBlock();
Obj.Headers->SuperBlock.Unknown1 = File.getUnknown1();
Obj.Headers->FileSize = File.getFileSize();
return Error::success();
}
Error YAMLOutputStyle::dumpStreamMetadata() {
if (!opts::pdb2yaml::StreamMetadata)
return Error::success();
Obj.StreamSizes.emplace();
Obj.StreamSizes->assign(File.getStreamSizes().begin(),
File.getStreamSizes().end());
return Error::success();
}
Error YAMLOutputStyle::dumpStreamDirectory() {
if (!opts::pdb2yaml::StreamDirectory)
return Error::success();
auto StreamMap = File.getStreamMap();
Obj.StreamMap.emplace();
for (auto &Stream : StreamMap) {
pdb::yaml::StreamBlockList BlockList;
BlockList.Blocks.assign(Stream.begin(), Stream.end());
Obj.StreamMap->push_back(BlockList);
}
return Error::success();
}
Error YAMLOutputStyle::dumpPDBStream() {
if (!opts::pdb2yaml::PdbStream)
return Error::success();
auto IS = File.getPDBInfoStream();
if (!IS)
return IS.takeError();
auto &InfoS = IS.get();
Obj.PdbStream.emplace();
Obj.PdbStream->Age = InfoS.getAge();
Obj.PdbStream->Guid = InfoS.getGuid();
Obj.PdbStream->Signature = InfoS.getSignature();
Obj.PdbStream->Version = InfoS.getVersion();
for (auto &NS : InfoS.named_streams()) {
yaml::NamedStreamMapping Mapping;
Mapping.StreamName = NS.getKey();
Mapping.StreamNumber = NS.getValue();
Obj.PdbStream->NamedStreams.push_back(Mapping);
}
return Error::success();
}
Error YAMLOutputStyle::dumpDbiStream() {
if (!opts::pdb2yaml::DbiStream)
return Error::success();
auto DbiS = File.getPDBDbiStream();
if (!DbiS)
return DbiS.takeError();
auto &DS = DbiS.get();
Obj.DbiStream.emplace();
Obj.DbiStream->Age = DS.getAge();
Obj.DbiStream->BuildNumber = DS.getBuildNumber();
Obj.DbiStream->Flags = DS.getFlags();
Obj.DbiStream->MachineType = DS.getMachineType();
Obj.DbiStream->PdbDllRbld = DS.getPdbDllRbld();
Obj.DbiStream->PdbDllVersion = DS.getPdbDllVersion();
Obj.DbiStream->VerHeader = DS.getDbiVersion();
if (opts::pdb2yaml::DbiModuleInfo) {
for (const auto &MI : DS.modules()) {
yaml::PdbDbiModuleInfo DMI;
DMI.Mod = MI.Info.getModuleName();
DMI.Obj = MI.Info.getObjFileName();
if (opts::pdb2yaml::DbiModuleSourceFileInfo)
DMI.SourceFiles = MI.SourceFiles;
if (opts::pdb2yaml::DbiModuleSyms &&
MI.Info.getModuleStreamIndex() != kInvalidStreamIndex) {
DMI.Modi.emplace();
auto ModStreamData = msf::MappedBlockStream::createIndexedStream(
File.getMsfLayout(), File.getMsfBuffer(),
MI.Info.getModuleStreamIndex());
pdb::ModStream ModS(MI.Info, std::move(ModStreamData));
if (auto EC = ModS.reload())
return EC;
DMI.Modi->Signature = ModS.signature();
bool HadError = false;
for (auto &Sym : ModS.symbols(&HadError)) {
pdb::yaml::PdbSymbolRecord Record{Sym};
DMI.Modi->Symbols.push_back(Record);
}
}
Obj.DbiStream->ModInfos.push_back(DMI);
}
}
return Error::success();
}
Error YAMLOutputStyle::dumpTpiStream() {
if (!opts::pdb2yaml::TpiStream)
return Error::success();
auto TpiS = File.getPDBTpiStream();
if (!TpiS)
return TpiS.takeError();
auto &TS = TpiS.get();
Obj.TpiStream.emplace();
Obj.TpiStream->Version = TS.getTpiVersion();
for (auto &Record : TS.types(nullptr)) {
yaml::PdbTpiRecord R;
// It's not necessary to set R.RecordData here. That only exists as a
// way to have the `PdbTpiRecord` structure own the memory that `R.Record`
// references. In the case of reading an existing PDB though, that memory
// is owned by the backing stream.
R.Record = Record;
Obj.TpiStream->Records.push_back(R);
}
return Error::success();
}
Error YAMLOutputStyle::dumpIpiStream() {
if (!opts::pdb2yaml::IpiStream)
return Error::success();
auto IpiS = File.getPDBIpiStream();
if (!IpiS)
return IpiS.takeError();
auto &IS = IpiS.get();
Obj.IpiStream.emplace();
Obj.IpiStream->Version = IS.getTpiVersion();
for (auto &Record : IS.types(nullptr)) {
yaml::PdbTpiRecord R;
R.Record = Record;
Obj.IpiStream->Records.push_back(R);
}
return Error::success();
}
void YAMLOutputStyle::flush() {
Out << Obj;
outs().flush();
}
| 2,666 |
460 | /*
* Copyright (C) 1999 <NAME> (<EMAIL>)
* (C) 1999 <NAME> (<EMAIL>)
* Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009 Apple Inc. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public License
* along with this library; see the file COPYING.LIB. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#ifndef RenderInline_h
#define RenderInline_h
#include "RenderBoxModelObject.h"
#include "RenderLineBoxList.h"
namespace WebCore {
class Position;
class RenderInline : public RenderBoxModelObject {
public:
RenderInline(Node*);
virtual void destroy();
virtual void addChild(RenderObject* newChild, RenderObject* beforeChild = 0);
virtual int marginLeft() const;
virtual int marginRight() const;
virtual void absoluteRects(Vector<IntRect>&, int tx, int ty);
virtual void absoluteQuads(Vector<FloatQuad>&);
virtual IntSize offsetFromContainer(RenderObject*, const IntPoint&) const;
IntRect linesBoundingBox() const;
IntRect linesVisibleOverflowBoundingBox() const;
InlineFlowBox* createAndAppendInlineFlowBox();
void dirtyLineBoxes(bool fullLayout);
RenderLineBoxList* lineBoxes() { return &m_lineBoxes; }
const RenderLineBoxList* lineBoxes() const { return &m_lineBoxes; }
InlineFlowBox* firstLineBox() const { return m_lineBoxes.firstLineBox(); }
InlineFlowBox* lastLineBox() const { return m_lineBoxes.lastLineBox(); }
RenderBoxModelObject* continuation() const { return m_continuation; }
virtual void updateDragState(bool dragOn);
IntSize relativePositionedInlineOffset(const RenderBox* child) const;
virtual void addFocusRingRects(Vector<IntRect>&, int tx, int ty);
void paintOutline(GraphicsContext*, int tx, int ty);
int verticalPositionFromCache(bool firstLine) const;
void invalidateVerticalPosition() { m_verticalPosition = PositionUndefined; }
private:
virtual RenderObjectChildList* virtualChildren() { return children(); }
virtual const RenderObjectChildList* virtualChildren() const { return children(); }
const RenderObjectChildList* children() const { return &m_children; }
RenderObjectChildList* children() { return &m_children; }
virtual const char* renderName() const;
virtual bool isRenderInline() const { return true; }
void addChildToContinuation(RenderObject* newChild, RenderObject* beforeChild);
virtual void addChildIgnoringContinuation(RenderObject* newChild, RenderObject* beforeChild = 0);
void splitInlines(RenderBlock* fromBlock, RenderBlock* toBlock, RenderBlock* middleBlock,
RenderObject* beforeChild, RenderBoxModelObject* oldCont);
void splitFlow(RenderObject* beforeChild, RenderBlock* newBlockBox,
RenderObject* newChild, RenderBoxModelObject* oldCont);
virtual void layout() { ASSERT_NOT_REACHED(); } // Do nothing for layout()
virtual void paint(PaintInfo&, int tx, int ty);
virtual bool nodeAtPoint(const HitTestRequest&, HitTestResult&, int x, int y, int tx, int ty, HitTestAction);
virtual bool requiresLayer() const { return isRelPositioned() || isTransparent() || hasMask(); }
virtual int offsetLeft() const;
virtual int offsetTop() const;
virtual int offsetWidth() const { return linesBoundingBox().width(); }
virtual int offsetHeight() const { return linesBoundingBox().height(); }
// Just ignore top/bottom margins on RenderInlines.
virtual int marginTop() const { return 0; }
virtual int marginBottom() const { return 0; }
virtual IntRect clippedOverflowRectForRepaint(RenderBoxModelObject* repaintContainer);
virtual IntRect rectWithOutlineForRepaint(RenderBoxModelObject* repaintContainer, int outlineWidth);
virtual void computeRectForRepaint(RenderBoxModelObject* repaintContainer, IntRect& rect, bool fixed);
virtual void mapLocalToContainer(RenderBoxModelObject* repaintContainer, bool fixed, bool useTransforms, TransformState&) const;
virtual void mapAbsoluteToLocalPoint(bool fixed, bool useTransforms, TransformState&) const;
virtual VisiblePosition positionForPoint(const IntPoint&);
virtual IntRect borderBoundingBox() const
{
IntRect boundingBox = linesBoundingBox();
return IntRect(0, 0, boundingBox.width(), boundingBox.height());
}
virtual InlineFlowBox* createInlineFlowBox(); // Subclassed by SVG and Ruby
virtual void dirtyLinesFromChangedChild(RenderObject* child) { m_lineBoxes.dirtyLinesFromChangedChild(this, child); }
virtual int lineHeight(bool firstLine, bool isRootLineBox = false) const;
RenderInline* inlineContinuation() const;
void setContinuation(RenderBoxModelObject* c) { m_continuation = c; }
virtual void childBecameNonInline(RenderObject* child);
virtual void updateHitTestResult(HitTestResult&, const IntPoint&);
virtual void imageChanged(WrappedImagePtr, const IntRect* = 0);
#if ENABLE(DASHBOARD_SUPPORT)
virtual void addDashboardRegions(Vector<DashboardRegionValue>&);
#endif
virtual void styleDidChange(StyleDifference, const RenderStyle* oldStyle);
virtual void updateBoxModelInfoFromStyle();
static RenderInline* cloneInline(RenderInline* src);
void paintOutlineForLine(GraphicsContext*, int tx, int ty, const IntRect& prevLine, const IntRect& thisLine, const IntRect& nextLine);
RenderBoxModelObject* continuationBefore(RenderObject* beforeChild);
RenderObjectChildList m_children;
RenderLineBoxList m_lineBoxes; // All of the line boxes created for this inline flow. For example, <i>Hello<br>world.</i> will have two <i> line boxes.
RenderBoxModelObject* m_continuation; // Can be either a block or an inline. <b><i><p>Hello</p></i></b>. In this example the <i> will have a block as its continuation but the
// <b> will just have an inline as its continuation.
mutable int m_lineHeight;
mutable int m_verticalPosition;
};
inline RenderInline* toRenderInline(RenderObject* object)
{
ASSERT(!object || object->isRenderInline());
return static_cast<RenderInline*>(object);
}
inline const RenderInline* toRenderInline(const RenderObject* object)
{
ASSERT(!object || object->isRenderInline());
return static_cast<const RenderInline*>(object);
}
// This will catch anyone doing an unnecessary cast.
void toRenderInline(const RenderInline*);
} // namespace WebCore
#endif // RenderInline_h
| 2,408 |
4,013 | <reponame>cclauss/checkov
import unittest
import hcl2
from checkov.common.models.enums import CheckResult
from checkov.terraform.checks.resource.gcp.GoogleSubnetworkLoggingEnabled import check
class TestGoogleSubnetworkLoggingEnabled(unittest.TestCase):
def test_failure(self):
hcl_res = hcl2.loads("""
resource "google_compute_subnetwork" "without logging" {
name = "log-test-subnetwork"
ip_cidr_range = "10.2.0.0/16"
region = "us-central1"
network = google_compute_network.custom-test.id
}
""")
resource_conf = hcl_res['resource'][0]['google_compute_subnetwork']['without logging']
scan_result = check.scan_resource_conf(conf=resource_conf)
self.assertEqual(CheckResult.FAILED, scan_result)
def test_success(self):
hcl_res = hcl2.loads("""
resource "google_compute_subnetwork" "with logging" {
name = "log-test-subnetwork"
ip_cidr_range = "10.2.0.0/16"
region = "us-central1"
network = google_compute_network.custom-test.id
log_config {
aggregation_interval = "INTERVAL_10_MIN"
flow_sampling = 0.5
metadata = "INCLUDE_ALL_METADATA"
}
}
""")
resource_conf = hcl_res['resource'][0]['google_compute_subnetwork']['with logging']
scan_result = check.scan_resource_conf(conf=resource_conf)
self.assertEqual(CheckResult.PASSED, scan_result)
if __name__ == '__main__':
unittest.main()
| 772 |
3,372 | <reponame>MC-JY/aws-sdk-java
/*
* Copyright 2016-2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.opensearch;
import javax.annotation.Generated;
import com.amazonaws.services.opensearch.model.*;
import com.amazonaws.*;
/**
* Abstract implementation of {@code AmazonOpenSearch}. Convenient method forms pass through to the corresponding
* overload that takes a request object, which throws an {@code UnsupportedOperationException}.
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class AbstractAmazonOpenSearch implements AmazonOpenSearch {
protected AbstractAmazonOpenSearch() {
}
@Override
public AcceptInboundConnectionResult acceptInboundConnection(AcceptInboundConnectionRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public AddTagsResult addTags(AddTagsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public AssociatePackageResult associatePackage(AssociatePackageRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CancelServiceSoftwareUpdateResult cancelServiceSoftwareUpdate(CancelServiceSoftwareUpdateRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreateDomainResult createDomain(CreateDomainRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreateOutboundConnectionResult createOutboundConnection(CreateOutboundConnectionRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public CreatePackageResult createPackage(CreatePackageRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeleteDomainResult deleteDomain(DeleteDomainRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeleteInboundConnectionResult deleteInboundConnection(DeleteInboundConnectionRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeleteOutboundConnectionResult deleteOutboundConnection(DeleteOutboundConnectionRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DeletePackageResult deletePackage(DeletePackageRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeDomainResult describeDomain(DescribeDomainRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeDomainAutoTunesResult describeDomainAutoTunes(DescribeDomainAutoTunesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeDomainConfigResult describeDomainConfig(DescribeDomainConfigRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeDomainsResult describeDomains(DescribeDomainsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeInboundConnectionsResult describeInboundConnections(DescribeInboundConnectionsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeInstanceTypeLimitsResult describeInstanceTypeLimits(DescribeInstanceTypeLimitsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeOutboundConnectionsResult describeOutboundConnections(DescribeOutboundConnectionsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribePackagesResult describePackages(DescribePackagesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeReservedInstanceOfferingsResult describeReservedInstanceOfferings(DescribeReservedInstanceOfferingsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DescribeReservedInstancesResult describeReservedInstances(DescribeReservedInstancesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public DissociatePackageResult dissociatePackage(DissociatePackageRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public GetCompatibleVersionsResult getCompatibleVersions(GetCompatibleVersionsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public GetPackageVersionHistoryResult getPackageVersionHistory(GetPackageVersionHistoryRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public GetUpgradeHistoryResult getUpgradeHistory(GetUpgradeHistoryRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public GetUpgradeStatusResult getUpgradeStatus(GetUpgradeStatusRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ListDomainNamesResult listDomainNames(ListDomainNamesRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ListDomainsForPackageResult listDomainsForPackage(ListDomainsForPackageRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ListInstanceTypeDetailsResult listInstanceTypeDetails(ListInstanceTypeDetailsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ListPackagesForDomainResult listPackagesForDomain(ListPackagesForDomainRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ListTagsResult listTags(ListTagsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public ListVersionsResult listVersions(ListVersionsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public PurchaseReservedInstanceOfferingResult purchaseReservedInstanceOffering(PurchaseReservedInstanceOfferingRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public RejectInboundConnectionResult rejectInboundConnection(RejectInboundConnectionRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public RemoveTagsResult removeTags(RemoveTagsRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public StartServiceSoftwareUpdateResult startServiceSoftwareUpdate(StartServiceSoftwareUpdateRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public UpdateDomainConfigResult updateDomainConfig(UpdateDomainConfigRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public UpdatePackageResult updatePackage(UpdatePackageRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public UpgradeDomainResult upgradeDomain(UpgradeDomainRequest request) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public void shutdown() {
throw new java.lang.UnsupportedOperationException();
}
@Override
public com.amazonaws.ResponseMetadata getCachedResponseMetadata(com.amazonaws.AmazonWebServiceRequest request) {
throw new java.lang.UnsupportedOperationException();
}
}
| 2,450 |
1,367 | <reponame>developer-inspur/SwissArmyKnife<gh_stars>1000+
package com.wanjian.sak.layer.impl_tmp;
import android.content.Context;
import android.graphics.drawable.Drawable;
import android.view.View;
import com.wanjian.sak.R;
/**
* Created by wanjian on 2016/10/24.
*/
@Deprecated
public class InfoLayer {
public static final int INFO_KEY = R.layout.sak_view_info_layout;
// public InfoLayer(Context context) {
// super(context);
// }
//
//
// @Override
// protected String getTxt(View view) {
// Object obj = view.getTag(INFO_KEY);
// String info;
// if (obj == null) {
// info = "";
// } else {
// info = obj.toString();
// }
// return info;
//
// }
//
// @Override
// public Drawable icon() {
// return getContext().getResources().getDrawable(R.drawable.sak_custom_info_icon);
// }
//
// @Override
// public String description() {
// return getContext().getString(R.string.sak_personal_info);
// }
}
| 441 |
348 | <filename>docs/data/leg-t1/080/08005442.json
{"nom":"Hombleux","circ":"5ème circonscription","dpt":"Somme","inscrits":805,"abs":433,"votants":372,"blancs":4,"nuls":4,"exp":364,"res":[{"nuance":"UDI","nom":"<NAME>","voix":163},{"nuance":"FN","nom":"M. <NAME>","voix":89},{"nuance":"FI","nom":"M. <NAME>","voix":54},{"nuance":"COM","nom":"Mme <NAME>","voix":30},{"nuance":"DLF","nom":"Mme <NAME>","voix":10},{"nuance":"DIV","nom":"M. <NAME>","voix":8},{"nuance":"EXD","nom":"Mme <NAME>","voix":5},{"nuance":"EXG","nom":"M. <NAME>","voix":4},{"nuance":"DIV","nom":"M. <NAME>","voix":1}]} | 241 |
892 | {
"schema_version": "1.2.0",
"id": "GHSA-8857-6ch2-mpr7",
"modified": "2022-04-29T01:26:35Z",
"published": "2022-04-29T01:26:35Z",
"aliases": [
"CVE-2003-0525"
],
"details": "The getCanonicalPath function in Windows NT 4.0 may free memory that it does not own and cause heap corruption, which allows attackers to cause a denial of service (crash) via requests that cause a long file name to be passed to getCanonicalPath, as demonstrated on the IBM JVM using a long string to the java.io.getCanonicalPath Java method.",
"severity": [
],
"affected": [
],
"references": [
{
"type": "ADVISORY",
"url": "https://nvd.nist.gov/vuln/detail/CVE-2003-0525"
},
{
"type": "WEB",
"url": "https://docs.microsoft.com/en-us/security-updates/securitybulletins/2003/ms03-029"
},
{
"type": "WEB",
"url": "https://exchange.xforce.ibmcloud.com/vulnerabilities/12701"
},
{
"type": "WEB",
"url": "https://oval.cisecurity.org/repository/search/definition/oval%3Aorg.mitre.oval%3Adef%3A319"
},
{
"type": "WEB",
"url": "http://www.atstake.com/research/advisories/2003/a072303-1.txt"
}
],
"database_specific": {
"cwe_ids": [
],
"severity": "MODERATE",
"github_reviewed": false
}
} | 570 |
1,655 | #ifndef FILEITERABLES_HPP_
#define FILEITERABLES_HPP_
#include "RecursiveFileIterator.hpp"
#include "FileIterator.hpp"
namespace Tungsten {
class FileIterable
{
Path _path;
Path _extension;
public:
FileIterable(const Path &p, const Path &extension)
: _path(p),
_extension(extension)
{
}
FileIterator begin() const
{
return FileIterator(_path, false, true, _extension);
}
FileIterator end() const
{
return FileIterator();
}
};
class DirectoryIterable
{
Path _path;
public:
DirectoryIterable(const Path &p)
: _path(p)
{
}
FileIterator begin() const
{
return FileIterator(_path, true, false, Path());
}
FileIterator end() const
{
return FileIterator();
}
};
class RecursiveIterable
{
Path _path;
public:
RecursiveIterable(const Path &p)
: _path(p)
{
}
RecursiveFileIterator begin() const
{
return RecursiveFileIterator(_path);
}
RecursiveFileIterator end() const
{
return RecursiveFileIterator();
}
};
}
#endif /* FILEITERABLES_HPP_ */
| 484 |
945 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.iotdb.db.integration;
import org.apache.iotdb.commons.exception.IllegalPathException;
import org.apache.iotdb.commons.path.PartialPath;
import org.apache.iotdb.db.utils.EnvironmentUtils;
import org.apache.iotdb.itbase.category.LocalStandaloneTest;
import org.apache.iotdb.jdbc.Config;
import org.apache.iotdb.tsfile.utils.Pair;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.stream.Collectors;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
@Category({LocalStandaloneTest.class})
public class IoTDBContinuousQueryIT {
private static final int EXPECTED_TEST_SIZE = 3;
private Statement statement;
private Connection connection;
private volatile Exception exception = null;
private PartialPath[] partialPathArray;
private final Thread dataGenerator =
new Thread() {
@Override
public void run() {
try (Connection connection =
DriverManager.getConnection(
Config.IOTDB_URL_PREFIX + "127.0.0.1:6667/", "root", "root");
Statement statement = connection.createStatement()) {
do {
for (PartialPath partialPath : partialPathArray) {
statement.execute(
String.format(
"insert into %s(timestamp, %s) values(now(), %.3f)",
partialPath.getDevicePath(),
partialPath.getMeasurement(),
200 * Math.random()));
}
} while (!isInterrupted());
} catch (Exception e) {
exception = e;
}
}
};
private void startDataGenerator() {
dataGenerator.start();
}
private void stopDataGenerator() throws InterruptedException {
dataGenerator.interrupt();
dataGenerator.join();
}
private void createTimeSeries(String[] timeSeriesArray) throws SQLException {
initPartialPaths(timeSeriesArray);
for (PartialPath partialPath : partialPathArray) {
statement.execute(
String.format(
"create timeseries %s with datatype=FLOAT,encoding=RLE", partialPath.getFullPath()));
}
}
private void initPartialPaths(String[] timeSeriesArray) {
partialPathArray = new PartialPath[timeSeriesArray.length];
for (int i = 0; i < timeSeriesArray.length; ++i) {
try {
partialPathArray[i] = new PartialPath(timeSeriesArray[i]);
} catch (IllegalPathException e) {
fail(e.getMessage());
}
}
}
@Before
public void setUp() throws Exception {
EnvironmentUtils.envSetUp();
Class.forName(Config.JDBC_DRIVER_NAME);
connection = DriverManager.getConnection("jdbc:iotdb://127.0.0.1:6667/", "root", "root");
statement = connection.createStatement();
}
@After
public void tearDown() throws Exception {
statement.close();
connection.close();
EnvironmentUtils.cleanEnv();
}
@Test
public void testCreateAndDropContinuousQuery() throws Exception {
createTimeSeries(
new String[] {
"root.ln.wf01.wt01.ws01.temperature",
"root.ln.wf01.wt01.ws02.temperature",
"root.ln.wf01.wt02.ws01.temperature",
"root.ln.wf01.wt02.ws02.temperature",
"root.ln.wf02.wt01.ws01.temperature",
"root.ln.wf02.wt01.ws02.temperature",
"root.ln.wf02.wt02.ws01.temperature",
"root.ln.wf02.wt02.ws02.temperature"
});
statement.execute(
"CREATE CONTINUOUS QUERY cq1 "
+ "BEGIN SELECT max_value(temperature) INTO temperature_max FROM root.ln.*.*.* "
+ "GROUP BY time(1s) END");
statement.execute(
"CREATE CONTINUOUS QUERY cq2 "
+ "BEGIN SELECT avg(temperature) INTO temperature_cnt FROM root.ln.wf01.*.* "
+ " GROUP BY time(1s), level=3 END");
statement.execute(
"CREATE CONTINUOUS QUERY cq3 "
+ "RESAMPLE EVERY 2s FOR 2s "
+ "BEGIN SELECT min_value(temperature) INTO temperature_avg FROM root.ln.wf01.*.* "
+ "GROUP BY time(1s), level=2 END");
statement.execute("DROP CONTINUOUS QUERY cq1");
statement.execute("DROP CONTINUOUS QUERY cq2");
checkShowContinuousQueriesResult(new String[] {"cq3"});
statement.close();
connection.close();
EnvironmentUtils.shutdownDaemon();
EnvironmentUtils.stopDaemon();
setUp();
checkShowContinuousQueriesResult(new String[] {"cq3"});
try {
statement.execute(
"CREATE CONTINUOUS QUERY cq3 "
+ "RESAMPLE EVERY 2s FOR 2s "
+ "BEGIN SELECT avg(temperature) INTO temperature_avg FROM root.ln.wf01.*.* "
+ "GROUP BY time(1s), level=2 END");
} catch (Exception e) {
assertTrue(e.getMessage().contains("already exists"));
}
try {
statement.execute("DROP CONTINUOUS QUERY cq1");
} catch (Exception e) {
assertTrue(e.getMessage().contains("not exist"));
}
statement.execute(
"CREATE CONTINUOUS QUERY cq1 "
+ "BEGIN SELECT sum(temperature) INTO temperature_max FROM root.ln.*.*.* "
+ "GROUP BY time(1s) END");
statement.execute(
"CREATE CONTINUOUS QUERY cq2 "
+ "BEGIN SELECT avg(temperature) INTO temperature_cnt FROM root.ln.wf01.*.* "
+ " GROUP BY time(1s), level=3 END");
checkShowContinuousQueriesResult(new String[] {"cq3", "cq1", "cq2"});
statement.execute("DROP CONTINUOUS QUERY cq1");
statement.execute("DROP CONTINUOUS QUERY cq2");
statement.execute("DROP CONTINUOUS QUERY cq3");
}
@Test
public void testContinuousQueryResultSeriesWithLevels() throws Exception {
createTimeSeries(
new String[] {
"root.ln.wf01.wt01.ws01.temperature",
"root.ln.wf01.wt01.ws02.temperature",
"root.ln.wf01.wt02.ws01.temperature",
"root.ln.wf01.wt02.ws02.temperature",
"root.ln.wf02.wt01.ws01.temperature",
"root.ln.wf02.wt01.ws02.temperature",
"root.ln.wf02.wt02.ws01.temperature",
"root.ln.wf02.wt02.ws02.temperature"
});
startDataGenerator();
Thread.sleep(500);
statement.execute(
"CREATE CONTINUOUS QUERY cq1 "
+ "BEGIN SELECT count(temperature) INTO temperature_cnt FROM root.ln.*.*.* "
+ "GROUP BY time(1s), level=1,2 END");
Thread.sleep(5500);
checkShowTimeSeriesResult(
new String[] {
"root.ln.wf01.wt01.ws01.temperature",
"root.ln.wf01.wt01.ws02.temperature",
"root.ln.wf01.wt02.ws01.temperature",
"root.ln.wf01.wt02.ws02.temperature",
"root.ln.wf02.wt01.ws01.temperature",
"root.ln.wf02.wt01.ws02.temperature",
"root.ln.wf02.wt02.ws01.temperature",
"root.ln.wf02.wt02.ws02.temperature",
"root.ln.wf01.temperature_cnt",
"root.ln.wf02.temperature_cnt"
});
statement.execute("DROP CONTINUOUS QUERY cq1");
stopDataGenerator();
}
public void testContinuousQueryResultSeriesWithLevels1() throws Exception {
createTimeSeries(
new String[] {
"root.ln.wf01.wt01.ws01.`(temperature)`",
"root.ln.wf01.wt01.ws02.`(temperature)`",
"root.ln.wf01.wt02.ws01.`(temperature)`",
"root.ln.wf01.wt02.ws02.`(temperature)`",
"root.ln.wf02.wt01.ws01.`(temperature)`",
"root.ln.wf02.wt01.ws02.`(temperature)`",
"root.ln.wf02.wt02.ws01.`(temperature)`",
"root.ln.wf02.wt02.ws02.`(temperature)`"
});
startDataGenerator();
Thread.sleep(500);
statement.execute(
"CREATE CONTINUOUS QUERY cq1 "
+ "BEGIN SELECT count(`(temperature)`) INTO temperature_cnt FROM root.ln.*.*.* "
+ "GROUP BY time(1s), level=1,2 END");
Thread.sleep(5500);
checkShowTimeSeriesResult(
new String[] {
"root.ln.wf01.wt01.ws01.`(temperature)`",
"root.ln.wf01.wt01.ws02.`(temperature)`",
"root.ln.wf01.wt02.ws01.`(temperature)`",
"root.ln.wf01.wt02.ws02.`(temperature)`",
"root.ln.wf02.wt01.ws01.`(temperature)`",
"root.ln.wf02.wt01.ws02.`(temperature)`",
"root.ln.wf02.wt02.ws01.`(temperature)`",
"root.ln.wf02.wt02.ws02.`(temperature)`",
"root.ln.wf01.temperature_cnt",
"root.ln.wf02.temperature_cnt"
});
statement.execute("DROP CONTINUOUS QUERY cq1");
stopDataGenerator();
}
@Test
public void testContinuousQueryResultSeriesWithDuplicatedTargetPaths() throws Exception {
createTimeSeries(
new String[] {
"root.ln.wf01.ws02.temperature",
"root.ln.wf01.ws01.temperature",
"root.ln.wf02.wt01.temperature",
"root.ln.wf02.wt02.temperature",
});
startDataGenerator();
Thread.sleep(500);
try {
statement.execute(
"CREATE CONTINUOUS QUERY cq1 "
+ "BEGIN SELECT avg(temperature) INTO root.target.{2}.{3}.avg FROM root.ln.*.* "
+ "GROUP BY time(1s) END");
} catch (Exception e) {
assertTrue(e.getMessage().contains("duplicated"));
} finally {
stopDataGenerator();
}
}
@Test
public void testContinuousQueryResultSeriesWithoutLevels1() throws Exception {
String[] timeSeriesArray = new String[30];
int wsIndex = 1;
for (int i = 1; i <= 30; ++i) {
timeSeriesArray[i - 1] =
"root.ln.wf0" + (i < 15 ? 1 : 2) + ".ws" + wsIndex++ + ".temperature";
}
createTimeSeries(timeSeriesArray);
startDataGenerator();
Thread.sleep(500);
statement.execute(
"CREATE CONTINUOUS QUERY cq1 "
+ "BEGIN SELECT avg(temperature) INTO root.target.${2}.${3}_avg FROM root.ln.*.* "
+ "GROUP BY time(1s) END");
Thread.sleep(5500);
checkShowTimeSeriesCount(2 * timeSeriesArray.length);
statement.execute("DROP CONTINUOUS QUERY cq1");
stopDataGenerator();
}
@Test
public void testContinuousQueryResultSeriesWithoutLevels2() throws Exception {
String[] timeSeriesArray = new String[30];
int wsIndex = 1;
for (int i = 1; i <= 30; ++i) {
timeSeriesArray[i - 1] =
"root.ln.wf0" + (i < 15 ? 1 : 2) + ".ws" + wsIndex++ + ".temperature";
}
createTimeSeries(timeSeriesArray);
startDataGenerator();
Thread.sleep(500);
statement.execute(
"CREATE CONTINUOUS QUERY cq1 "
+ "BEGIN SELECT avg(temperature) INTO root.target.${2}.${3}.avg FROM root.ln.*.* "
+ "GROUP BY time(1s) END");
Thread.sleep(5500);
checkShowTimeSeriesCount(2 * timeSeriesArray.length);
statement.execute("DROP CONTINUOUS QUERY cq1");
stopDataGenerator();
}
@Test
public void testInterval1000() throws Exception {
createTimeSeries(
new String[] {
"root.ln.wf01.wt01.ws01.temperature",
"root.ln.wf01.wt01.ws02.temperature",
"root.ln.wf01.wt02.ws01.temperature",
"root.ln.wf01.wt02.ws02.temperature",
"root.ln.wf02.wt01.ws01.temperature",
"root.ln.wf02.wt01.ws02.temperature",
"root.ln.wf02.wt02.ws01.temperature",
"root.ln.wf02.wt02.ws02.temperature"
});
startDataGenerator();
statement.execute(
"CREATE CQ cq1 "
+ "RESAMPLE EVERY 1s FOR 1s "
+ "BEGIN SELECT avg(temperature) INTO temperature_avg FROM root.ln.wf01.*.* "
+ "GROUP BY time(1s), level=1,2 END");
checkCQExecutionResult(1000);
statement.execute("DROP CQ cq1");
stopDataGenerator();
}
@Test
public void testInterval2000() throws Exception {
createTimeSeries(
new String[] {
"root.ln.wf01.wt01.ws01.temperature",
"root.ln.wf01.wt01.ws02.temperature",
"root.ln.wf01.wt02.ws01.temperature",
"root.ln.wf01.wt02.ws02.temperature",
"root.ln.wf02.wt01.ws01.temperature",
"root.ln.wf02.wt01.ws02.temperature",
"root.ln.wf02.wt02.ws01.temperature",
"root.ln.wf02.wt02.ws02.temperature"
});
startDataGenerator();
statement.execute(
"CREATE CONTINUOUS QUERY cq1 "
+ "RESAMPLE EVERY 2s "
+ "BEGIN SELECT avg(temperature) INTO temperature_avg FROM root.ln.wf01.*.* "
+ "GROUP BY time(2s), level=1,2 END");
checkCQExecutionResult(2000);
statement.execute("DROP CQ cq1");
stopDataGenerator();
}
@Test
public void testInterval3000() throws Exception {
createTimeSeries(
new String[] {
"root.ln.wf01.wt01.ws01.temperature",
"root.ln.wf01.wt01.ws02.temperature",
"root.ln.wf01.wt02.ws01.temperature",
"root.ln.wf01.wt02.ws02.temperature",
"root.ln.wf02.wt01.ws01.temperature",
"root.ln.wf02.wt01.ws02.temperature",
"root.ln.wf02.wt02.ws01.temperature",
"root.ln.wf02.wt02.ws02.temperature"
});
startDataGenerator();
statement.execute(
"CREATE CONTINUOUS QUERY cq1 "
+ "BEGIN SELECT avg(temperature) INTO temperature_avg FROM root.ln.wf01.*.* "
+ "GROUP BY time(3s), level=1,2 END");
checkCQExecutionResult(3000);
statement.execute("DROP CQ cq1");
stopDataGenerator();
}
private void checkCQExecutionResult(long groupByInterval)
throws SQLException, InterruptedException {
// IOTDB-1821
// ignore the check when the background data generation thread's connection is broken
if (exception != null) {
return;
}
long waitMillSeconds = 0;
List<Pair<Long, Double>> actualResult;
do {
Thread.sleep(waitMillSeconds);
waitMillSeconds += 100;
statement.execute("select temperature_avg from root.ln.wf01");
actualResult = collectQueryResult();
} while (actualResult.size() < EXPECTED_TEST_SIZE);
long actualWindowBegin = actualResult.get(0).left;
long actualWindowEnd = actualResult.get(actualResult.size() - 1).left;
statement.execute(
String.format(
"select avg(temperature) from root.ln.wf01.*.* GROUP BY ([%d, %d), %dms), level=1,2 without null all",
actualWindowBegin, actualWindowEnd + groupByInterval, groupByInterval));
List<Pair<Long, Double>> expectedResult = collectQueryResult();
assertEquals(expectedResult.size(), actualResult.size());
final int size = expectedResult.size();
for (int i = 0; i < size; ++i) {
Pair<Long, Double> expected = expectedResult.get(i);
Pair<Long, Double> actual = actualResult.get(i);
assertEquals(expected.left, actual.left);
assertEquals(expected.right, actual.right, 10e-6);
}
}
private List<Pair<Long, Double>> collectQueryResult() {
List<Pair<Long, Double>> result = new ArrayList<>();
try (ResultSet resultSet = statement.getResultSet()) {
while (resultSet.next()) {
String timestamp = resultSet.getString(1);
String value = resultSet.getString(2);
result.add(new Pair<>(Long.parseLong(timestamp), Double.parseDouble(value)));
}
} catch (SQLException throwable) {
fail(throwable.getMessage());
}
return result;
}
private void checkShowContinuousQueriesResult(String[] continuousQueryArray) throws SQLException {
Assert.assertTrue(statement.execute("show continuous queries"));
List<String> resultList = new ArrayList<>();
try (ResultSet resultSet = statement.getResultSet()) {
while (resultSet.next()) {
resultList.add(resultSet.getString("cq name"));
}
}
Assert.assertEquals(continuousQueryArray.length, resultList.size());
List<String> collect =
resultList.stream()
.sorted(Comparator.comparingInt(e -> e.split("\\.").length))
.collect(Collectors.toList());
for (String s : continuousQueryArray) {
Assert.assertTrue(collect.contains(s));
}
}
private void checkShowTimeSeriesResult(String[] timeSeriesArray) throws SQLException {
Assert.assertTrue(statement.execute("show timeseries"));
List<String> resultList = new ArrayList<>();
try (ResultSet resultSet = statement.getResultSet()) {
while (resultSet.next()) {
resultList.add(resultSet.getString("timeseries"));
}
}
Assert.assertEquals(timeSeriesArray.length, resultList.size());
List<String> collect =
resultList.stream()
.sorted(Comparator.comparingInt(e -> e.split("\\.").length))
.collect(Collectors.toList());
for (String s : timeSeriesArray) {
Assert.assertTrue(collect.contains(s));
}
}
private void checkShowTimeSeriesCount(int expected) throws SQLException {
Assert.assertTrue(statement.execute("show timeseries"));
int autual = 0;
try (ResultSet resultSet = statement.getResultSet()) {
while (resultSet.next()) {
++autual;
}
}
Assert.assertEquals(expected, autual);
}
}
| 7,853 |
348 | <filename>docs/data/leg-t2/080/08004098.json<gh_stars>100-1000
{"nom":"Bettembos","circ":"4ème circonscription","dpt":"Somme","inscrits":83,"abs":35,"votants":48,"blancs":5,"nuls":1,"exp":42,"res":[{"nuance":"FN","nom":"<NAME>","voix":29},{"nuance":"REM","nom":"<NAME>","voix":13}]} | 119 |
636 | package cn.org.atool.fluent.mybatis.generator.shared2.dao.impl;
import cn.org.atool.fluent.mybatis.generator.shared2.dao.base.HomeAddressBaseDao;
import cn.org.atool.fluent.mybatis.generator.shared2.dao.intf.HomeAddressDao;
import org.springframework.stereotype.Repository;
/**
* HomeAddressDaoImpl: 数据操作接口实现
* <p>
* 这只是一个减少手工创建的模板文件
* 可以任意添加方法和实现, 更改作者和重定义类名
* <p/>@author Powered By Fluent Mybatis
*/
@Repository
public class HomeAddressDaoImpl extends HomeAddressBaseDao implements HomeAddressDao {
}
| 273 |
535 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
#include "net/instaweb/rewriter/public/css_inline_import_to_link_filter.h"
#include "net/instaweb/rewriter/public/rewrite_driver.h"
#include "net/instaweb/rewriter/public/rewrite_options.h"
#include "pagespeed/kernel/base/string.h"
#include "pagespeed/kernel/base/string_util.h"
#include "pagespeed/kernel/http/content_type.h"
#include "test/net/instaweb/rewriter/rewrite_test_base.h"
#include "test/pagespeed/kernel/base/gtest.h"
namespace net_instaweb {
namespace {
const char kCssFile[] = "assets/styles.css";
const char kCssTail[] = "styles.css";
const char kCssSubdir[] = "assets/";
const char kCssData[] = ".blue {color: blue; src: url(dummy.png);}";
class CssInlineImportToLinkFilterTest : public RewriteTestBase {
protected:
void SetUp() override {
RewriteTestBase::SetUp();
SetHtmlMimetype();
}
// Test general situations.
void ValidateStyleToLink(const GoogleString& input_style,
const GoogleString& expected_style) {
const GoogleString html_input = "<head>\n" + input_style +
"</head>\n"
"<body>Hello, world!</body>\n";
// Rewrite the HTML page.
ParseUrl("http://test.com/test.html", html_input);
// Check the output HTML.
const GoogleString expected_output = "<head>\n" + expected_style +
"</head>\n"
"<body>Hello, world!</body>\n";
EXPECT_EQ(AddHtmlBody(expected_output), output_buffer_);
}
void ValidateStyleUnchanged(const GoogleString& import_equals_output) {
ValidateStyleToLink(import_equals_output, import_equals_output);
}
};
TEST_F(CssInlineImportToLinkFilterTest, CssPreserveURLOff) {
options()->EnableFilter(RewriteOptions::kInlineImportToLink);
options()->set_css_preserve_urls(false);
static const char kLink[] =
"<link rel=\"stylesheet\" href=\"assets/styles.css\">";
rewrite_driver()->AddFilters();
ValidateStyleToLink("<style>@import url(assets/styles.css);</style>", kLink);
}
TEST_F(CssInlineImportToLinkFilterTest, AlwaysAllowUnauthorizedDomain) {
options()->EnableFilter(RewriteOptions::kInlineImportToLink);
options()->set_css_preserve_urls(false);
rewrite_driver()->AddFilters();
ValidateStyleToLink(
"<style>@import url(http://unauth.com/assets/styles.css);</style>",
"<link rel=\"stylesheet\" href=\"http://unauth.com/assets/styles.css\">");
}
// Tests for converting styles to links.
TEST_F(CssInlineImportToLinkFilterTest, ConvertGoodStyle) {
AddFilter(RewriteOptions::kInlineImportToLink);
static const char kLink[] =
"<link rel=\"stylesheet\" href=\"assets/styles.css\">";
// These all get converted to the above link.
ValidateStyleToLink("<style>@import url(assets/styles.css);</style>", kLink);
ValidateStyleToLink("<style>@import url(\"assets/styles.css\");</style>",
kLink);
ValidateStyleToLink("<style>\n\t@import \"assets/styles.css\"\t;\n\t</style>",
kLink);
ValidateStyleToLink("<style>@import 'assets/styles.css';</style>", kLink);
ValidateStyleToLink("<style>@import url( assets/styles.css);</style>", kLink);
ValidateStyleToLink("<style>@import url('assets/styles.css');</style>",
kLink);
ValidateStyleToLink("<style>@import url( 'assets/styles.css' );</style>",
kLink);
// According to the latest DRAFT CSS spec this is invalid due to the missing
// final semicolon, however according to the 2003 spec it is valid. Some
// browsers seem to accept it and some don't, so we will accept it.
ValidateStyleToLink("<style>@import url(assets/styles.css)</style>", kLink);
}
TEST_F(CssInlineImportToLinkFilterTest, DoNotConvertScoped) {
// <style scoped> can't be converted to a link.
// (https://github.com/apache/incubator-pagespeed-mod/issues/918)
AddFilter(RewriteOptions::kInlineImportToLink);
ValidateStyleUnchanged(
"<style type=\"text/css\" scoped>"
"@import url(assets/styles.css);</style>");
}
TEST_F(CssInlineImportToLinkFilterTest, ConvertStyleWithMultipleImports) {
AddFilter(RewriteOptions::kInlineImportToLink);
ValidateStyleToLink(
"<style>"
"@import \"first.css\" all;\n"
"@import url(\"second.css\" );\n"
"@import 'third.css';\n"
"</style>",
"<link rel=\"stylesheet\" href=\"first.css\" media=\"all\">"
"<link rel=\"stylesheet\" href=\"second.css\">"
"<link rel=\"stylesheet\" href=\"third.css\">");
ValidateStyleToLink(
"<style>"
"@import \"first.css\" screen;\n"
"@import \"third.css\" print;\n"
"</style>",
"<link rel=\"stylesheet\" href=\"first.css\" media=\"screen\">"
"<link rel=\"stylesheet\" href=\"third.css\" media=\"print\">");
// Example from modpagespeed issue #491. Note that all the attributes from
// the style are copied to the end of every link.
ValidateStyleToLink(
"<style type=\"text/css\" title=\"currentStyle\" media=\"screen\">"
" @import \"http://example.com/universal.css?63310\";"
" @import \"http://example.com/navigation_beta.css?123\";"
" @import \"http://example.com/navigation.css?321\";"
" @import \"http://example.com/teases.css\";"
" @import \"http://example.com/homepage.css?nocache=987\";"
" @import \"http://example.com/yourPicks.css?nocache=123\";"
" @import \"http://example.com/sportsTabsHomepage.css\";"
" @import \"http://example.com/businessTabsHomepage.css\";"
" @import \"http://example.com/slider.css?09\";"
" @import \"http://example.com/weather.css\";"
" @import \"http://example.com/style3.css\";"
" @import \"http://example.com/style3_tmp.css\";"
"</style>",
"<link rel=\"stylesheet\""
" href=\"http://example.com/universal.css?63310\" type=\"text/css\""
" title=\"currentStyle\" media=\"screen\">"
"<link rel=\"stylesheet\""
" href=\"http://example.com/navigation_beta.css?123\" type=\"text/css\""
" title=\"currentStyle\" media=\"screen\">"
"<link rel=\"stylesheet\""
" href=\"http://example.com/navigation.css?321\" type=\"text/css\""
" title=\"currentStyle\" media=\"screen\">"
"<link rel=\"stylesheet\""
" href=\"http://example.com/teases.css\" type=\"text/css\""
" title=\"currentStyle\" media=\"screen\">"
"<link rel=\"stylesheet\""
" href=\"http://example.com/homepage.css?nocache=987\" type=\"text/css\""
" title=\"currentStyle\" media=\"screen\">"
"<link rel=\"stylesheet\""
" href=\"http://example.com/yourPicks.css?nocache=123\" type=\"text/css\""
" title=\"currentStyle\" media=\"screen\">"
"<link rel=\"stylesheet\""
" href=\"http://example.com/sportsTabsHomepage.css\" type=\"text/css\""
" title=\"currentStyle\" media=\"screen\">"
"<link rel=\"stylesheet\""
" href=\"http://example.com/businessTabsHomepage.css\" type=\"text/css\""
" title=\"currentStyle\" media=\"screen\">"
"<link rel=\"stylesheet\""
" href=\"http://example.com/slider.css?09\" type=\"text/css\""
" title=\"currentStyle\" media=\"screen\">"
"<link rel=\"stylesheet\""
" href=\"http://example.com/weather.css\" type=\"text/css\""
" title=\"currentStyle\" media=\"screen\">"
"<link rel=\"stylesheet\""
" href=\"http://example.com/style3.css\" type=\"text/css\""
" title=\"currentStyle\" media=\"screen\">"
"<link rel=\"stylesheet\""
" href=\"http://example.com/style3_tmp.css\" type=\"text/css\""
" title=\"currentStyle\" media=\"screen\">");
// Pull out @import statements, even if there is trailing CSS.
ValidateStyleToLink(
"<style>"
"@import \"first.css\" all;\n"
"@import url('second.css' );\n"
"@import \"third.css\";\n"
".a { background-color: red }"
"</style>",
"<link rel=\"stylesheet\" href=\"first.css\" media=\"all\">"
"<link rel=\"stylesheet\" href=\"second.css\">"
"<link rel=\"stylesheet\" href=\"third.css\">"
"<style>"
".a { background-color: red }"
"</style>");
// Variations where there's more than just valid @imports.
// We do not convert because of the invalid @import.
ValidateStyleUnchanged(
"<style>"
"@import \"first.css\" all;\n"
"@import url( );\n"
"@import \"third.css\";\n"
"</style>");
// We do not convert because of the @charset
ValidateStyleUnchanged(
"<style>"
"@charset \"ISO-8859-1\";\n"
"@import \"first.css\" all;\n"
"@import url('second.css' );\n"
"@import \"third.css\";\n"
"</style>");
// These could be handled as it's "obvious" what the right thing is, but
// at the moment we don't handle all perms-and-combs of media [queries].
// The first 4 could "ignore" the style's media as it includes the imports.
ValidateStyleUnchanged(
"<style>"
"@import \"first.css\" screen;\n"
"@import \"third.css\" not screen;\n"
"</style>");
ValidateStyleUnchanged(
"<style media=\"all\">"
"@import \"first.css\" screen;\n"
"@import \"third.css\" print;\n"
"</style>");
ValidateStyleUnchanged(
"<style media=\"all\">"
"@import \"first.css\" screen;\n"
"@import \"third.css\" not screen;\n");
ValidateStyleUnchanged(
"<style media=\"screen, not screen\">"
"@import \"first.css\" screen;\n"
"@import \"third.css\" not screen;\n"
"</style>");
// This one could determine that the intersection of screen & not screen
// is the empty set and therefore drop the 2nd import/link completely.
ValidateStyleUnchanged(
"<style media=\"screen\">"
"@import \"first.css\" screen;\n"
"@import \"third.css\" not screen;\n"
"</style>");
}
TEST_F(CssInlineImportToLinkFilterTest, OnlyConvertPrefix) {
AddFilter(RewriteOptions::kInlineImportToLink);
// Trailing content.
ValidateStyleToLink(
"<style>@import url(assets/styles.css);\n"
"a { color: red; }</style>",
"<link rel=\"stylesheet\" href=\"assets/styles.css\">"
"<style>a { color: red; }</style>");
// Nonsense @-rule.
ValidateStyleToLink(
"<style>@import url(assets/styles.css);\n"
"@foobar</style>",
"<link rel=\"stylesheet\" href=\"assets/styles.css\">"
"<style>@foobar</style>");
// @import later in the CSS.
ValidateStyleToLink(
"<style>@import url(a.css);\n"
"@font-face { src: url(b.woff) }\n"
"@import url(c.css);</style>",
"<link rel=\"stylesheet\" href=\"a.css\">"
"<style>@font-face { src: url(b.woff) }\n"
"@import url(c.css);</style>");
}
TEST_F(CssInlineImportToLinkFilterTest, ConvertStyleWithAttributes) {
AddFilter(RewriteOptions::kInlineImportToLink);
ValidateStyleToLink(
"<style type=\"text/css\">"
"@import url(assets/styles.css);</style>",
"<link rel=\"stylesheet\" href=\"assets/styles.css\""
" type=\"text/css\">");
ValidateStyleToLink(
"<style type=\"text/css\" media=\"screen\">"
"@import url(assets/styles.css);</style>",
"<link rel=\"stylesheet\" href=\"assets/styles.css\""
" type=\"text/css\" media=\"screen\">");
}
TEST_F(CssInlineImportToLinkFilterTest, ConvertStyleWithSameMedia) {
AddFilter(RewriteOptions::kInlineImportToLink);
ValidateStyleToLink("<style>@import url(assets/styles.css) all</style>",
"<link rel=\"stylesheet\" href=\"assets/styles.css\""
" media=\"all\">");
ValidateStyleToLink(
"<style type=\"text/css\">"
"@import url(assets/styles.css) all;</style>",
"<link rel=\"stylesheet\" href=\"assets/styles.css\""
" type=\"text/css\" media=\"all\">");
ValidateStyleToLink(
"<style type=\"text/css\" media=\"screen\">"
"@import url(assets/styles.css) screen;</style>",
"<link rel=\"stylesheet\" href=\"assets/styles.css\""
" type=\"text/css\" media=\"screen\">");
ValidateStyleToLink(
"<style type=\"text/css\" media=\"screen,printer\">"
"@import url(assets/styles.css) printer,screen;</style>",
"<link rel=\"stylesheet\" href=\"assets/styles.css\""
" type=\"text/css\" media=\"screen,printer\">");
ValidateStyleToLink(
"<style type=\"text/css\" media=\" screen , printer \">"
"@import 'assets/styles.css' printer, screen ;</style>",
"<link rel=\"stylesheet\" href=\"assets/styles.css\""
" type=\"text/css\" media=\" screen , printer \">");
}
TEST_F(CssInlineImportToLinkFilterTest, ConvertStyleWithDifferentMedia) {
AddFilter(RewriteOptions::kInlineImportToLink);
ValidateStyleUnchanged(
"<style type=\"text/css\" media=\"screen\">"
"@import url(assets/styles.css) all;</style>");
ValidateStyleUnchanged(
"<style type=\"text/css\" media=\"screen,printer\">"
"@import url(assets/styles.css) screen;</style>");
}
TEST_F(CssInlineImportToLinkFilterTest, MediaQueries) {
AddFilter(RewriteOptions::kInlineImportToLink);
// If @import has no media, we'll keep the complex media query in the
// media attribute.
ValidateStyleToLink(
"<style type=\"text/css\" media=\"not screen\">"
"@import url(assets/styles.css);</style>",
"<link rel=\"stylesheet\" href=\"assets/styles.css\""
" type=\"text/css\" media=\"not screen\">");
// Generally we just give up on complex media queries. Note, these could
// be rewritten in the future, just change the tests to produce sane results.
ValidateStyleUnchanged(
"<style type=\"text/css\">"
"@import url(assets/styles.css) not screen;</style>");
ValidateStyleUnchanged(
"<style type=\"text/css\" media=\"not screen\">"
"@import url(assets/styles.css) not screen;</style>");
ValidateStyleUnchanged(
"<style media=\"not screen and (color), only print\">"
"@import url(assets/styles.css)"
" not screen and (color), only print;</style>");
ValidateStyleUnchanged(
"<style type=\"text/css\" media=\"not screen\">"
"@import url(assets/styles.css) screen;</style>");
ValidateStyleUnchanged(
"<style type=\"text/css\" media=\"screen and (x)\">"
"@import url(assets/styles.css) screen;</style>");
}
TEST_F(CssInlineImportToLinkFilterTest, DoNotConvertBadStyle) {
AddFilter(RewriteOptions::kInlineImportToLink);
// These all are problematic in some way so are not changed at all.
ValidateStyleUnchanged("<style/>");
ValidateStyleUnchanged("<style></style>");
ValidateStyleUnchanged("<style>@import assets/styles.css;</style>");
ValidateStyleUnchanged("<style>@import assets/styles.css</style>");
ValidateStyleUnchanged("<style>@import styles.css</style>");
ValidateStyleUnchanged("<style>@import foo</style>");
ValidateStyleUnchanged("<style>@import url (assets/styles.css);</style>");
ValidateStyleUnchanged("<style>@ import url(assets/styles.css)</style>");
ValidateStyleUnchanged("<style>*border: 0px</style>");
ValidateStyleUnchanged(
"<style>@charset \"ISO-8859-1\";\n"
"@import \"mystyle.css\" all;</style>");
ValidateStyleUnchanged("<style><p/>@import url(assets/styles.css)</style>");
ValidateStyleUnchanged("<style><![CDATA[@import url(assets/styles.css);]]\n");
ValidateStyleUnchanged(
"<style><![CDATA[\njunky junk junk!\n]]\\>\n"
"@import url(assets/styles.css);</style>");
ValidateStyleUnchanged(
"<style><!-- comment -->"
"@import url(assets/styles.css);</style>");
ValidateStyleUnchanged("<style href='x'>@import url(styles.css);</style>");
ValidateStyleUnchanged("<style rel='x'>@import url(styles.css);</style>");
ValidateStyleUnchanged(
"<style type=\"text/javascript\">"
"@import url(assets/styles.css);</style>");
ValidateStyleUnchanged("<style>@import url(styles.css)<style/></style>");
// These are fine to convert. These have errors, but only after valid
// @import statements. Turning them into links is safe.
ValidateStyleToLink("<style>@import url(assets/styles.css);<p/</style>",
"<link rel=\"stylesheet\" href=\"assets/styles.css\">"
"<style><p/</style>");
ValidateStyleToLink(
"<style>@import url(assets/styles.css);\n"
"<![CDATA[\njunky junk junk!\n]]\\></style>",
"<link rel=\"stylesheet\" href=\"assets/styles.css\">"
"<style><![CDATA[\njunky junk junk!\n]]\\></style>");
ValidateStyleToLink(
"<style>@import url(assets/styles.css);"
"<!-- comment --></style>",
"<link rel=\"stylesheet\" href=\"assets/styles.css\">"
"<style><!-- comment --></style>");
}
class CssInlineImportToLinkFilterTestNoTags
: public CssInlineImportToLinkFilterTest {
public:
bool AddHtmlTags() const override { return false; }
};
TEST_F(CssInlineImportToLinkFilterTestNoTags, UnclosedStyleGetsConverted) {
options()->EnableFilter(RewriteOptions::kInlineImportToLink);
rewrite_driver()->AddFilters();
ValidateExpected("unclosed_style", "<style>@import url(assets/styles.css)",
"<link rel=\"stylesheet\" href=\"assets/styles.css\">");
}
TEST_F(CssInlineImportToLinkFilterTest, ConvertThenCacheExtend) {
options()->EnableFilter(RewriteOptions::kInlineImportToLink);
options()->EnableFilter(RewriteOptions::kExtendCacheCss);
rewrite_driver()->AddFilters();
// Cache for 100s.
SetResponseWithDefaultHeaders(kCssFile, kContentTypeCss, kCssData, 100);
ValidateExpected(
"script_to_link_then_cache_extend",
StrCat("<style>@import url(", kCssFile, ");</style>"),
StrCat("<link rel=\"stylesheet\" href=\"",
Encode(kCssSubdir, "ce", "0", kCssTail, "css"), "\">"));
}
TEST_F(CssInlineImportToLinkFilterTest, DontConvertOrCacheExtend) {
options()->EnableFilter(RewriteOptions::kInlineImportToLink);
options()->EnableFilter(RewriteOptions::kExtendCacheCss);
rewrite_driver()->AddFilters();
// Cache for 100s.
SetResponseWithDefaultHeaders(kCssFile, kContentTypeCss, kCssData, 100);
// Note: This @import is not converted because it is preceded by a @foobar.
const GoogleString kStyleElement = StrCat(
"<style>\n"
"@foobar ;\n"
"@import url(",
kCssFile, ");\n", "body { color: red; }\n", "</style>");
ValidateNoChanges("dont_touch_script_but_cache_extend", kStyleElement);
}
} // namespace
} // namespace net_instaweb
| 7,305 |
2,970 | <filename>scripts/cluster-serving/overall-correctness-check.py
from os import system
# build zoo package -> install zoo package -> install and start Redis and Flink -> start cluster serving
system("../../zoo/make-dist.sh && \
echo install zoo package && \
cd ../../pyzoo && \
pwd && \
python setup.py sdist && \
pip install dist/*.tar.gz && \
cd .. && \
mkdir tmp && \
echo REDIS && \
cd tmp && \
export REDIS_VERSION=5.0.5 && \
wget http://download.redis.io/releases/redis-${REDIS_VERSION}.tar.gz && \
tar xzf redis-${REDIS_VERSION}.tar.gz && \
rm redis-${REDIS_VERSION}.tar.gz && \
cd redis-${REDIS_VERSION} && \
make && \
export REDIS_HOME=$(pwd) && \
echo install flink && \
cd .. && \
export FLINK_VERSION=1.11.2 && \
wget https://archive.apache.org/dist/flink/flink-${FLINK_VERSION}/flink-${FLINK_VERSION}-bin-scala_2.11.tgz && \
tar xzf flink-${FLINK_VERSION}-bin-scala_2.11.tgz && \
rm flink-${FLINK_VERSION}-bin-scala_2.11.tgz && \
cd flink-${FLINK_VERSION} && \
export FLINK_HOME=$(pwd) && \
$FLINK_HOME/bin/start-cluster.sh && \
$FLINK_HOME/bin/flink list && \
cd ../.. && \
echo start cluster serving && \
cd dist/bin/cluster-serving && \
pwd && \
bash cluster-serving-init && \
bash cluster-serving-start && \
echo CHECK_FLINK && \
$FLINK_HOME/bin/flink list && \
rm -r ../../../tmp ")
# predict
print("predict")
#system("conda list")
from zoo.serving.client import *
#system("conda env list")
import numpy
import cv2
input_api = InputQueue()
path="/home/qihong/Documents/test.jpg"
img = cv2.imread(path)
img = cv2.resize(img, (224, 224))
data = cv2.imencode(".jpg", img)[1]
img_encoded = base64.b64encode(data).decode("utf-8")
input_api.enqueue("my-image2", t={"b64": img_encoded})
import time
time.sleep(3)
print("output")
output_api = OutputQueue()
result_ndarray = output_api.query("my-image2")
print(result_ndarray) | 958 |
4,036 | <gh_stars>1000+
class Base(object):
class C(object): pass
def meth(self):
pass
class Derived(Base):
def meth(self):
super(Derived, self).meth()
super(Derived, self).x
class Derived2(Base):
def meth(self):
pass
class C(object): pass
| 131 |
558 | <reponame>jiashiwen/redissyncer-server
package syncer.replica.event.iter.datatype;
public class BatchedKeyStringValueStringEvent extends BatchedKeyValuePairEvent<byte[], byte[]> {
private static final long serialVersionUID = 1L;
}
| 77 |
1,838 | // Feature tests of parsing Checked C interop annotations that declare
// an alternate checked pointer type for a variable or member with
// unchecked pointer type.
//
// The following lines are for the LLVM test harness:
//
// RUN: %clang_cc1 -verify %s
#include <stdchecked.h>
//
// parameters with interop type annotations
//
// first parameter has interop type annotation
extern void f1(int *p : itype(ptr<int>), int y) {
*p = y;
}
extern void f2(int *p : itype(array_ptr<int>), int y) {
}
extern void f3(int *p : itype(int checked[]), int y) {
}
extern void f4(int *p : itype(int checked[10]), int y) {
}
extern void f5(int **p : itype(ptr<ptr<int>>), int y) {
**p = y;
}
extern void f6(int **p : itype(array_ptr<ptr<int>>), int y) {
}
extern void f7(int **p : itype(ptr<int> checked[]), int y) {
}
extern void f8(int **p : itype(int * checked[10]), int y) {
}
// Second parameter has interop type annotation
extern void g1(int y, int *p : itype(ptr<int>)) {
*p = y;
}
extern void g2(int y, int *p : itype(array_ptr<int>)) {
}
extern void g3(int y, int *p : itype(int checked[])) {
}
extern void g4(int y, int *p : itype(int checked[10])) {
}
extern void g5(int y, int **p : itype(ptr<ptr<int>>)) {
y = **p;
}
extern void g6(int y, int **p : itype(ptr<array_ptr<int>>)) {
}
extern void g7(int y, int **p : itype(array_ptr<ptr<int>>)) {
}
extern void g8(int y, int **p : itype(ptr<int> checked[])) {
}
extern void g9(int y, int **p : itype(int * checked[10])) {
}
//
// returns an unchecked pointer type with an
// interop type annotation.
//
extern int *h1(int y, ptr<int> p) : itype(ptr<int>) {
*p = y;
return 0;
}
extern int *h2 (int y, const ptr<int> p) : itype(array_ptr<int>) {
return 0;
}
extern int **h3(void) : itype(ptr<ptr<int>>) {
return 0;
}
extern int **h4(void) : itype(array_ptr<ptr<int>>) {
return 0;
}
//
// Global variables with interop type annotations
//
int *a1 : itype(ptr<int>) = 0;
int *a2 : itype(array_ptr<int>) = 0;
int **a3 : itype(ptr<ptr<int>>) = 0;
int **a4 : itype(ptr<array_ptr<int>>) = 0;
int **a5 : itype(array_ptr<ptr<int>>) = 0;
int **a6 : itype(array_ptr<array_ptr<int>>) = 0;
int ***a7 : itype(ptr<ptr<ptr<int>>>) = 0;
int a8[10] : itype(int checked[10]);
extern int a9[] : itype(int checked[]);
//
// Structure members with interop pointer type annotations
//
struct S1 {
float *data1 : itype(ptr<float>);
float *data2 : itype(array_ptr<float>);
float **data3 : itype(ptr<ptr<float>>);
float **data4 : itype(ptr<array_ptr<float>>);
float **data5 : itype(array_ptr<ptr<float>>);
float ***data6 : itype(ptr<ptr<ptr<float>>>);
float data7[4] : itype(float checked[4]);
float data8[] : itype(float checked[]);
};
///
/// The interop type can have modifiers
///
extern void f30(const int * const x : itype(const ptr<const int>)) {
}
extern void f31(const int a[] : itype(const int checked[])) {
}
extern void f32(const int a[10] : itype(const int checked[10])) {
}
extern void f33(const int *x : itype(ptr<const int>)) {
}
extern const int *f34(void) : itype(ptr<const int>) {
return 0;
}
const int *a10 : itype(ptr<const int>) = 0;
int *const a11 : itype(const ptr<int>) = 0;
// First dimension of an array interop type for a parameter can
// have modifiers or the static keyword
extern void f35(int a[const 10] : itype(int checked[const 10])) {
}
extern void f36(int a[static const 10] : itype(int checked[static const 10])) {
}
extern void f37(int a[volatile 10] : itype(int checked[volatile 10])) {
}
extern void f38(const int *const x : itype(const int checked[const])) {
}
///
/// Typedef'ed names can be used as interop types
///
typedef ptr<int> pint;
typedef ptr<const int> pcint;
extern void f40(int *x : itype(pint)) {
}
extern void f41(const int *x : itype(pcint)) {
}
// Identifier not allowed in a type name
void f50(int *p : itype(ptr<int> a)) { // expected-error {{type name cannot have identifier in it}}
}
| 1,568 |
2,107 | <filename>outdated/win/gem/load_img.c<gh_stars>1000+
/*
* $NHDT-Date: 1432512809 2015/05/25 00:13:29 $ $NHDT-Branch: master $:$NHDT-Revision: 1.5 $
*/
#define __TCC_COMPAT__
#include <stdio.h>
#include <string.h>
#include <osbind.h>
#include <memory.h>
#include <aesbind.h>
#include <vdibind.h>
#include <gemfast.h>
#include <e_gem.h>
#include "load_img.h"
#ifndef FALSE
#define FALSE 0
#define TRUE !FALSE
#endif
/* VDI <-> Device palette order conversion matrixes: */
/* Four-plane vdi-device */
int vdi2dev4[] = { 0, 15, 1, 2, 4, 6, 3, 5, 7, 8, 9, 10, 12, 14, 11, 13 };
/* Two-plane vdi-device */
int vdi2dev2[] = { 0, 3, 1, 2 };
void
get_colors(int handle, short *palette, int col)
{
int i, idx;
/* get current color palette */
for (i = 0; i < col; i++) {
/* device->vdi->device palette order */
switch (planes) {
case 1:
idx = i;
break;
case 2:
idx = vdi2dev2[i];
break;
case 4:
idx = vdi2dev4[i];
break;
default:
if (i < 16)
idx = vdi2dev4[i];
else
idx = i == 255 ? 1 : i;
}
vq_color(handle, i, 0, (int *) palette + idx * 3);
}
}
void
img_set_colors(int handle, short *palette, int col)
{
int i, idx, end;
/* set color palette */
end = min(1 << col, 1 << planes);
for (i = 0; i < end; i++) {
switch (planes) { /* MAR -- war col 10.01.2001 */
case 1:
idx = i;
break;
case 2:
idx = vdi2dev2[i];
break;
case 4:
idx = vdi2dev4[i];
break;
default:
if (i < 16)
idx = vdi2dev4[i];
else
idx = i == 255 ? 1 : i;
}
vs_color(handle, i, (int *) palette + idx * 3);
}
}
int
convert(MFDB *image, long size)
{
int plane, mplanes;
char *line_addr, *buf_addr, *new_addr, *new1_addr, *image_addr,
*screen_addr;
MFDB dev_form, tmp;
long new_size;
/* convert size from words to bytes */
size <<= 1;
/* memory for the device raster */
new_size = size * (long) planes;
if ((new_addr = (char *) calloc(1, new_size)) == NULL)
return (FALSE);
/* initialize MFDBs */
tmp = *image;
tmp.fd_nplanes = planes;
tmp.fd_addr = new_addr;
tmp.fd_stand = 1; /* standard format */
dev_form = tmp;
screen_addr = new_addr;
dev_form.fd_stand = 0; /* device format */
image_addr = (char *) image->fd_addr;
/* initialize some variables and zero temp. line buffer */
mplanes = min(image->fd_nplanes, planes);
/* convert image */
line_addr = image_addr;
buf_addr = screen_addr;
if (mplanes > 1) {
/* cut/pad color planes into temp buf */
for (plane = 0; plane < mplanes; plane++) {
memcpy(buf_addr, line_addr, size);
line_addr += size;
buf_addr += size;
}
} else {
/* fill temp line bitplanes with a b&w line */
for (plane = 0; plane < planes; plane++) {
memcpy(buf_addr, line_addr, size);
buf_addr += size;
}
}
free(image->fd_addr);
/* convert image line in temp into current device raster format */
if ((new1_addr = (char *) calloc(1, new_size)) == NULL)
return (FALSE);
dev_form.fd_addr = new1_addr;
vr_trnfm(x_handle, &tmp, &dev_form);
free(new_addr);
/* change image description */
image->fd_stand = 0; /* device format */
image->fd_addr = new1_addr;
image->fd_nplanes = planes;
return (TRUE);
}
int
transform_img(MFDB *image)
{ /* return FALSE if transform_img fails */
int success;
long size;
if (!image->fd_addr)
return (FALSE);
size = (long) ((long) image->fd_wdwidth * (long) image->fd_h);
success = convert(
image, size); /* Use vr_trfm(), which needs quite a lot memory. */
if (success)
return (TRUE);
/* else show_error(ERR_ALLOC); */
return (FALSE);
}
/* Loads & depacks IMG (0 if succeded, else error). */
/* Bitplanes are one after another in address IMG_HEADER.addr. */
int
depack_img(char *name, IMG_header *pic)
{
int b, line, plane, width, word_aligned, opcode, patt_len, pal_size,
byte_repeat, patt_repeat, scan_repeat, error = FALSE;
char *pattern, *to, *endline, *puffer, sol_pat;
long size;
FILE *fp;
if ((fp = fopen(name, "rb")) == NULL)
return (ERR_FILE);
setvbuf(fp, NULL, _IOLBF, BUFSIZ);
/* read header info (bw & ximg) into image structure */
fread((char *) &(pic->version), 2, 8 + 3, fp);
/* only 2-256 color imgs */
if (pic->planes < 1 || pic->planes > 8) {
error = ERR_COLOR;
goto end_depack;
}
/* if XIMG, read info */
if (pic->magic == XIMG && pic->paltype == 0) {
pal_size = (1 << pic->planes) * 3 * 2;
if ((pic->palette = (short *) calloc(1, pal_size))) {
fread((char *) pic->palette, 1, pal_size, fp);
}
} else {
pic->palette = NULL;
}
/* width in bytes word aliged */
word_aligned = (pic->img_w + 15) >> 4;
word_aligned <<= 1;
/* width byte aligned */
width = (pic->img_w + 7) >> 3;
/* allocate memory for the picture */
free(pic->addr);
size = (long) ((long) word_aligned * (long) pic->img_h
* (long) pic->planes); /*MAR*/
/* check for header validity & malloc long... */
if (pic->length > 7 && pic->planes < 33 && pic->img_w > 0
&& pic->img_h > 0) {
if (!(pic->addr = (char *) calloc(1, size))) {
error = ERR_ALLOC;
goto end_depack;
}
} else {
error = ERR_HEADER;
goto end_depack;
}
patt_len = pic->pat_len;
/* jump over the header and possible (XIMG) info */
fseek(fp, (long) pic->length * 2L, SEEK_SET);
for (line = 0, to = pic->addr; line < pic->img_h;
line += scan_repeat) { /* depack whole img */
for (plane = 0, scan_repeat = 1; plane < pic->planes;
plane++) { /* depack one scan line */
puffer = to =
pic->addr
+ (long) (line + plane * pic->img_h) * (long) word_aligned;
endline = puffer + width;
do { /* depack one line in one bitplane */
switch ((opcode = fgetc(fp))) {
case 0: /* pattern or scan repeat */
if ((patt_repeat = fgetc(fp))) { /* repeat a pattern */
fread(to, patt_len, 1, fp);
pattern = to;
to += patt_len;
while (--patt_repeat) { /* copy pattern */
memcpy(to, pattern, patt_len);
to += patt_len;
}
} else { /* repeat a line */
if (fgetc(fp) == 0xFF)
scan_repeat = fgetc(fp);
else {
error = ERR_DEPACK;
goto end_depack;
}
}
break;
case 0x80: /* Literal */
byte_repeat = fgetc(fp);
fread(to, byte_repeat, 1, fp);
to += byte_repeat;
break;
default: /* Solid run */
byte_repeat = opcode & 0x7F;
sol_pat = opcode & 0x80 ? 0xFF : 0x00;
while (byte_repeat--)
*to++ = sol_pat;
}
} while (to < endline);
if (to == endline) {
/* ensure that lines aren't repeated past the end of the img
*/
if (line + scan_repeat > pic->img_h)
scan_repeat = pic->img_h - line;
/* copy line to image buffer */
if (scan_repeat > 1) {
/* calculate address of a current line in a current
* bitplane */
/* to=pic->addr+(long)(line+1+plane*pic->img_h)*(long)word_aligned;*/
for (b = scan_repeat - 1; b; --b) {
memcpy(to, puffer, width);
to += word_aligned;
}
}
} else {
error = ERR_DEPACK;
goto end_depack;
}
}
}
end_depack:
fclose(fp);
return (error);
}
int
half_img(MFDB *s, MFDB *d)
{
int pxy[8], i, j;
MFDB tmp;
mfdb(&tmp, NULL, s->fd_w / 2, s->fd_h, s->fd_stand, s->fd_nplanes);
tmp.fd_w = s->fd_w / 2;
tmp.fd_addr = calloc(1, mfdb_size(&tmp));
if (!tmp.fd_addr)
return (FALSE);
pxy[1] = pxy[5] = 0;
pxy[3] = pxy[7] = s->fd_h - 1;
for (i = 0; i < s->fd_w / 2; i++) {
pxy[0] = pxy[2] = 2 * i;
pxy[4] = pxy[6] = i;
vro_cpyfm(x_handle, S_ONLY, pxy, s, &tmp);
}
pxy[0] = pxy[4] = 0;
pxy[2] = pxy[6] = s->fd_w / 2 - 1;
for (j = 0; j < s->fd_h / 2; j++) {
pxy[1] = pxy[3] = 2 * j;
pxy[5] = pxy[7] = j;
vro_cpyfm(x_handle, S_ONLY, pxy, &tmp, d);
}
free(tmp.fd_addr);
return (TRUE);
}
| 4,975 |
616 | /*
* Copyright (C) 2015 <NAME> & <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.larswerkman.lobsterpicker.adapters;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.support.annotation.ColorInt;
import android.support.annotation.DrawableRes;
import com.larswerkman.lobsterpicker.ColorAdapter;
/**
* {@link ColorAdapter} implementation that uses a {@link android.graphics.drawable.Drawable} resource
* as source. Each vertical pixel represents a color and each horizontal pixel represents a shade for
* that color
*/
public class BitmapColorAdapter implements ColorAdapter {
private Bitmap bitmap;
public BitmapColorAdapter(Context context, @DrawableRes int resource) {
bitmap = BitmapFactory.decodeResource(context.getResources(), resource);
}
@Override
public @ColorInt int color(int color, int shade) {
int pixel = bitmap.getPixel(shade, color);
while(pixel == 0x0){
pixel = bitmap.getPixel(shade--, color);
}
return pixel;
}
@Override
public int size() {
return bitmap.getHeight();
}
@Override
public int shades(int position) {
int[] pixels = new int[bitmap.getWidth()];
bitmap.getPixels(pixels, 0, bitmap.getWidth(), 0, position, bitmap.getWidth(), 1);
for (int i = 0; i < pixels.length; i++) {
if (pixels[i] == 0x0) {
return i;
}
}
return bitmap.getWidth();
}
} | 720 |
930 | package com.foxinmy.weixin4j.mp.message;
import java.io.Serializable;
import com.foxinmy.weixin4j.tuple.NotifyTuple;
/**
* 客服消息(48小时内不限制发送次数)
*
* @author jinyu(<EMAIL>)
* @date 2014年4月4日
* @since JDK 1.6
* @see com.foxinmy.weixin4j.tuple.Text
* @see com.foxinmy.weixin4j.tuple.Image
* @see com.foxinmy.weixin4j.tuple.Voice
* @see com.foxinmy.weixin4j.tuple.Video
* @see com.foxinmy.weixin4j.tuple.Music
* @see com.foxinmy.weixin4j.tuple.News
* @see <a
* href="https://mp.weixin.qq.com/wiki?t=resource/res_main&id=mp1421140547&token=&lang=zh_CN">发送客服消息</a>
*/
public class NotifyMessage implements Serializable {
private static final long serialVersionUID = 7190233634431087729L;
/**
* 用户的openid
*/
private String touser;
/**
* 消息对象
*/
private NotifyTuple tuple;
public NotifyMessage(String touser, NotifyTuple tuple) {
this.touser = touser;
this.tuple = tuple;
}
public String getTouser() {
return touser;
}
public NotifyTuple getTuple() {
return tuple;
}
@Override
public String toString() {
return "NotifyMessage [touser=" + touser + ", tuple=" + tuple + "]";
}
} | 593 |
648 | import os.path as osp
import numpy as np
import pytest
from mmcv.utils import assert_dict_has_keys
from mmaction.datasets import AudioFeatureDataset
from .base import BaseTestDataset
class TestAudioFeatureDataset(BaseTestDataset):
def test_audio_feature_dataset(self):
audio_dataset = AudioFeatureDataset(
self.audio_feature_ann_file,
self.audio_feature_pipeline,
data_prefix=self.data_prefix)
audio_infos = audio_dataset.video_infos
feature_path = osp.join(self.data_prefix, 'test.npy')
assert audio_infos == [
dict(audio_path=feature_path, total_frames=100, label=127)
] * 2
def test_audio_feature_pipeline(self):
target_keys = [
'audio_path', 'label', 'start_index', 'modality', 'audios',
'total_frames'
]
# Audio feature dataset not in test mode
audio_feature_dataset = AudioFeatureDataset(
self.audio_feature_ann_file,
self.audio_feature_pipeline,
data_prefix=self.data_prefix,
test_mode=False)
result = audio_feature_dataset[0]
assert assert_dict_has_keys(result, target_keys)
# Audio dataset in test mode
audio_feature_dataset = AudioFeatureDataset(
self.audio_feature_ann_file,
self.audio_feature_pipeline,
data_prefix=self.data_prefix,
test_mode=True)
result = audio_feature_dataset[0]
assert assert_dict_has_keys(result, target_keys)
def test_audio_feature_evaluate(self):
audio_dataset = AudioFeatureDataset(
self.audio_feature_ann_file,
self.audio_feature_pipeline,
data_prefix=self.data_prefix)
with pytest.raises(TypeError):
# results must be a list
audio_dataset.evaluate('0.5')
with pytest.raises(AssertionError):
# The length of results must be equal to the dataset len
audio_dataset.evaluate([0] * 5)
with pytest.raises(TypeError):
# topk must be int or tuple of int
audio_dataset.evaluate(
[0] * len(audio_dataset),
metric_options=dict(top_k_accuracy=dict(topk=1.)))
with pytest.raises(KeyError):
# unsupported metric
audio_dataset.evaluate([0] * len(audio_dataset), metrics='iou')
# evaluate top_k_accuracy and mean_class_accuracy metric
results = [np.array([0.1, 0.5, 0.4])] * 2
eval_result = audio_dataset.evaluate(
results, metrics=['top_k_accuracy', 'mean_class_accuracy'])
assert set(eval_result) == set(
['top1_acc', 'top5_acc', 'mean_class_accuracy'])
| 1,286 |
2,270 | /*
==============================================================================
This file is part of the JUCE library.
Copyright (c) 2020 - Raw Material Software Limited
JUCE is an open source library subject to commercial or open-source
licensing.
By using JUCE, you agree to the terms of both the JUCE 6 End-User License
Agreement and JUCE Privacy Policy (both effective as of the 16th June 2020).
End User License Agreement: www.juce.com/juce-6-licence
Privacy Policy: www.juce.com/juce-privacy-policy
Or: You may also use this code under the terms of the GPL v3 (see
www.gnu.org/licenses).
JUCE IS PROVIDED "AS IS" WITHOUT ANY WARRANTY, AND ALL WARRANTIES, WHETHER
EXPRESSED OR IMPLIED, INCLUDING MERCHANTABILITY AND FITNESS FOR PURPOSE, ARE
DISCLAIMED.
==============================================================================
*/
namespace juce
{
namespace VariantHelpers
{
inline void clear (VARIANT* variant)
{
variant->vt = VT_EMPTY;
}
inline void setInt (int value, VARIANT* variant)
{
variant->vt = VT_I4;
variant->lVal = value;
}
inline void setBool (bool value, VARIANT* variant)
{
variant->vt = VT_BOOL;
variant->boolVal = value ? -1 : 0;
}
inline void setString (const String& value, VARIANT* variant)
{
variant->vt = VT_BSTR;
variant->bstrVal = SysAllocString ((const OLECHAR*) value.toWideCharPointer());
}
inline void setDouble (double value, VARIANT* variant)
{
variant->vt = VT_R8;
variant->dblVal = value;
}
}
inline JUCE_COMRESULT addHandlersToArray (const std::vector<const AccessibilityHandler*>& handlers, SAFEARRAY** pRetVal)
{
auto numHandlers = handlers.size();
*pRetVal = SafeArrayCreateVector (VT_UNKNOWN, 0, (ULONG) numHandlers);
if (pRetVal != nullptr)
{
for (LONG i = 0; i < (LONG) numHandlers; ++i)
{
auto* handler = handlers[(size_t) i];
if (handler == nullptr)
continue;
ComSmartPtr<IRawElementProviderSimple> provider;
JUCE_BEGIN_IGNORE_WARNINGS_GCC_LIKE ("-Wlanguage-extension-token")
handler->getNativeImplementation()->QueryInterface (IID_PPV_ARGS (provider.resetAndGetPointerAddress()));
JUCE_END_IGNORE_WARNINGS_GCC_LIKE
auto hr = SafeArrayPutElement (*pRetVal, &i, provider);
if (FAILED (hr))
return E_FAIL;
}
}
return S_OK;
}
template <typename Value, typename Object, typename Callback>
inline JUCE_COMRESULT withCheckedComArgs (Value* pRetVal, Object& handle, Callback&& callback)
{
if (pRetVal == nullptr)
return E_INVALIDARG;
*pRetVal = Value{};
if (! handle.isElementValid())
return (HRESULT) UIA_E_ELEMENTNOTAVAILABLE;
return callback();
}
} // namespace juce
| 1,293 |
4,526 | <gh_stars>1000+
#include <test.hpp>
#include <vtzero/feature.hpp>
#include <vtzero/layer.hpp>
#include <vtzero/vector_tile.hpp>
TEST_CASE("default constructed feature") {
vtzero::feature feature{};
REQUIRE_FALSE(feature.valid());
REQUIRE_FALSE(feature);
REQUIRE(feature.id() == 0);
REQUIRE_FALSE(feature.has_id());
REQUIRE(feature.geometry_type() == vtzero::GeomType::UNKNOWN);
REQUIRE_ASSERT(feature.geometry());
REQUIRE(feature.empty());
REQUIRE(feature.num_properties() == 0);
}
TEST_CASE("read a feature") {
const auto data = load_test_tile();
vtzero::vector_tile tile{data};
auto layer = tile.get_layer_by_name("bridge");
REQUIRE(layer.valid());
auto feature = layer.next_feature();
REQUIRE(feature.valid());
REQUIRE(feature);
REQUIRE(feature.id() == 0);
REQUIRE(feature.has_id());
REQUIRE(feature.geometry_type() == vtzero::GeomType::LINESTRING);
REQUIRE_FALSE(feature.empty());
REQUIRE(feature.num_properties() == 4);
}
TEST_CASE("iterate over all properties of a feature") {
const auto data = load_test_tile();
vtzero::vector_tile tile{data};
auto layer = tile.get_layer_by_name("bridge");
auto feature = layer.next_feature();
int count = 0;
SECTION("external iterator") {
while (auto p = feature.next_property()) {
++count;
if (p.key() == "type") {
REQUIRE(p.value().type() == vtzero::property_value_type::string_value);
REQUIRE(p.value().string_value() == "primary");
}
}
}
SECTION("internal iterator") {
feature.for_each_property([&count](const vtzero::property& p) {
++count;
if (p.key() == "type") {
REQUIRE(p.value().type() == vtzero::property_value_type::string_value);
REQUIRE(p.value().string_value() == "primary");
}
return true;
});
}
REQUIRE(count == 4);
}
TEST_CASE("iterate over some properties of a feature") {
const auto data = load_test_tile();
vtzero::vector_tile tile{data};
auto layer = tile.get_layer_by_name("bridge");
REQUIRE(layer.valid());
auto feature = layer.next_feature();
REQUIRE(feature.valid());
int count = 0;
SECTION("external iterator") {
while (auto p = feature.next_property()) {
++count;
if (p.key() == "oneway") {
break;
}
}
}
SECTION("internal iterator") {
feature.for_each_property([&count](const vtzero::property& p) {
++count;
return p.key() != "oneway";
});
}
REQUIRE(count == 2);
}
| 1,184 |
1,127 | // Copyright (C) 2018-2022 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
#pragma once
#include <tuple>
#include <vector>
#include <string>
#include <memory>
#include "shared_test_classes/base/layer_test_utils.hpp"
#include "base/behavior_test_utils.hpp"
#include "ngraph_functions/builders.hpp"
#include "pugixml.hpp"
namespace ExecutionGraphTests {
class ExecGraphUniqueNodeNames : public testing::WithParamInterface<LayerTestsUtils::basicParams>,
public CommonTestUtils::TestsCommon {
public:
static std::string getTestCaseName(testing::TestParamInfo<LayerTestsUtils::basicParams> obj);
void SetUp() override;
void TearDown() override;
protected:
std::string targetDevice;
std::shared_ptr<ngraph::Function> fnPtr;
};
class ExecGraphSerializationTest : public CommonTestUtils::TestsCommon, public testing::WithParamInterface<std::string> {
public:
static std::string getTestCaseName(testing::TestParamInfo<std::string> obj);
void SetUp() override;
void TearDown() override;
private:
// walker traverse (DFS) xml document and store layer & data nodes in
// vector which is later used for comparison
struct exec_graph_walker : pugi::xml_tree_walker {
std::vector<pugi::xml_node> nodes;
bool for_each(pugi::xml_node &node) override;
};
// compare_docs() helper
std::pair<bool, std::string> compare_nodes(const pugi::xml_node &node1,
const pugi::xml_node &node2);
protected:
// checks if two exec graph xml's are equivalent:
// - the same count of <layer> and <data> nodes
// - the same count of attributes of each node
// - the same name of each attribute (value is not checked, since it can differ
// beetween different devices)
std::pair<bool, std::string> compare_docs(const pugi::xml_document &doc1,
const pugi::xml_document &doc2);
std::string deviceName, m_out_xml_path, m_out_bin_path;
};
} // namespace ExecutionGraphTests
| 785 |
2,151 | <filename>third_party/angle/src/libANGLE/renderer/d3d/d3d11/IndexBuffer11.h<gh_stars>1000+
//
// Copyright (c) 2012 The ANGLE Project Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// IndexBuffer11.h: Defines the D3D11 IndexBuffer implementation.
#ifndef LIBANGLE_RENDERER_D3D_D3D11_INDEXBUFFER11_H_
#define LIBANGLE_RENDERER_D3D_D3D11_INDEXBUFFER11_H_
#include "libANGLE/renderer/d3d/IndexBuffer.h"
#include "libANGLE/renderer/d3d/d3d11/ResourceManager11.h"
namespace rx
{
class Renderer11;
class IndexBuffer11 : public IndexBuffer
{
public:
explicit IndexBuffer11(Renderer11 *const renderer);
~IndexBuffer11() override;
gl::Error initialize(unsigned int bufferSize, GLenum indexType, bool dynamic) override;
gl::Error mapBuffer(unsigned int offset, unsigned int size, void **outMappedMemory) override;
gl::Error unmapBuffer() override;
GLenum getIndexType() const override;
unsigned int getBufferSize() const override;
gl::Error setSize(unsigned int bufferSize, GLenum indexType) override;
gl::Error discard() override;
DXGI_FORMAT getIndexFormat() const;
const d3d11::Buffer &getBuffer() const;
private:
Renderer11 *const mRenderer;
d3d11::Buffer mBuffer;
unsigned int mBufferSize;
GLenum mIndexType;
bool mDynamicUsage;
};
} // namespace rx
#endif // LIBANGLE_RENDERER_D3D_D3D11_INDEXBUFFER11_H_
| 532 |
13,885 | <filename>filament/src/details/Texture.h
/*
* Copyright (C) 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef TNT_FILAMENT_DETAILS_TEXTURE_H
#define TNT_FILAMENT_DETAILS_TEXTURE_H
#include "upcast.h"
#include <backend/Handle.h>
#include <filament/Texture.h>
#include <utils/compiler.h>
namespace filament {
class FEngine;
class FStream;
class FTexture : public Texture {
public:
FTexture(FEngine& engine, const Builder& builder);
// frees driver resources, object becomes invalid
void terminate(FEngine& engine);
backend::Handle<backend::HwTexture> getHwHandle() const noexcept { return mHandle; }
size_t getWidth(size_t level = 0) const noexcept;
size_t getHeight(size_t level = 0) const noexcept;
size_t getDepth(size_t level = 0) const noexcept;
size_t getLevelCount() const noexcept { return mLevelCount; }
size_t getMaxLevelCount() const noexcept { return FTexture::maxLevelCount(mWidth, mHeight); }
Sampler getTarget() const noexcept { return mTarget; }
InternalFormat getFormat() const noexcept { return mFormat; }
Usage getUsage() const noexcept { return mUsage; }
void setImage(FEngine& engine, size_t level,
uint32_t xoffset, uint32_t yoffset, uint32_t width, uint32_t height,
PixelBufferDescriptor&& buffer) const;
void setImage(FEngine& engine, size_t level,
uint32_t xoffset, uint32_t yoffset, uint32_t zoffset,
uint32_t width, uint32_t height, uint32_t depth,
PixelBufferDescriptor&& buffer) const;
void setImage(FEngine& engine, size_t level,
PixelBufferDescriptor&& buffer, const FaceOffsets& faceOffsets) const;
void generatePrefilterMipmap(FEngine& engine,
PixelBufferDescriptor&& buffer, const FaceOffsets& faceOffsets,
PrefilterOptions const* options);
void setExternalImage(FEngine& engine, void* image) noexcept;
void setExternalImage(FEngine& engine, void* image, size_t plane) noexcept;
void setExternalStream(FEngine& engine, FStream* stream) noexcept;
void generateMipmaps(FEngine& engine) const noexcept;
void setSampleCount(size_t sampleCount) noexcept { mSampleCount = uint8_t(sampleCount); }
size_t getSampleCount() const noexcept { return mSampleCount; }
bool isMultisample() const noexcept { return mSampleCount > 1; }
bool isCompressed() const noexcept { return backend::isCompressedFormat(mFormat); }
bool isCubemap() const noexcept { return mTarget == Sampler::SAMPLER_CUBEMAP; }
FStream const* getStream() const noexcept { return mStream; }
/*
* Utilities
*/
// synchronous call to the backend. returns whether a backend supports a particular format.
static bool isTextureFormatSupported(FEngine& engine, InternalFormat format) noexcept;
// synchronous call to the backend. returns whether a backend supports texture swizzling.
static bool isTextureSwizzleSupported(FEngine& engine) noexcept;
// storage needed on the CPU side for texture data uploads
static size_t computeTextureDataSize(Texture::Format format, Texture::Type type,
size_t stride, size_t height, size_t alignment) noexcept;
// Size a of a pixel in bytes for the given format
static size_t getFormatSize(InternalFormat format) noexcept;
// Returns the with or height for a given mipmap level from the base value.
static inline size_t valueForLevel(uint8_t level, size_t baseLevelValue) {
return std::max(size_t(1), baseLevelValue >> level);
}
// Returns the max number of levels for a texture of given max dimensions
static inline uint8_t maxLevelCount(uint32_t maxDimension) noexcept {
return std::max(1, std::ilogbf(maxDimension) + 1);
}
// Returns the max number of levels for a texture of given dimensions
static inline uint8_t maxLevelCount(uint32_t width, uint32_t height) noexcept {
return std::max(1, std::ilogbf(std::max(width, height)) + 1);
}
static bool validatePixelFormatAndType(backend::TextureFormat internalFormat,
backend::PixelDataFormat format, backend::PixelDataType type) noexcept;
private:
friend class Texture;
FStream* mStream = nullptr;
backend::Handle<backend::HwTexture> mHandle;
uint32_t mWidth = 1;
uint32_t mHeight = 1;
uint32_t mDepth = 1;
InternalFormat mFormat = InternalFormat::RGBA8;
Sampler mTarget = Sampler::SAMPLER_2D;
uint8_t mLevelCount = 1;
uint8_t mSampleCount = 1;
Usage mUsage = Usage::DEFAULT;
};
FILAMENT_UPCAST(Texture)
} // namespace filament
#endif // TNT_FILAMENT_DETAILS_TEXTURE_H
| 1,747 |
15,577 | <reponame>tianyiYoung/ClickHouse<filename>src/AggregateFunctions/AggregateFunctionDeltaSumTimestamp.h
#pragma once
#include <type_traits>
#include <experimental/type_traits>
#include <IO/ReadHelpers.h>
#include <IO/WriteHelpers.h>
#include <Columns/ColumnVector.h>
#include <DataTypes/DataTypesDecimal.h>
#include <DataTypes/DataTypesNumber.h>
#include <AggregateFunctions/IAggregateFunction.h>
namespace DB
{
template <typename ValueType, typename TimestampType>
struct AggregationFunctionDeltaSumTimestampData
{
ValueType sum = 0;
ValueType first = 0;
ValueType last = 0;
TimestampType first_ts = 0;
TimestampType last_ts = 0;
bool seen = false;
};
template <typename ValueType, typename TimestampType>
class AggregationFunctionDeltaSumTimestamp final
: public IAggregateFunctionDataHelper<
AggregationFunctionDeltaSumTimestampData<ValueType, TimestampType>,
AggregationFunctionDeltaSumTimestamp<ValueType, TimestampType>
>
{
public:
AggregationFunctionDeltaSumTimestamp(const DataTypes & arguments, const Array & params)
: IAggregateFunctionDataHelper<
AggregationFunctionDeltaSumTimestampData<ValueType, TimestampType>,
AggregationFunctionDeltaSumTimestamp<ValueType, TimestampType>
>{arguments, params}
{}
AggregationFunctionDeltaSumTimestamp()
: IAggregateFunctionDataHelper<
AggregationFunctionDeltaSumTimestampData<ValueType, TimestampType>,
AggregationFunctionDeltaSumTimestamp<ValueType, TimestampType>
>{}
{}
bool allocatesMemoryInArena() const override { return false; }
String getName() const override { return "deltaSumTimestamp"; }
DataTypePtr getReturnType() const override { return std::make_shared<DataTypeNumber<ValueType>>(); }
void NO_SANITIZE_UNDEFINED ALWAYS_INLINE add(AggregateDataPtr __restrict place, const IColumn ** columns, size_t row_num, Arena *) const override
{
auto value = assert_cast<const ColumnVector<ValueType> &>(*columns[0]).getData()[row_num];
auto ts = assert_cast<const ColumnVector<TimestampType> &>(*columns[1]).getData()[row_num];
if ((this->data(place).last < value) && this->data(place).seen)
{
this->data(place).sum += (value - this->data(place).last);
}
this->data(place).last = value;
this->data(place).last_ts = ts;
if (!this->data(place).seen)
{
this->data(place).first = value;
this->data(place).seen = true;
this->data(place).first_ts = ts;
}
}
// before returns true if lhs is before rhs or false if it is not or can't be determined
bool ALWAYS_INLINE before (
const AggregationFunctionDeltaSumTimestampData<ValueType, TimestampType> * lhs,
const AggregationFunctionDeltaSumTimestampData<ValueType, TimestampType> * rhs
) const
{
if (lhs->last_ts < rhs->first_ts)
{
return true;
}
if (lhs->last_ts == rhs->first_ts && (lhs->last_ts < rhs->last_ts || lhs->first_ts < rhs->first_ts))
{
return true;
}
return false;
}
void NO_SANITIZE_UNDEFINED ALWAYS_INLINE merge(AggregateDataPtr __restrict place, ConstAggregateDataPtr rhs, Arena *) const override
{
auto place_data = &this->data(place);
auto rhs_data = &this->data(rhs);
if (!place_data->seen && rhs_data->seen)
{
place_data->sum = rhs_data->sum;
place_data->seen = true;
place_data->first = rhs_data->first;
place_data->first_ts = rhs_data->first_ts;
place_data->last = rhs_data->last;
place_data->last_ts = rhs_data->last_ts;
}
else if (place_data->seen && !rhs_data->seen)
return;
else if (before(place_data, rhs_data))
{
// This state came before the rhs state
if (rhs_data->first > place_data->last)
place_data->sum += (rhs_data->first - place_data->last);
place_data->sum += rhs_data->sum;
place_data->last = rhs_data->last;
place_data->last_ts = rhs_data->last_ts;
}
else if (before(rhs_data, place_data))
{
// This state came after the rhs state
if (place_data->first > rhs_data->last)
place_data->sum += (place_data->first - rhs_data->last);
place_data->sum += rhs_data->sum;
place_data->first = rhs_data->first;
place_data->first_ts = rhs_data->first_ts;
}
else
{
// If none of those conditions matched, it means both states we are merging have all
// same timestamps. We have to pick either the smaller or larger value so that the
// result is deterministic.
if (place_data->first < rhs_data->first)
{
place_data->first = rhs_data->first;
place_data->last = rhs_data->last;
}
}
}
void serialize(ConstAggregateDataPtr __restrict place, WriteBuffer & buf) const override
{
writeIntBinary(this->data(place).sum, buf);
writeIntBinary(this->data(place).first, buf);
writeIntBinary(this->data(place).first_ts, buf);
writeIntBinary(this->data(place).last, buf);
writeIntBinary(this->data(place).last_ts, buf);
writePODBinary<bool>(this->data(place).seen, buf);
}
void deserialize(AggregateDataPtr __restrict place, ReadBuffer & buf, Arena *) const override
{
readIntBinary(this->data(place).sum, buf);
readIntBinary(this->data(place).first, buf);
readIntBinary(this->data(place).first_ts, buf);
readIntBinary(this->data(place).last, buf);
readIntBinary(this->data(place).last_ts, buf);
readPODBinary<bool>(this->data(place).seen, buf);
}
void insertResultInto(AggregateDataPtr __restrict place, IColumn & to, Arena *) const override
{
assert_cast<ColumnVector<ValueType> &>(to).getData().push_back(this->data(place).sum);
}
};
}
| 2,633 |
11,356 | /*-----------------------------------------------------------------------------+
Copyright (c) 2008-2009: <NAME>
+------------------------------------------------------------------------------+
Distributed under the Boost Software License, Version 1.0.
(See accompanying file LICENCE.txt or copy at
http://www.boost.org/LICENSE_1_0.txt)
+-----------------------------------------------------------------------------*/
#ifndef BOOST_ICL_FASTEST_TOTAL_INTERVAL_QUANTIFIER_CASES_HPP_JOFA_090703
#define BOOST_ICL_FASTEST_TOTAL_INTERVAL_QUANTIFIER_CASES_HPP_JOFA_090703
//------------------------------------------------------------------------------
// total_absorber
//------------------------------------------------------------------------------
BOOST_AUTO_TEST_CASE
(fastest_itl_total_icl_quantifier_check_monoid_plus_4_bicremental_types)
{ icl_quantifier_check_monoid_plus_4_bicremental_types<bicremental_type_1, std::string, total_absorber, INTERVAL_MAP>();}
BOOST_AUTO_TEST_CASE
(fastest_itl_total_icl_quantifier_check_monoid_et_4_bicremental_types)
{ icl_quantifier_check_monoid_et_4_bicremental_types<bicremental_type_2, int, total_absorber, INTERVAL_MAP>();}
BOOST_AUTO_TEST_CASE
(fastest_itl_total_icl_quantifier_check_abelian_monoid_plus_4_bicremental_types)
{ icl_quantifier_check_abelian_monoid_plus_4_bicremental_types<bicremental_type_3, std::string, total_absorber, INTERVAL_MAP>();}
BOOST_AUTO_TEST_CASE
(fastest_itl_total_icl_quantifier_check_abelian_monoid_et_4_bicremental_types)
{ icl_quantifier_check_abelian_monoid_et_4_bicremental_types<bicremental_type_4, float, total_absorber, INTERVAL_MAP>();}
// (0-x) + x = 0 | total absorber
BOOST_AUTO_TEST_CASE
(fastest_itl_total_icl_quantifier_check_abelian_group_plus_4_bicremental_domain_and_discrete_codomain)
{ icl_quantifier_check_abelian_group_plus_4_bicremental_types<bicremental_type_5, int, total_absorber, INTERVAL_MAP>();}
BOOST_AUTO_TEST_CASE
(fastest_itl_total_icl_quantifier_check_abelian_group_plus_4_bicremental_domain_and_continuous_codomain_1)
{ icl_quantifier_check_abelian_group_plus_4_bicremental_types<bicremental_type_5, double, total_absorber, INTERVAL_MAP>();}
BOOST_AUTO_TEST_CASE
(fastest_itl_total_icl_quantifier_check_abelian_group_plus_4_bicremental_domain_and_continuous_codomain_2)
{ icl_quantifier_check_abelian_group_plus_4_bicremental_types<bicremental_type_5, boost::rational<int>, total_absorber, INTERVAL_MAP>();}
//------------------------------------------------------------------------------
// total_enricher
//------------------------------------------------------------------------------
BOOST_AUTO_TEST_CASE
(fastest_itl_total_enricher_icl_quantifier_check_monoid_plus_4_bicremental_types)
{ icl_quantifier_check_monoid_plus_4_bicremental_types<bicremental_type_6, std::string, total_enricher, INTERVAL_MAP>();}
BOOST_AUTO_TEST_CASE
(fastest_itl_total_enricher_icl_quantifier_check_monoid_et_4_bicremental_types)
{ icl_quantifier_check_monoid_et_4_bicremental_types<bicremental_type_7, int, total_enricher, INTERVAL_MAP>();}
BOOST_AUTO_TEST_CASE
(fastest_itl_total_enricher_icl_quantifier_check_abelian_monoid_plus_4_bicremental_types)
{ icl_quantifier_check_abelian_monoid_plus_4_bicremental_types<bicremental_type_8, std::string, total_enricher, INTERVAL_MAP>();}
BOOST_AUTO_TEST_CASE
(fastest_itl_total_enricher_icl_quantifier_check_abelian_monoid_et_4_bicremental_types)
{ icl_quantifier_check_abelian_monoid_et_4_bicremental_types<bicremental_type_1, double, total_enricher, INTERVAL_MAP>();}
// (0-x) + x =d= 0 | total absorber
BOOST_AUTO_TEST_CASE
(fastest_itl_total_enricher_icl_quantifier_check_abelian_group_plus_prot_inv_4_bicremental_domain_discrete_codomain)
{ icl_quantifier_check_abelian_group_plus_prot_inv_4_bicremental_types<bicremental_type_2, int, total_enricher, INTERVAL_MAP>();}
BOOST_AUTO_TEST_CASE
(fastest_itl_total_enricher_icl_quantifier_check_abelian_group_plus_prot_inv_4_bicremental_domain_continuous_codomain_1)
{ icl_quantifier_check_abelian_group_plus_prot_inv_4_bicremental_types<bicremental_type_3, float, total_enricher, INTERVAL_MAP>();}
BOOST_AUTO_TEST_CASE
(fastest_itl_total_enricher_icl_quantifier_check_abelian_group_plus_prot_inv_4_bicremental_domain_continuous_codomain_2)
{ icl_quantifier_check_abelian_group_plus_prot_inv_4_bicremental_types<bicremental_type_4, boost::rational<int>, total_enricher, INTERVAL_MAP>();}
// absorber enricher
// partial x - x == 0 x - x =d= 0 partiality of subtraction
// total (-x)+ x == 0 (-x)+ x =d= 0 totality of subtraction
#endif // BOOST_ICL_FASTEST_TOTAL_INTERVAL_QUANTIFIER_CASES_HPP_JOFA_090703
| 2,129 |
2,151 | <filename>catapult_build/temp_deployment_dir.py
#!/usr/bin/python
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import contextlib
import os
import shutil
import tempfile
@contextlib.contextmanager
def TempDeploymentDir(paths, use_symlinks=True):
"""Sets up and tears down a directory for deploying an app."""
if use_symlinks:
link_func = os.symlink
else:
link_func = _Copy
try:
deployment_dir = tempfile.mkdtemp(prefix='deploy-')
_PopulateDeploymentDir(deployment_dir, paths, link_func)
yield deployment_dir
finally:
shutil.rmtree(deployment_dir)
def _Copy(src, dst):
if os.path.isdir(src):
shutil.copytree(src, dst)
else:
shutil.copy2(src, dst)
def _PopulateDeploymentDir(deployment_dir, paths, link_func):
"""Fills the deployment directory using the link_func specified."""
for path in paths:
destination = os.path.join(deployment_dir, os.path.basename(path))
link_func(path, destination)
| 375 |
563 | <gh_stars>100-1000
package com.gentics.mesh.context;
import com.gentics.mesh.core.data.branch.HibBranch;
import com.gentics.mesh.core.data.project.HibProject;
import com.gentics.mesh.core.data.schema.HibSchemaVersion;
import com.gentics.mesh.core.endpoint.migration.MigrationStatusHandler;
import com.gentics.mesh.core.rest.event.node.SchemaMigrationCause;
public interface NodeMigrationActionContext extends InternalActionContext {
/**
* Return referenced project.
*
* @return current project
*/
HibProject getProject();
/**
* Return referenced branch.
*
* @return branch
*/
HibBranch getBranch();
/**
* Return the from schema version.
*
* @return version
*/
HibSchemaVersion getFromVersion();
/**
* Return the to schema version.
*
* @return version
*/
HibSchemaVersion getToVersion();
/**
* Return migration cause.
*
* @return cause
*/
SchemaMigrationCause getCause();
/**
* Return the status handler.
*
* @return status
*/
MigrationStatusHandler getStatus();
/**
* Validate that all needed information is present in the context.
*/
void validate();
}
| 379 |
1,091 | <gh_stars>1000+
/*
* Copyright (c) 2015-2020, Oracle and/or its affiliates. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tribuo.util.tokens.impl;
import org.tribuo.util.tokens.Tokenizer;
import org.tribuo.util.tokens.TokenizerTestBase;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.tribuo.util.tokens.universal.UniversalTokenizer;
import java.io.File;
import java.io.IOException;
import java.util.Locale;
import static org.tribuo.util.tokens.TokenizerTestWrapper.serializeAndDeserialize;
public class TokenizerSerializationTest extends TokenizerTestBase {
private File f;
@BeforeEach
public void setUp() throws IOException {
f = File.createTempFile("serialized-tokenizer", ".ser", new File("target"));
f.deleteOnExit();
}
@Test
public void testSerializeDeserialize() throws IOException, ClassNotFoundException {
//SplitPatternTokenizer
Tokenizer tokenizer = new SplitPatternTokenizer();
test(tokenizer, "a, b, and c", "a", "b", "and", "c");
tokenizer = serializeAndDeserialize(f, tokenizer);
test(tokenizer, "a, b, and c", "a", "b", "and", "c");
tokenizer = new SplitPatternTokenizer("\\s+");
test(tokenizer, "a b c", "a", "b", "c");
tokenizer = serializeAndDeserialize(f, tokenizer);
test(tokenizer, "a b c", "a", "b", "c");
//SplitCharactersTokenizer
tokenizer = new SplitCharactersTokenizer();
test(tokenizer, "a*b(c)d&e[f]g{h}i`j'k|l!m", "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m");
tokenizer = serializeAndDeserialize(f, tokenizer);
test(tokenizer, "a*b(c)d&e[f]g{h}i`j'k|l!m", "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m");
tokenizer = new SplitCharactersTokenizer(new char[]{'*', '(', ')', '&', '[', ']', '{', '}', '`', '\'', '|', ';', ':', '\\', '!', '-', '?'}, new char[]{',', '/'});
test(tokenizer, "Washington, D.C.", "Washington", "D.C.");
tokenizer = serializeAndDeserialize(f, tokenizer);
test(tokenizer, "Washington, D.C.", "Washington", "D.C.");
//ShapeTokenizer
tokenizer = new ShapeTokenizer();
test(tokenizer, "ABCIndustries AB123 A1B2", "ABCIndustries", "AB", "123", "A", "1", "B", "2");
tokenizer = serializeAndDeserialize(f, tokenizer);
test(tokenizer, "ABCIndustries AB123 A1B2", "ABCIndustries", "AB", "123", "A", "1", "B", "2");
//NonTokenizer
tokenizer = new NonTokenizer();
test(tokenizer, "Hello there!", "Hello there!");
tokenizer = serializeAndDeserialize(f, tokenizer);
test(tokenizer, "Hello there!", "Hello there!");
//BreakIteratorTokenizer
tokenizer = new BreakIteratorTokenizer(Locale.US);
test(tokenizer, "http://www.acme.com/SH55126545/VD55177927", "http", ":", "/", "/", "www.acme.com", "/", "SH55126545", "/", "VD55177927");
tokenizer = serializeAndDeserialize(f, tokenizer);
test(tokenizer, "http://www.acme.com/SH55126545/VD55177927", "http", ":", "/", "/", "www.acme.com", "/", "SH55126545", "/", "VD55177927");
//UniversalTokenizer
tokenizer = new UniversalTokenizer(true);
test(tokenizer, "4:36 PM", "4", ":", "36", "PM");
tokenizer = serializeAndDeserialize(f, tokenizer);
test(tokenizer, "4:36 PM", "4", ":", "36", "PM");
}
}
| 1,699 |
4,559 | //
// SUTouchBarButtonGroup.h
// Sparkle
//
// Created by <NAME> on 05/01/2017.
// Copyright © 2017 Sparkle Project. All rights reserved.
//
#if SPARKLE_BUILD_UI_BITS || !BUILDING_SPARKLE
#import <Cocoa/Cocoa.h>
NS_ASSUME_NONNULL_BEGIN
@interface SUTouchBarButtonGroup : NSViewController
@property (nonatomic, readonly, copy) NSArray<NSButton *> *buttons;
- (instancetype)initByReferencingButtons:(NSArray<NSButton *> *)buttons;
@end
NS_ASSUME_NONNULL_END
#endif
| 187 |
4,036 | // Generated automatically from com.google.common.collect.Range for testing purposes
package com.google.common.collect;
import com.google.common.base.Predicate;
import com.google.common.collect.BoundType;
import com.google.common.collect.DiscreteDomain;
import com.google.common.collect.RangeGwtSerializationDependencies;
import java.io.Serializable;
public class Range<C extends Comparable> extends RangeGwtSerializationDependencies implements Predicate<C>, Serializable
{
protected Range() {}
public BoundType lowerBoundType(){ return null; }
public BoundType upperBoundType(){ return null; }
public C lowerEndpoint(){ return null; }
public C upperEndpoint(){ return null; }
public Range<C> canonical(DiscreteDomain<C> p0){ return null; }
public Range<C> gap(Range<C> p0){ return null; }
public Range<C> intersection(Range<C> p0){ return null; }
public Range<C> span(Range<C> p0){ return null; }
public String toString(){ return null; }
public boolean apply(C p0){ return false; }
public boolean contains(C p0){ return false; }
public boolean containsAll(Iterable<? extends C> p0){ return false; }
public boolean encloses(Range<C> p0){ return false; }
public boolean equals(Object p0){ return false; }
public boolean hasLowerBound(){ return false; }
public boolean hasUpperBound(){ return false; }
public boolean isConnected(Range<C> p0){ return false; }
public boolean isEmpty(){ return false; }
public int hashCode(){ return 0; }
public static <C extends Comparable<? extends Object>> Range<C> all(){ return null; }
public static <C extends Comparable<? extends Object>> Range<C> atLeast(C p0){ return null; }
public static <C extends Comparable<? extends Object>> Range<C> atMost(C p0){ return null; }
public static <C extends Comparable<? extends Object>> Range<C> closed(C p0, C p1){ return null; }
public static <C extends Comparable<? extends Object>> Range<C> closedOpen(C p0, C p1){ return null; }
public static <C extends Comparable<? extends Object>> Range<C> downTo(C p0, BoundType p1){ return null; }
public static <C extends Comparable<? extends Object>> Range<C> encloseAll(Iterable<C> p0){ return null; }
public static <C extends Comparable<? extends Object>> Range<C> greaterThan(C p0){ return null; }
public static <C extends Comparable<? extends Object>> Range<C> lessThan(C p0){ return null; }
public static <C extends Comparable<? extends Object>> Range<C> open(C p0, C p1){ return null; }
public static <C extends Comparable<? extends Object>> Range<C> openClosed(C p0, C p1){ return null; }
public static <C extends Comparable<? extends Object>> Range<C> range(C p0, BoundType p1, C p2, BoundType p3){ return null; }
public static <C extends Comparable<? extends Object>> Range<C> singleton(C p0){ return null; }
public static <C extends Comparable<? extends Object>> Range<C> upTo(C p0, BoundType p1){ return null; }
}
| 926 |
2,138 | <gh_stars>1000+
package org.jeecgframework.web.cgform.enhance;
import java.text.SimpleDateFormat;
import java.util.Date;
import net.sf.json.JSONObject;
import org.apache.commons.lang.math.RandomUtils;
/**
* 订单号生成规则实现类
* @author YanDong
*/
public class OrderNumFillRule implements IFillRuleHandler{
@Override
public Object execute(String paramJson) {
String prefix="CN";
//订单前缀默认为CN 如果规则参数不为空,则取自定义前缀
if(paramJson!=null && !"".equals(paramJson)){
JSONObject jsonObject = JSONObject.fromObject(paramJson);
Object obj = jsonObject.get("prefix");
if(obj!=null)prefix=obj.toString();
}
SimpleDateFormat format=new SimpleDateFormat("yyyyMMddHHmmss");
int random=RandomUtils.nextInt(90)+10;
return prefix+format.format(new Date())+random;
}
}
| 349 |
485 | <gh_stars>100-1000
# !/usr/bin/python
# -*- coding: utf-8 -*-
# @time : 2020/1/9 21:44
# @author : Mo
# @function: CRF
from macropodus.network.base.graph import graph
from macropodus.network.layers.crf import CRF
import tensorflow as tf
class CRFGraph(graph):
def __init__(self, hyper_parameters):
"""
初始化
:param hyper_parameters: json,超参
"""
self.crf_mode = hyper_parameters["model"].get("crf_mode", "reg") # "reg", pad
self.supports_masking = hyper_parameters["model"].get("supports_masking", True) # True or False
super().__init__(hyper_parameters)
def create_model(self, hyper_parameters):
"""
构建神经网络
:param hyper_parameters:json, hyper parameters of network
:return: tensor, moedl
"""
super().create_model(hyper_parameters)
x = self.word_embedding.output
# TimeDistributed
x_64 = tf.keras.layers.TimeDistributed(tf.keras.layers.Dense(128, activation="softmax"),
name='layer_time_distributed')(x)
# dense to a smaller units
tensor = tf.keras.layers.Dense(units=self.label, activation=self.activate_rnn, name="layer_dense_64")(x_64)
# crf, "pad" or "reg"
if self.crf_mode == "pad":
# length of real sentence
x_mask = tf.keras.layers.Input(shape=(1), dtype=tf.int32)
self.crf = CRF(self.label, mode="pad", supports_masking=True, name="layer_crf")
self.output = self.crf([tensor, x_mask])
if self.embedding_type in ["bert", "albert"]:
self.inputs = [self.word_embedding.input[0], self.word_embedding.input[1], x_mask]
else:
self.inputs = [self.word_embedding.input, x_mask]
else:
self.crf = CRF(self.label, mode="reg", name="layer_crf")
self.output = self.crf(tensor)
self.inputs = self.word_embedding.input
self.model = tf.keras.Model(self.inputs, self.output)
self.model.summary(132)
def create_compile(self):
"""
构建优化器、损失函数和评价函数
:return:
"""
self.loss = self.crf.loss
self.metrics = self.crf.viterbi_accuracy
super().create_compile()
| 1,166 |
190,993 | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
r"""Benchmarks for low-level graph building primitives.
To run CPU benchmarks:
bazel run -c opt graph_building_benchmarks -- --benchmarks=.
To run GPU benchmarks:
bazel run --config=cuda -c opt --copt="-mavx" graph_building_benchmarks -- \
--benchmarks=.
To run a subset of benchmarks using --benchmarks flag.
--benchmarks: the list of benchmarks to run. The specified value is interpreted
as a regular expression and any benchmark whose name contains a partial match
to the regular expression is executed.
e.g. --benchmarks=".*MatMul.*" will run all matmul related benchmarks.
"""
import time
from tensorflow.python.eager import context
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_math_ops
from tensorflow.python.platform import test
def run_benchmark(func, num_iters):
start = time.time()
for _ in range(num_iters):
func()
end = time.time()
return end - start
class SingleOpBenchmarks(test.Benchmark):
"""Benchmark for graph building time of ops."""
def _run_and_report(self, func, num_iters):
total_time = run_benchmark(func, num_iters)
mean_us = total_time * 1e6 / num_iters
self.report_benchmark(
iters=num_iters,
wall_time=mean_us,
extras={
"examples_per_sec": float("{0:.3f}".format(num_iters / total_time)),
})
def benchmarkAddScalars(self):
with context.execution_mode(context.GRAPH_MODE):
x = array_ops.placeholder(shape=[], dtype=dtypes.float32, name="x")
y = array_ops.placeholder(shape=[], dtype=dtypes.float32, name="y")
def bench():
return gen_math_ops.add(x, y)
self._run_and_report(bench, 1000)
def benchmarkAddBatchedMatrices(self):
with context.execution_mode(context.GRAPH_MODE):
x = array_ops.placeholder(
shape=[32, 784, 1000], dtype=dtypes.float32, name="x")
y = array_ops.placeholder(
shape=[32, 784, 1000], dtype=dtypes.float32, name="y")
def bench():
return gen_math_ops.add(x, y)
self._run_and_report(bench, 1000)
def benchmarkMatMul(self):
with context.execution_mode(context.GRAPH_MODE):
x = array_ops.placeholder(
shape=[784, 1000], dtype=dtypes.float32, name="x")
y = array_ops.placeholder(
shape=[1000, 1000], dtype=dtypes.float32, name="y")
def bench():
return gen_math_ops.mat_mul(x, y)
self._run_and_report(bench, 1000)
if __name__ == "__main__":
test.main()
| 1,137 |
539 | <filename>nRF Toolbox/BGM/Model/ContinuousGlucoseReading.h
//
// ContinuousGlucoseReading.h
// nRF Toolbox
//
// Created by <NAME> on 28/04/16.
// Copyright © 2016 Nordic Semiconductor. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "ContinuousGlucoseFeatureData.h"
typedef enum
{
MG_DL = 0,
} CgmMeasurementUnit;
typedef enum
{
CGMTrendInfoPresent = 0,
CGMQualityInfoPresent = 1,
CGMSesnorStatusWarningPresent = 5,
CGMSensorStatusCalTempOctetPresent = 6,
CGMSensorStatusStatusOctetPresent = 7,
} CgmFlags;
typedef enum
{
CGMSessionStopped = 0,
CGMDeviceBatteryLow = 1,
CGMSensorTypeIncorrectForDevice = 2,
CGMSensorMalfunction = 3,
CGMDeviceSpecificAlert = 4,
CGMGeneralDeviceFaultOccurredInSensor = 5,
CGMTimeSynchronizationRequired = 8,
CGMCalibrationNotAllowed = 9,
CGMCalibrationRecommended = 10,
CGMCalibrationRequired = 11,
CGMSensorTemperatureTooHighForValidMeasurement = 12,
CGMSensorTemperatureTooLowForValidMeasurement = 13,
CGMSensorResultLowerThanPatientLowLevel = 16,
CGMSensorResultHigherThanPatientHighLevel = 16,
CGMSensorResultLowerThanHypoLevel = 18,
CGMSensorResultHigherThanHyperLevel = 19,
CGMSensorRateOfDecreaseExceeded = 20,
CGMSensorRateOfIncreaseExceeded = 21,
CGMSensorResultLowerThanTheDeviceCanProcess = 22,
CGMSensorResultHigherThanTheDeviceCanProcess = 23,
} CGMSensorAnnuciation;
@interface ContinuousGlucoseReading : NSObject
// Glucose Measurement values
@property (weak, nonatomic) ContinuousGlucoseFeatureData* CGMfeatureData;
@property (assign, nonatomic) UInt8 measurementSize;
@property (strong, nonatomic) NSDate* timesStamp;
@property (assign, nonatomic) SInt16 timeOffsetSinceSessionStart;
@property (assign, nonatomic) Float32 glucoseConcentration;
@property (assign, nonatomic) Float32 trendInfo;
@property (assign, nonatomic) Float32 quality;
@property (assign, nonatomic) UInt32 sensorStatusAnnunciation;
@property (assign, nonatomic) CgmMeasurementUnit unit;
@property (assign, nonatomic) BOOL sensorStatusAnnunciationPresent;
@property (assign, nonatomic) BOOL sensorTrendInfoPresent;
@property (assign, nonatomic) BOOL sensorWarningPresent;
@property (assign, nonatomic) BOOL sensorCalTempPresent;
@property (assign, nonatomic) BOOL sensorQualityPresent;
@property (assign, nonatomic) BOOL e2eCrcPresent;
+ (ContinuousGlucoseReading*) readingFromBytes:(uint8_t*) bytes;
- (void) updateFromBytes:(uint8_t*) bytes;
- (NSString*) typeAsString;
- (NSString*) locationAsString;
@end
| 1,020 |
428 | void start(__attribute__((annotate("an annotation"))) int);
| 17 |
10,504 | import pytest
from pymilvus import DataType, ParamError, BaseException
from utils import *
from constants import *
ADD_TIMEOUT = 60
uid = "test_insert"
field_name = default_float_vec_field_name
binary_field_name = default_binary_vec_field_name
default_single_query = {
"bool": {
"must": [
{"vector": {field_name: {"topk": 10, "query": gen_vectors(1, default_dim), "metric_type": "L2",
"params": {"nprobe": 10}}}}
]
}
}
class TestInsertBase:
"""
******************************************************************
The following cases are used to test `insert` function
******************************************************************
"""
@pytest.fixture(
scope="function",
params=gen_simple_index()
)
def get_simple_index(self, request, connect):
# if str(connect._cmd("mode")) == "CPU":
if request.param["index_type"] in index_cpu_not_support():
pytest.skip("CPU not support index_type: ivf_sq8h")
logging.getLogger().info(request.param)
return request.param
@pytest.fixture(
scope="function",
params=gen_single_filter_fields()
)
def get_filter_field(self, request):
yield request.param
@pytest.fixture(
scope="function",
params=gen_single_vector_fields()
)
def get_vector_field(self, request):
yield request.param
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_insert_with_empty_entity(self, connect, collection):
'''
target: test insert with empty entity list
method: set empty entity list as insert method params
expected: raises a ParamError exception
'''
entities = []
with pytest.raises(ParamError) as e:
connect.insert(collection, entities)
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_insert_with_None(self, connect, collection):
'''
target: test insert with None
method: set None as insert method params
expected: raises a ParamError
'''
entity = None
with pytest.raises(Exception) as e:
connect.insert(collection, entity)
@pytest.mark.timeout(ADD_TIMEOUT)
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_insert_collection_not_existed(self, connect):
'''
target: test insert, with collection not existed
method: insert entity into a random named collection
expected: raise a BaseException
'''
collection_name = gen_unique_str(uid)
with pytest.raises(BaseException) as e:
connect.insert(collection_name, default_entities)
@pytest.mark.tags(CaseLabel.L2)
def test_insert_without_connect(self, dis_connect, collection):
'''
target: test insert entities without connection
method: create collection and insert entities in it, check if inserted successfully
expected: raise exception
'''
with pytest.raises(Exception) as e:
dis_connect.insert(collection, default_entities)
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.timeout(ADD_TIMEOUT)
def test_insert_drop_collection(self, connect, collection):
'''
target: test delete collection after insert entities
method: insert entities and drop collection
expected: has_collection false
'''
ids = connect.insert(collection, default_entity)
assert len(ids) == 1
connect.drop_collection(collection)
assert connect.has_collection(collection) == False
@pytest.mark.timeout(ADD_TIMEOUT)
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_insert_flush_drop_collection(self, connect, collection):
'''
target: test drop collection after insert entities for a while
method: insert entities, sleep, and delete collection
expected: has_collection false
'''
ids = connect.insert(collection, default_entity)
assert len(ids) == 1
connect.flush([collection])
connect.drop_collection(collection)
assert connect.has_collection(collection) == False
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.timeout(ADD_TIMEOUT)
def test_insert_create_index(self, connect, collection, get_simple_index):
'''
target: test build index insert after entities
method: insert entities and build index
expected: no error raised
'''
ids = connect.insert(collection, default_entities)
assert len(ids) == default_nb
connect.flush([collection])
connect.create_index(collection, field_name, get_simple_index)
if get_simple_index["index_type"] != "FLAT":
index = connect.describe_index(collection, "")
create_target_index(get_simple_index, field_name)
assert index == get_simple_index
@pytest.mark.timeout(ADD_TIMEOUT)
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_insert_after_create_index(self, connect, collection, get_simple_index):
'''
target: test build index insert after vector
method: insert entities and build index
expected: no error raised
'''
connect.create_index(collection, field_name, get_simple_index)
ids = connect.insert(collection, default_entities)
assert len(ids) == default_nb
if get_simple_index["index_type"] != "FLAT":
index = connect.describe_index(collection, "")
create_target_index(get_simple_index, field_name)
assert index == get_simple_index
@pytest.mark.timeout(ADD_TIMEOUT)
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_insert_search(self, connect, collection):
'''
target: test search entity after insert entity after a while
method: insert entity, sleep, and search collection
expected: no error raised
'''
ids = connect.insert(collection, default_entities)
connect.flush([collection])
connect.load_collection(collection)
res = connect.search(collection, default_single_query)
assert len(res[0]) == default_top_k
@pytest.mark.tags(CaseLabel.L2)
def _test_insert_segment_row_count(self, connect, collection):
nb = default_segment_row_limit + 1
res_ids = connect.insert(collection, gen_entities(nb))
connect.flush([collection])
assert len(res_ids) == nb
stats = connect.get_collection_stats(collection)
assert len(stats['partitions'][0]['segments']) == 2
for segment in stats['partitions'][0]['segments']:
assert segment['row_count'] in [default_segment_row_limit, 1]
@pytest.fixture(
scope="function",
params=[
1,
2000
],
)
def insert_count(self, request):
yield request.param
@pytest.mark.timeout(ADD_TIMEOUT)
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_insert_ids(self, connect, id_collection, insert_count):
'''
target: test insert entities in collection, use customize ids
method: create collection and insert entities in it, check the ids returned and the collection length after entities inserted
expected: the length of ids and the collection row count
'''
nb = insert_count
ids = [i for i in range(nb)]
entities = gen_entities(nb)
entities[0]["values"] = ids
res_ids = connect.insert(id_collection, entities)
connect.flush([id_collection])
assert len(res_ids) == nb
assert res_ids == ids
stats = connect.get_collection_stats(id_collection)
assert stats[row_count] == nb
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.timeout(ADD_TIMEOUT)
def test_insert_the_same_ids(self, connect, id_collection, insert_count):
'''
target: test insert vectors in collection, use customize the same ids
method: create collection and insert vectors in it, check the ids returned and the collection length after vectors inserted
expected: the length of ids and the collection row count
'''
nb = insert_count
ids = [1 for i in range(nb)]
entities = gen_entities(nb)
entities[0]["values"] = ids
res_ids = connect.insert(id_collection, entities)
connect.flush([id_collection])
assert len(res_ids) == nb
assert res_ids == ids
stats = connect.get_collection_stats(id_collection)
assert stats[row_count] == nb
@pytest.mark.timeout(ADD_TIMEOUT)
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_insert_ids_fields(self, connect, get_filter_field, get_vector_field):
'''
target: test create normal collection with different fields, insert entities into id with ids
method: create collection with diff fields: metric/field_type/..., insert, and count
expected: row count correct
'''
nb = 5
filter_field = get_filter_field
vector_field = get_vector_field
collection_name = gen_unique_str("test_collection")
fields = {
"fields": [gen_primary_field(), filter_field, vector_field],
"auto_id": False
}
connect.create_collection(collection_name, fields)
ids = [i for i in range(nb)]
entities = gen_entities_by_fields(fields["fields"], nb, default_dim, ids)
logging.getLogger().info(entities)
res_ids = connect.insert(collection_name, entities)
assert res_ids == ids
connect.flush([collection_name])
stats = connect.get_collection_stats(collection_name)
assert stats[row_count] == nb
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.timeout(ADD_TIMEOUT)
def test_insert_ids_not_match(self, connect, id_collection, insert_count):
'''
target: test insert entities in collection without ids
method: create id_collection and insert entities without
expected: exception raised
'''
nb = insert_count
with pytest.raises(Exception) as e:
entities = gen_entities(nb)
del entities[0]
connect.insert(id_collection, entities)
# TODO
@pytest.mark.timeout(ADD_TIMEOUT)
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_insert_twice_ids_no_ids(self, connect, id_collection):
'''
target: check the result of insert, with params ids and no ids
method: test insert vectors twice, use customize ids first, and then use no ids
expected: BaseException raised
'''
ids = [i for i in range(default_nb)]
entities = copy.deepcopy(default_entities)
entities[0]["values"] = ids
connect.insert(id_collection, entities)
with pytest.raises(Exception) as e:
del entities[0]
connect.insert(id_collection, entities)
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.timeout(ADD_TIMEOUT)
def test_insert_not_ids(self, connect, id_collection):
'''
target: check the result of insert, with params ids and no ids
method: test insert vectors twice, use not ids first, and then use customize ids
expected: error raised
'''
entities = copy.deepcopy(default_entities)
del entities[0]
with pytest.raises(Exception) as e:
connect.insert(id_collection, entities)
@pytest.mark.timeout(ADD_TIMEOUT)
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_insert_ids_length_not_match_batch(self, connect, id_collection):
'''
target: test insert vectors in collection, use customize ids, len(ids) != len(vectors)
method: create collection and insert vectors in it
expected: raise an exception
'''
ids = [i for i in range(1, default_nb)]
logging.getLogger().info(len(ids))
entities = copy.deepcopy(default_entities)
entities[0]["values"] = ids
with pytest.raises(Exception) as e:
connect.insert(id_collection, entities)
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.timeout(ADD_TIMEOUT)
def test_insert_ids_length_not_match_single(self, connect, id_collection):
'''
target: test insert vectors in collection, use customize ids, len(ids) != len(vectors)
method: create collection and insert vectors in it
expected: raise an exception
'''
ids = [i for i in range(1, default_nb)]
logging.getLogger().info(len(ids))
entity = copy.deepcopy(default_entity)
entity[0]["values"] = ids
with pytest.raises(Exception) as e:
connect.insert(id_collection, entity)
@pytest.mark.timeout(ADD_TIMEOUT)
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_insert_partition(self, connect, collection):
'''
target: test insert entities in collection created before
method: create collection and insert entities in it, with the partition_name param
expected: the collection row count equals to nq
'''
connect.create_partition(collection, default_tag)
ids = connect.insert(collection, default_entities, partition_name=default_tag)
assert len(ids) == default_nb
assert connect.has_partition(collection, default_tag)
connect.flush([collection])
stats = connect.get_collection_stats(collection)
assert stats[row_count] == default_nb
# TODO
@pytest.mark.timeout(ADD_TIMEOUT)
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_insert_partition_with_ids(self, connect, id_collection):
'''
target: test insert entities in collection created before, insert with ids
method: create collection and insert entities in it, with the partition_name param
expected: the collection row count equals to nq
'''
connect.create_partition(id_collection, default_tag)
ids = [i for i in range(default_nb)]
entities = gen_entities(default_nb)
entities[0]["values"] = ids
res_ids = connect.insert(id_collection, entities, partition_name=default_tag)
assert res_ids == ids
logging.getLogger().info(connect.describe_collection(id_collection))
@pytest.mark.timeout(ADD_TIMEOUT)
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_insert_default_partition(self, connect, collection):
'''
target: test insert entities into default partition
method: create partition and insert info collection without tag params
expected: the collection row count equals to nb
'''
ids = connect.insert(collection, default_entities, partition_name=default_partition_name)
assert len(ids) == default_nb
connect.flush([collection])
stats = connect.get_collection_stats(collection)
assert stats[row_count] == default_nb
@pytest.mark.timeout(ADD_TIMEOUT)
@pytest.mark.tags(CaseLabel.L2)
def test_insert_partition_not_existed(self, connect, collection):
'''
target: test insert entities in collection created before
method: create collection and insert entities in it, with the not existed partition_name param
expected: error raised
'''
tag = gen_unique_str()
with pytest.raises(Exception) as e:
connect.insert(collection, default_entities, partition_name=tag)
@pytest.mark.timeout(ADD_TIMEOUT)
@pytest.mark.tags(CaseLabel.L2)
def test_insert_partition_repeatedly(self, connect, collection):
'''
target: test insert entities in collection created before
method: create collection and insert entities in it repeatly, with the partition_name param
expected: the collection row count equals to nq
'''
connect.create_partition(collection, default_tag)
ids = connect.insert(collection, default_entities, partition_name=default_tag)
ids = connect.insert(collection, default_entities, partition_name=default_tag)
connect.flush([collection])
res = connect.get_collection_stats(collection)
assert res[row_count] == 2 * default_nb
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_insert_dim_not_matched(self, connect, collection):
'''
target: test insert entities, the vector dimension is not equal to the collection dimension
method: the entities dimension is half of the collection dimension, check the status
expected: error raised
'''
vectors = gen_vectors(default_nb, int(default_dim) // 2)
insert_entities = copy.deepcopy(default_entities)
insert_entities[-1]["values"] = vectors
with pytest.raises(Exception) as e:
connect.insert(collection, insert_entities)
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_insert_with_field_name_not_match(self, connect, collection):
'''
target: test insert entities, with the entity field name updated
method: update entity field name
expected: error raised
'''
tmp_entity = update_field_name(copy.deepcopy(default_entity), "int64", "int64new")
with pytest.raises(Exception):
connect.insert(collection, tmp_entity)
@pytest.mark.tags(CaseLabel.L2)
def test_insert_with_field_type_not_match(self, connect, collection):
'''
target: test insert entities, with the entity field type updated
method: update entity field type
expected: error raised
'''
tmp_entity = update_field_type(copy.deepcopy(default_entity), "int64", DataType.FLOAT)
with pytest.raises(Exception):
connect.insert(collection, tmp_entity)
@pytest.mark.tags(CaseLabel.L2)
def test_insert_with_field_value_not_match(self, connect, collection):
'''
target: test insert entities, with the entity field value updated
method: update entity field value
expected: error raised
'''
tmp_entity = update_field_value(copy.deepcopy(default_entity), DataType.FLOAT, 's')
with pytest.raises(Exception):
connect.insert(collection, tmp_entity)
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_insert_with_field_more(self, connect, collection):
'''
target: test insert entities, with more fields than collection schema
method: add entity field
expected: error raised
'''
tmp_entity = add_field(copy.deepcopy(default_entity))
with pytest.raises(Exception):
connect.insert(collection, tmp_entity)
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_insert_with_field_vector_more(self, connect, collection):
'''
target: test insert entities, with more fields than collection schema
method: add entity vector field
expected: error raised
'''
tmp_entity = add_vector_field(default_nb, default_dim)
with pytest.raises(Exception):
connect.insert(collection, tmp_entity)
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_insert_with_field_less(self, connect, collection):
'''
target: test insert entities, with less fields than collection schema
method: remove entity field
expected: error raised
'''
tmp_entity = remove_field(copy.deepcopy(default_entity))
with pytest.raises(Exception):
connect.insert(collection, tmp_entity)
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_insert_with_field_vector_less(self, connect, collection):
'''
target: test insert entities, with less fields than collection schema
method: remove entity vector field
expected: error raised
'''
tmp_entity = remove_vector_field(copy.deepcopy(default_entity))
with pytest.raises(Exception):
connect.insert(collection, tmp_entity)
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_insert_with_no_field_vector_value(self, connect, collection):
'''
target: test insert entities, with no vector field value
method: remove entity values of vector field
expected: error raised
'''
tmp_entity = copy.deepcopy(default_entity)
del tmp_entity[-1]["values"]
with pytest.raises(Exception):
connect.insert(collection, tmp_entity)
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_insert_with_no_field_vector_type(self, connect, collection):
'''
target: test insert entities, with no vector field type
method: remove entity vector field
expected: error raised
'''
tmp_entity = copy.deepcopy(default_entity)
del tmp_entity[-1]["type"]
with pytest.raises(Exception):
connect.insert(collection, tmp_entity)
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_insert_with_no_field_vector_name(self, connect, collection):
'''
target: test insert entities, with no vector field name
method: remove entity vector field
expected: error raised
'''
tmp_entity = copy.deepcopy(default_entity)
del tmp_entity[-1]["name"]
with pytest.raises(Exception):
connect.insert(collection, tmp_entity)
# todo fix timeout
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.timeout(30)
def test_collection_insert_rows_count_multi_threading(self, args, collection):
'''
target: test collection rows_count is correct or not with multi threading
method: create collection and insert entities in it(idmap),
assert the value returned by count_entities method is equal to length of entities
expected: the count is equal to the length of entities
'''
if args["handler"] == "HTTP":
pytest.skip("Skip test in http mode")
thread_num = 8
threads = []
milvus = get_milvus(host=args["ip"], port=args["port"], handler=args["handler"], try_connect=False)
def insert(thread_i):
logging.getLogger().info("In thread-%d" % thread_i)
res_ids = milvus.insert(collection, default_entities)
milvus.flush([collection])
for i in range(thread_num):
x = threading.Thread(target=insert, args=(i,))
threads.append(x)
x.start()
for th in threads:
th.join()
stats = milvus.get_collection_stats(collection)
assert stats[row_count] == thread_num * default_nb
# TODO: unable to set config
@pytest.mark.tags(CaseLabel.L2)
def _test_insert_disable_auto_flush(self, connect, collection):
'''
target: test insert entities, with disable autoflush
method: disable autoflush and insert, get entity
expected: the count is equal to 0
'''
delete_nums = 500
disable_flush(connect)
ids = connect.insert(collection, default_entities)
res = connect.get_entity_by_id(collection, ids[:delete_nums])
assert len(res) == delete_nums
assert res[0] is None
class TestInsertBinary:
@pytest.fixture(
scope="function",
params=gen_binary_index()
)
def get_binary_index(self, request):
request.param["metric_type"] = "JACCARD"
return request.param
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_insert_binary_entities(self, connect, binary_collection):
'''
target: test insert entities in binary collection
method: create collection and insert binary entities in it
expected: the collection row count equals to nb
'''
ids = connect.insert(binary_collection, default_binary_entities)
assert len(ids) == default_nb
connect.flush([binary_collection])
stats = connect.get_collection_stats(binary_collection)
assert stats[row_count] == default_nb
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_insert_binary_partition(self, connect, binary_collection):
'''
target: test insert entities and create partition tag
method: create collection and insert binary entities in it, with the partition_name param
expected: the collection row count equals to nb
'''
connect.create_partition(binary_collection, default_tag)
ids = connect.insert(binary_collection, default_binary_entities, partition_name=default_tag)
assert len(ids) == default_nb
assert connect.has_partition(binary_collection, default_tag)
connect.flush([binary_collection])
stats = connect.get_collection_stats(binary_collection)
assert stats[row_count] == default_nb
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.skip(reason="issue 7027")
def test_insert_binary_multi_times(self, connect, binary_collection):
'''
target: test insert entities multi times and final flush
method: create collection and insert binary entity multi and final flush
expected: the collection row count equals to nb
'''
for i in range(default_nb):
ids = connect.insert(binary_collection, default_binary_entity)
assert len(ids) == 1
connect.flush([binary_collection])
stats = connect.get_collection_stats(binary_collection)
assert stats[row_count] == default_nb
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_insert_binary_after_create_index(self, connect, binary_collection, get_binary_index):
'''
target: test insert binary entities after build index
method: build index and insert entities
expected: no error raised
'''
connect.create_index(binary_collection, binary_field_name, get_binary_index)
ids = connect.insert(binary_collection, default_binary_entities)
assert len(ids) == default_nb
connect.flush([binary_collection])
index = connect.describe_index(binary_collection, "")
create_target_index(get_binary_index, binary_field_name)
assert index == get_binary_index
@pytest.mark.timeout(ADD_TIMEOUT)
@pytest.mark.tags(CaseLabel.L2)
def test_insert_binary_create_index(self, connect, binary_collection, get_binary_index):
'''
target: test build index insert after vector
method: insert vector and build index
expected: no error raised
'''
ids = connect.insert(binary_collection, default_binary_entities)
assert len(ids) == default_nb
connect.flush([binary_collection])
connect.create_index(binary_collection, binary_field_name, get_binary_index)
index = connect.describe_index(binary_collection, "")
create_target_index(get_binary_index, binary_field_name)
assert index == get_binary_index
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_insert_binary_search(self, connect, binary_collection):
'''
target: test search vector after insert vector after a while
method: insert vector, sleep, and search collection
expected: no error raised
'''
ids = connect.insert(binary_collection, default_binary_entities)
connect.flush([binary_collection])
query, vecs = gen_query_vectors(binary_field_name, default_binary_entities, default_top_k, 1,
metric_type="JACCARD")
connect.load_collection(binary_collection)
res = connect.search(binary_collection, query)
logging.getLogger().debug(res)
assert len(res[0]) == default_top_k
class TestInsertAsync:
@pytest.fixture(scope="function", autouse=True)
def skip_http_check(self, args):
if args["handler"] == "HTTP":
pytest.skip("skip in http mode")
@pytest.fixture(
scope="function",
params=[
1,
1000
],
)
def insert_count(self, request):
yield request.param
def check_status(self, result):
logging.getLogger().info("In callback check status")
assert not result
def check_result(self, result):
logging.getLogger().info("In callback check results")
assert result
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_insert_async(self, connect, collection, insert_count):
'''
target: test insert vectors with different length of vectors
method: set different vectors as insert method params
expected: length of ids is equal to the length of vectors
'''
nb = insert_count
future = connect.insert(collection, gen_entities(nb), _async=True)
ids = future.result()
connect.flush([collection])
assert len(ids) == nb
@pytest.mark.tags(CaseLabel.L2)
def test_insert_async_false(self, connect, collection, insert_count):
'''
target: test insert vectors with different length of vectors
method: set different vectors as insert method params
expected: length of ids is equal to the length of vectors
'''
nb = insert_count
ids = connect.insert(collection, gen_entities(nb), _async=False)
# ids = future.result()
connect.flush([collection])
assert len(ids) == nb
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_insert_async_callback(self, connect, collection, insert_count):
'''
target: test insert vectors with different length of vectors
method: set different vectors as insert method params
expected: length of ids is equal to the length of vectors
'''
nb = insert_count
future = connect.insert(collection, gen_entities(nb), _async=True, _callback=self.check_result)
future.done()
ids = future.result()
assert len(ids) == nb
@pytest.mark.tags(CaseLabel.L2)
def test_insert_async_long(self, connect, collection):
'''
target: test insert vectors with different length of vectors
method: set different vectors as insert method params
expected: length of ids is equal to the length of vectors
'''
nb = 50000
future = connect.insert(collection, gen_entities(nb), _async=True, _callback=self.check_result)
ids = future.result()
assert len(ids) == nb
connect.flush([collection])
stats = connect.get_collection_stats(collection)
logging.getLogger().info(stats)
assert stats[row_count] == nb
@pytest.mark.tags(CaseLabel.L2)
def test_insert_async_callback_timeout(self, connect, collection):
'''
target: test insert vectors with different length of vectors
method: set different vectors as insert method params
expected: length of ids is equal to the length of vectors
'''
nb = 100000
future = connect.insert(collection, gen_entities(nb), _async=True, _callback=self.check_status, timeout=1)
with pytest.raises(Exception) as e:
result = future.result()
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_insert_async_invalid_params(self, connect):
'''
target: test insert vectors with different length of vectors
method: set different vectors as insert method params
expected: length of ids is equal to the length of vectors
'''
collection_new = gen_unique_str()
future = connect.insert(collection_new, default_entities, _async=True)
future.done()
with pytest.raises(Exception) as e:
ids = future.result()
# 1339
@pytest.mark.tags(CaseLabel.L2)
def test_insert_async_invalid_params_raise_exception(self, connect, collection):
'''
target: test insert vectors with different length of vectors
method: set different vectors as insert method params
expected: length of ids is equal to the length of vectors
'''
entities = []
future = connect.insert(collection, entities, _async=True)
future.done()
with pytest.raises(Exception) as e:
future.result()
class TestInsertMultiCollections:
"""
******************************************************************
The following cases are used to test `insert` function
******************************************************************
"""
@pytest.fixture(
scope="function",
params=gen_simple_index()
)
def get_simple_index(self, request, connect):
logging.getLogger().info(request.param)
# if str(connect._cmd("mode")) == "CPU":
# if request.param["index_type"] in index_cpu_not_support():
# pytest.skip("sq8h not support in CPU mode")
return request.param
@pytest.mark.tags(CaseLabel.L2)
def test_insert_entity_multi_collections(self, connect):
'''
target: test insert entities
method: create 10 collections and insert entities into them in turn
expected: row count
'''
collection_num = 10
collection_list = []
for i in range(collection_num):
collection_name = gen_unique_str(uid)
collection_list.append(collection_name)
connect.create_collection(collection_name, default_fields)
ids = connect.insert(collection_name, default_entities)
connect.flush([collection_name])
assert len(ids) == default_nb
stats = connect.get_collection_stats(collection_name)
assert stats[row_count] == default_nb
for i in range(collection_num):
connect.drop_collection(collection_list[i])
@pytest.mark.timeout(ADD_TIMEOUT)
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_drop_collection_insert_entity_another(self, connect, collection):
'''
target: test insert vector to collection_1 after collection_2 deleted
method: delete collection_2 and insert vector to collection_1
expected: row count equals the length of entities inserted
'''
collection_name = gen_unique_str(uid)
connect.create_collection(collection_name, default_fields)
connect.drop_collection(collection)
ids = connect.insert(collection_name, default_entity)
connect.flush([collection_name])
assert len(ids) == 1
@pytest.mark.timeout(ADD_TIMEOUT)
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_create_index_insert_entity_another(self, connect, collection, get_simple_index):
'''
target: test insert vector to collection_2 after build index for collection_1
method: build index and insert vector
expected: status ok
'''
collection_name = gen_unique_str(uid)
connect.create_collection(collection_name, default_fields)
connect.create_index(collection, field_name, get_simple_index)
ids = connect.insert(collection_name, default_entity)
assert len(ids) == 1
if get_simple_index["index_type"] != "FLAT":
index = connect.describe_index(collection, "")
create_target_index(get_simple_index, field_name)
assert index == get_simple_index
connect.drop_collection(collection_name)
@pytest.mark.timeout(ADD_TIMEOUT)
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_insert_entity_create_index_another(self, connect, collection, get_simple_index):
'''
target: test insert vector to collection_2 after build index for collection_1
method: build index and insert vector
expected: status ok
'''
collection_name = gen_unique_str(uid)
connect.create_collection(collection_name, default_fields)
ids = connect.insert(collection, default_entity)
connect.flush([collection])
connect.create_index(collection_name, field_name, get_simple_index)
if get_simple_index["index_type"] != "FLAT":
index = connect.describe_index(collection_name, "")
create_target_index(get_simple_index, field_name)
assert index == get_simple_index
stats = connect.get_collection_stats(collection)
assert stats[row_count] == 1
@pytest.mark.timeout(ADD_TIMEOUT)
@pytest.mark.tags(CaseLabel.L2)
def test_insert_entity_sleep_create_index_another(self, connect, collection, get_simple_index):
'''
target: test insert vector to collection_2 after build index for collection_1 for a while
method: build index and insert vector
expected: status ok
'''
collection_name = gen_unique_str(uid)
connect.create_collection(collection_name, default_fields)
ids = connect.insert(collection, default_entity)
connect.flush([collection])
connect.create_index(collection_name, field_name, get_simple_index)
stats = connect.get_collection_stats(collection)
assert stats[row_count] == 1
@pytest.mark.timeout(ADD_TIMEOUT)
@pytest.mark.tags(CaseLabel.L2)
def test_search_entity_insert_entity_another(self, connect, collection):
'''
target: test insert entity to collection_1 after search collection_2
method: search collection and insert entity
expected: status ok
'''
collection_name = gen_unique_str(uid)
connect.create_collection(collection_name, default_fields)
connect.load_collection(collection)
res = connect.search(collection, default_single_query)
assert len(res[0]) == 0
connect.insert(collection_name, default_entity)
connect.flush([collection_name])
stats = connect.get_collection_stats(collection_name)
assert stats[row_count] == 1
@pytest.mark.timeout(ADD_TIMEOUT)
@pytest.mark.tags(CaseLabel.tags_smoke)
def test_insert_entity_search_entity_another(self, connect, collection):
'''
target: test insert entity to collection_1 after search collection_2
method: search collection and insert entity
expected: status ok
'''
collection_name = gen_unique_str(uid)
connect.create_collection(collection_name, default_fields)
ids = connect.insert(collection, default_entity)
connect.flush([collection])
connect.load_collection(collection_name)
res = connect.search(collection_name, default_single_query)
stats = connect.get_collection_stats(collection)
assert stats[row_count] == 1
@pytest.mark.timeout(ADD_TIMEOUT)
@pytest.mark.tags(CaseLabel.L2)
def test_insert_entity_sleep_search_entity_another(self, connect, collection):
'''
target: test insert entity to collection_1 after search collection_2 a while
method: search collection, sleep, and insert entity
expected: status ok
'''
collection_name = gen_unique_str(uid)
connect.create_collection(collection_name, default_fields)
ids = connect.insert(collection, default_entity)
connect.flush([collection])
connect.load_collection(collection_name)
res = connect.search(collection_name, default_single_query)
assert len(res[0]) == 0
@pytest.mark.timeout(ADD_TIMEOUT)
@pytest.mark.tags(CaseLabel.L2)
def _test_insert_entity_during_release_collection(self, connect, collection):
'''
target: test insert entity during release
method: release collection async, then do insert operation
expected: insert ok
'''
for i in range(10):
connect.insert(collection, default_entities)
connect.flush([collection])
connect.load_collection(collection)
def release():
connect.release_collection(collection)
t = threading.Thread(target=release, args=(collection,))
t.start()
ids = connect.insert(collection, default_entities)
assert len(ids) == default_nb
class TestInsertInvalid(object):
"""
Test inserting vectors with invalid collection names
"""
@pytest.fixture(
scope="function",
params=gen_invalid_strs()
)
def get_collection_name(self, request):
yield request.param
@pytest.fixture(
scope="function",
params=gen_invalid_strs()
)
def get_tag_name(self, request):
yield request.param
@pytest.fixture(
scope="function",
params=gen_invalid_strs()
)
def get_field_name(self, request):
yield request.param
@pytest.fixture(
scope="function",
params=gen_invalid_strs()
)
def get_field_type(self, request):
yield request.param
@pytest.fixture(
scope="function",
params=gen_invalid_strs()
)
def get_field_int_value(self, request):
yield request.param
@pytest.fixture(
scope="function",
params=gen_invalid_ints()
)
def get_entity_id(self, request):
yield request.param
@pytest.fixture(
scope="function",
params=gen_invalid_vectors()
)
def get_field_vectors_value(self, request):
yield request.param
@pytest.mark.tags(CaseLabel.L2)
def test_insert_ids_invalid(self, connect, id_collection, get_entity_id):
'''
target: test insert, with using customize ids, which are not int64
method: create collection and insert entities in it
expected: raise an exception
'''
entity_id = get_entity_id
ids = [entity_id for _ in range(default_nb)]
with pytest.raises(Exception):
connect.insert(id_collection, default_entities, ids)
@pytest.mark.tags(CaseLabel.L2)
def test_insert_with_invalid_collection_name(self, connect, get_collection_name):
collection_name = get_collection_name
with pytest.raises(Exception):
connect.insert(collection_name, default_entity)
@pytest.mark.tags(CaseLabel.L2)
def test_insert_with_invalid_partition_name(self, connect, collection, get_tag_name):
tag_name = get_tag_name
connect.create_partition(collection, default_tag)
if tag_name is not None:
with pytest.raises(Exception):
connect.insert(collection, default_entity, partition_name=tag_name)
else:
connect.insert(collection, default_entity, partition_name=tag_name)
@pytest.mark.tags(CaseLabel.L2)
def test_insert_with_invalid_field_name(self, connect, collection, get_field_name):
tmp_entity = update_field_name(copy.deepcopy(default_entity), "int64", get_field_name)
with pytest.raises(Exception):
connect.insert(collection, tmp_entity)
@pytest.mark.tags(CaseLabel.L2)
def test_insert_with_invalid_field_type(self, connect, collection, get_field_type):
field_type = get_field_type
tmp_entity = update_field_type(copy.deepcopy(default_entity), 'float', field_type)
with pytest.raises(Exception):
connect.insert(collection, tmp_entity)
@pytest.mark.tags(CaseLabel.L2)
def test_insert_with_invalid_field_value(self, connect, collection, get_field_int_value):
field_value = get_field_int_value
tmp_entity = update_field_type(copy.deepcopy(default_entity), 'int64', field_value)
with pytest.raises(Exception):
connect.insert(collection, tmp_entity)
@pytest.mark.tags(CaseLabel.L2)
def test_insert_with_invalid_field_entity_value(self, connect, collection, get_field_vectors_value):
tmp_entity = copy.deepcopy(default_entity)
src_vector = tmp_entity[-1]["values"]
src_vector[0][1] = get_field_vectors_value
with pytest.raises(Exception):
connect.insert(collection, tmp_entity)
class TestInsertInvalidBinary(object):
"""
Test inserting vectors with invalid collection names
"""
@pytest.fixture(
scope="function",
params=gen_invalid_strs()
)
def get_collection_name(self, request):
yield request.param
@pytest.fixture(
scope="function",
params=gen_invalid_strs()
)
def get_tag_name(self, request):
yield request.param
@pytest.fixture(
scope="function",
params=gen_invalid_strs()
)
def get_field_name(self, request):
yield request.param
@pytest.fixture(
scope="function",
params=gen_invalid_strs()
)
def get_field_type(self, request):
yield request.param
@pytest.fixture(
scope="function",
params=gen_invalid_strs()
)
def get_field_int_value(self, request):
yield request.param
@pytest.fixture(
scope="function",
params=gen_invalid_ints()
)
def get_entity_id(self, request):
yield request.param
@pytest.fixture(
scope="function",
params=gen_invalid_vectors()
)
def get_field_vectors_value(self, request):
yield request.param
@pytest.mark.tags(CaseLabel.L2)
def test_insert_with_invalid_field_name(self, connect, binary_collection, get_field_name):
tmp_entity = update_field_name(copy.deepcopy(default_binary_entity), "int64", get_field_name)
with pytest.raises(Exception):
connect.insert(binary_collection, tmp_entity)
@pytest.mark.tags(CaseLabel.L2)
def test_insert_with_invalid_field_value(self, connect, binary_collection, get_field_int_value):
tmp_entity = update_field_type(copy.deepcopy(default_binary_entity), 'int64', get_field_int_value)
with pytest.raises(Exception):
connect.insert(binary_collection, tmp_entity)
@pytest.mark.tags(CaseLabel.L2)
def test_insert_with_invalid_field_entity_value(self, connect, binary_collection, get_field_vectors_value):
tmp_entity = copy.deepcopy(default_binary_entity)
src_vectors = tmp_entity[-1]["values"]
src_vectors[0] = get_field_vectors_value
with pytest.raises(Exception):
connect.insert(binary_collection, tmp_entity)
@pytest.mark.tags(CaseLabel.L2)
def test_insert_ids_invalid(self, connect, binary_id_collection, get_entity_id):
'''
target: test insert, with using customize ids, which are not int64
method: create collection and insert entities in it
expected: raise an exception
'''
entity_id = get_entity_id
ids = [entity_id for _ in range(default_nb)]
with pytest.raises(Exception):
connect.insert(binary_id_collection, default_binary_entities, ids)
@pytest.mark.tags(CaseLabel.L2)
def test_insert_with_invalid_field_type(self, connect, binary_collection, get_field_type):
field_type = get_field_type
tmp_entity = update_field_type(copy.deepcopy(default_binary_entity), 'int64', field_type)
with pytest.raises(Exception):
connect.insert(binary_collection, tmp_entity)
@pytest.mark.tags(CaseLabel.L2)
def test_insert_with_invalid_field_entities_value(self, connect, binary_collection, get_field_vectors_value):
tmp_entities = copy.deepcopy(default_binary_entities)
src_vector = tmp_entities[-1]["values"]
src_vector[1] = get_field_vectors_value
with pytest.raises(Exception):
connect.insert(binary_collection, tmp_entities)
| 18,962 |
1,940 | <gh_stars>1000+
package org.opentech.activities;
import android.content.Intent;
import android.graphics.Color;
import android.net.Uri;
import android.os.Bundle;
import android.support.v7.app.ActionBarActivity;
import android.support.v7.widget.Toolbar;
import android.text.TextUtils;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import com.nispok.snackbar.Snackbar;
import com.nispok.snackbar.listeners.ActionClickListener;
import org.opentech.R;
import org.opentech.db.DatabaseManager;
import org.opentech.fragments.ScheduleFragment;
public class TrackActivity extends ActionBarActivity {
private String track;
private String map;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_track);
setSupportActionBar((Toolbar) findViewById(R.id.toolbar));
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
track = getIntent().getStringExtra("TRACK");
getSupportFragmentManager().beginTransaction().replace(R.id.container, ScheduleFragment.newInstance(track), ScheduleFragment.TAG).addToBackStack(null).commit();
setTitle("Tracks: " + track);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.map, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.directions) {
//Get Map location from Database made from getting data from sheet.
DatabaseManager db = DatabaseManager.getInstance();
map = db.getTrackMapUrl(track);
Log.d("TRACK ACTIVITY",map);
if (!TextUtils.isEmpty(map)){
Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse(map));
startActivity(intent);
}
else{
Snackbar.with(getApplicationContext())
.text(R.string.no_location)
.color(getResources().getColor(R.color.color_primary))
.show(this);
}
return true;
}
return super.onOptionsItemSelected(item);
}
@Override
public void onBackPressed() {
finish();
}
}
| 1,086 |
14,668 | // Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.net.smoke;
import android.content.Context;
import org.json.JSONObject;
import org.chromium.net.ExperimentalCronetEngine;
import java.io.File;
/**
* Provides support for tests, so they can be run in different environments against different
* servers. It contains methods, which behavior can be different in different testing environments.
* The concrete implementation of this interface is determined dynamically at runtime by reading
* the value of |TestSupportImplClass| from the Android string resource file.
*/
public interface TestSupport {
enum Protocol {
HTTP1,
HTTP2,
QUIC,
}
/**
* Creates a new test server that supports a given {@code protocol}.
*
* @param context context.
* @param protocol protocol that should be supported by the server.
* @return an instance of the server.
*
* @throws UnsupportedOperationException if the implementation of this interface
* does not support a given {@code protocol}.
*/
TestServer createTestServer(Context context, Protocol protocol);
/**
* This method is called at the end of a test run if the netlog is available. An implementer
* of {@link TestSupport} can use it to process the result netlog; e.g., to copy the netlog
* to a directory where all test logs are collected. This method is optional and can be no-op.
*
* @param file the netlog file.
*/
void processNetLog(Context context, File file);
/**
* Adds host resolver rules to a given experimental option JSON file.
* This method is optional.
*
* @param experimentalOptionsJson experimental options.
*/
void addHostResolverRules(JSONObject experimentalOptionsJson);
/**
* Installs mock certificate verifier for a given {@code builder}.
* This method is optional.
*
* @param builder that should have the verifier installed.
*/
void installMockCertVerifierForTesting(ExperimentalCronetEngine.Builder builder);
/**
* Loads a native library that is required for testing if any required.
*/
void loadTestNativeLibrary();
/**
* A test server.
*/
interface TestServer {
/**
* Starts the server.
*
* @return true if the server started successfully.
*/
boolean start();
/**
* Shuts down the server.
*/
void shutdown();
/**
* Return a URL that can be used by the test code to receive a successful response.
*
* @return the URL as a string.
*/
String getSuccessURL();
}
}
| 983 |
310 | {
"name": "Metro (iOS)",
"description": "A news app.",
"url": "https://itunes.apple.com/us/app/id465059515"
} | 48 |
369 | <gh_stars>100-1000
// Copyright (c) 2017-2021, <NAME>. All rights reserved.
// For licensing, see https://github.com/mudita/MuditaOS/LICENSE.md
#ifndef DATECOMMON_H
#define DATECOMMON_H
#include "time_conversion_factory.hpp"
#include <Utils.hpp>
#include <date/date.h>
#include <random>
using Clock = std::chrono::system_clock;
using TimePoint = std::chrono::time_point<Clock>;
namespace calendar
{
using YearMonthDay = date::year_month_day;
using YearMonthDayLast = date::year_month_day_last;
class Timestamp : public utils::time::Timestamp
{
public:
enum class GetParameters
{
Hour,
Minute,
Day,
Month,
Year
};
explicit Timestamp(time_t newtime) : utils::time::Timestamp(newtime)
{}
uint32_t get_date_time_sub_value(GetParameters param)
{
auto timeinfo = *std::localtime(&time);
switch (param) {
case GetParameters::Hour:
return timeinfo.tm_hour;
case GetParameters::Minute:
return timeinfo.tm_min;
case GetParameters::Day:
return timeinfo.tm_mday;
case GetParameters::Month:
return timeinfo.tm_mon + 1;
case GetParameters::Year:
return timeinfo.tm_year + 1900;
}
return UINT32_MAX;
}
uint32_t get_UTC_date_time_sub_value(GetParameters param)
{
std::tm tm = *std::gmtime(&time);
switch (param) {
case GetParameters::Hour:
return tm.tm_hour;
case GetParameters::Minute:
return tm.tm_min;
case GetParameters::Day:
return tm.tm_mday;
case GetParameters::Month:
return tm.tm_mon + 1;
case GetParameters::Year:
return tm.tm_year + 1900;
}
return UINT32_MAX;
}
};
} // namespace calendar
inline constexpr auto max_month_day = 31;
inline constexpr auto unix_epoch_year = 1900;
enum class Reminder
{
never = 0xFFFF,
event_time = 0,
five_min_before = 5,
fifteen_min_before = 15,
thirty_min_before = 30,
one_hour_before = 60,
two_hour_before = 120,
one_day_before = 1440,
two_days_before = 2880,
one_week_before = 10080
};
enum class Repeat
{
never,
daily,
weekly,
biweekly,
monthly,
yearly
};
inline constexpr TimePoint TIME_POINT_INVALID = date::sys_days{date::January / 1 / 1970};
inline constexpr TimePoint TIME_POINT_MAX = date::sys_days{date::April / 11 / 2262};
inline constexpr uint32_t yearDigitsNumb = 4, monthDigitsNumb = 2, dayDigitsNumb = 2, HourDigitsNumb = 2,
MinDigitsNumb = 2, SecDigitsNumb = 2;
inline std::tm CreateTmStruct(int year, int month, int day, int hour, int minutes, int seconds)
{
struct tm tm_ret;
tm_ret.tm_isdst = -1;
tm_ret.tm_sec = seconds;
tm_ret.tm_min = minutes;
tm_ret.tm_hour = hour;
tm_ret.tm_mday = day;
tm_ret.tm_mon = month - 1;
tm_ret.tm_year = year - 1900;
return tm_ret;
}
inline time_t GetDiffLocalWithUTCTime()
{
std::tm tm = CreateTmStruct(2000, 1, 1, 0, 0, 0);
std::time_t basetime = std::mktime(&tm);
std::time_t diff;
tm = *std::localtime(&basetime);
tm.tm_isdst = -1;
diff = std::mktime(&tm);
tm = *std::gmtime(&basetime);
tm.tm_isdst = -1;
diff -= std::mktime(&tm);
return diff;
}
inline time_t GetAsUTCTime(int year, int month, int day, int hour = 0, int minutes = 0, int seconds = 0)
{
std::tm tm = CreateTmStruct(year, month, day, hour, minutes, seconds);
std::time_t basetime = std::mktime(&tm);
return basetime + GetDiffLocalWithUTCTime();
}
inline TimePoint TimePointFromTimeT(const time_t &time)
{
return std::chrono::system_clock::from_time_t(time);
}
inline time_t TimePointToTimeT(const TimePoint &tp)
{
return std::chrono::system_clock::to_time_t(tp);
}
inline TimePoint TimePointNow()
{
return TimePointFromTimeT(std::time(nullptr));
}
inline std::string TimePointToString(const TimePoint &tp)
{
return date::format("%F %T", std::chrono::time_point_cast<std::chrono::seconds>(tp));
}
inline auto TimePointToHourMinSec(const TimePoint &tp)
{
auto dp = date::floor<date::days>(tp);
return date::make_time(tp - dp);
}
inline uint32_t TimePointToHour24H(const TimePoint &tp)
{
auto time = TimePointToTimeT(tp);
calendar::Timestamp timestamp(time);
auto hour = timestamp.get_date_time_sub_value(calendar::Timestamp::GetParameters::Hour);
return hour;
}
inline auto LocalizedHoursToUtcHours(int hour = 0)
{
std::tm tm = CreateTmStruct(unix_epoch_year, 1, 1, hour, 0, 0);
std::time_t basetime = std::mktime(&tm);
basetime -= GetDiffLocalWithUTCTime();
return TimePointToHour24H(TimePointFromTimeT(basetime));
}
inline uint32_t TimePointToMinutes(const TimePoint &tp)
{
auto time = TimePointToTimeT(tp);
calendar::Timestamp timestamp(time);
auto minute = timestamp.get_date_time_sub_value(calendar::Timestamp::GetParameters::Minute);
return minute;
}
inline TimePoint getFirstWeekDay(const TimePoint &tp)
{
auto time_of_day = TimePointToHourMinSec(tp);
auto yearMonthDay = date::year_month_day{date::floor<date::days>(tp)};
while (date::weekday{yearMonthDay} != date::mon) {
auto decrementedDay = --yearMonthDay.day();
yearMonthDay = yearMonthDay.year() / yearMonthDay.month() / decrementedDay;
}
auto finalDate = date::sys_days{yearMonthDay.year() / yearMonthDay.month() / yearMonthDay.day()};
auto finalDateTime = finalDate + time_of_day.hours() + time_of_day.minutes();
return finalDateTime;
}
inline std::string TimePointToString(const TimePoint &tp, date::months months)
{
date::year_month_day yearMonthDay = date::year_month_day{date::floor<date::days>(tp)};
date::year_month_day yearMonthDayLast = yearMonthDay.year() / yearMonthDay.month() / date::last;
auto tpHourMinuteSecond = TimePointToHourMinSec(tp).to_duration();
TimePoint timePoint;
if ((static_cast<unsigned>(yearMonthDay.month()) + months.count()) <= 12) {
if (yearMonthDayLast.day() == yearMonthDay.day()) {
yearMonthDayLast = yearMonthDay.year() / (yearMonthDay.month() + months) / date::last;
timePoint = date::sys_days{yearMonthDayLast.year() / yearMonthDayLast.month() / yearMonthDayLast.day()};
}
else {
timePoint = date::sys_days{yearMonthDay.year() / (yearMonthDay.month() + months) / yearMonthDay.day()};
}
}
else {
date::month incrementedMonths = date::month(months.count()) - (date::month(12) - yearMonthDay.month());
yearMonthDay = (yearMonthDay.year() + date::years{1}) / incrementedMonths / yearMonthDay.day();
if (yearMonthDayLast.day() == yearMonthDay.day()) {
yearMonthDayLast = yearMonthDay.year() / incrementedMonths / date::last;
timePoint = date::sys_days{yearMonthDayLast.year() / yearMonthDayLast.month() / yearMonthDayLast.day()};
}
else {
timePoint = date::sys_days{yearMonthDay.year() / yearMonthDay.month() / yearMonthDay.day()};
}
}
auto time_of_day = TimePointToHourMinSec(tp);
return date::format(
"%F %T",
std::chrono::time_point_cast<std::chrono::seconds>(timePoint + time_of_day.hours() + time_of_day.minutes()));
}
inline std::string TimePointToString(const TimePoint &tp, date::years years)
{
auto yearMonthDay = date::year_month_day{date::floor<date::days>(tp)};
auto yearMonthDayLast = (yearMonthDay.year() + date::years(years)) / yearMonthDay.month() / date::last;
TimePoint timePoint =
date::sys_days{yearMonthDayLast.year() / yearMonthDayLast.month() /
((yearMonthDayLast.day() == yearMonthDay.day()) ? yearMonthDayLast.day() : yearMonthDay.day())};
auto time_of_day = TimePointToHourMinSec(tp);
return date::format(
"%F %T",
std::chrono::time_point_cast<std::chrono::seconds>(timePoint + time_of_day.hours() + time_of_day.minutes()));
}
inline std::string HHMMToLocalizedString(std::chrono::hours hours,
std::chrono::minutes minutes,
utils::time::TimestampType type,
const std::string format = "")
{
using namespace utils::time;
const auto nowTimeT = std::chrono::system_clock::to_time_t(std::chrono::system_clock::now());
const auto nowLocal = std::localtime(&nowTimeT);
nowLocal->tm_hour = hours.count();
nowLocal->tm_min = minutes.count();
auto timestamp = TimestampFactory().createTimestamp(type, std::mktime(nowLocal));
return timestamp->str(format);
}
inline TimePoint TimePointFromString(const char *s1)
{
TimePoint tp;
std::istringstream(s1) >> date::parse("%F %T", tp);
return tp;
}
inline calendar::YearMonthDay TimePointToYearMonthDay(const TimePoint &tp)
{
return date::year_month_day{date::floor<date::days>(tp)};
}
inline TimePoint TimePointFromYearMonthDay(const calendar::YearMonthDay &ymd)
{
return date::sys_days{ymd.year() / ymd.month() / ymd.day()};
}
inline time_t TimePointToMin(const TimePoint &tp)
{
auto time = TimePointToTimeT(tp);
auto duration = utils::time::Duration(time);
auto minutes = duration.getMinutes();
return minutes;
}
inline uint32_t TimePointToHour12H(const TimePoint &tp)
{
auto time = TimePointToTimeT(tp);
calendar::Timestamp timestamp(time);
auto hour = timestamp.get_date_time_sub_value(calendar::Timestamp::GetParameters::Hour);
if (hour > 12) {
hour -= 12;
}
return hour;
}
inline std::string TimePointToHourString12H(const TimePoint &tp)
{
auto hour =
calendar::Timestamp(TimePointToTimeT(tp)).get_UTC_date_time_sub_value(calendar::Timestamp::GetParameters::Hour);
auto hour12h = date::make12(std::chrono::hours(hour)).count();
return utils::to_string(hour12h);
}
inline std::string TimePointToHourString24H(const TimePoint &tp)
{
auto hour =
calendar::Timestamp(TimePointToTimeT(tp)).get_UTC_date_time_sub_value(calendar::Timestamp::GetParameters::Hour);
return utils::to_string(hour);
}
inline std::string TimePointToMinutesString(const TimePoint &tp)
{
auto minute = TimePointToMinutes(tp);
auto minuteString = std::to_string(minute);
if (minute < 10) {
minuteString = "0" + minuteString;
}
return minuteString;
}
// 0: Monday, 1: Tuesday ... 6: Sunday
inline unsigned int WeekdayIndexFromTimePoint(const TimePoint &tp)
{
auto ymw = date::year_month_weekday{std::chrono::floor<date::days>(tp)};
return ymw.weekday().iso_encoding() - 1;
}
inline TimePoint TimePointFloorMinutes(const TimePoint &tp)
{
return std::chrono::floor<std::chrono::minutes>(tp);
}
/// Returns TimePoint within 24h after relative time based on tp as source of daytime
inline TimePoint GetFollowingDayTime(const TimePoint &tp, const TimePoint &relativeTime)
{
auto diff = relativeTime - tp;
if (diff > std::chrono::hours{0}) {
return tp + std::chrono::ceil<date::days>(diff);
}
else if (diff == std::chrono::hours{0}) {
return tp + date::days{1};
}
else if (diff < std::chrono::hours{0} && std::chrono::abs(diff) > std::chrono::hours{24}) {
return tp - std::chrono::floor<date::days>(std::chrono::abs(diff));
}
else {
return tp;
}
}
inline TimePoint nextTimePointFromHHMM(std::chrono::hours hours, std::chrono::minutes minutes, const TimePoint &from)
{
const auto fromTimeT = std::chrono::system_clock::to_time_t(from);
const auto fromLocal = std::localtime(&fromTimeT);
fromLocal->tm_hour = hours.count();
fromLocal->tm_min = minutes.count();
auto nextTime = TimePointFloorMinutes(std::chrono::system_clock::from_time_t(std::mktime(fromLocal)));
return GetFollowingDayTime(nextTime, from);
}
inline std::string createUID()
{
constexpr uint32_t bufferLimit = 16;
char Buffer[bufferLimit];
utils::time::Timestamp timestamp(std::time(nullptr));
std::string UID{timestamp.str("%Y%m%dT%H%M%S")};
UID += '-';
std::random_device rd;
std::mt19937 gen(rd());
std::uniform_int_distribution<> distrib(1, 100);
sprintf(Buffer, "%d", distrib(gen));
UID += Buffer;
return UID;
}
#endif
// DATECOMMON_H
| 5,401 |
495 | <reponame>StudistCorporation/media-for-mobile<filename>domain/src/main/java/org/m4m/domain/pipeline/OutputFormatChangedHandler.java
/*
* Copyright 2014-2016 Media for Mobile
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.m4m.domain.pipeline;
import org.m4m.domain.Command;
import org.m4m.domain.Frame;
import org.m4m.domain.ICommandHandler;
import org.m4m.domain.IFrameAllocator;
import org.m4m.domain.IOutput;
import org.m4m.domain.MultipleMediaSource;
import org.m4m.domain.Plugin;
class OutputFormatChangedHandler implements ICommandHandler {
//Logger log = Logger.getLogger(getClass().getSimpleName());
protected IOutput output;
protected Plugin plugin;
private IFrameAllocator inputWithAllocator;
public OutputFormatChangedHandler(IOutput output, Plugin plugin, IFrameAllocator frameAllocator) {
this.output = output;
this.plugin = plugin;
this.inputWithAllocator = frameAllocator;
}
@Override
public void handle() {
if (output instanceof MultipleMediaSource) {
Frame frame = inputWithAllocator.findFreeFrame();
if (frame == null) {
restoreCommands();
return;
}
plugin.drain(frame.getBufferIndex());
plugin.stop();
plugin.setMediaFormat(output.getMediaFormatByType(plugin.getMediaFormatType()));
plugin.configure();
plugin.start();
plugin.setTrackId(plugin.getTrackId());
MultipleMediaSource multipleMediaSource = (MultipleMediaSource) output;
int trackId = multipleMediaSource.getTrackIdByMediaType(plugin.getMediaFormatType());
multipleMediaSource.selectTrack(trackId);
multipleMediaSource.setTrackMap(trackId, plugin.getTrackId());
multipleMediaSource.nextFile();
}
}
private void restoreCommands() {
output.getOutputCommandQueue().queue(Command.OutputFormatChanged, plugin.getTrackId());
plugin.getInputCommandQueue().clear();
plugin.skipProcessing();
plugin.getInputCommandQueue().queue(Command.NeedData, plugin.getTrackId());
}
}
| 967 |
553 | #include "jsb_cocos2dx_pluginx_auto.hpp"
#include "jsb_pluginx_spidermonkey_specifics.h"
#include "jsb_pluginx_basic_conversions.h"
using namespace pluginx;
#include "PluginManager.h"
#include "ProtocolAnalytics.h"
#include "ProtocolIAP.h"
#include "ProtocolAds.h"
#include "ProtocolShare.h"
#include "ProtocolSocial.h"
#include "ProtocolUser.h"
template<class T>
static bool dummy_constructor(JSContext *cx, uint32_t argc, jsval *vp) {
JS::RootedValue initializing(cx);
bool isNewValid = true;
if (isNewValid)
{
TypeTest<T> t;
js_type_class_t *typeClass = nullptr;
std::string typeName = t.s_name();
auto typeMapIter = _js_global_type_map.find(typeName);
CCASSERT(typeMapIter != _js_global_type_map.end(), "Can't find the class type!");
typeClass = typeMapIter->second;
CCASSERT(typeClass, "The value is null.");
JSObject *_tmp = JS_NewObject(cx, typeClass->jsclass, typeClass->proto, typeClass->parentProto);
T* cobj = new T();
js_proxy_t *pp = jsb_new_proxy(cobj, _tmp);
JS_AddObjectRoot(cx, &pp->obj);
JS_SET_RVAL(cx, vp, OBJECT_TO_JSVAL(_tmp));
return true;
}
return false;
}
static bool empty_constructor(JSContext *cx, uint32_t argc, jsval *vp) {
return false;
}
static bool js_is_native_obj(JSContext *cx, JS::HandleObject obj, JS::HandleId id, JS::MutableHandleValue vp)
{
vp.set(BOOLEAN_TO_JSVAL(true));
return true;
}
JSClass *jsb_cocos2d_plugin_PluginProtocol_class;
JSObject *jsb_cocos2d_plugin_PluginProtocol_prototype;
bool js_pluginx_protocols_PluginProtocol_getPluginName(JSContext *cx, uint32_t argc, jsval *vp)
{
JSObject *obj = JS_THIS_OBJECT(cx, vp);
js_proxy_t *proxy = jsb_get_js_proxy(obj);
cocos2d::plugin::PluginProtocol* cobj = (cocos2d::plugin::PluginProtocol *)(proxy ? proxy->ptr : NULL);
JSB_PRECONDITION2( cobj, cx, false, "js_pluginx_protocols_PluginProtocol_getPluginName : Invalid Native Object");
if (argc == 0) {
const char* ret = cobj->getPluginName();
jsval jsret = JSVAL_NULL;
jsret = c_string_to_jsval(cx, ret);
JS_SET_RVAL(cx, vp, jsret);
return true;
}
JS_ReportError(cx, "js_pluginx_protocols_PluginProtocol_getPluginName : wrong number of arguments: %d, was expecting %d", argc, 0);
return false;
}
bool js_pluginx_protocols_PluginProtocol_getPluginVersion(JSContext *cx, uint32_t argc, jsval *vp)
{
JSObject *obj = JS_THIS_OBJECT(cx, vp);
js_proxy_t *proxy = jsb_get_js_proxy(obj);
cocos2d::plugin::PluginProtocol* cobj = (cocos2d::plugin::PluginProtocol *)(proxy ? proxy->ptr : NULL);
JSB_PRECONDITION2( cobj, cx, false, "js_pluginx_protocols_PluginProtocol_getPluginVersion : Invalid Native Object");
if (argc == 0) {
std::string ret = cobj->getPluginVersion();
jsval jsret = JSVAL_NULL;
jsret = std_string_to_jsval(cx, ret);
JS_SET_RVAL(cx, vp, jsret);
return true;
}
JS_ReportError(cx, "js_pluginx_protocols_PluginProtocol_getPluginVersion : wrong number of arguments: %d, was expecting %d", argc, 0);
return false;
}
bool js_pluginx_protocols_PluginProtocol_getSDKVersion(JSContext *cx, uint32_t argc, jsval *vp)
{
JSObject *obj = JS_THIS_OBJECT(cx, vp);
js_proxy_t *proxy = jsb_get_js_proxy(obj);
cocos2d::plugin::PluginProtocol* cobj = (cocos2d::plugin::PluginProtocol *)(proxy ? proxy->ptr : NULL);
JSB_PRECONDITION2( cobj, cx, false, "js_pluginx_protocols_PluginProtocol_getSDKVersion : Invalid Native Object");
if (argc == 0) {
std::string ret = cobj->getSDKVersion();
jsval jsret = JSVAL_NULL;
jsret = std_string_to_jsval(cx, ret);
JS_SET_RVAL(cx, vp, jsret);
return true;
}
JS_ReportError(cx, "js_pluginx_protocols_PluginProtocol_getSDKVersion : wrong number of arguments: %d, was expecting %d", argc, 0);
return false;
}
bool js_pluginx_protocols_PluginProtocol_setDebugMode(JSContext *cx, uint32_t argc, jsval *vp)
{
jsval *argv = JS_ARGV(cx, vp);
bool ok = true;
JSObject *obj = JS_THIS_OBJECT(cx, vp);
js_proxy_t *proxy = jsb_get_js_proxy(obj);
cocos2d::plugin::PluginProtocol* cobj = (cocos2d::plugin::PluginProtocol *)(proxy ? proxy->ptr : NULL);
JSB_PRECONDITION2( cobj, cx, false, "js_pluginx_protocols_PluginProtocol_setDebugMode : Invalid Native Object");
if (argc == 1) {
bool arg0;
arg0 = JS::ToBoolean(JS::RootedValue(cx, argv[0]));
JSB_PRECONDITION2(ok, cx, false, "js_pluginx_protocols_PluginProtocol_setDebugMode : Error processing arguments");
cobj->setDebugMode(arg0);
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return true;
}
JS_ReportError(cx, "js_pluginx_protocols_PluginProtocol_setDebugMode : wrong number of arguments: %d, was expecting %d", argc, 1);
return false;
}
void js_cocos2d_plugin_PluginProtocol_finalize(JSFreeOp *fop, JSObject *obj) {
CCLOGINFO("jsbindings: finalizing JS object %p (PluginProtocol)", obj);
js_proxy_t* nproxy;
js_proxy_t* jsproxy;
jsproxy = jsb_get_js_proxy(obj);
if (jsproxy) {
nproxy = jsb_get_native_proxy(jsproxy->ptr);
cocos2d::plugin::PluginProtocol *nobj = static_cast<cocos2d::plugin::PluginProtocol *>(nproxy->ptr);
if (nobj)
delete nobj;
jsb_remove_proxy(nproxy, jsproxy);
}
}
void js_register_pluginx_protocols_PluginProtocol(JSContext *cx, JSObject *global) {
jsb_cocos2d_plugin_PluginProtocol_class = (JSClass *)calloc(1, sizeof(JSClass));
jsb_cocos2d_plugin_PluginProtocol_class->name = "PluginProtocol";
jsb_cocos2d_plugin_PluginProtocol_class->addProperty = JS_PropertyStub;
jsb_cocos2d_plugin_PluginProtocol_class->delProperty = JS_DeletePropertyStub;
jsb_cocos2d_plugin_PluginProtocol_class->getProperty = JS_PropertyStub;
jsb_cocos2d_plugin_PluginProtocol_class->setProperty = JS_StrictPropertyStub;
jsb_cocos2d_plugin_PluginProtocol_class->enumerate = JS_EnumerateStub;
jsb_cocos2d_plugin_PluginProtocol_class->resolve = JS_ResolveStub;
jsb_cocos2d_plugin_PluginProtocol_class->convert = JS_ConvertStub;
jsb_cocos2d_plugin_PluginProtocol_class->finalize = js_cocos2d_plugin_PluginProtocol_finalize;
jsb_cocos2d_plugin_PluginProtocol_class->flags = JSCLASS_HAS_RESERVED_SLOTS(2);
static JSPropertySpec properties[] = {
{"__nativeObj", 0, JSPROP_ENUMERATE | JSPROP_PERMANENT, JSOP_WRAPPER(js_is_native_obj), JSOP_NULLWRAPPER},
{0, 0, 0, JSOP_NULLWRAPPER, JSOP_NULLWRAPPER}
};
static JSFunctionSpec funcs[] = {
JS_FN("getPluginName", js_pluginx_protocols_PluginProtocol_getPluginName, 0, JSPROP_PERMANENT | JSPROP_ENUMERATE),
JS_FN("getPluginVersion", js_pluginx_protocols_PluginProtocol_getPluginVersion, 0, JSPROP_PERMANENT | JSPROP_ENUMERATE),
JS_FN("getSDKVersion", js_pluginx_protocols_PluginProtocol_getSDKVersion, 0, JSPROP_PERMANENT | JSPROP_ENUMERATE),
JS_FN("setDebugMode", js_pluginx_protocols_PluginProtocol_setDebugMode, 1, JSPROP_PERMANENT | JSPROP_ENUMERATE),
JS_FS_END
};
JSFunctionSpec *st_funcs = NULL;
jsb_cocos2d_plugin_PluginProtocol_prototype = JS_InitClass(
cx, global,
NULL, // parent proto
jsb_cocos2d_plugin_PluginProtocol_class,
empty_constructor, 0,
properties,
funcs,
NULL, // no static properties
st_funcs);
// make the class enumerable in the registered namespace
// bool found;
//FIXME: Removed in Firefox v27
// JS_SetPropertyAttributes(cx, global, "PluginProtocol", JSPROP_ENUMERATE | JSPROP_READONLY, &found);
// add the proto and JSClass to the type->js info hash table
TypeTest<cocos2d::plugin::PluginProtocol> t;
js_type_class_t *p;
std::string typeName = t.s_name();
if (_js_global_type_map.find(typeName) == _js_global_type_map.end())
{
p = (js_type_class_t *)malloc(sizeof(js_type_class_t));
p->jsclass = jsb_cocos2d_plugin_PluginProtocol_class;
p->proto = jsb_cocos2d_plugin_PluginProtocol_prototype;
p->parentProto = NULL;
_js_global_type_map.insert(std::make_pair(typeName, p));
}
}
JSClass *jsb_cocos2d_plugin_PluginManager_class;
JSObject *jsb_cocos2d_plugin_PluginManager_prototype;
bool js_pluginx_protocols_PluginManager_unloadPlugin(JSContext *cx, uint32_t argc, jsval *vp)
{
jsval *argv = JS_ARGV(cx, vp);
bool ok = true;
JSObject *obj = JS_THIS_OBJECT(cx, vp);
js_proxy_t *proxy = jsb_get_js_proxy(obj);
cocos2d::plugin::PluginManager* cobj = (cocos2d::plugin::PluginManager *)(proxy ? proxy->ptr : NULL);
JSB_PRECONDITION2( cobj, cx, false, "js_pluginx_protocols_PluginManager_unloadPlugin : Invalid Native Object");
if (argc == 1) {
const char* arg0;
std::string arg0_tmp; ok &= jsval_to_std_string(cx, argv[0], &arg0_tmp); arg0 = arg0_tmp.c_str();
JSB_PRECONDITION2(ok, cx, false, "js_pluginx_protocols_PluginManager_unloadPlugin : Error processing arguments");
cobj->unloadPlugin(arg0);
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return true;
}
JS_ReportError(cx, "js_pluginx_protocols_PluginManager_unloadPlugin : wrong number of arguments: %d, was expecting %d", argc, 1);
return false;
}
bool js_pluginx_protocols_PluginManager_loadPlugin(JSContext *cx, uint32_t argc, jsval *vp)
{
jsval *argv = JS_ARGV(cx, vp);
bool ok = true;
JSObject *obj = JS_THIS_OBJECT(cx, vp);
js_proxy_t *proxy = jsb_get_js_proxy(obj);
cocos2d::plugin::PluginManager* cobj = (cocos2d::plugin::PluginManager *)(proxy ? proxy->ptr : NULL);
JSB_PRECONDITION2( cobj, cx, false, "js_pluginx_protocols_PluginManager_loadPlugin : Invalid Native Object");
if (argc == 1) {
const char* arg0;
std::string arg0_tmp; ok &= jsval_to_std_string(cx, argv[0], &arg0_tmp); arg0 = arg0_tmp.c_str();
JSB_PRECONDITION2(ok, cx, false, "js_pluginx_protocols_PluginManager_loadPlugin : Error processing arguments");
cocos2d::plugin::PluginProtocol* ret = cobj->loadPlugin(arg0);
jsval jsret = JSVAL_NULL;
do {
if (ret) {
js_proxy_t *jsProxy = js_get_or_create_proxy<cocos2d::plugin::PluginProtocol>(cx, (cocos2d::plugin::PluginProtocol*)ret);
jsret = OBJECT_TO_JSVAL(jsProxy->obj);
} else {
jsret = JSVAL_NULL;
}
} while (0);
JS_SET_RVAL(cx, vp, jsret);
return true;
}
JS_ReportError(cx, "js_pluginx_protocols_PluginManager_loadPlugin : wrong number of arguments: %d, was expecting %d", argc, 1);
return false;
}
bool js_pluginx_protocols_PluginManager_end(JSContext *cx, uint32_t argc, jsval *vp)
{
if (argc == 0) {
cocos2d::plugin::PluginManager::end();
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return true;
}
JS_ReportError(cx, "js_pluginx_protocols_PluginManager_end : wrong number of arguments");
return false;
}
bool js_pluginx_protocols_PluginManager_getInstance(JSContext *cx, uint32_t argc, jsval *vp)
{
if (argc == 0) {
cocos2d::plugin::PluginManager* ret = cocos2d::plugin::PluginManager::getInstance();
jsval jsret = JSVAL_NULL;
do {
if (ret) {
js_proxy_t *jsProxy = js_get_or_create_proxy<cocos2d::plugin::PluginManager>(cx, (cocos2d::plugin::PluginManager*)ret);
jsret = OBJECT_TO_JSVAL(jsProxy->obj);
} else {
jsret = JSVAL_NULL;
}
} while (0);
JS_SET_RVAL(cx, vp, jsret);
return true;
}
JS_ReportError(cx, "js_pluginx_protocols_PluginManager_getInstance : wrong number of arguments");
return false;
}
void js_cocos2d_plugin_PluginManager_finalize(JSFreeOp *fop, JSObject *obj) {
CCLOGINFO("jsbindings: finalizing JS object %p (PluginManager)", obj);
js_proxy_t* nproxy;
js_proxy_t* jsproxy;
jsproxy = jsb_get_js_proxy(obj);
if (jsproxy) {
nproxy = jsb_get_native_proxy(jsproxy->ptr);
cocos2d::plugin::PluginManager *nobj = static_cast<cocos2d::plugin::PluginManager *>(nproxy->ptr);
if (nobj)
delete nobj;
jsb_remove_proxy(nproxy, jsproxy);
}
}
void js_register_pluginx_protocols_PluginManager(JSContext *cx, JSObject *global) {
jsb_cocos2d_plugin_PluginManager_class = (JSClass *)calloc(1, sizeof(JSClass));
jsb_cocos2d_plugin_PluginManager_class->name = "PluginManager";
jsb_cocos2d_plugin_PluginManager_class->addProperty = JS_PropertyStub;
jsb_cocos2d_plugin_PluginManager_class->delProperty = JS_DeletePropertyStub;
jsb_cocos2d_plugin_PluginManager_class->getProperty = JS_PropertyStub;
jsb_cocos2d_plugin_PluginManager_class->setProperty = JS_StrictPropertyStub;
jsb_cocos2d_plugin_PluginManager_class->enumerate = JS_EnumerateStub;
jsb_cocos2d_plugin_PluginManager_class->resolve = JS_ResolveStub;
jsb_cocos2d_plugin_PluginManager_class->convert = JS_ConvertStub;
jsb_cocos2d_plugin_PluginManager_class->finalize = js_cocos2d_plugin_PluginManager_finalize;
jsb_cocos2d_plugin_PluginManager_class->flags = JSCLASS_HAS_RESERVED_SLOTS(2);
static JSPropertySpec properties[] = {
{"__nativeObj", 0, JSPROP_ENUMERATE | JSPROP_PERMANENT, JSOP_WRAPPER(js_is_native_obj), JSOP_NULLWRAPPER},
{0, 0, 0, JSOP_NULLWRAPPER, JSOP_NULLWRAPPER}
};
static JSFunctionSpec funcs[] = {
JS_FN("unloadPlugin", js_pluginx_protocols_PluginManager_unloadPlugin, 1, JSPROP_PERMANENT | JSPROP_ENUMERATE),
JS_FN("loadPlugin", js_pluginx_protocols_PluginManager_loadPlugin, 1, JSPROP_PERMANENT | JSPROP_ENUMERATE),
JS_FS_END
};
static JSFunctionSpec st_funcs[] = {
JS_FN("end", js_pluginx_protocols_PluginManager_end, 0, JSPROP_PERMANENT | JSPROP_ENUMERATE),
JS_FN("getInstance", js_pluginx_protocols_PluginManager_getInstance, 0, JSPROP_PERMANENT | JSPROP_ENUMERATE),
JS_FS_END
};
jsb_cocos2d_plugin_PluginManager_prototype = JS_InitClass(
cx, global,
NULL, // parent proto
jsb_cocos2d_plugin_PluginManager_class,
empty_constructor, 0,
properties,
funcs,
NULL, // no static properties
st_funcs);
// make the class enumerable in the registered namespace
// bool found;
//FIXME: Removed in Firefox v27
// JS_SetPropertyAttributes(cx, global, "PluginManager", JSPROP_ENUMERATE | JSPROP_READONLY, &found);
// add the proto and JSClass to the type->js info hash table
TypeTest<cocos2d::plugin::PluginManager> t;
js_type_class_t *p;
std::string typeName = t.s_name();
if (_js_global_type_map.find(typeName) == _js_global_type_map.end())
{
p = (js_type_class_t *)malloc(sizeof(js_type_class_t));
p->jsclass = jsb_cocos2d_plugin_PluginManager_class;
p->proto = jsb_cocos2d_plugin_PluginManager_prototype;
p->parentProto = NULL;
_js_global_type_map.insert(std::make_pair(typeName, p));
}
}
JSClass *jsb_cocos2d_plugin_ProtocolAnalytics_class;
JSObject *jsb_cocos2d_plugin_ProtocolAnalytics_prototype;
bool js_pluginx_protocols_ProtocolAnalytics_logTimedEventBegin(JSContext *cx, uint32_t argc, jsval *vp)
{
jsval *argv = JS_ARGV(cx, vp);
bool ok = true;
JSObject *obj = JS_THIS_OBJECT(cx, vp);
js_proxy_t *proxy = jsb_get_js_proxy(obj);
cocos2d::plugin::ProtocolAnalytics* cobj = (cocos2d::plugin::ProtocolAnalytics *)(proxy ? proxy->ptr : NULL);
JSB_PRECONDITION2( cobj, cx, false, "js_pluginx_protocols_ProtocolAnalytics_logTimedEventBegin : Invalid Native Object");
if (argc == 1) {
const char* arg0;
std::string arg0_tmp; ok &= jsval_to_std_string(cx, argv[0], &arg0_tmp); arg0 = arg0_tmp.c_str();
JSB_PRECONDITION2(ok, cx, false, "js_pluginx_protocols_ProtocolAnalytics_logTimedEventBegin : Error processing arguments");
cobj->logTimedEventBegin(arg0);
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return true;
}
JS_ReportError(cx, "js_pluginx_protocols_ProtocolAnalytics_logTimedEventBegin : wrong number of arguments: %d, was expecting %d", argc, 1);
return false;
}
bool js_pluginx_protocols_ProtocolAnalytics_logError(JSContext *cx, uint32_t argc, jsval *vp)
{
jsval *argv = JS_ARGV(cx, vp);
bool ok = true;
JSObject *obj = JS_THIS_OBJECT(cx, vp);
js_proxy_t *proxy = jsb_get_js_proxy(obj);
cocos2d::plugin::ProtocolAnalytics* cobj = (cocos2d::plugin::ProtocolAnalytics *)(proxy ? proxy->ptr : NULL);
JSB_PRECONDITION2( cobj, cx, false, "js_pluginx_protocols_ProtocolAnalytics_logError : Invalid Native Object");
if (argc == 2) {
const char* arg0;
const char* arg1;
std::string arg0_tmp; ok &= jsval_to_std_string(cx, argv[0], &arg0_tmp); arg0 = arg0_tmp.c_str();
std::string arg1_tmp; ok &= jsval_to_std_string(cx, argv[1], &arg1_tmp); arg1 = arg1_tmp.c_str();
JSB_PRECONDITION2(ok, cx, false, "js_pluginx_protocols_ProtocolAnalytics_logError : Error processing arguments");
cobj->logError(arg0, arg1);
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return true;
}
JS_ReportError(cx, "js_pluginx_protocols_ProtocolAnalytics_logError : wrong number of arguments: %d, was expecting %d", argc, 2);
return false;
}
bool js_pluginx_protocols_ProtocolAnalytics_setCaptureUncaughtException(JSContext *cx, uint32_t argc, jsval *vp)
{
jsval *argv = JS_ARGV(cx, vp);
bool ok = true;
JSObject *obj = JS_THIS_OBJECT(cx, vp);
js_proxy_t *proxy = jsb_get_js_proxy(obj);
cocos2d::plugin::ProtocolAnalytics* cobj = (cocos2d::plugin::ProtocolAnalytics *)(proxy ? proxy->ptr : NULL);
JSB_PRECONDITION2( cobj, cx, false, "js_pluginx_protocols_ProtocolAnalytics_setCaptureUncaughtException : Invalid Native Object");
if (argc == 1) {
bool arg0;
arg0 = JS::ToBoolean(JS::RootedValue(cx, argv[0]));
JSB_PRECONDITION2(ok, cx, false, "js_pluginx_protocols_ProtocolAnalytics_setCaptureUncaughtException : Error processing arguments");
cobj->setCaptureUncaughtException(arg0);
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return true;
}
JS_ReportError(cx, "js_pluginx_protocols_ProtocolAnalytics_setCaptureUncaughtException : wrong number of arguments: %d, was expecting %d", argc, 1);
return false;
}
bool js_pluginx_protocols_ProtocolAnalytics_setSessionContinueMillis(JSContext *cx, uint32_t argc, jsval *vp)
{
jsval *argv = JS_ARGV(cx, vp);
bool ok = true;
JSObject *obj = JS_THIS_OBJECT(cx, vp);
js_proxy_t *proxy = jsb_get_js_proxy(obj);
cocos2d::plugin::ProtocolAnalytics* cobj = (cocos2d::plugin::ProtocolAnalytics *)(proxy ? proxy->ptr : NULL);
JSB_PRECONDITION2( cobj, cx, false, "js_pluginx_protocols_ProtocolAnalytics_setSessionContinueMillis : Invalid Native Object");
if (argc == 1) {
long arg0;
ok &= jsval_to_long(cx, argv[0], (long *)&arg0);
JSB_PRECONDITION2(ok, cx, false, "js_pluginx_protocols_ProtocolAnalytics_setSessionContinueMillis : Error processing arguments");
cobj->setSessionContinueMillis(arg0);
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return true;
}
JS_ReportError(cx, "js_pluginx_protocols_ProtocolAnalytics_setSessionContinueMillis : wrong number of arguments: %d, was expecting %d", argc, 1);
return false;
}
bool js_pluginx_protocols_ProtocolAnalytics_logEvent(JSContext *cx, uint32_t argc, jsval *vp)
{
jsval *argv = JS_ARGV(cx, vp);
bool ok = true;
JSObject *obj = JS_THIS_OBJECT(cx, vp);
js_proxy_t *proxy = jsb_get_js_proxy(obj);
cocos2d::plugin::ProtocolAnalytics* cobj = (cocos2d::plugin::ProtocolAnalytics *)(proxy ? proxy->ptr : NULL);
JSB_PRECONDITION2( cobj, cx, false, "js_pluginx_protocols_ProtocolAnalytics_logEvent : Invalid Native Object");
if (argc == 1) {
const char* arg0;
std::string arg0_tmp; ok &= jsval_to_std_string(cx, argv[0], &arg0_tmp); arg0 = arg0_tmp.c_str();
JSB_PRECONDITION2(ok, cx, false, "js_pluginx_protocols_ProtocolAnalytics_logEvent : Error processing arguments");
cobj->logEvent(arg0);
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return true;
}
if (argc == 2) {
const char* arg0;
cocos2d::plugin::LogEventParamMap* arg1;
std::string arg0_tmp; ok &= jsval_to_std_string(cx, argv[0], &arg0_tmp); arg0 = arg0_tmp.c_str();
cocos2d::plugin::LogEventParamMap arg1_tmp;
do {
ok &= pluginx::jsval_to_LogEventParamMap(cx, argv[1], &arg1);
if (ok) { arg1_tmp = *arg1; delete arg1; arg1 = &arg1_tmp; } else { arg1 = NULL; }
} while(0);
JSB_PRECONDITION2(ok, cx, false, "js_pluginx_protocols_ProtocolAnalytics_logEvent : Error processing arguments");
cobj->logEvent(arg0, arg1);
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return true;
}
JS_ReportError(cx, "js_pluginx_protocols_ProtocolAnalytics_logEvent : wrong number of arguments: %d, was expecting %d", argc, 1);
return false;
}
bool js_pluginx_protocols_ProtocolAnalytics_startSession(JSContext *cx, uint32_t argc, jsval *vp)
{
jsval *argv = JS_ARGV(cx, vp);
bool ok = true;
JSObject *obj = JS_THIS_OBJECT(cx, vp);
js_proxy_t *proxy = jsb_get_js_proxy(obj);
cocos2d::plugin::ProtocolAnalytics* cobj = (cocos2d::plugin::ProtocolAnalytics *)(proxy ? proxy->ptr : NULL);
JSB_PRECONDITION2( cobj, cx, false, "js_pluginx_protocols_ProtocolAnalytics_startSession : Invalid Native Object");
if (argc == 1) {
const char* arg0;
std::string arg0_tmp; ok &= jsval_to_std_string(cx, argv[0], &arg0_tmp); arg0 = arg0_tmp.c_str();
JSB_PRECONDITION2(ok, cx, false, "js_pluginx_protocols_ProtocolAnalytics_startSession : Error processing arguments");
cobj->startSession(arg0);
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return true;
}
JS_ReportError(cx, "js_pluginx_protocols_ProtocolAnalytics_startSession : wrong number of arguments: %d, was expecting %d", argc, 1);
return false;
}
bool js_pluginx_protocols_ProtocolAnalytics_stopSession(JSContext *cx, uint32_t argc, jsval *vp)
{
JSObject *obj = JS_THIS_OBJECT(cx, vp);
js_proxy_t *proxy = jsb_get_js_proxy(obj);
cocos2d::plugin::ProtocolAnalytics* cobj = (cocos2d::plugin::ProtocolAnalytics *)(proxy ? proxy->ptr : NULL);
JSB_PRECONDITION2( cobj, cx, false, "js_pluginx_protocols_ProtocolAnalytics_stopSession : Invalid Native Object");
if (argc == 0) {
cobj->stopSession();
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return true;
}
JS_ReportError(cx, "js_pluginx_protocols_ProtocolAnalytics_stopSession : wrong number of arguments: %d, was expecting %d", argc, 0);
return false;
}
bool js_pluginx_protocols_ProtocolAnalytics_logTimedEventEnd(JSContext *cx, uint32_t argc, jsval *vp)
{
jsval *argv = JS_ARGV(cx, vp);
bool ok = true;
JSObject *obj = JS_THIS_OBJECT(cx, vp);
js_proxy_t *proxy = jsb_get_js_proxy(obj);
cocos2d::plugin::ProtocolAnalytics* cobj = (cocos2d::plugin::ProtocolAnalytics *)(proxy ? proxy->ptr : NULL);
JSB_PRECONDITION2( cobj, cx, false, "js_pluginx_protocols_ProtocolAnalytics_logTimedEventEnd : Invalid Native Object");
if (argc == 1) {
const char* arg0;
std::string arg0_tmp; ok &= jsval_to_std_string(cx, argv[0], &arg0_tmp); arg0 = arg0_tmp.c_str();
JSB_PRECONDITION2(ok, cx, false, "js_pluginx_protocols_ProtocolAnalytics_logTimedEventEnd : Error processing arguments");
cobj->logTimedEventEnd(arg0);
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return true;
}
JS_ReportError(cx, "js_pluginx_protocols_ProtocolAnalytics_logTimedEventEnd : wrong number of arguments: %d, was expecting %d", argc, 1);
return false;
}
extern JSObject *jsb_cocos2d_plugin_PluginProtocol_prototype;
void js_cocos2d_plugin_ProtocolAnalytics_finalize(JSFreeOp *fop, JSObject *obj) {
CCLOGINFO("jsbindings: finalizing JS object %p (ProtocolAnalytics)", obj);
js_proxy_t* nproxy;
js_proxy_t* jsproxy;
jsproxy = jsb_get_js_proxy(obj);
if (jsproxy) {
nproxy = jsb_get_native_proxy(jsproxy->ptr);
cocos2d::plugin::ProtocolAnalytics *nobj = static_cast<cocos2d::plugin::ProtocolAnalytics *>(nproxy->ptr);
if (nobj)
delete nobj;
jsb_remove_proxy(nproxy, jsproxy);
}
}
void js_register_pluginx_protocols_ProtocolAnalytics(JSContext *cx, JSObject *global) {
jsb_cocos2d_plugin_ProtocolAnalytics_class = (JSClass *)calloc(1, sizeof(JSClass));
jsb_cocos2d_plugin_ProtocolAnalytics_class->name = "ProtocolAnalytics";
jsb_cocos2d_plugin_ProtocolAnalytics_class->addProperty = JS_PropertyStub;
jsb_cocos2d_plugin_ProtocolAnalytics_class->delProperty = JS_DeletePropertyStub;
jsb_cocos2d_plugin_ProtocolAnalytics_class->getProperty = JS_PropertyStub;
jsb_cocos2d_plugin_ProtocolAnalytics_class->setProperty = JS_StrictPropertyStub;
jsb_cocos2d_plugin_ProtocolAnalytics_class->enumerate = JS_EnumerateStub;
jsb_cocos2d_plugin_ProtocolAnalytics_class->resolve = JS_ResolveStub;
jsb_cocos2d_plugin_ProtocolAnalytics_class->convert = JS_ConvertStub;
jsb_cocos2d_plugin_ProtocolAnalytics_class->finalize = js_cocos2d_plugin_ProtocolAnalytics_finalize;
jsb_cocos2d_plugin_ProtocolAnalytics_class->flags = JSCLASS_HAS_RESERVED_SLOTS(2);
static JSPropertySpec properties[] = {
{"__nativeObj", 0, JSPROP_ENUMERATE | JSPROP_PERMANENT, JSOP_WRAPPER(js_is_native_obj), JSOP_NULLWRAPPER},
{0, 0, 0, JSOP_NULLWRAPPER, JSOP_NULLWRAPPER}
};
static JSFunctionSpec funcs[] = {
JS_FN("logTimedEventBegin", js_pluginx_protocols_ProtocolAnalytics_logTimedEventBegin, 1, JSPROP_PERMANENT | JSPROP_ENUMERATE),
JS_FN("logError", js_pluginx_protocols_ProtocolAnalytics_logError, 2, JSPROP_PERMANENT | JSPROP_ENUMERATE),
JS_FN("setCaptureUncaughtException", js_pluginx_protocols_ProtocolAnalytics_setCaptureUncaughtException, 1, JSPROP_PERMANENT | JSPROP_ENUMERATE),
JS_FN("setSessionContinueMillis", js_pluginx_protocols_ProtocolAnalytics_setSessionContinueMillis, 1, JSPROP_PERMANENT | JSPROP_ENUMERATE),
JS_FN("logEvent", js_pluginx_protocols_ProtocolAnalytics_logEvent, 1, JSPROP_PERMANENT | JSPROP_ENUMERATE),
JS_FN("startSession", js_pluginx_protocols_ProtocolAnalytics_startSession, 1, JSPROP_PERMANENT | JSPROP_ENUMERATE),
JS_FN("stopSession", js_pluginx_protocols_ProtocolAnalytics_stopSession, 0, JSPROP_PERMANENT | JSPROP_ENUMERATE),
JS_FN("logTimedEventEnd", js_pluginx_protocols_ProtocolAnalytics_logTimedEventEnd, 1, JSPROP_PERMANENT | JSPROP_ENUMERATE),
JS_FS_END
};
JSFunctionSpec *st_funcs = NULL;
jsb_cocos2d_plugin_ProtocolAnalytics_prototype = JS_InitClass(
cx, global,
jsb_cocos2d_plugin_PluginProtocol_prototype,
jsb_cocos2d_plugin_ProtocolAnalytics_class,
empty_constructor, 0,
properties,
funcs,
NULL, // no static properties
st_funcs);
// make the class enumerable in the registered namespace
// bool found;
//FIXME: Removed in Firefox v27
// JS_SetPropertyAttributes(cx, global, "ProtocolAnalytics", JSPROP_ENUMERATE | JSPROP_READONLY, &found);
// add the proto and JSClass to the type->js info hash table
TypeTest<cocos2d::plugin::ProtocolAnalytics> t;
js_type_class_t *p;
std::string typeName = t.s_name();
if (_js_global_type_map.find(typeName) == _js_global_type_map.end())
{
p = (js_type_class_t *)malloc(sizeof(js_type_class_t));
p->jsclass = jsb_cocos2d_plugin_ProtocolAnalytics_class;
p->proto = jsb_cocos2d_plugin_ProtocolAnalytics_prototype;
p->parentProto = jsb_cocos2d_plugin_PluginProtocol_prototype;
_js_global_type_map.insert(std::make_pair(typeName, p));
}
}
JSClass *jsb_cocos2d_plugin_ProtocolIAP_class;
JSObject *jsb_cocos2d_plugin_ProtocolIAP_prototype;
bool js_pluginx_protocols_ProtocolIAP_payForProduct(JSContext *cx, uint32_t argc, jsval *vp)
{
jsval *argv = JS_ARGV(cx, vp);
bool ok = true;
JSObject *obj = JS_THIS_OBJECT(cx, vp);
js_proxy_t *proxy = jsb_get_js_proxy(obj);
cocos2d::plugin::ProtocolIAP* cobj = (cocos2d::plugin::ProtocolIAP *)(proxy ? proxy->ptr : NULL);
JSB_PRECONDITION2( cobj, cx, false, "js_pluginx_protocols_ProtocolIAP_payForProduct : Invalid Native Object");
if (argc == 1) {
cocos2d::plugin::TProductInfo arg0;
ok &= pluginx::jsval_to_TProductInfo(cx, argv[0], &arg0);
JSB_PRECONDITION2(ok, cx, false, "js_pluginx_protocols_ProtocolIAP_payForProduct : Error processing arguments");
cobj->payForProduct(arg0);
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return true;
}
JS_ReportError(cx, "js_pluginx_protocols_ProtocolIAP_payForProduct : wrong number of arguments: %d, was expecting %d", argc, 1);
return false;
}
bool js_pluginx_protocols_ProtocolIAP_onPayResult(JSContext *cx, uint32_t argc, jsval *vp)
{
jsval *argv = JS_ARGV(cx, vp);
bool ok = true;
JSObject *obj = JS_THIS_OBJECT(cx, vp);
js_proxy_t *proxy = jsb_get_js_proxy(obj);
cocos2d::plugin::ProtocolIAP* cobj = (cocos2d::plugin::ProtocolIAP *)(proxy ? proxy->ptr : NULL);
JSB_PRECONDITION2( cobj, cx, false, "js_pluginx_protocols_ProtocolIAP_onPayResult : Invalid Native Object");
if (argc == 2) {
cocos2d::plugin::PayResultCode arg0;
const char* arg1;
ok &= jsval_to_int32(cx, argv[0], (int32_t *)&arg0);
std::string arg1_tmp; ok &= jsval_to_std_string(cx, argv[1], &arg1_tmp); arg1 = arg1_tmp.c_str();
JSB_PRECONDITION2(ok, cx, false, "js_pluginx_protocols_ProtocolIAP_onPayResult : Error processing arguments");
cobj->onPayResult(arg0, arg1);
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return true;
}
JS_ReportError(cx, "js_pluginx_protocols_ProtocolIAP_onPayResult : wrong number of arguments: %d, was expecting %d", argc, 2);
return false;
}
bool js_pluginx_protocols_ProtocolIAP_configDeveloperInfo(JSContext *cx, uint32_t argc, jsval *vp)
{
jsval *argv = JS_ARGV(cx, vp);
bool ok = true;
JSObject *obj = JS_THIS_OBJECT(cx, vp);
js_proxy_t *proxy = jsb_get_js_proxy(obj);
cocos2d::plugin::ProtocolIAP* cobj = (cocos2d::plugin::ProtocolIAP *)(proxy ? proxy->ptr : NULL);
JSB_PRECONDITION2( cobj, cx, false, "js_pluginx_protocols_ProtocolIAP_configDeveloperInfo : Invalid Native Object");
if (argc == 1) {
cocos2d::plugin::TIAPDeveloperInfo arg0;
ok &= pluginx::jsval_to_TIAPDeveloperInfo(cx, argv[0], &arg0);
JSB_PRECONDITION2(ok, cx, false, "js_pluginx_protocols_ProtocolIAP_configDeveloperInfo : Error processing arguments");
cobj->configDeveloperInfo(arg0);
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return true;
}
JS_ReportError(cx, "js_pluginx_protocols_ProtocolIAP_configDeveloperInfo : wrong number of arguments: %d, was expecting %d", argc, 1);
return false;
}
extern JSObject *jsb_cocos2d_plugin_PluginProtocol_prototype;
void js_cocos2d_plugin_ProtocolIAP_finalize(JSFreeOp *fop, JSObject *obj) {
CCLOGINFO("jsbindings: finalizing JS object %p (ProtocolIAP)", obj);
js_proxy_t* nproxy;
js_proxy_t* jsproxy;
jsproxy = jsb_get_js_proxy(obj);
if (jsproxy) {
nproxy = jsb_get_native_proxy(jsproxy->ptr);
cocos2d::plugin::ProtocolIAP *nobj = static_cast<cocos2d::plugin::ProtocolIAP *>(nproxy->ptr);
if (nobj)
delete nobj;
jsb_remove_proxy(nproxy, jsproxy);
}
}
void js_register_pluginx_protocols_ProtocolIAP(JSContext *cx, JSObject *global) {
jsb_cocos2d_plugin_ProtocolIAP_class = (JSClass *)calloc(1, sizeof(JSClass));
jsb_cocos2d_plugin_ProtocolIAP_class->name = "ProtocolIAP";
jsb_cocos2d_plugin_ProtocolIAP_class->addProperty = JS_PropertyStub;
jsb_cocos2d_plugin_ProtocolIAP_class->delProperty = JS_DeletePropertyStub;
jsb_cocos2d_plugin_ProtocolIAP_class->getProperty = JS_PropertyStub;
jsb_cocos2d_plugin_ProtocolIAP_class->setProperty = JS_StrictPropertyStub;
jsb_cocos2d_plugin_ProtocolIAP_class->enumerate = JS_EnumerateStub;
jsb_cocos2d_plugin_ProtocolIAP_class->resolve = JS_ResolveStub;
jsb_cocos2d_plugin_ProtocolIAP_class->convert = JS_ConvertStub;
jsb_cocos2d_plugin_ProtocolIAP_class->finalize = js_cocos2d_plugin_ProtocolIAP_finalize;
jsb_cocos2d_plugin_ProtocolIAP_class->flags = JSCLASS_HAS_RESERVED_SLOTS(2);
static JSPropertySpec properties[] = {
{"__nativeObj", 0, JSPROP_ENUMERATE | JSPROP_PERMANENT, JSOP_WRAPPER(js_is_native_obj), JSOP_NULLWRAPPER},
{0, 0, 0, JSOP_NULLWRAPPER, JSOP_NULLWRAPPER}
};
static JSFunctionSpec funcs[] = {
JS_FN("payForProduct", js_pluginx_protocols_ProtocolIAP_payForProduct, 1, JSPROP_PERMANENT | JSPROP_ENUMERATE),
JS_FN("onPayResult", js_pluginx_protocols_ProtocolIAP_onPayResult, 2, JSPROP_PERMANENT | JSPROP_ENUMERATE),
JS_FN("configDeveloperInfo", js_pluginx_protocols_ProtocolIAP_configDeveloperInfo, 1, JSPROP_PERMANENT | JSPROP_ENUMERATE),
JS_FS_END
};
JSFunctionSpec *st_funcs = NULL;
jsb_cocos2d_plugin_ProtocolIAP_prototype = JS_InitClass(
cx, global,
jsb_cocos2d_plugin_PluginProtocol_prototype,
jsb_cocos2d_plugin_ProtocolIAP_class,
empty_constructor, 0,
properties,
funcs,
NULL, // no static properties
st_funcs);
// make the class enumerable in the registered namespace
// bool found;
//FIXME: Removed in Firefox v27
// JS_SetPropertyAttributes(cx, global, "ProtocolIAP", JSPROP_ENUMERATE | JSPROP_READONLY, &found);
// add the proto and JSClass to the type->js info hash table
TypeTest<cocos2d::plugin::ProtocolIAP> t;
js_type_class_t *p;
std::string typeName = t.s_name();
if (_js_global_type_map.find(typeName) == _js_global_type_map.end())
{
p = (js_type_class_t *)malloc(sizeof(js_type_class_t));
p->jsclass = jsb_cocos2d_plugin_ProtocolIAP_class;
p->proto = jsb_cocos2d_plugin_ProtocolIAP_prototype;
p->parentProto = jsb_cocos2d_plugin_PluginProtocol_prototype;
_js_global_type_map.insert(std::make_pair(typeName, p));
}
}
JSClass *jsb_cocos2d_plugin_ProtocolAds_class;
JSObject *jsb_cocos2d_plugin_ProtocolAds_prototype;
bool js_pluginx_protocols_ProtocolAds_showAds(JSContext *cx, uint32_t argc, jsval *vp)
{
jsval *argv = JS_ARGV(cx, vp);
bool ok = true;
JSObject *obj = JS_THIS_OBJECT(cx, vp);
js_proxy_t *proxy = jsb_get_js_proxy(obj);
cocos2d::plugin::ProtocolAds* cobj = (cocos2d::plugin::ProtocolAds *)(proxy ? proxy->ptr : NULL);
JSB_PRECONDITION2( cobj, cx, false, "js_pluginx_protocols_ProtocolAds_showAds : Invalid Native Object");
if (argc == 1) {
cocos2d::plugin::TAdsInfo arg0;
ok &= pluginx::jsval_to_TAdsInfo(cx, argv[0], &arg0);
JSB_PRECONDITION2(ok, cx, false, "js_pluginx_protocols_ProtocolAds_showAds : Error processing arguments");
cobj->showAds(arg0);
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return true;
}
if (argc == 2) {
cocos2d::plugin::TAdsInfo arg0;
cocos2d::plugin::ProtocolAds::AdsPos arg1;
ok &= pluginx::jsval_to_TAdsInfo(cx, argv[0], &arg0);
ok &= jsval_to_int32(cx, argv[1], (int32_t *)&arg1);
JSB_PRECONDITION2(ok, cx, false, "js_pluginx_protocols_ProtocolAds_showAds : Error processing arguments");
cobj->showAds(arg0, arg1);
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return true;
}
JS_ReportError(cx, "js_pluginx_protocols_ProtocolAds_showAds : wrong number of arguments: %d, was expecting %d", argc, 1);
return false;
}
bool js_pluginx_protocols_ProtocolAds_hideAds(JSContext *cx, uint32_t argc, jsval *vp)
{
jsval *argv = JS_ARGV(cx, vp);
bool ok = true;
JSObject *obj = JS_THIS_OBJECT(cx, vp);
js_proxy_t *proxy = jsb_get_js_proxy(obj);
cocos2d::plugin::ProtocolAds* cobj = (cocos2d::plugin::ProtocolAds *)(proxy ? proxy->ptr : NULL);
JSB_PRECONDITION2( cobj, cx, false, "js_pluginx_protocols_ProtocolAds_hideAds : Invalid Native Object");
if (argc == 1) {
cocos2d::plugin::TAdsInfo arg0;
ok &= pluginx::jsval_to_TAdsInfo(cx, argv[0], &arg0);
JSB_PRECONDITION2(ok, cx, false, "js_pluginx_protocols_ProtocolAds_hideAds : Error processing arguments");
cobj->hideAds(arg0);
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return true;
}
JS_ReportError(cx, "js_pluginx_protocols_ProtocolAds_hideAds : wrong number of arguments: %d, was expecting %d", argc, 1);
return false;
}
bool js_pluginx_protocols_ProtocolAds_queryPoints(JSContext *cx, uint32_t argc, jsval *vp)
{
JSObject *obj = JS_THIS_OBJECT(cx, vp);
js_proxy_t *proxy = jsb_get_js_proxy(obj);
cocos2d::plugin::ProtocolAds* cobj = (cocos2d::plugin::ProtocolAds *)(proxy ? proxy->ptr : NULL);
JSB_PRECONDITION2( cobj, cx, false, "js_pluginx_protocols_ProtocolAds_queryPoints : Invalid Native Object");
if (argc == 0) {
cobj->queryPoints();
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return true;
}
JS_ReportError(cx, "js_pluginx_protocols_ProtocolAds_queryPoints : wrong number of arguments: %d, was expecting %d", argc, 0);
return false;
}
bool js_pluginx_protocols_ProtocolAds_spendPoints(JSContext *cx, uint32_t argc, jsval *vp)
{
jsval *argv = JS_ARGV(cx, vp);
bool ok = true;
JSObject *obj = JS_THIS_OBJECT(cx, vp);
js_proxy_t *proxy = jsb_get_js_proxy(obj);
cocos2d::plugin::ProtocolAds* cobj = (cocos2d::plugin::ProtocolAds *)(proxy ? proxy->ptr : NULL);
JSB_PRECONDITION2( cobj, cx, false, "js_pluginx_protocols_ProtocolAds_spendPoints : Invalid Native Object");
if (argc == 1) {
int arg0;
ok &= jsval_to_int32(cx, argv[0], (int32_t *)&arg0);
JSB_PRECONDITION2(ok, cx, false, "js_pluginx_protocols_ProtocolAds_spendPoints : Error processing arguments");
cobj->spendPoints(arg0);
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return true;
}
JS_ReportError(cx, "js_pluginx_protocols_ProtocolAds_spendPoints : wrong number of arguments: %d, was expecting %d", argc, 1);
return false;
}
bool js_pluginx_protocols_ProtocolAds_configDeveloperInfo(JSContext *cx, uint32_t argc, jsval *vp)
{
jsval *argv = JS_ARGV(cx, vp);
bool ok = true;
JSObject *obj = JS_THIS_OBJECT(cx, vp);
js_proxy_t *proxy = jsb_get_js_proxy(obj);
cocos2d::plugin::ProtocolAds* cobj = (cocos2d::plugin::ProtocolAds *)(proxy ? proxy->ptr : NULL);
JSB_PRECONDITION2( cobj, cx, false, "js_pluginx_protocols_ProtocolAds_configDeveloperInfo : Invalid Native Object");
if (argc == 1) {
cocos2d::plugin::TAdsDeveloperInfo arg0;
ok &= pluginx::jsval_to_TAdsDeveloperInfo(cx, argv[0], &arg0);
JSB_PRECONDITION2(ok, cx, false, "js_pluginx_protocols_ProtocolAds_configDeveloperInfo : Error processing arguments");
cobj->configDeveloperInfo(arg0);
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return true;
}
JS_ReportError(cx, "js_pluginx_protocols_ProtocolAds_configDeveloperInfo : wrong number of arguments: %d, was expecting %d", argc, 1);
return false;
}
bool js_pluginx_protocols_ProtocolAds_getAdsListener(JSContext *cx, uint32_t argc, jsval *vp)
{
JSObject *obj = JS_THIS_OBJECT(cx, vp);
js_proxy_t *proxy = jsb_get_js_proxy(obj);
cocos2d::plugin::ProtocolAds* cobj = (cocos2d::plugin::ProtocolAds *)(proxy ? proxy->ptr : NULL);
JSB_PRECONDITION2( cobj, cx, false, "js_pluginx_protocols_ProtocolAds_getAdsListener : Invalid Native Object");
if (argc == 0) {
cocos2d::plugin::AdsListener* ret = cobj->getAdsListener();
jsval jsret = JSVAL_NULL;
do {
if (ret) {
js_proxy_t *jsProxy = js_get_or_create_proxy<cocos2d::plugin::AdsListener>(cx, (cocos2d::plugin::AdsListener*)ret);
jsret = OBJECT_TO_JSVAL(jsProxy->obj);
} else {
jsret = JSVAL_NULL;
}
} while (0);
JS_SET_RVAL(cx, vp, jsret);
return true;
}
JS_ReportError(cx, "js_pluginx_protocols_ProtocolAds_getAdsListener : wrong number of arguments: %d, was expecting %d", argc, 0);
return false;
}
extern JSObject *jsb_cocos2d_plugin_PluginProtocol_prototype;
void js_cocos2d_plugin_ProtocolAds_finalize(JSFreeOp *fop, JSObject *obj) {
CCLOGINFO("jsbindings: finalizing JS object %p (ProtocolAds)", obj);
js_proxy_t* nproxy;
js_proxy_t* jsproxy;
jsproxy = jsb_get_js_proxy(obj);
if (jsproxy) {
nproxy = jsb_get_native_proxy(jsproxy->ptr);
cocos2d::plugin::ProtocolAds *nobj = static_cast<cocos2d::plugin::ProtocolAds *>(nproxy->ptr);
if (nobj)
delete nobj;
jsb_remove_proxy(nproxy, jsproxy);
}
}
void js_register_pluginx_protocols_ProtocolAds(JSContext *cx, JSObject *global) {
jsb_cocos2d_plugin_ProtocolAds_class = (JSClass *)calloc(1, sizeof(JSClass));
jsb_cocos2d_plugin_ProtocolAds_class->name = "ProtocolAds";
jsb_cocos2d_plugin_ProtocolAds_class->addProperty = JS_PropertyStub;
jsb_cocos2d_plugin_ProtocolAds_class->delProperty = JS_DeletePropertyStub;
jsb_cocos2d_plugin_ProtocolAds_class->getProperty = JS_PropertyStub;
jsb_cocos2d_plugin_ProtocolAds_class->setProperty = JS_StrictPropertyStub;
jsb_cocos2d_plugin_ProtocolAds_class->enumerate = JS_EnumerateStub;
jsb_cocos2d_plugin_ProtocolAds_class->resolve = JS_ResolveStub;
jsb_cocos2d_plugin_ProtocolAds_class->convert = JS_ConvertStub;
jsb_cocos2d_plugin_ProtocolAds_class->finalize = js_cocos2d_plugin_ProtocolAds_finalize;
jsb_cocos2d_plugin_ProtocolAds_class->flags = JSCLASS_HAS_RESERVED_SLOTS(2);
static JSPropertySpec properties[] = {
{"__nativeObj", 0, JSPROP_ENUMERATE | JSPROP_PERMANENT, JSOP_WRAPPER(js_is_native_obj), JSOP_NULLWRAPPER},
{0, 0, 0, JSOP_NULLWRAPPER, JSOP_NULLWRAPPER}
};
static JSFunctionSpec funcs[] = {
JS_FN("showAds", js_pluginx_protocols_ProtocolAds_showAds, 1, JSPROP_PERMANENT | JSPROP_ENUMERATE),
JS_FN("hideAds", js_pluginx_protocols_ProtocolAds_hideAds, 1, JSPROP_PERMANENT | JSPROP_ENUMERATE),
JS_FN("queryPoints", js_pluginx_protocols_ProtocolAds_queryPoints, 0, JSPROP_PERMANENT | JSPROP_ENUMERATE),
JS_FN("spendPoints", js_pluginx_protocols_ProtocolAds_spendPoints, 1, JSPROP_PERMANENT | JSPROP_ENUMERATE),
JS_FN("configDeveloperInfo", js_pluginx_protocols_ProtocolAds_configDeveloperInfo, 1, JSPROP_PERMANENT | JSPROP_ENUMERATE),
JS_FN("getAdsListener", js_pluginx_protocols_ProtocolAds_getAdsListener, 0, JSPROP_PERMANENT | JSPROP_ENUMERATE),
JS_FS_END
};
JSFunctionSpec *st_funcs = NULL;
jsb_cocos2d_plugin_ProtocolAds_prototype = JS_InitClass(
cx, global,
jsb_cocos2d_plugin_PluginProtocol_prototype,
jsb_cocos2d_plugin_ProtocolAds_class,
empty_constructor, 0,
properties,
funcs,
NULL, // no static properties
st_funcs);
// make the class enumerable in the registered namespace
// bool found;
//FIXME: Removed in Firefox v27
// JS_SetPropertyAttributes(cx, global, "ProtocolAds", JSPROP_ENUMERATE | JSPROP_READONLY, &found);
// add the proto and JSClass to the type->js info hash table
TypeTest<cocos2d::plugin::ProtocolAds> t;
js_type_class_t *p;
std::string typeName = t.s_name();
if (_js_global_type_map.find(typeName) == _js_global_type_map.end())
{
p = (js_type_class_t *)malloc(sizeof(js_type_class_t));
p->jsclass = jsb_cocos2d_plugin_ProtocolAds_class;
p->proto = jsb_cocos2d_plugin_ProtocolAds_prototype;
p->parentProto = jsb_cocos2d_plugin_PluginProtocol_prototype;
_js_global_type_map.insert(std::make_pair(typeName, p));
}
}
JSClass *jsb_cocos2d_plugin_ProtocolShare_class;
JSObject *jsb_cocos2d_plugin_ProtocolShare_prototype;
bool js_pluginx_protocols_ProtocolShare_onShareResult(JSContext *cx, uint32_t argc, jsval *vp)
{
jsval *argv = JS_ARGV(cx, vp);
bool ok = true;
JSObject *obj = JS_THIS_OBJECT(cx, vp);
js_proxy_t *proxy = jsb_get_js_proxy(obj);
cocos2d::plugin::ProtocolShare* cobj = (cocos2d::plugin::ProtocolShare *)(proxy ? proxy->ptr : NULL);
JSB_PRECONDITION2( cobj, cx, false, "js_pluginx_protocols_ProtocolShare_onShareResult : Invalid Native Object");
if (argc == 2) {
cocos2d::plugin::ShareResultCode arg0;
const char* arg1;
ok &= jsval_to_int32(cx, argv[0], (int32_t *)&arg0);
std::string arg1_tmp; ok &= jsval_to_std_string(cx, argv[1], &arg1_tmp); arg1 = arg1_tmp.c_str();
JSB_PRECONDITION2(ok, cx, false, "js_pluginx_protocols_ProtocolShare_onShareResult : Error processing arguments");
cobj->onShareResult(arg0, arg1);
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return true;
}
JS_ReportError(cx, "js_pluginx_protocols_ProtocolShare_onShareResult : wrong number of arguments: %d, was expecting %d", argc, 2);
return false;
}
bool js_pluginx_protocols_ProtocolShare_share(JSContext *cx, uint32_t argc, jsval *vp)
{
jsval *argv = JS_ARGV(cx, vp);
bool ok = true;
JSObject *obj = JS_THIS_OBJECT(cx, vp);
js_proxy_t *proxy = jsb_get_js_proxy(obj);
cocos2d::plugin::ProtocolShare* cobj = (cocos2d::plugin::ProtocolShare *)(proxy ? proxy->ptr : NULL);
JSB_PRECONDITION2( cobj, cx, false, "js_pluginx_protocols_ProtocolShare_share : Invalid Native Object");
if (argc == 1) {
cocos2d::plugin::TShareInfo arg0;
ok &= pluginx::jsval_to_TShareInfo(cx, argv[0], &arg0);
JSB_PRECONDITION2(ok, cx, false, "js_pluginx_protocols_ProtocolShare_share : Error processing arguments");
cobj->share(arg0);
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return true;
}
JS_ReportError(cx, "js_pluginx_protocols_ProtocolShare_share : wrong number of arguments: %d, was expecting %d", argc, 1);
return false;
}
bool js_pluginx_protocols_ProtocolShare_configDeveloperInfo(JSContext *cx, uint32_t argc, jsval *vp)
{
jsval *argv = JS_ARGV(cx, vp);
bool ok = true;
JSObject *obj = JS_THIS_OBJECT(cx, vp);
js_proxy_t *proxy = jsb_get_js_proxy(obj);
cocos2d::plugin::ProtocolShare* cobj = (cocos2d::plugin::ProtocolShare *)(proxy ? proxy->ptr : NULL);
JSB_PRECONDITION2( cobj, cx, false, "js_pluginx_protocols_ProtocolShare_configDeveloperInfo : Invalid Native Object");
if (argc == 1) {
cocos2d::plugin::TShareDeveloperInfo arg0;
ok &= pluginx::jsval_to_TShareDeveloperInfo(cx, argv[0], &arg0);
JSB_PRECONDITION2(ok, cx, false, "js_pluginx_protocols_ProtocolShare_configDeveloperInfo : Error processing arguments");
cobj->configDeveloperInfo(arg0);
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return true;
}
JS_ReportError(cx, "js_pluginx_protocols_ProtocolShare_configDeveloperInfo : wrong number of arguments: %d, was expecting %d", argc, 1);
return false;
}
extern JSObject *jsb_cocos2d_plugin_PluginProtocol_prototype;
void js_cocos2d_plugin_ProtocolShare_finalize(JSFreeOp *fop, JSObject *obj) {
CCLOGINFO("jsbindings: finalizing JS object %p (ProtocolShare)", obj);
js_proxy_t* nproxy;
js_proxy_t* jsproxy;
jsproxy = jsb_get_js_proxy(obj);
if (jsproxy) {
nproxy = jsb_get_native_proxy(jsproxy->ptr);
cocos2d::plugin::ProtocolShare *nobj = static_cast<cocos2d::plugin::ProtocolShare *>(nproxy->ptr);
if (nobj)
delete nobj;
jsb_remove_proxy(nproxy, jsproxy);
}
}
void js_register_pluginx_protocols_ProtocolShare(JSContext *cx, JSObject *global) {
jsb_cocos2d_plugin_ProtocolShare_class = (JSClass *)calloc(1, sizeof(JSClass));
jsb_cocos2d_plugin_ProtocolShare_class->name = "ProtocolShare";
jsb_cocos2d_plugin_ProtocolShare_class->addProperty = JS_PropertyStub;
jsb_cocos2d_plugin_ProtocolShare_class->delProperty = JS_DeletePropertyStub;
jsb_cocos2d_plugin_ProtocolShare_class->getProperty = JS_PropertyStub;
jsb_cocos2d_plugin_ProtocolShare_class->setProperty = JS_StrictPropertyStub;
jsb_cocos2d_plugin_ProtocolShare_class->enumerate = JS_EnumerateStub;
jsb_cocos2d_plugin_ProtocolShare_class->resolve = JS_ResolveStub;
jsb_cocos2d_plugin_ProtocolShare_class->convert = JS_ConvertStub;
jsb_cocos2d_plugin_ProtocolShare_class->finalize = js_cocos2d_plugin_ProtocolShare_finalize;
jsb_cocos2d_plugin_ProtocolShare_class->flags = JSCLASS_HAS_RESERVED_SLOTS(2);
static JSPropertySpec properties[] = {
{"__nativeObj", 0, JSPROP_ENUMERATE | JSPROP_PERMANENT, JSOP_WRAPPER(js_is_native_obj), JSOP_NULLWRAPPER},
{0, 0, 0, JSOP_NULLWRAPPER, JSOP_NULLWRAPPER}
};
static JSFunctionSpec funcs[] = {
JS_FN("onShareResult", js_pluginx_protocols_ProtocolShare_onShareResult, 2, JSPROP_PERMANENT | JSPROP_ENUMERATE),
JS_FN("share", js_pluginx_protocols_ProtocolShare_share, 1, JSPROP_PERMANENT | JSPROP_ENUMERATE),
JS_FN("configDeveloperInfo", js_pluginx_protocols_ProtocolShare_configDeveloperInfo, 1, JSPROP_PERMANENT | JSPROP_ENUMERATE),
JS_FS_END
};
JSFunctionSpec *st_funcs = NULL;
jsb_cocos2d_plugin_ProtocolShare_prototype = JS_InitClass(
cx, global,
jsb_cocos2d_plugin_PluginProtocol_prototype,
jsb_cocos2d_plugin_ProtocolShare_class,
empty_constructor, 0,
properties,
funcs,
NULL, // no static properties
st_funcs);
// make the class enumerable in the registered namespace
// bool found;
//FIXME: Removed in Firefox v27
// JS_SetPropertyAttributes(cx, global, "ProtocolShare", JSPROP_ENUMERATE | JSPROP_READONLY, &found);
// add the proto and JSClass to the type->js info hash table
TypeTest<cocos2d::plugin::ProtocolShare> t;
js_type_class_t *p;
std::string typeName = t.s_name();
if (_js_global_type_map.find(typeName) == _js_global_type_map.end())
{
p = (js_type_class_t *)malloc(sizeof(js_type_class_t));
p->jsclass = jsb_cocos2d_plugin_ProtocolShare_class;
p->proto = jsb_cocos2d_plugin_ProtocolShare_prototype;
p->parentProto = jsb_cocos2d_plugin_PluginProtocol_prototype;
_js_global_type_map.insert(std::make_pair(typeName, p));
}
}
JSClass *jsb_cocos2d_plugin_ProtocolSocial_class;
JSObject *jsb_cocos2d_plugin_ProtocolSocial_prototype;
bool js_pluginx_protocols_ProtocolSocial_showLeaderboard(JSContext *cx, uint32_t argc, jsval *vp)
{
jsval *argv = JS_ARGV(cx, vp);
bool ok = true;
JSObject *obj = JS_THIS_OBJECT(cx, vp);
js_proxy_t *proxy = jsb_get_js_proxy(obj);
cocos2d::plugin::ProtocolSocial* cobj = (cocos2d::plugin::ProtocolSocial *)(proxy ? proxy->ptr : NULL);
JSB_PRECONDITION2( cobj, cx, false, "js_pluginx_protocols_ProtocolSocial_showLeaderboard : Invalid Native Object");
if (argc == 1) {
const char* arg0;
std::string arg0_tmp; ok &= jsval_to_std_string(cx, argv[0], &arg0_tmp); arg0 = arg0_tmp.c_str();
JSB_PRECONDITION2(ok, cx, false, "js_pluginx_protocols_ProtocolSocial_showLeaderboard : Error processing arguments");
cobj->showLeaderboard(arg0);
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return true;
}
JS_ReportError(cx, "js_pluginx_protocols_ProtocolSocial_showLeaderboard : wrong number of arguments: %d, was expecting %d", argc, 1);
return false;
}
bool js_pluginx_protocols_ProtocolSocial_showAchievements(JSContext *cx, uint32_t argc, jsval *vp)
{
JSObject *obj = JS_THIS_OBJECT(cx, vp);
js_proxy_t *proxy = jsb_get_js_proxy(obj);
cocos2d::plugin::ProtocolSocial* cobj = (cocos2d::plugin::ProtocolSocial *)(proxy ? proxy->ptr : NULL);
JSB_PRECONDITION2( cobj, cx, false, "js_pluginx_protocols_ProtocolSocial_showAchievements : Invalid Native Object");
if (argc == 0) {
cobj->showAchievements();
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return true;
}
JS_ReportError(cx, "js_pluginx_protocols_ProtocolSocial_showAchievements : wrong number of arguments: %d, was expecting %d", argc, 0);
return false;
}
bool js_pluginx_protocols_ProtocolSocial_submitScore(JSContext *cx, uint32_t argc, jsval *vp)
{
jsval *argv = JS_ARGV(cx, vp);
bool ok = true;
JSObject *obj = JS_THIS_OBJECT(cx, vp);
js_proxy_t *proxy = jsb_get_js_proxy(obj);
cocos2d::plugin::ProtocolSocial* cobj = (cocos2d::plugin::ProtocolSocial *)(proxy ? proxy->ptr : NULL);
JSB_PRECONDITION2( cobj, cx, false, "js_pluginx_protocols_ProtocolSocial_submitScore : Invalid Native Object");
if (argc == 2) {
const char* arg0;
long arg1;
std::string arg0_tmp; ok &= jsval_to_std_string(cx, argv[0], &arg0_tmp); arg0 = arg0_tmp.c_str();
ok &= jsval_to_long(cx, argv[1], (long *)&arg1);
JSB_PRECONDITION2(ok, cx, false, "js_pluginx_protocols_ProtocolSocial_submitScore : Error processing arguments");
cobj->submitScore(arg0, arg1);
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return true;
}
JS_ReportError(cx, "js_pluginx_protocols_ProtocolSocial_submitScore : wrong number of arguments: %d, was expecting %d", argc, 2);
return false;
}
bool js_pluginx_protocols_ProtocolSocial_configDeveloperInfo(JSContext *cx, uint32_t argc, jsval *vp)
{
jsval *argv = JS_ARGV(cx, vp);
bool ok = true;
JSObject *obj = JS_THIS_OBJECT(cx, vp);
js_proxy_t *proxy = jsb_get_js_proxy(obj);
cocos2d::plugin::ProtocolSocial* cobj = (cocos2d::plugin::ProtocolSocial *)(proxy ? proxy->ptr : NULL);
JSB_PRECONDITION2( cobj, cx, false, "js_pluginx_protocols_ProtocolSocial_configDeveloperInfo : Invalid Native Object");
if (argc == 1) {
cocos2d::plugin::TSocialDeveloperInfo arg0;
ok &= pluginx::jsval_to_TSocialDeveloperInfo(cx, argv[0], &arg0);
JSB_PRECONDITION2(ok, cx, false, "js_pluginx_protocols_ProtocolSocial_configDeveloperInfo : Error processing arguments");
cobj->configDeveloperInfo(arg0);
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return true;
}
JS_ReportError(cx, "js_pluginx_protocols_ProtocolSocial_configDeveloperInfo : wrong number of arguments: %d, was expecting %d", argc, 1);
return false;
}
bool js_pluginx_protocols_ProtocolSocial_unlockAchievement(JSContext *cx, uint32_t argc, jsval *vp)
{
jsval *argv = JS_ARGV(cx, vp);
bool ok = true;
JSObject *obj = JS_THIS_OBJECT(cx, vp);
js_proxy_t *proxy = jsb_get_js_proxy(obj);
cocos2d::plugin::ProtocolSocial* cobj = (cocos2d::plugin::ProtocolSocial *)(proxy ? proxy->ptr : NULL);
JSB_PRECONDITION2( cobj, cx, false, "js_pluginx_protocols_ProtocolSocial_unlockAchievement : Invalid Native Object");
if (argc == 1) {
cocos2d::plugin::TAchievementInfo arg0;
ok &= pluginx::jsval_to_TAchievementInfo(cx, argv[0], &arg0);
JSB_PRECONDITION2(ok, cx, false, "js_pluginx_protocols_ProtocolSocial_unlockAchievement : Error processing arguments");
cobj->unlockAchievement(arg0);
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return true;
}
JS_ReportError(cx, "js_pluginx_protocols_ProtocolSocial_unlockAchievement : wrong number of arguments: %d, was expecting %d", argc, 1);
return false;
}
extern JSObject *jsb_cocos2d_plugin_PluginProtocol_prototype;
void js_cocos2d_plugin_ProtocolSocial_finalize(JSFreeOp *fop, JSObject *obj) {
CCLOGINFO("jsbindings: finalizing JS object %p (ProtocolSocial)", obj);
js_proxy_t* nproxy;
js_proxy_t* jsproxy;
jsproxy = jsb_get_js_proxy(obj);
if (jsproxy) {
nproxy = jsb_get_native_proxy(jsproxy->ptr);
cocos2d::plugin::ProtocolSocial *nobj = static_cast<cocos2d::plugin::ProtocolSocial *>(nproxy->ptr);
if (nobj)
delete nobj;
jsb_remove_proxy(nproxy, jsproxy);
}
}
void js_register_pluginx_protocols_ProtocolSocial(JSContext *cx, JSObject *global) {
jsb_cocos2d_plugin_ProtocolSocial_class = (JSClass *)calloc(1, sizeof(JSClass));
jsb_cocos2d_plugin_ProtocolSocial_class->name = "ProtocolSocial";
jsb_cocos2d_plugin_ProtocolSocial_class->addProperty = JS_PropertyStub;
jsb_cocos2d_plugin_ProtocolSocial_class->delProperty = JS_DeletePropertyStub;
jsb_cocos2d_plugin_ProtocolSocial_class->getProperty = JS_PropertyStub;
jsb_cocos2d_plugin_ProtocolSocial_class->setProperty = JS_StrictPropertyStub;
jsb_cocos2d_plugin_ProtocolSocial_class->enumerate = JS_EnumerateStub;
jsb_cocos2d_plugin_ProtocolSocial_class->resolve = JS_ResolveStub;
jsb_cocos2d_plugin_ProtocolSocial_class->convert = JS_ConvertStub;
jsb_cocos2d_plugin_ProtocolSocial_class->finalize = js_cocos2d_plugin_ProtocolSocial_finalize;
jsb_cocos2d_plugin_ProtocolSocial_class->flags = JSCLASS_HAS_RESERVED_SLOTS(2);
static JSPropertySpec properties[] = {
{"__nativeObj", 0, JSPROP_ENUMERATE | JSPROP_PERMANENT, JSOP_WRAPPER(js_is_native_obj), JSOP_NULLWRAPPER},
{0, 0, 0, JSOP_NULLWRAPPER, JSOP_NULLWRAPPER}
};
static JSFunctionSpec funcs[] = {
JS_FN("showLeaderboard", js_pluginx_protocols_ProtocolSocial_showLeaderboard, 1, JSPROP_PERMANENT | JSPROP_ENUMERATE),
JS_FN("showAchievements", js_pluginx_protocols_ProtocolSocial_showAchievements, 0, JSPROP_PERMANENT | JSPROP_ENUMERATE),
JS_FN("submitScore", js_pluginx_protocols_ProtocolSocial_submitScore, 2, JSPROP_PERMANENT | JSPROP_ENUMERATE),
JS_FN("configDeveloperInfo", js_pluginx_protocols_ProtocolSocial_configDeveloperInfo, 1, JSPROP_PERMANENT | JSPROP_ENUMERATE),
JS_FN("unlockAchievement", js_pluginx_protocols_ProtocolSocial_unlockAchievement, 1, JSPROP_PERMANENT | JSPROP_ENUMERATE),
JS_FS_END
};
JSFunctionSpec *st_funcs = NULL;
jsb_cocos2d_plugin_ProtocolSocial_prototype = JS_InitClass(
cx, global,
jsb_cocos2d_plugin_PluginProtocol_prototype,
jsb_cocos2d_plugin_ProtocolSocial_class,
empty_constructor, 0,
properties,
funcs,
NULL, // no static properties
st_funcs);
// make the class enumerable in the registered namespace
// bool found;
//FIXME: Removed in Firefox v27
// JS_SetPropertyAttributes(cx, global, "ProtocolSocial", JSPROP_ENUMERATE | JSPROP_READONLY, &found);
// add the proto and JSClass to the type->js info hash table
TypeTest<cocos2d::plugin::ProtocolSocial> t;
js_type_class_t *p;
std::string typeName = t.s_name();
if (_js_global_type_map.find(typeName) == _js_global_type_map.end())
{
p = (js_type_class_t *)malloc(sizeof(js_type_class_t));
p->jsclass = jsb_cocos2d_plugin_ProtocolSocial_class;
p->proto = jsb_cocos2d_plugin_ProtocolSocial_prototype;
p->parentProto = jsb_cocos2d_plugin_PluginProtocol_prototype;
_js_global_type_map.insert(std::make_pair(typeName, p));
}
}
JSClass *jsb_cocos2d_plugin_ProtocolUser_class;
JSObject *jsb_cocos2d_plugin_ProtocolUser_prototype;
bool js_pluginx_protocols_ProtocolUser_isLogined(JSContext *cx, uint32_t argc, jsval *vp)
{
JSObject *obj = JS_THIS_OBJECT(cx, vp);
js_proxy_t *proxy = jsb_get_js_proxy(obj);
cocos2d::plugin::ProtocolUser* cobj = (cocos2d::plugin::ProtocolUser *)(proxy ? proxy->ptr : NULL);
JSB_PRECONDITION2( cobj, cx, false, "js_pluginx_protocols_ProtocolUser_isLogined : Invalid Native Object");
if (argc == 0) {
bool ret = cobj->isLogined();
jsval jsret = JSVAL_NULL;
jsret = BOOLEAN_TO_JSVAL(ret);
JS_SET_RVAL(cx, vp, jsret);
return true;
}
JS_ReportError(cx, "js_pluginx_protocols_ProtocolUser_isLogined : wrong number of arguments: %d, was expecting %d", argc, 0);
return false;
}
bool js_pluginx_protocols_ProtocolUser_logout(JSContext *cx, uint32_t argc, jsval *vp)
{
JSObject *obj = JS_THIS_OBJECT(cx, vp);
js_proxy_t *proxy = jsb_get_js_proxy(obj);
cocos2d::plugin::ProtocolUser* cobj = (cocos2d::plugin::ProtocolUser *)(proxy ? proxy->ptr : NULL);
JSB_PRECONDITION2( cobj, cx, false, "js_pluginx_protocols_ProtocolUser_logout : Invalid Native Object");
if (argc == 0) {
cobj->logout();
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return true;
}
JS_ReportError(cx, "js_pluginx_protocols_ProtocolUser_logout : wrong number of arguments: %d, was expecting %d", argc, 0);
return false;
}
bool js_pluginx_protocols_ProtocolUser_configDeveloperInfo(JSContext *cx, uint32_t argc, jsval *vp)
{
jsval *argv = JS_ARGV(cx, vp);
bool ok = true;
JSObject *obj = JS_THIS_OBJECT(cx, vp);
js_proxy_t *proxy = jsb_get_js_proxy(obj);
cocos2d::plugin::ProtocolUser* cobj = (cocos2d::plugin::ProtocolUser *)(proxy ? proxy->ptr : NULL);
JSB_PRECONDITION2( cobj, cx, false, "js_pluginx_protocols_ProtocolUser_configDeveloperInfo : Invalid Native Object");
if (argc == 1) {
cocos2d::plugin::TUserDeveloperInfo arg0;
ok &= pluginx::jsval_to_TUserDeveloperInfo(cx, argv[0], &arg0);
JSB_PRECONDITION2(ok, cx, false, "js_pluginx_protocols_ProtocolUser_configDeveloperInfo : Error processing arguments");
cobj->configDeveloperInfo(arg0);
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return true;
}
JS_ReportError(cx, "js_pluginx_protocols_ProtocolUser_configDeveloperInfo : wrong number of arguments: %d, was expecting %d", argc, 1);
return false;
}
bool js_pluginx_protocols_ProtocolUser_login(JSContext *cx, uint32_t argc, jsval *vp)
{
JSObject *obj = JS_THIS_OBJECT(cx, vp);
js_proxy_t *proxy = jsb_get_js_proxy(obj);
cocos2d::plugin::ProtocolUser* cobj = (cocos2d::plugin::ProtocolUser *)(proxy ? proxy->ptr : NULL);
JSB_PRECONDITION2( cobj, cx, false, "js_pluginx_protocols_ProtocolUser_login : Invalid Native Object");
if (argc == 0) {
cobj->login();
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return true;
}
JS_ReportError(cx, "js_pluginx_protocols_ProtocolUser_login : wrong number of arguments: %d, was expecting %d", argc, 0);
return false;
}
bool js_pluginx_protocols_ProtocolUser_getSessionID(JSContext *cx, uint32_t argc, jsval *vp)
{
JSObject *obj = JS_THIS_OBJECT(cx, vp);
js_proxy_t *proxy = jsb_get_js_proxy(obj);
cocos2d::plugin::ProtocolUser* cobj = (cocos2d::plugin::ProtocolUser *)(proxy ? proxy->ptr : NULL);
JSB_PRECONDITION2( cobj, cx, false, "js_pluginx_protocols_ProtocolUser_getSessionID : Invalid Native Object");
if (argc == 0) {
std::string ret = cobj->getSessionID();
jsval jsret = JSVAL_NULL;
jsret = std_string_to_jsval(cx, ret);
JS_SET_RVAL(cx, vp, jsret);
return true;
}
JS_ReportError(cx, "js_pluginx_protocols_ProtocolUser_getSessionID : wrong number of arguments: %d, was expecting %d", argc, 0);
return false;
}
extern JSObject *jsb_cocos2d_plugin_PluginProtocol_prototype;
void js_cocos2d_plugin_ProtocolUser_finalize(JSFreeOp *fop, JSObject *obj) {
CCLOGINFO("jsbindings: finalizing JS object %p (ProtocolUser)", obj);
js_proxy_t* nproxy;
js_proxy_t* jsproxy;
jsproxy = jsb_get_js_proxy(obj);
if (jsproxy) {
nproxy = jsb_get_native_proxy(jsproxy->ptr);
cocos2d::plugin::ProtocolUser *nobj = static_cast<cocos2d::plugin::ProtocolUser *>(nproxy->ptr);
if (nobj)
delete nobj;
jsb_remove_proxy(nproxy, jsproxy);
}
}
void js_register_pluginx_protocols_ProtocolUser(JSContext *cx, JSObject *global) {
jsb_cocos2d_plugin_ProtocolUser_class = (JSClass *)calloc(1, sizeof(JSClass));
jsb_cocos2d_plugin_ProtocolUser_class->name = "ProtocolUser";
jsb_cocos2d_plugin_ProtocolUser_class->addProperty = JS_PropertyStub;
jsb_cocos2d_plugin_ProtocolUser_class->delProperty = JS_DeletePropertyStub;
jsb_cocos2d_plugin_ProtocolUser_class->getProperty = JS_PropertyStub;
jsb_cocos2d_plugin_ProtocolUser_class->setProperty = JS_StrictPropertyStub;
jsb_cocos2d_plugin_ProtocolUser_class->enumerate = JS_EnumerateStub;
jsb_cocos2d_plugin_ProtocolUser_class->resolve = JS_ResolveStub;
jsb_cocos2d_plugin_ProtocolUser_class->convert = JS_ConvertStub;
jsb_cocos2d_plugin_ProtocolUser_class->finalize = js_cocos2d_plugin_ProtocolUser_finalize;
jsb_cocos2d_plugin_ProtocolUser_class->flags = JSCLASS_HAS_RESERVED_SLOTS(2);
static JSPropertySpec properties[] = {
{"__nativeObj", 0, JSPROP_ENUMERATE | JSPROP_PERMANENT, JSOP_WRAPPER(js_is_native_obj), JSOP_NULLWRAPPER},
{0, 0, 0, JSOP_NULLWRAPPER, JSOP_NULLWRAPPER}
};
static JSFunctionSpec funcs[] = {
JS_FN("isLogined", js_pluginx_protocols_ProtocolUser_isLogined, 0, JSPROP_PERMANENT | JSPROP_ENUMERATE),
JS_FN("logout", js_pluginx_protocols_ProtocolUser_logout, 0, JSPROP_PERMANENT | JSPROP_ENUMERATE),
JS_FN("configDeveloperInfo", js_pluginx_protocols_ProtocolUser_configDeveloperInfo, 1, JSPROP_PERMANENT | JSPROP_ENUMERATE),
JS_FN("login", js_pluginx_protocols_ProtocolUser_login, 0, JSPROP_PERMANENT | JSPROP_ENUMERATE),
JS_FN("getSessionID", js_pluginx_protocols_ProtocolUser_getSessionID, 0, JSPROP_PERMANENT | JSPROP_ENUMERATE),
JS_FS_END
};
JSFunctionSpec *st_funcs = NULL;
jsb_cocos2d_plugin_ProtocolUser_prototype = JS_InitClass(
cx, global,
jsb_cocos2d_plugin_PluginProtocol_prototype,
jsb_cocos2d_plugin_ProtocolUser_class,
empty_constructor, 0,
properties,
funcs,
NULL, // no static properties
st_funcs);
// make the class enumerable in the registered namespace
// bool found;
//FIXME: Removed in Firefox v27
// JS_SetPropertyAttributes(cx, global, "ProtocolUser", JSPROP_ENUMERATE | JSPROP_READONLY, &found);
// add the proto and JSClass to the type->js info hash table
TypeTest<cocos2d::plugin::ProtocolUser> t;
js_type_class_t *p;
std::string typeName = t.s_name();
if (_js_global_type_map.find(typeName) == _js_global_type_map.end())
{
p = (js_type_class_t *)malloc(sizeof(js_type_class_t));
p->jsclass = jsb_cocos2d_plugin_ProtocolUser_class;
p->proto = jsb_cocos2d_plugin_ProtocolUser_prototype;
p->parentProto = jsb_cocos2d_plugin_PluginProtocol_prototype;
_js_global_type_map.insert(std::make_pair(typeName, p));
}
}
void register_all_pluginx_protocols(JSContext* cx, JSObject* obj) {
// first, try to get the ns
JS::RootedValue nsval(cx);
JS::RootedObject ns(cx);
JS_GetProperty(cx, obj, "plugin", &nsval);
if (nsval == JSVAL_VOID) {
ns = JS_NewObject(cx, NULL, NULL, NULL);
nsval = OBJECT_TO_JSVAL(ns);
JS_SetProperty(cx, obj, "plugin", nsval);
} else {
JS_ValueToObject(cx, nsval, &ns);
}
obj = ns;
js_register_pluginx_protocols_PluginProtocol(cx, obj);
js_register_pluginx_protocols_ProtocolUser(cx, obj);
js_register_pluginx_protocols_ProtocolShare(cx, obj);
js_register_pluginx_protocols_ProtocolIAP(cx, obj);
js_register_pluginx_protocols_ProtocolSocial(cx, obj);
js_register_pluginx_protocols_ProtocolAnalytics(cx, obj);
js_register_pluginx_protocols_ProtocolAds(cx, obj);
js_register_pluginx_protocols_PluginManager(cx, obj);
}
| 26,672 |
2,151 | // Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CHROME_BROWSER_STORAGE_STORAGE_INFO_FETCHER_H_
#define CHROME_BROWSER_STORAGE_STORAGE_INFO_FETCHER_H_
#include "base/memory/ref_counted.h"
#include "storage/browser/quota/quota_callbacks.h"
#include "third_party/blink/public/mojom/quota/quota_types.mojom.h"
namespace storage {
class QuotaManager;
}
class Profile;
// Asynchronously fetches the amount of storage used by websites.
class StorageInfoFetcher :
public base::RefCountedThreadSafe<StorageInfoFetcher> {
public:
using FetchCallback =
base::Callback<void(const storage::UsageInfoEntries&)>;
using ClearCallback =
base::Callback<void(blink::mojom::QuotaStatusCode code)>;
explicit StorageInfoFetcher(Profile* profile);
// Asynchronously fetches the StorageInfo.
void FetchStorageInfo(const FetchCallback& fetch_callback);
// Asynchronously clears storage for the given host.
void ClearStorage(const std::string& host,
blink::mojom::StorageType type,
const ClearCallback& clear_callback);
private:
virtual ~StorageInfoFetcher();
friend class base::RefCountedThreadSafe<StorageInfoFetcher>;
// Fetches the usage information.
void GetUsageInfo(storage::GetUsageInfoCallback callback);
// Called when usage information is available.
void OnGetUsageInfoInternal(const storage::UsageInfoEntries& entries);
// Reports back to all observers that information is available.
void OnFetchCompleted();
// Called when usage has been cleared.
void OnUsageClearedInternal(blink::mojom::QuotaStatusCode code);
// Reports back to all observers that storage has been deleted.
void OnClearCompleted(blink::mojom::QuotaStatusCode code);
// The quota manager to use to calculate the storage usage.
storage::QuotaManager* quota_manager_;
// Hosts and their usage.
storage::UsageInfoEntries entries_;
// The storage type to delete.
blink::mojom::StorageType type_to_delete_;
// The callback to use when fetching is complete.
FetchCallback fetch_callback_;
// The callback to use when storage has been cleared.
ClearCallback clear_callback_;
DISALLOW_COPY_AND_ASSIGN(StorageInfoFetcher);
};
#endif // CHROME_BROWSER_STORAGE_STORAGE_INFO_FETCHER_H_
| 762 |
7,150 | <reponame>LaudateCorpus1/Learn-Algorithms
// FROM:http://www.gowrikumar.com/c/
#include <stdlib.h>
#include <stdio.h>
void Error(char* s)
{
printf(s);
return;
}
int main()
{
int *p;
p = malloc(sizeof(int));
if(p == NULL)
{
Error("Could not allocate the memory\n");
Error("Quitting....\n");
exit(1);
}
else
{
/*some stuff to use p*/
Error("Could not allocate the memory\n");
Error("Quitting....\n");
Error(5);
}
return 0;
}
/*
潜在的问题是:
Error函数中,如果输入的参数不是字符串,比如传一个整数5,5被转成一个内存地址,printf这时候访问就会出问题了。
$1 = 0x5 <error: Cannot access memory at address 0x5>
*/ | 447 |
388 | //
// Copyright (C) 2009-2012 <NAME> (Tonkikh)
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
//
#ifndef BOOSTER_TEST_H
#define BOOSTER_TEST_H
#include <stdexcept>
#include <sstream>
#define TEST(X) \
do { \
if(X) break; \
std::ostringstream oss; \
oss << "Error " << __FILE__ << ":"<<__LINE__ << " "#X; \
throw std::runtime_error(oss.str()); \
}while(0)
#endif
| 243 |
371 | <reponame>StepNeverStop/RLs
from typing import Dict, List
import torch as th
import torch.nn as nn
from rls.nn.mlps import MLP
from rls.nn.represent_nets import RepresentationNetwork
class QTranBase(nn.Module):
def __init__(self,
n_agents,
state_spec,
rep_net_params,
a_dim,
qtran_arch,
hidden_units):
super().__init__()
self.rep_net = RepresentationNetwork(obs_spec=state_spec,
rep_net_params=rep_net_params)
self.qtran_arch = qtran_arch # QTran architecture
self.h_nums = 2 if self.rep_net.memory_net.network_type == 'lstm' else 1
# Q takes [state, agent_action_observation_encodings]
# Q(s,u)
if self.qtran_arch == "coma_critic":
# Q takes [state, u] as input
q_input_size = self.rep_net.h_dim + (n_agents * a_dim)
elif self.qtran_arch == "qtran_paper":
# Q takes [state, agent_action_observation_encodings]
ae_input = self.h_nums * self.rep_net.h_dim + a_dim
self.action_encoding = MLP(input_dim=ae_input, hidden_units=[ae_input],
layer='linear', act_fn='relu', output_shape=ae_input)
q_input_size = self.rep_net.h_dim + ae_input
else:
raise Exception(
"{} is not a valid QTran architecture".format(self.qtran_arch))
self.Q = MLP(input_dim=q_input_size, hidden_units=hidden_units,
layer='linear', act_fn='relu', output_shape=1)
# V(s)
self.V = MLP(input_dim=self.rep_net.h_dim, hidden_units=hidden_units,
layer='linear', act_fn='relu', output_shape=1)
def forward(self, state, hidden_states: List[Dict[str, th.Tensor]], actions: List[th.Tensor], **kwargs):
"""
state: [T, B, *]
hidden_states: N * [T, B, *]
actions: N * [T, B, A]
"""
# state: [T, B, *]
state_feat, _ = self.rep_net(state, **kwargs) # [T, B, *]
if self.qtran_arch == "coma_critic":
actions = th.cat(actions, dim=-1) # [T, B, N*A]
inputs = th.cat([state_feat, actions], dim=-1) # [T, B, *]
elif self.qtran_arch == "qtran_paper":
hs = [th.cat(list(hidden_state.values()), -1)
for hidden_state in hidden_states] # N * [T, B, *]
hs = th.stack(hs, dim=-2) # [T, B, N, *]
actions = th.stack(actions, dim=-2) # [T, B, N, A]
_input = th.cat((hs, actions), dim=-1) # [T, B, N, *]
agent_state_action_encoding = self.action_encoding(
_input) # [T, B, N, *]
agent_state_action_encoding = agent_state_action_encoding.sum(
dim=-2) # [T, B, *]
inputs = th.cat(
[state_feat, agent_state_action_encoding], dim=-1) # [T, B, *]
q_outputs = self.Q(inputs) # [T, B, 1]
v_outputs = self.V(state_feat) # [T, B, 1]
return q_outputs, v_outputs
| 1,667 |
2,219 | <gh_stars>1000+
// Copyright (c) 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "net/base/prioritized_task_runner.h"
#include <algorithm>
#include "base/bind.h"
#include "base/task_runner.h"
#include "base/task_runner_util.h"
namespace net {
PrioritizedTaskRunner::Job::Job(const base::Location& from_here,
base::OnceClosure task,
base::OnceClosure reply,
uint32_t priority,
uint32_t task_count)
: from_here(from_here),
task(std::move(task)),
reply(std::move(reply)),
priority(priority),
task_count(task_count) {}
PrioritizedTaskRunner::Job::Job() {}
PrioritizedTaskRunner::Job::~Job() = default;
PrioritizedTaskRunner::Job::Job(Job&& other) = default;
PrioritizedTaskRunner::Job& PrioritizedTaskRunner::Job::operator=(Job&& other) =
default;
PrioritizedTaskRunner::PrioritizedTaskRunner(
scoped_refptr<base::TaskRunner> task_runner)
: task_runner_(std::move(task_runner)) {}
void PrioritizedTaskRunner::PostTaskAndReply(const base::Location& from_here,
base::OnceClosure task,
base::OnceClosure reply,
uint32_t priority) {
Job job(from_here, std::move(task), std::move(reply), priority,
task_count_++);
{
base::AutoLock lock(task_job_heap_lock_);
task_job_heap_.push_back(std::move(job));
std::push_heap(task_job_heap_.begin(), task_job_heap_.end(), JobComparer());
}
task_runner_->PostTaskAndReply(
from_here,
base::BindOnce(&PrioritizedTaskRunner::RunTaskAndPostReply, this),
base::BindOnce(&PrioritizedTaskRunner::RunReply, this));
}
PrioritizedTaskRunner::~PrioritizedTaskRunner() = default;
void PrioritizedTaskRunner::RunTaskAndPostReply() {
// Find the next job to run.
Job job;
{
base::AutoLock lock(task_job_heap_lock_);
std::pop_heap(task_job_heap_.begin(), task_job_heap_.end(), JobComparer());
job = std::move(task_job_heap_.back());
task_job_heap_.pop_back();
}
std::move(job.task).Run();
// Add the job to the reply priority queue.
base::AutoLock reply_lock(reply_job_heap_lock_);
reply_job_heap_.push_back(std::move(job));
std::push_heap(reply_job_heap_.begin(), reply_job_heap_.end(), JobComparer());
}
void PrioritizedTaskRunner::RunReply() {
// Find the next job to run.
Job job;
{
base::AutoLock lock(reply_job_heap_lock_);
std::pop_heap(reply_job_heap_.begin(), reply_job_heap_.end(),
JobComparer());
job = std::move(reply_job_heap_.back());
reply_job_heap_.pop_back();
}
// Run the job.
std::move(job.reply).Run();
}
} // namespace net
| 1,248 |
1,057 | <filename>script/testdata/exec_caller_from_witness.c
#include "ckb_syscalls.h"
int main() {
int argc = 3;
char *argv[] = {"a", "b", "c"};
syscall(2043, 0, 1, 1, 0, argc, argv);
return -1;
}
| 104 |
575 | // Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "content/browser/renderer_host/input/touch_action_filter.h"
#include "content/browser/renderer_host/event_with_latency_info.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "third_party/blink/public/common/input/synthetic_web_input_event_builders.h"
#include "third_party/blink/public/common/input/web_input_event.h"
#include "third_party/blink/public/mojom/input/input_event_result.mojom-shared.h"
#include "ui/events/blink/blink_features.h"
using blink::SyntheticWebGestureEventBuilder;
using blink::WebGestureEvent;
using blink::WebInputEvent;
namespace content {
namespace {
const blink::WebGestureDevice kSourceDevice =
blink::WebGestureDevice::kTouchscreen;
} // namespace
class TouchActionFilterTest : public testing::Test {
public:
TouchActionFilterTest() { filter_.OnHasTouchEventHandlers(true); }
~TouchActionFilterTest() override = default;
protected:
base::Optional<cc::TouchAction> ActiveTouchAction() const {
return filter_.active_touch_action_;
}
void ResetTouchAction() { filter_.ResetTouchAction(); }
void ResetActiveTouchAction() { filter_.active_touch_action_.reset(); }
void ResetCompositorAllowedTouchAction() {
filter_.compositor_allowed_touch_action_ = cc::TouchAction::kAuto;
}
void SetNoDeferredEvents() { filter_.has_deferred_events_ = false; }
void SetGestureSequenceInProgress() {
filter_.gesture_sequence_in_progress_ = true;
}
void ResetGestureSequenceInProgress() {
filter_.gesture_sequence_in_progress_ = false;
}
void PanTest(cc::TouchAction action,
float scroll_x,
float scroll_y,
float dx,
float dy,
float expected_dx,
float expected_dy) {
WebGestureEvent tap_down = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureTapDown, kSourceDevice);
WebGestureEvent scroll_end = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureScrollEnd, kSourceDevice);
{
// Scrolls with no direction hint are permitted in the |action| direction.
ResetTouchAction();
filter_.OnSetTouchAction(action);
filter_.IncreaseActiveTouches();
WebGestureEvent scroll_begin =
SyntheticWebGestureEventBuilder::BuildScrollBegin(0, 0,
kSourceDevice);
EXPECT_EQ(filter_.FilterGestureEvent(&tap_down),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_begin),
FilterGestureEventResult::kFilterGestureEventAllowed);
WebGestureEvent scroll_update =
SyntheticWebGestureEventBuilder::BuildScrollUpdate(dx, dy, 0,
kSourceDevice);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_update),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(expected_dx, scroll_update.data.scroll_update.delta_x);
EXPECT_EQ(expected_dy, scroll_update.data.scroll_update.delta_y);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_end),
FilterGestureEventResult::kFilterGestureEventAllowed);
filter_.DecreaseActiveTouches();
}
{
// Scrolls biased towards the touch-action axis are permitted.
ResetTouchAction();
filter_.OnSetTouchAction(action);
filter_.IncreaseActiveTouches();
WebGestureEvent scroll_begin =
SyntheticWebGestureEventBuilder::BuildScrollBegin(scroll_x, scroll_y,
kSourceDevice);
EXPECT_EQ(filter_.FilterGestureEvent(&tap_down),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_begin),
FilterGestureEventResult::kFilterGestureEventAllowed);
WebGestureEvent scroll_update =
SyntheticWebGestureEventBuilder::BuildScrollUpdate(dx, dy, 0,
kSourceDevice);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_update),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(expected_dx, scroll_update.data.scroll_update.delta_x);
EXPECT_EQ(expected_dy, scroll_update.data.scroll_update.delta_y);
// Ensure that scrolls in the opposite direction are not filtered once
// scrolling has started. (Once scrolling is started, the direction may
// be reversed by the user even if scrolls that start in the reversed
// direction are disallowed.
WebGestureEvent scroll_update2 =
SyntheticWebGestureEventBuilder::BuildScrollUpdate(-dx, -dy, 0,
kSourceDevice);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_update2),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(-expected_dx, scroll_update2.data.scroll_update.delta_x);
EXPECT_EQ(-expected_dy, scroll_update2.data.scroll_update.delta_y);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_end),
FilterGestureEventResult::kFilterGestureEventAllowed);
filter_.DecreaseActiveTouches();
}
{
// Scrolls biased towards the perpendicular of the touch-action axis are
// suppressed entirely.
ResetTouchAction();
filter_.OnSetTouchAction(action);
filter_.IncreaseActiveTouches();
WebGestureEvent scroll_begin =
SyntheticWebGestureEventBuilder::BuildScrollBegin(scroll_y, scroll_x,
kSourceDevice);
EXPECT_EQ(filter_.FilterGestureEvent(&tap_down),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_begin),
FilterGestureEventResult::kFilterGestureEventFiltered);
WebGestureEvent scroll_update =
SyntheticWebGestureEventBuilder::BuildScrollUpdate(dx, dy, 0,
kSourceDevice);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_update),
FilterGestureEventResult::kFilterGestureEventFiltered);
EXPECT_EQ(dx, scroll_update.data.scroll_update.delta_x);
EXPECT_EQ(dy, scroll_update.data.scroll_update.delta_y);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_end),
FilterGestureEventResult::kFilterGestureEventFiltered);
filter_.DecreaseActiveTouches();
}
}
void PanTestForUnidirectionalTouchAction(cc::TouchAction action,
float scroll_x,
float scroll_y) {
WebGestureEvent tap_down = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureTapDown, kSourceDevice);
WebGestureEvent scroll_end = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureScrollEnd, kSourceDevice);
{
// Scrolls towards the touch-action direction are permitted.
ResetTouchAction();
filter_.OnSetTouchAction(action);
filter_.IncreaseActiveTouches();
WebGestureEvent scroll_begin =
SyntheticWebGestureEventBuilder::BuildScrollBegin(scroll_x, scroll_y,
kSourceDevice);
EXPECT_EQ(filter_.FilterGestureEvent(&tap_down),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_begin),
FilterGestureEventResult::kFilterGestureEventAllowed);
WebGestureEvent scroll_update =
SyntheticWebGestureEventBuilder::BuildScrollUpdate(scroll_x, scroll_y,
0, kSourceDevice);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_update),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_end),
FilterGestureEventResult::kFilterGestureEventAllowed);
filter_.DecreaseActiveTouches();
}
{
// Scrolls towards the exact opposite of the touch-action direction are
// suppressed entirely.
ResetTouchAction();
filter_.OnSetTouchAction(action);
filter_.IncreaseActiveTouches();
WebGestureEvent scroll_begin =
SyntheticWebGestureEventBuilder::BuildScrollBegin(
-scroll_x, -scroll_y, kSourceDevice);
EXPECT_EQ(filter_.FilterGestureEvent(&tap_down),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_begin),
FilterGestureEventResult::kFilterGestureEventFiltered);
WebGestureEvent scroll_update =
SyntheticWebGestureEventBuilder::BuildScrollUpdate(
-scroll_x, -scroll_y, 0, kSourceDevice);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_update),
FilterGestureEventResult::kFilterGestureEventFiltered);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_end),
FilterGestureEventResult::kFilterGestureEventFiltered);
filter_.DecreaseActiveTouches();
}
{
// Scrolls towards the diagonal opposite of the touch-action direction are
// suppressed entirely.
ResetTouchAction();
filter_.OnSetTouchAction(action);
filter_.IncreaseActiveTouches();
WebGestureEvent scroll_begin =
SyntheticWebGestureEventBuilder::BuildScrollBegin(
-scroll_x - scroll_y, -scroll_x - scroll_y, kSourceDevice);
EXPECT_EQ(filter_.FilterGestureEvent(&tap_down),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_begin),
FilterGestureEventResult::kFilterGestureEventFiltered);
WebGestureEvent scroll_update =
SyntheticWebGestureEventBuilder::BuildScrollUpdate(
-scroll_x - scroll_y, -scroll_x - scroll_y, 0, kSourceDevice);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_update),
FilterGestureEventResult::kFilterGestureEventFiltered);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_end),
FilterGestureEventResult::kFilterGestureEventFiltered);
filter_.DecreaseActiveTouches();
}
}
TouchActionFilter filter_;
};
TEST_F(TouchActionFilterTest, SimpleFilter) {
WebGestureEvent tap_down = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureTapDown, kSourceDevice);
WebGestureEvent scroll_begin =
SyntheticWebGestureEventBuilder::BuildScrollBegin(2, 3, kSourceDevice);
const float kDeltaX = 5;
const float kDeltaY = 10;
WebGestureEvent scroll_update =
SyntheticWebGestureEventBuilder::BuildScrollUpdate(kDeltaX, kDeltaY, 0,
kSourceDevice);
WebGestureEvent scroll_end = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureScrollEnd, kSourceDevice);
// cc::TouchAction::kAuto doesn't cause any filtering.
ResetTouchAction();
filter_.OnSetTouchAction(cc::TouchAction::kAuto);
filter_.IncreaseActiveTouches();
EXPECT_EQ(filter_.FilterGestureEvent(&tap_down),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_begin),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_update),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(kDeltaX, scroll_update.data.scroll_update.delta_x);
EXPECT_EQ(kDeltaY, scroll_update.data.scroll_update.delta_y);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_end),
FilterGestureEventResult::kFilterGestureEventAllowed);
filter_.DecreaseActiveTouches();
// cc::TouchAction::kNone filters out all scroll events, but no other events.
ResetTouchAction();
filter_.OnSetTouchAction(cc::TouchAction::kNone);
filter_.IncreaseActiveTouches();
EXPECT_EQ(filter_.FilterGestureEvent(&tap_down),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_begin),
FilterGestureEventResult::kFilterGestureEventFiltered);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_update),
FilterGestureEventResult::kFilterGestureEventFiltered);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_update),
FilterGestureEventResult::kFilterGestureEventFiltered);
EXPECT_EQ(kDeltaX, scroll_update.data.scroll_update.delta_x);
EXPECT_EQ(kDeltaY, scroll_update.data.scroll_update.delta_y);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_end),
FilterGestureEventResult::kFilterGestureEventFiltered);
filter_.DecreaseActiveTouches();
// When a new touch sequence begins, the state is reset.
ResetTouchAction();
filter_.OnSetTouchAction(cc::TouchAction::kAuto);
filter_.IncreaseActiveTouches();
EXPECT_EQ(filter_.FilterGestureEvent(&tap_down),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_begin),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_update),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_end),
FilterGestureEventResult::kFilterGestureEventAllowed);
filter_.DecreaseActiveTouches();
// Setting touch action doesn't impact any in-progress gestures.
ResetTouchAction();
filter_.OnSetTouchAction(cc::TouchAction::kAuto);
filter_.IncreaseActiveTouches();
EXPECT_EQ(filter_.FilterGestureEvent(&tap_down),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_begin),
FilterGestureEventResult::kFilterGestureEventAllowed);
filter_.OnSetTouchAction(cc::TouchAction::kNone);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_update),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_end),
FilterGestureEventResult::kFilterGestureEventAllowed);
filter_.DecreaseActiveTouches();
// And the state is still cleared for the next gesture.
ResetTouchAction();
filter_.OnSetTouchAction(cc::TouchAction::kAuto);
filter_.IncreaseActiveTouches();
EXPECT_EQ(filter_.FilterGestureEvent(&tap_down),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_begin),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_end),
FilterGestureEventResult::kFilterGestureEventAllowed);
filter_.DecreaseActiveTouches();
// Changing the touch action during a gesture has no effect.
ResetTouchAction();
filter_.OnSetTouchAction(cc::TouchAction::kNone);
filter_.IncreaseActiveTouches();
EXPECT_EQ(filter_.FilterGestureEvent(&tap_down),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_begin),
FilterGestureEventResult::kFilterGestureEventFiltered);
filter_.OnSetTouchAction(cc::TouchAction::kAuto);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_update),
FilterGestureEventResult::kFilterGestureEventFiltered);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_update),
FilterGestureEventResult::kFilterGestureEventFiltered);
EXPECT_EQ(kDeltaX, scroll_update.data.scroll_update.delta_x);
EXPECT_EQ(kDeltaY, scroll_update.data.scroll_update.delta_y);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_end),
FilterGestureEventResult::kFilterGestureEventFiltered);
filter_.DecreaseActiveTouches();
// horizontal scroll
scroll_begin =
SyntheticWebGestureEventBuilder::BuildScrollBegin(3, 2, kSourceDevice);
// kInternalPanXScrolls has no effect when active touch action is available.
{
ResetTouchAction();
// With kInternalPanXScrolls
filter_.OnSetTouchAction(cc::TouchAction::kPanX |
cc::TouchAction::kInternalPanXScrolls);
filter_.IncreaseActiveTouches();
EXPECT_EQ(filter_.FilterGestureEvent(&tap_down),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_begin),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_update),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_end),
FilterGestureEventResult::kFilterGestureEventAllowed);
filter_.DecreaseActiveTouches();
ResetTouchAction();
// Without kInternalPanXScrolls
filter_.OnSetTouchAction(cc::TouchAction::kPanX);
filter_.IncreaseActiveTouches();
EXPECT_EQ(filter_.FilterGestureEvent(&tap_down),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_begin),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_update),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_end),
FilterGestureEventResult::kFilterGestureEventAllowed);
filter_.DecreaseActiveTouches();
ResetTouchAction();
// We only set kInternalPanXScrolls when kPanX is set, so there is no
// kInternalPanXScrolls with kPanY case.
filter_.OnSetTouchAction(cc::TouchAction::kPanY);
filter_.IncreaseActiveTouches();
EXPECT_EQ(filter_.FilterGestureEvent(&tap_down),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_begin),
FilterGestureEventResult::kFilterGestureEventFiltered);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_update),
FilterGestureEventResult::kFilterGestureEventFiltered);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_end),
FilterGestureEventResult::kFilterGestureEventFiltered);
filter_.DecreaseActiveTouches();
}
}
TEST_F(TouchActionFilterTest, PanLeft) {
const float kDX = 5;
const float kDY = 10;
const float kScrollX = 7;
const float kScrollY = 6;
PanTest(cc::TouchAction::kPanLeft, kScrollX, kScrollY, kDX, kDY, kDX, 0);
PanTestForUnidirectionalTouchAction(cc::TouchAction::kPanLeft, kScrollX, 0);
}
TEST_F(TouchActionFilterTest, PanRight) {
const float kDX = 5;
const float kDY = 10;
const float kScrollX = -7;
const float kScrollY = 6;
PanTest(cc::TouchAction::kPanRight, kScrollX, kScrollY, kDX, kDY, kDX, 0);
PanTestForUnidirectionalTouchAction(cc::TouchAction::kPanRight, kScrollX, 0);
}
TEST_F(TouchActionFilterTest, PanX) {
const float kDX = 5;
const float kDY = 10;
const float kScrollX = 7;
const float kScrollY = 6;
PanTest(cc::TouchAction::kPanX, kScrollX, kScrollY, kDX, kDY, kDX, 0);
}
TEST_F(TouchActionFilterTest, PanUp) {
const float kDX = 5;
const float kDY = 10;
const float kScrollX = 6;
const float kScrollY = 7;
PanTest(cc::TouchAction::kPanUp, kScrollX, kScrollY, kDX, kDY, 0, kDY);
PanTestForUnidirectionalTouchAction(cc::TouchAction::kPanUp, 0, kScrollY);
}
TEST_F(TouchActionFilterTest, PanDown) {
const float kDX = 5;
const float kDY = 10;
const float kScrollX = 6;
const float kScrollY = -7;
PanTest(cc::TouchAction::kPanDown, kScrollX, kScrollY, kDX, kDY, 0, kDY);
PanTestForUnidirectionalTouchAction(cc::TouchAction::kPanDown, 0, kScrollY);
}
TEST_F(TouchActionFilterTest, PanY) {
const float kDX = 5;
const float kDY = 10;
const float kScrollX = 6;
const float kScrollY = 7;
PanTest(cc::TouchAction::kPanY, kScrollX, kScrollY, kDX, kDY, 0, kDY);
}
TEST_F(TouchActionFilterTest, PanXY) {
const float kDX = 5;
const float kDY = 10;
WebGestureEvent tap_down = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureTapDown, kSourceDevice);
WebGestureEvent scroll_end = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureScrollEnd, kSourceDevice);
{
// Scrolls hinted in the X axis are permitted and unmodified.
ResetTouchAction();
filter_.OnSetTouchAction(cc::TouchAction::kPan);
filter_.IncreaseActiveTouches();
WebGestureEvent scroll_begin =
SyntheticWebGestureEventBuilder::BuildScrollBegin(-7, 6, kSourceDevice);
EXPECT_EQ(filter_.FilterGestureEvent(&tap_down),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_begin),
FilterGestureEventResult::kFilterGestureEventAllowed);
WebGestureEvent scroll_update =
SyntheticWebGestureEventBuilder::BuildScrollUpdate(kDX, kDY, 0,
kSourceDevice);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_update),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(kDX, scroll_update.data.scroll_update.delta_x);
EXPECT_EQ(kDY, scroll_update.data.scroll_update.delta_y);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_end),
FilterGestureEventResult::kFilterGestureEventAllowed);
filter_.DecreaseActiveTouches();
}
{
// Scrolls hinted in the Y axis are permitted and unmodified.
ResetTouchAction();
filter_.OnSetTouchAction(cc::TouchAction::kPan);
filter_.IncreaseActiveTouches();
WebGestureEvent scroll_begin =
SyntheticWebGestureEventBuilder::BuildScrollBegin(-6, 7, kSourceDevice);
EXPECT_EQ(filter_.FilterGestureEvent(&tap_down),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_begin),
FilterGestureEventResult::kFilterGestureEventAllowed);
WebGestureEvent scroll_update =
SyntheticWebGestureEventBuilder::BuildScrollUpdate(kDX, kDY, 0,
kSourceDevice);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_update),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(kDX, scroll_update.data.scroll_update.delta_x);
EXPECT_EQ(kDY, scroll_update.data.scroll_update.delta_y);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_end),
FilterGestureEventResult::kFilterGestureEventAllowed);
filter_.DecreaseActiveTouches();
}
{
// A two-finger gesture is not allowed.
ResetTouchAction();
filter_.OnSetTouchAction(cc::TouchAction::kPan);
filter_.IncreaseActiveTouches();
WebGestureEvent scroll_begin =
SyntheticWebGestureEventBuilder::BuildScrollBegin(-6, 7, kSourceDevice,
2);
EXPECT_EQ(filter_.FilterGestureEvent(&tap_down),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_begin),
FilterGestureEventResult::kFilterGestureEventFiltered);
WebGestureEvent scroll_update =
SyntheticWebGestureEventBuilder::BuildScrollUpdate(kDX, kDY, 0,
kSourceDevice);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_update),
FilterGestureEventResult::kFilterGestureEventFiltered);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_end),
FilterGestureEventResult::kFilterGestureEventFiltered);
filter_.DecreaseActiveTouches();
}
}
TEST_F(TouchActionFilterTest, BitMath) {
// Verify that the simple flag mixing properties we depend on are now
// trivially true.
EXPECT_EQ(cc::TouchAction::kNone,
cc::TouchAction::kNone & cc::TouchAction::kAuto);
EXPECT_EQ(cc::TouchAction::kNone,
cc::TouchAction::kPanY & cc::TouchAction::kPanX);
EXPECT_EQ(cc::TouchAction::kPan,
cc::TouchAction::kAuto & cc::TouchAction::kPan);
EXPECT_EQ(cc::TouchAction::kManipulation,
cc::TouchAction::kAuto & ~(cc::TouchAction::kDoubleTapZoom |
cc::TouchAction::kInternalPanXScrolls));
EXPECT_EQ(cc::TouchAction::kPanX,
cc::TouchAction::kPanLeft | cc::TouchAction::kPanRight);
EXPECT_EQ(cc::TouchAction::kAuto, cc::TouchAction::kManipulation |
cc::TouchAction::kDoubleTapZoom |
cc::TouchAction::kInternalPanXScrolls);
}
TEST_F(TouchActionFilterTest, MultiTouch) {
WebGestureEvent tap_down = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureTapDown, kSourceDevice);
WebGestureEvent scroll_begin =
SyntheticWebGestureEventBuilder::BuildScrollBegin(2, 3, kSourceDevice);
const float kDeltaX = 5;
const float kDeltaY = 10;
WebGestureEvent scroll_update =
SyntheticWebGestureEventBuilder::BuildScrollUpdate(kDeltaX, kDeltaY, 0,
kSourceDevice);
WebGestureEvent scroll_end = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureScrollEnd, kSourceDevice);
// For multiple points, the intersection is what matters.
ResetTouchAction();
filter_.OnSetTouchAction(cc::TouchAction::kNone);
filter_.OnSetTouchAction(cc::TouchAction::kAuto);
filter_.IncreaseActiveTouches();
EXPECT_EQ(filter_.FilterGestureEvent(&tap_down),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_begin),
FilterGestureEventResult::kFilterGestureEventFiltered);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_update),
FilterGestureEventResult::kFilterGestureEventFiltered);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_update),
FilterGestureEventResult::kFilterGestureEventFiltered);
EXPECT_EQ(kDeltaX, scroll_update.data.scroll_update.delta_x);
EXPECT_EQ(kDeltaY, scroll_update.data.scroll_update.delta_y);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_end),
FilterGestureEventResult::kFilterGestureEventFiltered);
filter_.DecreaseActiveTouches();
// Intersection of PAN_X and PAN_Y is NONE.
ResetTouchAction();
filter_.OnSetTouchAction(cc::TouchAction::kPanX);
filter_.OnSetTouchAction(cc::TouchAction::kPanY);
filter_.OnSetTouchAction(cc::TouchAction::kPan);
filter_.IncreaseActiveTouches();
EXPECT_EQ(filter_.FilterGestureEvent(&tap_down),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_begin),
FilterGestureEventResult::kFilterGestureEventFiltered);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_update),
FilterGestureEventResult::kFilterGestureEventFiltered);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_end),
FilterGestureEventResult::kFilterGestureEventFiltered);
filter_.DecreaseActiveTouches();
}
class TouchActionFilterPinchTest : public testing::Test {
public:
TouchActionFilterPinchTest() = default;
void RunTest(bool force_enable_zoom) {
filter_.OnHasTouchEventHandlers(true);
filter_.SetForceEnableZoom(force_enable_zoom);
WebGestureEvent tap_down = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureTapDown, kSourceDevice);
WebGestureEvent scroll_begin =
SyntheticWebGestureEventBuilder::BuildScrollBegin(2, 3, kSourceDevice,
2);
WebGestureEvent pinch_begin = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGesturePinchBegin, kSourceDevice);
WebGestureEvent pinch_update =
SyntheticWebGestureEventBuilder::BuildPinchUpdate(1.2f, 5, 5, 0,
kSourceDevice);
WebGestureEvent pinch_end = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGesturePinchEnd, kSourceDevice);
WebGestureEvent scroll_end = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureScrollEnd, kSourceDevice);
// Pinch is allowed with touch-action: auto.
filter_.ResetTouchAction();
filter_.OnSetTouchAction(cc::TouchAction::kAuto);
filter_.IncreaseActiveTouches();
EXPECT_EQ(filter_.FilterGestureEvent(&tap_down),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_begin),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&pinch_begin),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&pinch_update),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&pinch_end),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_end),
FilterGestureEventResult::kFilterGestureEventAllowed);
filter_.DecreaseActiveTouches();
// Pinch is not allowed with touch-action: none.
filter_.ResetTouchAction();
filter_.OnSetTouchAction(cc::TouchAction::kNone);
filter_.IncreaseActiveTouches();
EXPECT_EQ(filter_.FilterGestureEvent(&tap_down),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_begin),
FilterGestureEventResult::kFilterGestureEventFiltered);
EXPECT_EQ(filter_.FilterGestureEvent(&pinch_begin),
FilterGestureEventResult::kFilterGestureEventFiltered);
EXPECT_EQ(filter_.FilterGestureEvent(&pinch_update),
FilterGestureEventResult::kFilterGestureEventFiltered);
EXPECT_EQ(filter_.FilterGestureEvent(&pinch_end),
FilterGestureEventResult::kFilterGestureEventFiltered);
EXPECT_EQ(filter_.FilterGestureEvent(&pinch_begin),
FilterGestureEventResult::kFilterGestureEventFiltered);
EXPECT_EQ(filter_.FilterGestureEvent(&pinch_update),
FilterGestureEventResult::kFilterGestureEventFiltered);
EXPECT_EQ(filter_.FilterGestureEvent(&pinch_end),
FilterGestureEventResult::kFilterGestureEventFiltered);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_end),
FilterGestureEventResult::kFilterGestureEventFiltered);
filter_.DecreaseActiveTouches();
// Pinch is not allowed with touch-action: pan-x pan-y except for force
// enable zoom.
filter_.ResetTouchAction();
filter_.OnSetTouchAction(cc::TouchAction::kPan);
filter_.IncreaseActiveTouches();
EXPECT_EQ(filter_.FilterGestureEvent(&tap_down),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_NE(filter_.FilterGestureEvent(&scroll_begin),
force_enable_zoom
? FilterGestureEventResult::kFilterGestureEventFiltered
: FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_NE(filter_.FilterGestureEvent(&pinch_begin),
force_enable_zoom
? FilterGestureEventResult::kFilterGestureEventFiltered
: FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_NE(filter_.FilterGestureEvent(&pinch_update),
force_enable_zoom
? FilterGestureEventResult::kFilterGestureEventFiltered
: FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_NE(filter_.FilterGestureEvent(&pinch_end),
force_enable_zoom
? FilterGestureEventResult::kFilterGestureEventFiltered
: FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_NE(filter_.FilterGestureEvent(&scroll_end),
force_enable_zoom
? FilterGestureEventResult::kFilterGestureEventFiltered
: FilterGestureEventResult::kFilterGestureEventAllowed);
filter_.DecreaseActiveTouches();
// Pinch is allowed with touch-action: manipulation.
filter_.ResetTouchAction();
filter_.OnSetTouchAction(cc::TouchAction::kManipulation);
filter_.IncreaseActiveTouches();
EXPECT_EQ(filter_.FilterGestureEvent(&tap_down),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_begin),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&pinch_begin),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&pinch_update),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&pinch_end),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_FALSE(filter_.drop_pinch_events_);
// The pinch gesture is always re-evaluated on pinch begin.
EXPECT_EQ(filter_.FilterGestureEvent(&pinch_begin),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&pinch_update),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&pinch_end),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_end),
FilterGestureEventResult::kFilterGestureEventAllowed);
filter_.DecreaseActiveTouches();
// Pinch state is automatically reset at the end of a scroll.
filter_.ResetTouchAction();
filter_.OnSetTouchAction(cc::TouchAction::kAuto);
filter_.IncreaseActiveTouches();
EXPECT_EQ(filter_.FilterGestureEvent(&tap_down),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_begin),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&pinch_begin),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&pinch_update),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&pinch_end),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_end),
FilterGestureEventResult::kFilterGestureEventAllowed);
filter_.DecreaseActiveTouches();
filter_.ResetTouchAction();
filter_.OnSetTouchAction(cc::TouchAction::kAuto);
filter_.IncreaseActiveTouches();
EXPECT_EQ(filter_.FilterGestureEvent(&pinch_begin),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&pinch_update),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&pinch_end),
FilterGestureEventResult::kFilterGestureEventAllowed);
filter_.DecreaseActiveTouches();
// Scrolling is allowed when two fingers are down.
filter_.ResetTouchAction();
filter_.OnSetTouchAction(cc::TouchAction::kPinchZoom);
filter_.IncreaseActiveTouches();
EXPECT_EQ(filter_.FilterGestureEvent(&tap_down),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_begin),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&pinch_begin),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&pinch_update),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&pinch_end),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_end),
FilterGestureEventResult::kFilterGestureEventAllowed);
filter_.DecreaseActiveTouches();
// At double-tap-drag-zoom case, the pointer_count is 1 at GesturePinchBegin
// and we need to evaluate whether the gesture is allowed or not at that
// time.
scroll_begin.data.scroll_begin.pointer_count = 1;
filter_.ResetTouchAction();
filter_.OnSetTouchAction(cc::TouchAction::kPinchZoom);
filter_.IncreaseActiveTouches();
EXPECT_EQ(filter_.FilterGestureEvent(&tap_down),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_begin),
FilterGestureEventResult::kFilterGestureEventFiltered);
EXPECT_EQ(filter_.FilterGestureEvent(&pinch_begin),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&pinch_update),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&pinch_end),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_end),
FilterGestureEventResult::kFilterGestureEventFiltered);
filter_.DecreaseActiveTouches();
}
private:
TouchActionFilter filter_;
};
TEST_F(TouchActionFilterPinchTest, Pinch) {
RunTest(false);
}
// Enables force enable zoom will override touch-action except for
// touch-action: none.
TEST_F(TouchActionFilterPinchTest, ForceEnableZoom) {
RunTest(true);
}
TEST_F(TouchActionFilterTest, DoubleTapWithTouchActionAuto) {
WebGestureEvent tap_down = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureTapDown, kSourceDevice);
WebGestureEvent unconfirmed_tap = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureTapUnconfirmed, kSourceDevice);
WebGestureEvent tap_cancel = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureTapCancel, kSourceDevice);
WebGestureEvent double_tap = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureDoubleTap, kSourceDevice);
// Double tap is allowed with touch action auto.
ResetTouchAction();
filter_.OnSetTouchAction(cc::TouchAction::kAuto);
EXPECT_EQ(filter_.FilterGestureEvent(&tap_down),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&unconfirmed_tap),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(unconfirmed_tap.GetType(),
WebInputEvent::Type::kGestureTapUnconfirmed);
// The tap cancel will come as part of the next touch sequence.
ResetTouchAction();
// Changing the touch action for the second tap doesn't effect the behaviour
// of the event.
filter_.OnSetTouchAction(cc::TouchAction::kNone);
EXPECT_EQ(filter_.FilterGestureEvent(&tap_cancel),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&tap_down),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&double_tap),
FilterGestureEventResult::kFilterGestureEventAllowed);
}
TEST_F(TouchActionFilterTest, DoubleTap) {
WebGestureEvent tap_down = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureTapDown, kSourceDevice);
WebGestureEvent unconfirmed_tap = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureTapUnconfirmed, kSourceDevice);
WebGestureEvent tap_cancel = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureTapCancel, kSourceDevice);
WebGestureEvent double_tap = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureDoubleTap, kSourceDevice);
// Double tap is disabled with any touch action other than auto.
ResetTouchAction();
filter_.OnSetTouchAction(cc::TouchAction::kManipulation);
filter_.IncreaseActiveTouches();
EXPECT_EQ(filter_.FilterGestureEvent(&tap_down),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&unconfirmed_tap),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(WebInputEvent::Type::kGestureTap, unconfirmed_tap.GetType());
// Changing the touch action for the second tap doesn't effect the behaviour
// of the event. The tap cancel will come as part of the next touch sequence.
ResetTouchAction();
filter_.OnSetTouchAction(cc::TouchAction::kAuto);
EXPECT_EQ(filter_.FilterGestureEvent(&tap_cancel),
FilterGestureEventResult::kFilterGestureEventFiltered);
EXPECT_EQ(filter_.FilterGestureEvent(&tap_down),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&double_tap),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(WebInputEvent::Type::kGestureTap, double_tap.GetType());
EXPECT_EQ(2, double_tap.data.tap.tap_count);
filter_.DecreaseActiveTouches();
}
TEST_F(TouchActionFilterTest, SingleTapWithTouchActionAuto) {
WebGestureEvent tap_down = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureTapDown, kSourceDevice);
WebGestureEvent unconfirmed_tap1 = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureTapUnconfirmed, kSourceDevice);
WebGestureEvent tap = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureTap, kSourceDevice);
// Single tap is allowed with touch action auto.
ResetTouchAction();
filter_.OnSetTouchAction(cc::TouchAction::kAuto);
EXPECT_EQ(filter_.FilterGestureEvent(&tap_down),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&unconfirmed_tap1),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(WebInputEvent::Type::kGestureTapUnconfirmed,
unconfirmed_tap1.GetType());
EXPECT_EQ(filter_.FilterGestureEvent(&tap),
FilterGestureEventResult::kFilterGestureEventAllowed);
}
TEST_F(TouchActionFilterTest, SingleTap) {
WebGestureEvent tap_down = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureTapDown, kSourceDevice);
WebGestureEvent unconfirmed_tap1 = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureTapUnconfirmed, kSourceDevice);
WebGestureEvent tap = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureTap, kSourceDevice);
// With touch action other than auto, tap unconfirmed is turned into tap.
ResetTouchAction();
filter_.OnSetTouchAction(cc::TouchAction::kNone);
filter_.IncreaseActiveTouches();
EXPECT_EQ(filter_.FilterGestureEvent(&tap_down),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&unconfirmed_tap1),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(WebInputEvent::Type::kGestureTap, unconfirmed_tap1.GetType());
EXPECT_EQ(filter_.FilterGestureEvent(&tap),
FilterGestureEventResult::kFilterGestureEventFiltered);
filter_.DecreaseActiveTouches();
}
TEST_F(TouchActionFilterTest, TouchActionResetsOnResetTouchAction) {
WebGestureEvent tap_down = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureTapDown, kSourceDevice);
WebGestureEvent tap = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureTap, kSourceDevice);
WebGestureEvent scroll_begin =
SyntheticWebGestureEventBuilder::BuildScrollBegin(2, 3, kSourceDevice);
WebGestureEvent scroll_end = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureScrollEnd, kSourceDevice);
ResetTouchAction();
filter_.OnSetTouchAction(cc::TouchAction::kNone);
filter_.IncreaseActiveTouches();
EXPECT_EQ(filter_.FilterGestureEvent(&tap_down),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_begin),
FilterGestureEventResult::kFilterGestureEventFiltered);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_end),
FilterGestureEventResult::kFilterGestureEventFiltered);
filter_.DecreaseActiveTouches();
ResetTouchAction();
filter_.OnSetTouchAction(cc::TouchAction::kNone);
filter_.IncreaseActiveTouches();
EXPECT_EQ(filter_.FilterGestureEvent(&tap_down),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&tap),
FilterGestureEventResult::kFilterGestureEventAllowed);
filter_.DecreaseActiveTouches();
ResetTouchAction();
filter_.OnSetTouchAction(cc::TouchAction::kAuto);
filter_.IncreaseActiveTouches();
EXPECT_EQ(filter_.FilterGestureEvent(&tap_down),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_begin),
FilterGestureEventResult::kFilterGestureEventAllowed);
filter_.DecreaseActiveTouches();
}
TEST_F(TouchActionFilterTest, TouchActionResetMidSequence) {
filter_.OnHasTouchEventHandlers(true);
WebGestureEvent tap_down = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureTapDown, kSourceDevice);
WebGestureEvent scroll_begin =
SyntheticWebGestureEventBuilder::BuildScrollBegin(2, 3, kSourceDevice);
WebGestureEvent pinch_begin = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGesturePinchBegin, kSourceDevice);
WebGestureEvent pinch_update =
SyntheticWebGestureEventBuilder::BuildPinchUpdate(1.2f, 5, 5, 0,
kSourceDevice);
WebGestureEvent pinch_end = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGesturePinchEnd, kSourceDevice);
WebGestureEvent scroll_end = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureScrollEnd, kSourceDevice);
filter_.OnSetTouchAction(cc::TouchAction::kNone);
filter_.IncreaseActiveTouches();
EXPECT_EQ(filter_.FilterGestureEvent(&tap_down),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_begin),
FilterGestureEventResult::kFilterGestureEventFiltered);
EXPECT_EQ(filter_.FilterGestureEvent(&pinch_begin),
FilterGestureEventResult::kFilterGestureEventFiltered);
EXPECT_EQ(filter_.FilterGestureEvent(&pinch_update),
FilterGestureEventResult::kFilterGestureEventFiltered);
// Even though the allowed action is auto after the reset, the remaining
// scroll and pinch events should be suppressed.
ResetTouchAction();
filter_.OnSetTouchAction(cc::TouchAction::kAuto);
EXPECT_EQ(filter_.FilterGestureEvent(&pinch_update),
FilterGestureEventResult::kFilterGestureEventFiltered);
EXPECT_EQ(filter_.FilterGestureEvent(&pinch_end),
FilterGestureEventResult::kFilterGestureEventFiltered);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_end),
FilterGestureEventResult::kFilterGestureEventFiltered);
filter_.DecreaseActiveTouches();
// A new scroll and pinch sequence should be allowed.
filter_.OnSetTouchAction(cc::TouchAction::kAuto);
filter_.IncreaseActiveTouches();
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_begin),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&pinch_begin),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&pinch_update),
FilterGestureEventResult::kFilterGestureEventAllowed);
// Resetting from auto to auto mid-stream should have no effect.
ResetTouchAction();
filter_.OnSetTouchAction(cc::TouchAction::kAuto);
EXPECT_EQ(filter_.FilterGestureEvent(&pinch_update),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&pinch_end),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_end),
FilterGestureEventResult::kFilterGestureEventAllowed);
filter_.DecreaseActiveTouches();
}
// This test makes sure that we do not reset scrolling touch action in the
// middle of a gesture sequence.
TEST_F(TouchActionFilterTest, TouchActionNotResetWithinGestureSequence) {
filter_.OnHasTouchEventHandlers(true);
EXPECT_FALSE(filter_.allowed_touch_action().has_value());
filter_.OnSetTouchAction(cc::TouchAction::kPanY);
WebGestureEvent tap_down = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureTapDown, kSourceDevice);
WebGestureEvent scroll_begin =
SyntheticWebGestureEventBuilder::BuildScrollBegin(1, 3, kSourceDevice);
WebGestureEvent scroll_update =
SyntheticWebGestureEventBuilder::BuildScrollUpdate(1, 5, 0,
kSourceDevice);
WebGestureEvent scroll_end = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureScrollEnd, kSourceDevice);
filter_.IncreaseActiveTouches();
EXPECT_EQ(filter_.FilterGestureEvent(&tap_down),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(cc::TouchAction::kPanY, ActiveTouchAction().value());
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_begin),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_update),
FilterGestureEventResult::kFilterGestureEventAllowed);
// Simulate a touch sequence end by calling ReportAndResetTouchAction.
filter_.DecreaseActiveTouches();
filter_.ReportAndResetTouchAction();
EXPECT_FALSE(filter_.allowed_touch_action().has_value());
EXPECT_EQ(cc::TouchAction::kPanY, ActiveTouchAction().value());
// In fling or fling boosting case, we will see ScrollUpdate after the touch
// end.
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_update),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_end),
FilterGestureEventResult::kFilterGestureEventAllowed);
// The |allowed_touch_action_| should have been reset, but not the
// |scrolling_touch_action_|.
EXPECT_FALSE(filter_.allowed_touch_action().has_value());
EXPECT_EQ(cc::TouchAction::kPanY, ActiveTouchAction().value());
}
// The following 3 tests ensures that when the IPC message
// OnHasTouchEventHandlers is received in the middle of a gesture sequence, the
// touch action is not reset.
TEST_F(TouchActionFilterTest, OnHasTouchEventHandlersReceivedDuringTap) {
filter_.OnHasTouchEventHandlers(false);
WebGestureEvent tap_down = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureTapDown, kSourceDevice);
EXPECT_EQ(filter_.FilterGestureEvent(&tap_down),
FilterGestureEventResult::kFilterGestureEventAllowed);
filter_.OnHasTouchEventHandlers(true);
EXPECT_TRUE(ActiveTouchAction().has_value());
filter_.OnSetTouchAction(cc::TouchAction::kPan);
// Simulate a simple tap gesture.
WebGestureEvent tap = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureTap, kSourceDevice);
EXPECT_EQ(filter_.FilterGestureEvent(&tap),
FilterGestureEventResult::kFilterGestureEventAllowed);
// Gesture tap indicates that there is no scroll in progress, so this should
// reset the |allowed_touch_action_|.
ResetTouchAction();
EXPECT_FALSE(filter_.allowed_touch_action().has_value());
}
TEST_F(TouchActionFilterTest, OnHasTouchEventHandlersReceivedDuringDoubleTap) {
filter_.OnHasTouchEventHandlers(false);
WebGestureEvent tap_down = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureTapDown, kSourceDevice);
WebGestureEvent tap_cancel = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureTapCancel, kSourceDevice);
WebGestureEvent double_tap = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureDoubleTap, kSourceDevice);
// Simulate a double tap gesture: GTD-->GTC-->GTD-->GTC-->GDT.
filter_.IncreaseActiveTouches();
EXPECT_EQ(filter_.FilterGestureEvent(&tap_down),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(ActiveTouchAction().value(), cc::TouchAction::kAuto);
filter_.OnHasTouchEventHandlers(true);
EXPECT_EQ(filter_.FilterGestureEvent(&tap_cancel),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_TRUE(ActiveTouchAction().has_value());
filter_.OnSetTouchAction(cc::TouchAction::kPan);
EXPECT_EQ(filter_.FilterGestureEvent(&tap_down),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(ActiveTouchAction().value(), cc::TouchAction::kPan);
EXPECT_EQ(filter_.FilterGestureEvent(&tap_cancel),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&double_tap),
FilterGestureEventResult::kFilterGestureEventAllowed);
filter_.DecreaseActiveTouches();
}
TEST_F(TouchActionFilterTest, OnHasTouchEventHandlersReceivedDuringScroll) {
filter_.OnHasTouchEventHandlers(false);
WebGestureEvent tap_down = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureTapDown, kSourceDevice);
WebGestureEvent tap_cancel = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureTapCancel, kSourceDevice);
WebGestureEvent scroll_begin =
SyntheticWebGestureEventBuilder::BuildScrollBegin(5, 0, kSourceDevice);
WebGestureEvent scroll_update =
SyntheticWebGestureEventBuilder::BuildScrollUpdate(5, 0, 0,
kSourceDevice);
WebGestureEvent scroll_end = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureScrollEnd, kSourceDevice);
// Simulate a gesture scroll: GTD-->GTC-->GSB-->GSU-->GSE.
EXPECT_EQ(filter_.FilterGestureEvent(&tap_down),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&tap_cancel),
FilterGestureEventResult::kFilterGestureEventAllowed);
filter_.OnHasTouchEventHandlers(true);
filter_.OnSetTouchAction(cc::TouchAction::kPan);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_begin),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(ActiveTouchAction().value(), cc::TouchAction::kPan);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_update),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_end),
FilterGestureEventResult::kFilterGestureEventAllowed);
}
// If OnHasTouchEventHandlers IPC is received after LongTap or TwoFingerTap,
// the touch action should be reset.
TEST_F(TouchActionFilterTest,
OnHasTouchEventHandlersReceivedAfterLongTapOrTwoFingerTap) {
filter_.OnHasTouchEventHandlers(false);
WebGestureEvent tap_down = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureTapDown, kSourceDevice);
WebGestureEvent tap_cancel = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureTapCancel, kSourceDevice);
WebGestureEvent long_tap = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureLongTap, kSourceDevice);
EXPECT_EQ(filter_.FilterGestureEvent(&tap_down),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&tap_cancel),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&long_tap),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(ActiveTouchAction().value(), cc::TouchAction::kAuto);
filter_.OnHasTouchEventHandlers(true);
EXPECT_FALSE(ActiveTouchAction().has_value());
filter_.OnHasTouchEventHandlers(false);
WebGestureEvent two_finger_tap = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureTwoFingerTap, kSourceDevice);
EXPECT_EQ(filter_.FilterGestureEvent(&tap_down),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&tap_cancel),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&two_finger_tap),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(ActiveTouchAction().value(), cc::TouchAction::kAuto);
filter_.OnHasTouchEventHandlers(true);
EXPECT_FALSE(ActiveTouchAction().has_value());
}
TEST_F(TouchActionFilterTest, OnHasTouchEventHandlersReceivedAfterTouchStart) {
filter_.OnHasTouchEventHandlers(true);
EXPECT_FALSE(ActiveTouchAction().has_value());
EXPECT_FALSE(filter_.allowed_touch_action().has_value());
// Receive a touch start ack, set the touch action.
filter_.OnSetTouchAction(cc::TouchAction::kPanY);
filter_.IncreaseActiveTouches();
filter_.OnHasTouchEventHandlers(false);
EXPECT_EQ(ActiveTouchAction().value(), cc::TouchAction::kPanY);
EXPECT_EQ(filter_.allowed_touch_action().value(), cc::TouchAction::kPanY);
filter_.OnHasTouchEventHandlers(true);
EXPECT_EQ(ActiveTouchAction().value(), cc::TouchAction::kPanY);
EXPECT_EQ(filter_.allowed_touch_action().value(), cc::TouchAction::kPanY);
}
TEST_F(TouchActionFilterTest, ResetTouchActionWithActiveTouch) {
filter_.OnHasTouchEventHandlers(true);
EXPECT_FALSE(ActiveTouchAction().has_value());
EXPECT_FALSE(filter_.allowed_touch_action().has_value());
// Receive a touch start ack, set the touch action.
filter_.OnSetTouchAction(cc::TouchAction::kPanY);
filter_.IncreaseActiveTouches();
// Somehow we get the ACK for the second touch start before the ACK for the
// first touch end.
filter_.OnSetTouchAction(cc::TouchAction::kPan);
filter_.IncreaseActiveTouches();
// The first touch end comes, we report and reset touch action. The touch
// actions should still have value.
filter_.DecreaseActiveTouches();
filter_.ReportAndResetTouchAction();
EXPECT_EQ(ActiveTouchAction().value(), cc::TouchAction::kPanY);
EXPECT_EQ(filter_.allowed_touch_action().value(), cc::TouchAction::kPanY);
// The ack for the second touch end comes, the touch actions will be reset.
filter_.DecreaseActiveTouches();
filter_.ReportAndResetTouchAction();
EXPECT_FALSE(filter_.allowed_touch_action().has_value());
}
// If the renderer is busy, the gesture event might have come before the
// OnHasTouchEventHanlders IPC is received. In this case, we should allow all
// the gestures.
TEST_F(TouchActionFilterTest, GestureArrivesBeforeHasHandlerSet) {
WebGestureEvent tap_down = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureTapDown, kSourceDevice);
EXPECT_EQ(filter_.FilterGestureEvent(&tap_down),
FilterGestureEventResult::kFilterGestureEventAllowed);
}
TEST_F(TouchActionFilterTest,
PinchGesturesAllowedByCompositorAllowedTouchAction) {
filter_.OnHasTouchEventHandlers(true);
EXPECT_FALSE(ActiveTouchAction().has_value());
EXPECT_FALSE(filter_.allowed_touch_action().has_value());
// Compositor allowed touch action has a default value of Auto, and pinch
// related gestures should be allowed.
WebGestureEvent pinch_begin = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGesturePinchBegin, kSourceDevice);
WebGestureEvent pinch_update =
SyntheticWebGestureEventBuilder::BuildPinchUpdate(1.2f, 5, 5, 0,
kSourceDevice);
WebGestureEvent pinch_end = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGesturePinchEnd, kSourceDevice);
EXPECT_EQ(filter_.FilterGestureEvent(&pinch_begin),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&pinch_update),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&pinch_end),
FilterGestureEventResult::kFilterGestureEventAllowed);
}
// Test gesture event filtering with compositor allowed touch action. It should
// test all 3 kinds of results: Allowed / Dropped / Delayed.
TEST_F(TouchActionFilterTest, FilterWithCompositorAllowedListedTouchAction) {
filter_.OnHasTouchEventHandlers(true);
EXPECT_FALSE(ActiveTouchAction().has_value());
EXPECT_FALSE(filter_.allowed_touch_action().has_value());
int dx = 2, dy = 5;
// Test gestures that are allowed.
WebGestureEvent scroll_begin =
SyntheticWebGestureEventBuilder::BuildScrollBegin(dx, dy, kSourceDevice);
WebGestureEvent scroll_update =
SyntheticWebGestureEventBuilder::BuildScrollUpdate(dx, dy, 0,
kSourceDevice);
WebGestureEvent scroll_end = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureScrollEnd, kSourceDevice);
// Vertical scroll, kInternalPanXScrolls doesn't have effect.
filter_.OnSetCompositorAllowedTouchAction(
cc::TouchAction::kPan | cc::TouchAction::kInternalPanXScrolls);
EXPECT_EQ(filter_.compositor_allowed_touch_action(),
cc::TouchAction::kPan | cc::TouchAction::kInternalPanXScrolls);
SetGestureSequenceInProgress();
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_begin),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_update),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_end),
FilterGestureEventResult::kFilterGestureEventAllowed);
// Don't have kInternalPanXScrolls, but this is a vertical scroll, so all the
// events are allowed.
ResetTouchAction();
ResetActiveTouchAction();
ResetCompositorAllowedTouchAction();
filter_.OnSetCompositorAllowedTouchAction(cc::TouchAction::kPan);
EXPECT_EQ(filter_.compositor_allowed_touch_action(), cc::TouchAction::kPan);
SetGestureSequenceInProgress();
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_begin),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_update),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_end),
FilterGestureEventResult::kFilterGestureEventAllowed);
// Pinch related gestures are always delayed.
ResetTouchAction();
ResetActiveTouchAction();
ResetCompositorAllowedTouchAction();
filter_.OnSetCompositorAllowedTouchAction(cc::TouchAction::kPan);
EXPECT_EQ(filter_.compositor_allowed_touch_action(), cc::TouchAction::kPan);
WebGestureEvent pinch_begin = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGesturePinchBegin, kSourceDevice);
WebGestureEvent pinch_update =
SyntheticWebGestureEventBuilder::BuildPinchUpdate(1.2f, 5, 5, 0,
kSourceDevice);
WebGestureEvent pinch_end = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGesturePinchEnd, kSourceDevice);
EXPECT_EQ(filter_.FilterGestureEvent(&pinch_begin),
FilterGestureEventResult::kFilterGestureEventDelayed);
EXPECT_EQ(filter_.FilterGestureEvent(&pinch_update),
FilterGestureEventResult::kFilterGestureEventDelayed);
EXPECT_EQ(filter_.FilterGestureEvent(&pinch_end),
FilterGestureEventResult::kFilterGestureEventDelayed);
// Scroll updates should be delayed if the compositor allowed listed touch
// action is PanY, because there are delta along the direction that is not
// allowed.
ResetTouchAction();
ResetActiveTouchAction();
ResetCompositorAllowedTouchAction();
filter_.OnSetCompositorAllowedTouchAction(cc::TouchAction::kPanY);
SetNoDeferredEvents();
EXPECT_EQ(filter_.compositor_allowed_touch_action(), cc::TouchAction::kPanY);
SetGestureSequenceInProgress();
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_begin),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_update),
FilterGestureEventResult::kFilterGestureEventDelayed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_end),
FilterGestureEventResult::kFilterGestureEventDelayed);
// Horizontal scroll, don't have kInternalPanXScrolls, delay scroll events.
ResetTouchAction();
ResetActiveTouchAction();
ResetCompositorAllowedTouchAction();
filter_.OnSetCompositorAllowedTouchAction(cc::TouchAction::kPanX);
SetNoDeferredEvents();
EXPECT_EQ(filter_.compositor_allowed_touch_action(), cc::TouchAction::kPanX);
dy = 0;
scroll_begin =
SyntheticWebGestureEventBuilder::BuildScrollBegin(dx, dy, kSourceDevice);
scroll_update = SyntheticWebGestureEventBuilder::BuildScrollUpdate(
dx, dy, 0, kSourceDevice);
SetGestureSequenceInProgress();
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_begin),
FilterGestureEventResult::kFilterGestureEventDelayed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_update),
FilterGestureEventResult::kFilterGestureEventDelayed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_end),
FilterGestureEventResult::kFilterGestureEventDelayed);
dx = 0;
dy = 5;
scroll_begin =
SyntheticWebGestureEventBuilder::BuildScrollBegin(dx, dy, kSourceDevice);
scroll_update = SyntheticWebGestureEventBuilder::BuildScrollUpdate(
dx, dy, 0, kSourceDevice);
filter_.OnSetCompositorAllowedTouchAction(cc::TouchAction::kPanX);
EXPECT_EQ(filter_.compositor_allowed_touch_action(), cc::TouchAction::kPanX);
SetGestureSequenceInProgress();
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_begin),
FilterGestureEventResult::kFilterGestureEventDelayed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_update),
FilterGestureEventResult::kFilterGestureEventDelayed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_end),
FilterGestureEventResult::kFilterGestureEventDelayed);
}
TEST_F(TouchActionFilterTest, CompositorAllowedTouchActionResetToAuto) {
filter_.OnHasTouchEventHandlers(true);
filter_.OnSetCompositorAllowedTouchAction(cc::TouchAction::kPan);
EXPECT_EQ(filter_.compositor_allowed_touch_action(), cc::TouchAction::kPan);
ResetTouchAction();
EXPECT_EQ(filter_.compositor_allowed_touch_action(), cc::TouchAction::kAuto);
}
TEST_F(TouchActionFilterTest, CompositorAllowedTouchActionAutoNoHasHandlers) {
filter_.OnHasTouchEventHandlers(false);
EXPECT_EQ(filter_.compositor_allowed_touch_action(), cc::TouchAction::kAuto);
ResetTouchAction();
EXPECT_EQ(filter_.compositor_allowed_touch_action(), cc::TouchAction::kAuto);
}
TEST_F(TouchActionFilterTest, ResetBeforeHasHandlerSet) {
// This should not crash, and should set touch action to auto.
ResetTouchAction();
WebGestureEvent tap_down = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureTapDown, kSourceDevice);
EXPECT_EQ(filter_.FilterGestureEvent(&tap_down),
FilterGestureEventResult::kFilterGestureEventAllowed);
}
TEST_F(TouchActionFilterTest,
CompositorAllowedTouchActionNotResetAtGestureScrollEnd) {
filter_.OnHasTouchEventHandlers(true);
filter_.OnSetCompositorAllowedTouchAction(cc::TouchAction::kPan);
EXPECT_EQ(filter_.compositor_allowed_touch_action(), cc::TouchAction::kPan);
int dx = 2, dy = 5;
WebGestureEvent scroll_begin =
SyntheticWebGestureEventBuilder::BuildScrollBegin(dx, dy, kSourceDevice);
WebGestureEvent scroll_update =
SyntheticWebGestureEventBuilder::BuildScrollUpdate(dx, dy, 0,
kSourceDevice);
WebGestureEvent scroll_end = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureScrollEnd, kSourceDevice);
SetGestureSequenceInProgress();
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_begin),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_update),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_end),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.compositor_allowed_touch_action(), cc::TouchAction::kPan);
}
// Having a gesture scroll begin without tap down should assume touch action is
// auto;
TEST_F(TouchActionFilterTest, ScrollBeginWithoutTapDown) {
filter_.OnHasTouchEventHandlers(true);
EXPECT_FALSE(ActiveTouchAction().has_value());
EXPECT_FALSE(filter_.allowed_touch_action().has_value());
WebGestureEvent scroll_begin =
SyntheticWebGestureEventBuilder::BuildScrollBegin(5, 0, kSourceDevice);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_begin),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.compositor_allowed_touch_action(), cc::TouchAction::kAuto);
}
// This tests a gesture tap down with |num_of_active_touches_| == 0
TEST_F(TouchActionFilterTest, TapDownWithZeroNumOfActiveTouches) {
filter_.OnHasTouchEventHandlers(true);
EXPECT_FALSE(ActiveTouchAction().has_value());
EXPECT_FALSE(filter_.allowed_touch_action().has_value());
WebGestureEvent tap_down = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureTapDown, kSourceDevice);
EXPECT_EQ(filter_.FilterGestureEvent(&tap_down),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_TRUE(ActiveTouchAction().has_value());
EXPECT_EQ(ActiveTouchAction().value(), cc::TouchAction::kAuto);
}
// Regression test for crbug.com/771330. One can start one finger panning y, and
// add another finger to pinch zooming. The pinch zooming should not be allowed
// if the allowed touch action doesn't allow it.
TEST_F(TouchActionFilterTest, PinchZoomStartsWithOneFingerPanDisallowed) {
filter_.OnHasTouchEventHandlers(true);
filter_.OnSetTouchAction(cc::TouchAction::kPanY);
WebGestureEvent scroll_begin =
SyntheticWebGestureEventBuilder::BuildScrollBegin(0, 3, kSourceDevice);
WebGestureEvent scroll_update =
SyntheticWebGestureEventBuilder::BuildScrollUpdate(5, 10, 0,
kSourceDevice);
WebGestureEvent pinch_begin = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGesturePinchBegin, kSourceDevice);
WebGestureEvent pinch_update =
SyntheticWebGestureEventBuilder::BuildPinchUpdate(1.2f, 5, 5, 0,
kSourceDevice);
WebGestureEvent pinch_end = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGesturePinchEnd, kSourceDevice);
WebGestureEvent scroll_end = SyntheticWebGestureEventBuilder::Build(
WebInputEvent::Type::kGestureScrollEnd, kSourceDevice);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_begin),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_update),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(filter_.FilterGestureEvent(&pinch_begin),
FilterGestureEventResult::kFilterGestureEventFiltered);
EXPECT_EQ(filter_.FilterGestureEvent(&pinch_update),
FilterGestureEventResult::kFilterGestureEventFiltered);
EXPECT_EQ(filter_.FilterGestureEvent(&pinch_end),
FilterGestureEventResult::kFilterGestureEventFiltered);
EXPECT_EQ(filter_.FilterGestureEvent(&pinch_begin),
FilterGestureEventResult::kFilterGestureEventFiltered);
EXPECT_EQ(filter_.FilterGestureEvent(&pinch_update),
FilterGestureEventResult::kFilterGestureEventFiltered);
EXPECT_EQ(filter_.FilterGestureEvent(&pinch_end),
FilterGestureEventResult::kFilterGestureEventFiltered);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_end),
FilterGestureEventResult::kFilterGestureEventAllowed);
}
TEST_F(TouchActionFilterTest, ScrollBeginWithoutTapDownWithKnownTouchAction) {
filter_.OnHasTouchEventHandlers(true);
EXPECT_FALSE(ActiveTouchAction().has_value());
EXPECT_FALSE(filter_.allowed_touch_action().has_value());
filter_.OnSetTouchAction(cc::TouchAction::kPan);
WebGestureEvent scroll_begin =
SyntheticWebGestureEventBuilder::BuildScrollBegin(5, 0, kSourceDevice);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_begin),
FilterGestureEventResult::kFilterGestureEventAllowed);
EXPECT_EQ(ActiveTouchAction().value(), cc::TouchAction::kPan);
EXPECT_EQ(filter_.allowed_touch_action().value(), cc::TouchAction::kPan);
}
TEST_F(TouchActionFilterTest, TouchpadScroll) {
WebGestureEvent scroll_begin =
SyntheticWebGestureEventBuilder::BuildScrollBegin(
2, 3, blink::WebGestureDevice::kTouchpad);
// cc::TouchAction::kNone filters out only touchscreen scroll events.
ResetTouchAction();
filter_.OnSetTouchAction(cc::TouchAction::kNone);
EXPECT_EQ(filter_.FilterGestureEvent(&scroll_begin),
FilterGestureEventResult::kFilterGestureEventAllowed);
}
} // namespace content
| 28,128 |
713 | <filename>leetcode.com/python/310_Minimum_Height_Trees.py
from collections import deque
class Solution(object):
def findMinHeightTrees(self, n, edges):
"""
:type n: int
:type edges: List[List[int]]
:rtype: List[int]
"""
if n <= 0:
return []
# with only one node, since its in-degrees will be 0, therefore, we need to handle it separately
if n == 1:
return [0]
# a. Initialize the graph
inDegree = {i: 0 for i in range(n)} # count of incoming edges
graph = {i: [] for i in range(n)} # adjacency list graph
# b. Build the graph
for edge in edges:
node1, node2 = edge[0], edge[1]
# since this is an undirected graph, therefore, add a link for both the nodes
graph[node1].append(node2)
graph[node2].append(node1)
# increment the in-degrees of both the nodes
inDegree[node1] += 1
inDegree[node2] += 1
# c. Find all leaves i.e., all nodes with 0 in-degrees
leaves = deque()
for key in inDegree:
if inDegree[key] == 1:
leaves.append(key)
# d. Remove leaves level by level and subtract each leave's children's in-degrees.
# Repeat this until we are left with 1 or 2 nodes, which will be our answer.
# Any node that has already been a leaf cannot be the root of a minimum height tree, because
# its adjacent non-leaf node will always be a better candidate.
totalNodes = n
while totalNodes > 2:
leavesSize = len(leaves)
totalNodes -= leavesSize
for i in range(0, leavesSize):
vertex = leaves.popleft()
# get the node's children to decrement their in-degrees
for child in graph[vertex]:
inDegree[child] -= 1
if inDegree[child] == 1:
leaves.append(child)
return list(leaves)
| 1,004 |
1,338 | /*
* Copyright 2010, <NAME>, <EMAIL>.
* Distributed under the terms of the MIT License.
*/
#include "HIDCollection.h"
#include "HIDParser.h"
#include "HIDReport.h"
#include <File.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
int
main(int argc, char *argv[])
{
if (argc < 2) {
printf("usage: %s <hid_descriptor_file>\n", argv[0]);
return 1;
}
BFile file(argv[1], B_READ_ONLY);
if (!file.IsReadable()) {
printf("can't open file \"%s\" for reading\n", argv[1]);
return 2;
}
off_t descriptorLength;
file.GetSize(&descriptorLength);
uint8 *reportDescriptor = (uint8 *)malloc(descriptorLength);
if (reportDescriptor == NULL) {
printf("failed to allocate buffer of %lld bytes\n", descriptorLength);
return 3;
}
ssize_t read = file.Read(reportDescriptor, descriptorLength);
if (read != descriptorLength) {
printf("failed to read file of %lld bytes: %s\n", descriptorLength,
strerror(read));
return 4;
}
HIDParser parser(NULL);
status_t result = parser.ParseReportDescriptor(reportDescriptor,
descriptorLength);
free(reportDescriptor);
if (result != B_OK) {
printf("failed to parse descriptor: %s\n", strerror(result));
return 5;
}
parser.PrintToStream();
return 0;
}
| 477 |
325 |
#include "TestListener.hpp"
#include <iostream>
#include <chrono>
#include <ctime>
void TestListener::setCallbacks(std::function<void ()> &&codePreLoadCallback, std::function<void ()> &&codePostLoadCallback)
{
m_codePreLoadCallback = std::move(codePreLoadCallback);
m_codePostLoadCallback = std::move(codePostLoadCallback);
}
void TestListener::onLog(jet::LogSeverity severity, const std::string &message)
{
using namespace std::chrono;
high_resolution_clock::time_point p = high_resolution_clock::now();
milliseconds ms = duration_cast<milliseconds>(p.time_since_epoch());
seconds s = duration_cast<seconds>(ms);
std::time_t t = s.count();
std::size_t milliSeconds = ms.count() % 1000;
char buf[1024];
strftime(buf, sizeof(buf), "%F %T", gmtime(&t));
std::cout << '[' << buf << ':' << milliSeconds << ']';
switch (severity) {
case jet::LogSeverity::kDebug: std::cout << "[D] "; break;
case jet::LogSeverity::kInfo: std::cout << "[I] "; break;
case jet::LogSeverity::kWarning: std::cout << "[W] "; break;
case jet::LogSeverity::kError: std::cout << "[E] "; break;
}
std::cout << message << std::endl;
}
void TestListener::onCodePreLoad()
{
if (m_codePreLoadCallback) {
m_codePreLoadCallback();
}
}
void TestListener::onCodePostLoad()
{
if (m_codePostLoadCallback) {
m_codePostLoadCallback();
}
}
| 561 |
636 | <gh_stars>100-1000
package cn.org.atool.fluent.mybatis.processor.base;
import cn.org.atool.fluent.mybatis.processor.entity.CommonField;
import cn.org.atool.fluent.mybatis.processor.filer.segment.*;
import cn.org.atool.fluent.mybatis.utility.MybatisUtil;
import com.squareup.javapoet.ClassName;
import lombok.AccessLevel;
import lombok.Getter;
import java.util.List;
import java.util.stream.Collectors;
import static cn.org.atool.fluent.mybatis.mapper.FluentConst.*;
/**
* fluent entity构造各模块ClassName基类
*
* @author darui.wu
*/
@SuppressWarnings("unused")
public abstract class FluentClassName {
public abstract String getNoSuffix();
/**
* 首字母小写,不带Entity后缀的entity名称
*
* @return ignore
*/
public String lowerNoSuffix() {
return MybatisUtil.lowerFirst(this.getNoSuffix(), "");
}
public abstract String getBasePack();
public abstract String getEntityPack();
public String getPackageName(String suffix) {
return this.getBasePack() + "." + suffix;
}
public abstract String getClassName();
public abstract List<CommonField> getFields();
/**
* 所有字段拼接在一起
*/
@Getter(AccessLevel.NONE)
private String All_Fields = null;
public String getAllFields() {
if (this.All_Fields == null) {
All_Fields = this.getFields().stream().map(CommonField::getColumn).collect(Collectors.joining(", "));
}
return All_Fields;
}
// all ClassName
/**
* ClassName of XyzEntity
*
* @return Entity ClassName
*/
public ClassName entity() {
return ClassName.get(this.getEntityPack(), this.getClassName());
}
/**
* ClassName of XyzUpdater
*
* @return Update ClassName
*/
public ClassName updater() {
return ClassName.get(
UpdaterFiler.getPackageName(this),
UpdaterFiler.getClassName(this));
}
/**
* ClassName of XyzEntityKit
*
* @return EntityHelper ClassName
*/
public ClassName entityMapping() {
return ClassName.get(
EntityMappingFiler.getPackageName(this),
EntityMappingFiler.getClassName(this));
}
/**
* ClassName of XyzMapper
*
* @return Mapper ClassName
*/
public ClassName mapper() {
return ClassName.get(
MapperFiler.getPackageName(this),
MapperFiler.getClassName(this));
}
/**
* ClassName of XyzQuery
*
* @return Query ClassName
*/
public ClassName query() {
return ClassName.get(
QueryFiler.getPackageName(this),
QueryFiler.getClassName(this));
}
public ClassName wrapperHelper() {
return ClassName.get(
SegmentFiler.getPackageName(this),
SegmentFiler.getClassName(this));
}
public ClassName queryWhere() {
return ClassName.get(
SegmentFiler.getPackageName(this),
SegmentFiler.getClassName(this),
Suffix_QueryWhere);
}
public ClassName updateWhere() {
return ClassName.get(
SegmentFiler.getPackageName(this),
SegmentFiler.getClassName(this),
Suffix_UpdateWhere);
}
public ClassName selector() {
return ClassName.get(
SegmentFiler.getPackageName(this),
SegmentFiler.getClassName(this),
Suffix_Selector);
}
public ClassName groupBy() {
return ClassName.get(
SegmentFiler.getPackageName(this),
SegmentFiler.getClassName(this),
Suffix_GroupBy);
}
public ClassName having() {
return ClassName.get(
SegmentFiler.getPackageName(this),
SegmentFiler.getClassName(this),
Suffix_Having);
}
public ClassName queryOrderBy() {
return ClassName.get(
SegmentFiler.getPackageName(this),
SegmentFiler.getClassName(this),
Suffix_QueryOrderBy);
}
public ClassName updateOrderBy() {
return ClassName.get(
SegmentFiler.getPackageName(this),
SegmentFiler.getClassName(this),
Suffix_UpdateOrderBy);
}
public ClassName updateSetter() {
return ClassName.get(
SegmentFiler.getPackageName(this),
SegmentFiler.getClassName(this),
Suffix_UpdateSetter);
}
public ClassName segment() {
return ClassName.get(
SegmentFiler.getPackageName(this),
SegmentFiler.getClassName(this),
Suffix_ISegment);
}
public ClassName formSetter() {
return ClassName.get(
SegmentFiler.getPackageName(this),
SegmentFiler.getClassName(this),
Suffix_EntityFormSetter);
}
} | 2,210 |
6,045 | <filename>hug/__main__.py
import hug
hug.development_runner.hug.interface.cli()
| 31 |
3,285 | <filename>oneflow/xrt/tensorrt/ops/op_kernel.h
/*
Copyright 2020 The OneFlow Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
#ifndef ONEFLOW_XRT_TENSORRT_OPS_OP_KERNEL_H_
#define ONEFLOW_XRT_TENSORRT_OPS_OP_KERNEL_H_
#include "oneflow/xrt/kernel/op_kernel.h"
#include "oneflow/xrt/tensorrt/ops/op_context.h"
#include "oneflow/xrt/types.h"
#include "oneflow/xrt/utility/registry.h"
#include "oneflow/xrt/utility/stl.h"
namespace oneflow {
namespace xrt {
namespace tensorrt {
class TrtOpKernel : public OpKernel<TrtOpContext> {
public:
virtual void Compile(TrtOpContext* ctx) = 0;
TrtOpKernel() = default;
virtual ~TrtOpKernel() = default;
};
using TrtOpKernelPtr = std::shared_ptr<OpKernel<TrtOpContext>>;
#define REGISTER_TRT_OP_KERNEL(OpName, KernelType) \
static OpKernelRegistrar<TrtOpContext> _trt_op_kernel_##OpName##_ __attribute__((unused)) = \
OpKernelRegistrar<TrtOpContext>(#OpName) \
.SetField(XrtEngine::TENSORRT) \
.SetDevice({XrtDevice::GPU_CUDA}) \
.SetFactory([]() -> OpKernel<TrtOpContext>* { return new KernelType; })
inline TrtOpKernelPtr BuildOpKernel(const std::string& op_name) {
auto field = MakeXrtField(XrtDevice::GPU_CUDA, XrtEngine::TENSORRT);
return TrtOpKernelPtr(OpKernelBuilder<TrtOpContext>()(field, op_name));
}
} // namespace tensorrt
} // namespace xrt
} // namespace oneflow
#endif // ONEFLOW_XRT_TENSORRT_OPS_OP_KERNEL_H_
| 917 |
1,455 | // ----------------------------------------------------------------------------
// - Open3D: www.open3d.org -
// ----------------------------------------------------------------------------
// The MIT License (MIT)
//
// Copyright (c) 2018-2021 www.open3d.org
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
// ----------------------------------------------------------------------------
#pragma once
#include <string>
namespace open3d {
namespace data {
/// \brief Computes MD5 Hash for the given file.
/// \param file_path Path to the file.
std::string GetMD5(const std::string& file_path);
/// \brief Download a file from URL.
///
/// \param url File URL. The saved file name will be the last part of the URL.
/// \param md5 MD5 checksum of the file. This is required as the same
/// URL may point to different files over time.
/// \param prefix The file will be downloaded to `data_root/prefix`.
/// Typically we group data file by dataset, e.g., "kitti", "rgbd", etc. If
/// empty, the file will be downloaded to `data_root` directly.
/// \param data_root Open3D data root directory. If empty, the default data root
/// is used. The default data root is $HOME/open3d_data. For more information,
/// see open3d::data::Dataset class.
/// \returns Path to downloaded file.
/// \throw std::runtime_error If the download fails.
std::string DownloadFromURL(const std::string& url,
const std::string& md5,
const std::string& prefix,
const std::string& data_root = "");
} // namespace data
} // namespace open3d
| 797 |
492 | <filename>2019/07/30/Create Custom Commands in Flask/flask_commands_example/app.py<gh_stars>100-1000
from flask import Flask
import click
from flask.cli import with_appcontext
from flask_sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///db.sqlite3'
db = SQLAlchemy(app)
class MyTable(db.Model):
id = db.Column(db.Integer, primary_key=True)
@click.command(name='create')
@with_appcontext
def create():
db.create_all()
app.cli.add_command(create) | 205 |
Subsets and Splits