max_stars_count
int64 301
224k
| text
stringlengths 6
1.05M
| token_count
int64 3
727k
|
---|---|---|
603 | import numpy as np
from pandas import DataFrame, Series, period_range
def test_iat(float_frame):
for i, row in enumerate(float_frame.index):
for j, col in enumerate(float_frame.columns):
result = float_frame.iat[i, j]
expected = float_frame.at[row, col]
assert result == expected
def test_iat_duplicate_columns():
# https://github.com/pandas-dev/pandas/issues/11754
df = DataFrame([[1, 2]], columns=["x", "x"])
assert df.iat[0, 0] == 1
def test_iat_getitem_series_with_period_index():
# GH#4390, iat incorrectly indexing
index = period_range("1/1/2001", periods=10)
ser = Series(np.random.randn(10), index=index)
expected = ser[index[0]]
result = ser.iat[0]
assert expected == result
| 320 |
679 | <reponame>Grosskopf/openoffice<filename>main/framework/source/helper/statusindicator.cxx<gh_stars>100-1000
/**************************************************************
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*************************************************************/
// MARKER(update_precomp.py): autogen include statement, do not remove
#include "precompiled_framework.hxx"
//_______________________________________________
// include files of own module
#include <helper/statusindicator.hxx>
#include <threadhelp/readguard.hxx>
#include <threadhelp/writeguard.hxx>
//_______________________________________________
// namespace
namespace framework{
//_______________________________________________
// declarations
//***********************************************
// XInterface
DEFINE_XINTERFACE_2(StatusIndicator ,
OWeakObject ,
DIRECT_INTERFACE(css::lang::XTypeProvider ),
DIRECT_INTERFACE(css::task::XStatusIndicator))
//***********************************************
// XInterface
DEFINE_XTYPEPROVIDER_2(StatusIndicator ,
css::lang::XTypeProvider ,
css::task::XStatusIndicator)
//***********************************************
StatusIndicator::StatusIndicator(StatusIndicatorFactory* pFactory)
: ThreadHelpBase ( )
, ::cppu::OWeakObject( )
, m_xFactory (pFactory)
{
}
//***********************************************
StatusIndicator::~StatusIndicator()
{
}
//***********************************************
void SAL_CALL StatusIndicator::start(const ::rtl::OUString& sText ,
sal_Int32 nRange)
throw(css::uno::RuntimeException)
{
// SAFE ->
ReadGuard aReadLock(m_aLock);
css::uno::Reference< css::task::XStatusIndicatorFactory > xFactory(m_xFactory.get(), css::uno::UNO_QUERY);
aReadLock.unlock();
// <- SAFE
if (xFactory.is())
{
StatusIndicatorFactory* pFactory = (StatusIndicatorFactory*)xFactory.get();
pFactory->start(this, sText, nRange);
}
}
//***********************************************
void SAL_CALL StatusIndicator::end()
throw(css::uno::RuntimeException)
{
// SAFE ->
ReadGuard aReadLock(m_aLock);
css::uno::Reference< css::task::XStatusIndicatorFactory > xFactory(m_xFactory.get(), css::uno::UNO_QUERY);
aReadLock.unlock();
// <- SAFE
if (xFactory.is())
{
StatusIndicatorFactory* pFactory = (StatusIndicatorFactory*)xFactory.get();
pFactory->end(this);
}
}
//***********************************************
void SAL_CALL StatusIndicator::reset()
throw(css::uno::RuntimeException)
{
// SAFE ->
ReadGuard aReadLock(m_aLock);
css::uno::Reference< css::task::XStatusIndicatorFactory > xFactory(m_xFactory.get(), css::uno::UNO_QUERY);
aReadLock.unlock();
// <- SAFE
if (xFactory.is())
{
StatusIndicatorFactory* pFactory = (StatusIndicatorFactory*)xFactory.get();
pFactory->reset(this);
}
}
//***********************************************
void SAL_CALL StatusIndicator::setText(const ::rtl::OUString& sText)
throw(css::uno::RuntimeException)
{
// SAFE ->
ReadGuard aReadLock(m_aLock);
css::uno::Reference< css::task::XStatusIndicatorFactory > xFactory(m_xFactory.get(), css::uno::UNO_QUERY);
aReadLock.unlock();
// <- SAFE
if (xFactory.is())
{
StatusIndicatorFactory* pFactory = (StatusIndicatorFactory*)xFactory.get();
pFactory->setText(this, sText);
}
}
//***********************************************
void SAL_CALL StatusIndicator::setValue(sal_Int32 nValue)
throw(css::uno::RuntimeException)
{
// SAFE ->
ReadGuard aReadLock(m_aLock);
css::uno::Reference< css::task::XStatusIndicatorFactory > xFactory(m_xFactory.get(), css::uno::UNO_QUERY);
aReadLock.unlock();
// <- SAFE
if (xFactory.is())
{
StatusIndicatorFactory* pFactory = (StatusIndicatorFactory*)xFactory.get();
pFactory->setValue(this, nValue);
}
}
} // namespace framework
| 1,778 |
1,056 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.performance.enterprise.footprint;
import org.netbeans.jellytools.NewWebProjectNameLocationStepOperator;
import org.netbeans.jellytools.NewProjectWizardOperator;
import org.netbeans.jellytools.ProjectsTabOperator;
import org.netbeans.jellytools.RuntimeTabOperator;
import org.netbeans.jellytools.nodes.Node;
import org.netbeans.jemmy.EventTool;
import org.netbeans.jemmy.operators.JCheckBoxOperator;
import org.netbeans.jemmy.operators.JComboBoxOperator;
import org.netbeans.modules.performance.utilities.CommonUtilities;
//import org.netbeans.performance.enterprise.EPUtilities;
/**
* Utilities for Memory footprint tests
*
* @author <EMAIL>, <EMAIL>
*/
public class EPFootprintUtilities extends CommonUtilities {
static String creatJ2EEeproject(String category, String project, boolean wait) {
return createProjectGeneral(category, project, wait, true);
}
private static String createProjectGeneral(String category, String project, boolean wait, boolean j2eeProject) {
// select Projects tab
ProjectsTabOperator.invoke();
// create a project
NewProjectWizardOperator wizard = NewProjectWizardOperator.invoke();
wizard.selectCategory(category);
wizard.selectProject(project);
wizard.next();
NewWebProjectNameLocationStepOperator wizard_location = new NewWebProjectNameLocationStepOperator();
wizard_location.txtProjectLocation().clearText();
wizard_location.txtProjectLocation().typeText(CommonUtilities.getTempDir());
String pname = wizard_location.txtProjectName().getText();
pname = pname + "_" + System.currentTimeMillis();
wizard_location.txtProjectName().clearText();
wizard_location.txtProjectName().typeText(pname);
wizard.next();
if(j2eeProject) {
new JComboBoxOperator(wizard,1).selectItem(1);
new JCheckBoxOperator(wizard,"Create Application Client module:").setSelected(true);
}
new EventTool().waitNoEvent(1000);
wizard.finish();
// wait 30 seconds
waitForProjectCreation(30000, wait);
return pname;
}
public static void killRunOnProject(String project) {
killProcessOnProject(project, "run");
}
public static void killDebugOnProject(String project) {
killProcessOnProject(project, "debug");
}
private static void killProcessOnProject(String project, String process) {
// prepare Runtime tab
RuntimeTabOperator runtime = RuntimeTabOperator.invoke();
// kill the execution
Node node = new Node(runtime.getRootNode(), "Processes|"+project+ " (" + process + ")");
node.select();
node.performPopupAction("Terminate Process");
}
}
| 1,264 |
360 | <reponame>28kayak/luwak
package uk.co.flax.luwak;
/*
* Copyright (c) 2015 Lemur Consulting Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.StringField;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.search.Explanation;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import uk.co.flax.luwak.matchers.ExplainingMatch;
import uk.co.flax.luwak.matchers.ExplainingMatcher;
import uk.co.flax.luwak.presearcher.MatchAllPresearcher;
import uk.co.flax.luwak.queryparsers.LuceneQueryParser;
import static org.assertj.core.api.Assertions.assertThat;
public class TestInputDocument {
@Rule
public ExpectedException expected = ExpectedException.none();
@Test
public void testCannotAddReservedFieldName() {
expected.expect(IllegalArgumentException.class);
expected.expectMessage("reserved");
InputDocument.builder("id").addField(InputDocument.ID_FIELD, "test", new StandardAnalyzer()).build();
}
@Test
public void testCannotAddReservedFieldObject() {
expected.expect(IllegalArgumentException.class);
expected.expectMessage("reserved");
InputDocument.builder("id").addField(new StringField(InputDocument.ID_FIELD, "", Field.Store.YES)).build();
}
@Test
public void testOmitNorms() throws Exception {
FieldType type = new FieldType();
type.setOmitNorms(true);
type.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
Field f = new Field("text", "this is some text that will have a length norm greater than 0", type);
InputDocument doc = InputDocument.builder("id")
.setDefaultAnalyzer(new StandardAnalyzer())
.addField(f).build();
try (Monitor monitor = new Monitor(new LuceneQueryParser("text"), new MatchAllPresearcher())) {
monitor.update(new MonitorQuery("q", "length"));
Matches<ExplainingMatch> matches = monitor.match(doc, ExplainingMatcher.FACTORY);
DocumentMatches<ExplainingMatch> m = matches.getMatches("id");
for (ExplainingMatch e : m) {
Explanation expl = e.getExplanation();
assertThat(expl.toString()).contains("norms omitted");
}
}
}
}
| 1,103 |
14,668 | <gh_stars>1000+
// Copyright 2021 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CHROMEOS_DBUS_FWUPD_DBUS_CONSTANTS_H_
#define CHROMEOS_DBUS_FWUPD_DBUS_CONSTANTS_H_
namespace chromeos {
const char kFwupdServiceName[] = "org.freedesktop.fwupd";
const char kFwupdServicePath[] = "/";
const char kFwupdServiceInterface[] = "org.freedesktop.fwupd";
const char kFwupdDeviceAddedSignalName[] = "DeviceAdded";
const char kFwupdGetUpgradesMethodName[] = "GetUpgrades";
const char kFwupdGetDevicesMethodName[] = "GetDevices";
const char kFwupdInstallMethodName[] = "Install";
} // namespace chromeos
#endif // CHROMEOS_DBUS_FWUPD_DBUS_CONSTANTS_H_
| 280 |
387 | <gh_stars>100-1000
/*
* Copyright (C) 2014, United States Government, as represented by the
* Administrator of the National Aeronautics and Space Administration.
* All rights reserved.
*
* The Java Pathfinder core (jpf-core) platform is licensed under the
* Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package gov.nasa.jpf.vm;
/**
* utility wrapper for exception handlers that /would/ handle
* a given exception type
*
* <2do> This should be a class hierarchy to properly distinguish between
* ordinary catch handlers and UncaughtHandler objects, but so far
* this isn't worth it
*/
public class HandlerContext {
public enum UncaughtHandlerType { INSTANCE, GROUP, GLOBAL }
ThreadInfo ti;
ClassInfo ciException;
StackFrame frame;
ExceptionHandler handler;
// - or -
int uncaughtHandlerRef;
UncaughtHandlerType uncaughtHandlerType;
HandlerContext (ThreadInfo ti, ClassInfo ciException, StackFrame frame, ExceptionHandler handler) {
this.ti = ti;
this.ciException = ciException;
this.frame = frame;
this.handler = handler;
}
HandlerContext (ThreadInfo ti, ClassInfo ciException, UncaughtHandlerType uncaughtHandlerType, int uncaughtHandlerRef){
this.ti = ti;
this.ciException = ciException;
this.uncaughtHandlerType = uncaughtHandlerType;
this.uncaughtHandlerRef = uncaughtHandlerRef;
}
public ThreadInfo getThreadInfo(){
return ti;
}
public StackFrame getFrame () {
return frame;
}
public ExceptionHandler getHandler () {
return handler;
}
public boolean isUncaughtHandler(){
return uncaughtHandlerType != null;
}
public UncaughtHandlerType getUncaughtHandlerType(){
return uncaughtHandlerType;
}
public int getUncaughtHandlerRef(){
return uncaughtHandlerRef;
}
}
| 674 |
406 | <filename>services/unsupported/OpenADRVenAgent/openadrven/models.py
# -*- coding: utf-8 -*- {{{
# vim: set fenc=utf-8 ft=python sw=4 ts=4 sts=4 et:
#
# Copyright 2020, Battelle Memorial Institute.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This material was prepared as an account of work sponsored by an agency of
# the United States Government. Neither the United States Government nor the
# United States Department of Energy, nor Battelle, nor any of their
# employees, nor any jurisdiction or organization that has cooperated in the
# development of these materials, makes any warranty, express or
# implied, or assumes any legal liability or responsibility for the accuracy,
# completeness, or usefulness or any information, apparatus, product,
# software, or process disclosed, or represents that its use would not infringe
# privately owned rights. Reference herein to any specific commercial product,
# process, or service by trade name, trademark, manufacturer, or otherwise
# does not necessarily constitute or imply its endorsement, recommendation, or
# favoring by the United States Government or any agency thereof, or
# Battelle Memorial Institute. The views and opinions of authors expressed
# herein do not necessarily state or reflect those of the
# United States Government or any agency thereof.
#
# PACIFIC NORTHWEST NATIONAL LABORATORY operated by
# BATTELLE for the UNITED STATES DEPARTMENT OF ENERGY
# under Contract DE-AC05-76RL01830
# }}}
from datetime import timedelta
from dateutil import parser
from sqlalchemy import Column, String, Integer, DateTime, Float
from sqlalchemy.ext.declarative import declarative_base
from volttron.platform.agent import utils
ORMBase = declarative_base()
class EiEvent(ORMBase):
"""
Model object for an event.
This model object, and the other models in this module, are managed by the SQLAlchemy ORM,
and are persisted in a SQLite database.
Note that the timestamps are stored as ISO8601 strings, by SQLite convention.
Timezone awareness is retained when they are persisted.
They're stored as strings -- iso_start_time, etc. -- but agent code uses property
methods -- start_time(), etc. -- to get and set them as datetime objects.
"""
__tablename__ = 'EiEvent'
rowid = Column(Integer, primary_key=True, autoincrement=True)
event_id = Column(String, nullable=False)
request_id = Column(String, nullable=False)
creation_time = Column(DateTime, nullable=False)
iso_start_time = Column(String, nullable=False) # ISO 8601 timestamp in UTC
iso_end_time = Column(String, nullable=True) # ISO 8601 timestamp in UTC
signals = Column(String, nullable=False)
status = Column(String, nullable=False)
opt_type = Column(String, nullable=False)
priority = Column(Integer, nullable=False)
modification_number = Column(Integer, nullable=False)
test_event = Column(String, nullable=False, default='false')
iso_dtstart = Column(String, nullable=True) # ISO 8601 timestamp in UTC
duration = Column(String, nullable=True) # ISO 8601 duration string
start_after = Column(String, nullable=True, default='') # ISO 8601 duration string
attribute_names = ['event_id', 'creation_time', 'start_time', 'end_time', 'priority',
'signals', 'status', 'opt_type']
STATUS_UNRESPONDED = 'unresponded'
STATUS_FAR = 'far'
STATUS_NEAR = 'near'
STATUS_ACTIVE = 'active'
STATUS_COMPLETED = 'completed'
STATUS_CANCELED = 'cancelled'
STATUS_VALUES = [STATUS_UNRESPONDED, STATUS_FAR, STATUS_NEAR, STATUS_ACTIVE, STATUS_COMPLETED,
STATUS_CANCELED]
OPT_TYPE_OPT_IN = 'optIn'
OPT_TYPE_OPT_OUT = 'optOut'
OPT_TYPE_NONE = 'none'
def __init__(self, request_id, event_id):
self.request_id = request_id
self.creation_time = utils.get_aware_utc_now()
self.event_id = event_id
self.signals = ''
self.status = self.STATUS_UNRESPONDED
self.opt_type = self.OPT_TYPE_NONE
self.priority = 1
self.modification_number = 0 # Incremented by the VTN if/when the event changes
self.test_event = 'false'
def __str__(self):
"""Format the instance as a string suitable for trace display."""
my_str = '{}: '.format(self.__class__.__name__)
my_str += 'event_id:{}; '.format(self.event_id)
my_str += 'start_time:{}; '.format(self.start_time)
my_str += 'end_time:{}; '.format(self.end_time)
my_str += 'opt_type:{}; '.format(self.opt_type)
my_str += 'event_id:{}; '.format(self.event_id)
my_str += 'status:{}; '.format(self.status)
my_str += 'priority:{}; '.format(self.priority)
my_str += 'modification_number:{}; '.format(self.modification_number)
my_str += 'signals:{}; '.format(self.signals)
return my_str
@property
def start_time(self):
return parser.parse(self.iso_start_time) if self.iso_start_time else None
@property
def end_time(self):
return parser.parse(self.iso_end_time) if self.iso_end_time else None
@property
def dtstart(self):
return parser.parse(self.iso_dtstart) if self.iso_dtstart else None
@start_time.setter
def start_time(self, t):
self.iso_start_time = utils.format_timestamp(t) if t else None
@end_time.setter
def end_time(self, t):
self.iso_end_time = utils.format_timestamp(t) if t else None
@dtstart.setter
def dtstart(self, t):
self.iso_dtstart = utils.format_timestamp(t) if t else None
@classmethod
def sample_event(cls):
"""Return a sample EiEvent for debugging purposes."""
sample = cls('123456', '12345')
sample.start_time = utils.get_aware_utc_now()
sample.end_time = sample.start_time + timedelta(hours=1)
sample.opt_type = 'optIn'
return sample
def is_active(self):
return self.status not in [self.STATUS_COMPLETED, self.STATUS_CANCELED]
def as_json_compatible_object(self):
"""Format the object as JSON that will be returned in response to an RPC, or sent in a pub/sub."""
return {attname: getattr(self, attname) for attname in self.attribute_names}
def copy_from_event(self, another_event):
# Do not copy creation_time from another_event
self.event_id = another_event.event_id
self.request_id = another_event.request_id
self.start_time = another_event.start_time
self.end_time = another_event.end_time
self.priority = another_event.priority
self.signals = another_event.signals
# Do not copy status from another_event
# Do not copy opt_type from another_event
self.modification_number = another_event.modification_number
self.test_event = another_event.test_event
self.dtstart = another_event.dtstart
self.duration = another_event.duration
self.start_after = another_event.start_after
class EiReport(ORMBase):
"""Model object for a report."""
__tablename__ = 'EiReport'
STATUS_INACTIVE = 'inactive'
STATUS_ACTIVE = 'active'
STATUS_COMPLETED = 'completed'
STATUS_CANCELED = 'cancelled'
rowid = Column(Integer, primary_key=True)
created_on = Column(DateTime)
request_id = Column(String)
report_request_id = Column(String)
report_specifier_id = Column(String)
iso_start_time = Column(String, nullable=False) # ISO 8601 timestamp in UTC
iso_end_time = Column(String, nullable=True) # ISO 8601 timestamp in UTC
duration = Column(String) # ISO 8601 duration
status = Column(String)
iso_last_report = Column(String) # ISO 8601 timestamp in UTC
name = Column(String)
interval_secs = Column(Integer)
granularity_secs = Column(String)
telemetry_parameters = Column(String)
attribute_names = ["status", "report_specifier_id", "report_request_id", "request_id",
"interval_secs", "granularity_secs",
"start_time", "end_time", "last_report", "created_on"]
def __init__(self, request_id, report_request_id, report_specifier_id):
self.created_on = utils.get_aware_utc_now()
self.request_id = request_id
self.report_request_id = report_request_id
self.report_specifier_id = report_specifier_id
self.status = 'inactive'
self.last_report = utils.get_aware_utc_now()
def __str__(self):
"""Format the instance as a string suitable for trace display."""
my_str = '{}: '.format(self.__class__.__name__)
my_str += 'report_request_id:{}; '.format(self.report_request_id)
my_str += 'report_specifier_id:{}; '.format(self.report_specifier_id)
my_str += 'start_time:{}; '.format(self.start_time)
my_str += 'end_time:{}; '.format(self.end_time)
my_str += 'status:{}; '.format(self.status)
return my_str
@property
def start_time(self):
return parser.parse(self.iso_start_time) if self.iso_start_time else None
@property
def end_time(self):
return parser.parse(self.iso_end_time) if self.iso_end_time else None
@property
def last_report(self):
return parser.parse(self.iso_last_report) if self.iso_last_report else None
@start_time.setter
def start_time(self, t):
self.iso_start_time = utils.format_timestamp(t) if t else None
@end_time.setter
def end_time(self, t):
self.iso_end_time = utils.format_timestamp(t) if t else None
@last_report.setter
def last_report(self, t):
self.iso_last_report = utils.format_timestamp(t) if t else None
def is_active(self):
return self.status not in [self.STATUS_COMPLETED, self.STATUS_CANCELED]
def as_json_compatible_object(self):
"""Format the object as JSON that will be returned in response to an RPC, or sent in a pub/sub."""
return {attname: getattr(self, attname) for attname in self.attribute_names}
def copy_from_report(self, another_report):
"""(Selectively) Copy the contents of another_report to this one."""
self.request_id = another_report.request_id
self.report_request_id = another_report.report_request_id
self.report_specifier_id = another_report.report_specifier_id
self.start_time = another_report.start_time
self.end_time = another_report.end_time
self.duration = another_report.duration
self.granularity_secs = another_report.granularity_secs
# Do not copy created_on from another_report
# Do not copy status from another_report
# Do not copy last_report from another_report
self.name = another_report.name
self.interval_secs = another_report.interval_secs
self.telemetry_parameters = another_report.telemetry_parameters
class EiTelemetryValues(ORMBase):
"""Model object for telemetry values."""
__tablename__ = 'EiTelemetryValues'
rowid = Column(Integer, primary_key=True)
created_on = Column(DateTime)
report_request_id = Column(String)
baseline_power_kw = Column(Float)
current_power_kw = Column(Float)
iso_start_time = Column(String) # ISO 8601 timestamp in UTC
iso_end_time = Column(String) # ISO 8601 timestamp in UTC
attribute_names = ['created_on', 'report_request_id', 'baseline_power_kw', 'current_power_kw',
'start_time', 'end_time']
def __init__(self, report_request_id=None,
baseline_power_kw=None, current_power_kw=None,
start_time=None, end_time=None):
self.created_on = utils.get_aware_utc_now()
self.report_request_id = report_request_id
self.baseline_power_kw = baseline_power_kw
self.current_power_kw = current_power_kw
self.start_time = start_time
self.end_time = end_time
def __str__(self):
"""Format the instance as a string suitable for trace display."""
my_str = '{}: '.format(self.__class__.__name__)
my_str += 'created_on:{}; '.format(self.created_on)
my_str += 'report_request_id:{}; '.format(self.report_request_id)
my_str += 'baseline_power_kw:{}; '.format(self.baseline_power_kw)
my_str += 'current_power_kw:{} '.format(self.current_power_kw)
my_str += 'start_time:{} '.format(self.start_time)
my_str += 'end_time:{} '.format(self.end_time)
return my_str
@property
def start_time(self):
return parser.parse(self.iso_start_time) if self.iso_start_time else None
@property
def end_time(self):
return parser.parse(self.iso_end_time) if self.iso_end_time else None
@start_time.setter
def start_time(self, t):
self.iso_start_time = utils.format_timestamp(t) if t else None
@end_time.setter
def end_time(self, t):
self.iso_end_time = utils.format_timestamp(t) if t else None
@classmethod
def sample_values(cls):
"""Return a sample set of telemetry values for debugging purposes."""
telemetry_values = cls()
telemetry_values.report_request_id = '123'
telemetry_values.baseline_power_kw = 37.1
telemetry_values.current_power_kw = 272.3
return telemetry_values
def as_json_compatible_object(self):
"""Format the object as JSON that will be returned in response to an RPC, or sent in a pub/sub."""
return {attname: getattr(self, attname) for attname in self.attribute_names}
def get_baseline_power(self):
return self.baseline_power_kw
def get_current_power(self):
return self.current_power_kw
def get_duration(self):
return self.end_time - self.start_time
| 5,664 |
66,985 | <filename>spring-boot-project/spring-boot-tools/spring-boot-configuration-processor/src/test/java/org/springframework/boot/configurationsample/simple/SimpleArrayProperties.java<gh_stars>1000+
/*
* Copyright 2012-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.configurationsample.simple;
import java.util.Map;
import org.springframework.boot.configurationsample.ConfigurationProperties;
/**
* Properties with array.
*
* @author <NAME>
*/
@ConfigurationProperties("array")
public class SimpleArrayProperties {
private int[] primitive;
private String[] simple;
private Holder[] inner;
private Map<String, Integer>[] nameToInteger;
public int[] getPrimitive() {
return this.primitive;
}
public void setPrimitive(int[] primitive) {
this.primitive = primitive;
}
public String[] getSimple() {
return this.simple;
}
public void setSimple(String[] simple) {
this.simple = simple;
}
public Holder[] getInner() {
return this.inner;
}
public void setInner(Holder[] inner) {
this.inner = inner;
}
public Map<String, Integer>[] getNameToInteger() {
return this.nameToInteger;
}
public void setNameToInteger(Map<String, Integer>[] nameToInteger) {
this.nameToInteger = nameToInteger;
}
static class Holder {
}
}
| 547 |
335 | // $Id$
# ifndef CPPAD_CORE_OMP_MAX_THREAD_HPP
# define CPPAD_CORE_OMP_MAX_THREAD_HPP
/* --------------------------------------------------------------------------
CppAD: C++ Algorithmic Differentiation: Copyright (C) 2003-16 <NAME>
CppAD is distributed under multiple licenses. This distribution is under
the terms of the
GNU General Public License Version 3.
A copy of this license is included in the COPYING file of this distribution.
Please visit http://www.coin-or.org/CppAD/ for information on other licenses.
-------------------------------------------------------------------------- */
/*
$begin omp_max_thread$$
$spell
alloc
num
omp
OpenMp
CppAD
$$
$section OpenMP Parallel Setup$$
$mindex omp_max_thread$$
$head Deprecated 2011-06-23$$
Use $cref/thread_alloc::parallel_setup/ta_parallel_setup/$$
to set the number of threads.
$head Syntax$$
$codei%AD<%Base%>::omp_max_thread(%number%)
%$$
$head Purpose$$
By default, for each $codei%AD<%Base%>%$$ class there is only one
tape that records $cref/AD of Base/glossary/AD of Base/$$ operations.
This tape is a global variable and hence it cannot be used
by multiple OpenMP threads at the same time.
The $code omp_max_thread$$ function is used to set the
maximum number of OpenMP threads that can be active.
In this case, there is a different tape corresponding to each
$codei%AD<%Base%>%$$ class and thread pair.
$head number$$
The argument $icode number$$ has prototype
$codei%
size_t %number%
%$$
It must be greater than zero and specifies the maximum number of
OpenMp threads that will be active at one time.
$head Independent$$
Each call to $cref/Independent(x)/Independent/$$
creates a new $cref/active/glossary/Tape/Active/$$ tape.
All of the operations with the corresponding variables
must be preformed by the same OpenMP thread.
This includes the corresponding call to
$cref/f.Dependent(x,y)/Dependent/$$ or the
$cref/ADFun f(x, y)/FunConstruct/Sequence Constructor/$$
during which the tape stops recording and the variables
become parameters.
$head Restriction$$
No tapes can be
$cref/active/glossary/Tape/Active/$$ when this function is called.
$end
-----------------------------------------------------------------------------
*/
// BEGIN CppAD namespace
namespace CppAD {
template <class Base>
void AD<Base>::omp_max_thread(size_t number)
{
# ifdef _OPENMP
thread_alloc::parallel_setup(
number, omp_alloc::in_parallel, omp_alloc::get_thread_num
);
# else
CPPAD_ASSERT_KNOWN(
number == 1,
"omp_max_thread: number > 1 and _OPENMP is not defined"
);
# endif
parallel_ad<Base>();
}
} // END CppAD namespace
# endif
| 827 |
1,056 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.modules.java.source;
import java.io.IOException;
import java.net.URL;
import org.netbeans.junit.NbTestCase;
import org.netbeans.modules.java.source.indexing.TransactionContext;
import org.netbeans.modules.java.source.parsing.FileManagerTransaction;
import org.netbeans.modules.java.source.parsing.ProcessorGenerated;
import org.netbeans.modules.java.source.usages.ClassIndexEventsTransaction;
import org.netbeans.modules.java.source.usages.ClassIndexImpl.State;
import org.netbeans.modules.java.source.usages.ClassIndexManager;
import org.netbeans.modules.java.source.usages.PersistentClassIndex;
import org.netbeans.modules.parsing.api.ParsingTestBase;
/**
*
* @author sdedic
*/
public class ClassIndexTestCase extends NbTestCase {
public ClassIndexTestCase(String name) {
super(name);
}
/**
* Ensures that a CP root becomes valid, by creating ClassIndex for it; if TransactionContext
* does not exist, creates a local one + commits it.
*
* @param url
* @throws IOException
*/
protected void ensureRootValid(URL url) throws IOException {
TransactionContext testTx = beginTx();
PersistentClassIndex pi = (PersistentClassIndex)ClassIndexManager.getDefault().createUsagesQuery(url, true);
pi.setState(State.INITIALIZED);
TransactionContext.get().commit();
if (testTx == null) {
beginTx();
}
}
protected void createClassIndex(URL url) throws Exception {
ClassIndexManager.getDefault().createUsagesQuery(url, true);
}
public static TransactionContext beginTx() throws IOException {
try {
return TransactionContext.beginTrans().
register(ClassIndexEventsTransaction.class,
ClassIndexEventsTransaction.create(true, ()->true)).
register(FileManagerTransaction.class, FileManagerTransaction.writeThrough()).
register(ProcessorGenerated.class, ProcessorGenerated.nullWrite());
} catch (IllegalStateException ise) {
// ignore, tx may have been created by a previous test
return null;
}
}
public static void commitTx() throws IOException {
try {
TransactionContext.get().commit();
} catch (IllegalStateException e) {
// ignore silently
}
}
}
| 1,134 |
655 | #include <StdAfx.h>
#include <MrMapi/MMRules.h>
#include <MrMapi/mmcli.h>
#include <MrMapi/MMAcls.h>
void DoRules(_In_opt_ LPMAPIFOLDER lpFolder) { DumpExchangeTable(PR_RULES_TABLE, lpFolder); }
| 92 |
1,056 | <filename>enterprise/payara.common/src/org/netbeans/modules/payara/common/actions/AbstractOutputAction.java
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.modules.payara.common.actions;
import java.awt.event.ActionEvent;
import javax.swing.AbstractAction;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import org.openide.util.ImageUtilities;
import org.openide.util.Mutex;
import org.openide.util.WeakListeners;
import org.netbeans.modules.payara.spi.PayaraModule;
/**
*
* @author <NAME>
*/
public abstract class AbstractOutputAction extends AbstractAction implements ChangeListener {
private static final String PROP_ENABLED = "enabled"; // NOI18N
protected final PayaraModule commonSupport;
public AbstractOutputAction(final PayaraModule commonSupport,
String localizedName, String localizedShortDesc, String iconBase) {
super(localizedName, ImageUtilities.loadImageIcon(iconBase, false));
putValue(SHORT_DESCRIPTION, localizedShortDesc);
this.commonSupport = commonSupport;
// listen for server state changes
commonSupport.addChangeListener(WeakListeners.change(this, commonSupport));
}
public abstract void actionPerformed(ActionEvent e);
@Override
public abstract boolean isEnabled();
// --------------------------------------------------------------------
// ChangeListener interface implementation
// --------------------------------------------------------------------
public void stateChanged(ChangeEvent evt) {
Mutex.EVENT.readAccess(new Runnable() {
public void run() {
firePropertyChange(PROP_ENABLED, null, isEnabled() ? Boolean.TRUE : Boolean.FALSE);
}
});
}
}
| 747 |
631 | /**
* Copyright (c) 2011-2017 libbitcoin developers (see AUTHORS)
*
* This file is part of libbitcoin.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef LIBBITCOIN_PNG_HPP
#define LIBBITCOIN_PNG_HPP
#include <cstdint>
#include <istream>
#include <bitcoin/bitcoin/compat.hpp>
#include <bitcoin/bitcoin/define.hpp>
#include <bitcoin/bitcoin/utility/color.hpp>
#include <bitcoin/bitcoin/utility/data.hpp>
#ifdef WITH_PNG
#include <png.h>
namespace libbitcoin {
class BC_API png
{
public:
static BC_CONSTEXPR uint32_t margin = 2;
static BC_CONSTEXPR uint32_t dots_per_inch = 72;
static BC_CONSTEXPR uint32_t inches_per_meter = (100.0 / 2.54);
static const color get_default_foreground()
{
static BC_CONSTEXPR color default_foreground{ 0, 0, 0, 255 };
return default_foreground;
}
static const color get_default_background()
{
static BC_CONSTEXPR color default_background{ 255, 255, 255, 255 };
return default_background;
}
/**
* A method that takes encoded qrcode as a data chunk and writes it to
* an output stream in png format with the default parameters. The
* size parameter specifies the number of dots (pixels) per qr code
* modules.
*/
static bool write_png(const data_chunk& data, uint32_t size,
std::ostream& out);
/**
* A method that takes encoded qrcode data as a data chunk and writes
* it to an output stream in png format with the specified parameters.
*/
static bool write_png(const data_chunk& data, uint32_t size,
uint32_t dots_per_inch, uint32_t margin, uint32_t inches_per_meter,
const color& foreground, const color& background, std::ostream& out);
/**
* A method that reads encoded qrcode data via an input stream and
* writes it to an output stream in png format with the default
* parameters. The size parameter specifies the number of dots
* (pixels) per qr code modules.
*/
static bool write_png(std::istream& in, uint32_t size, std::ostream& out);
/**
* A method that reads encoded qrcode data via an input stream and
* writes it to an output stream in png format with the specified
* parameters.
*/
static bool write_png(std::istream& in, uint32_t size,
uint32_t dots_per_inch, const uint32_t margin,
uint32_t inches_per_meter, const color& foreground,
const color& background, std::ostream& out);
};
} // namespace libbitcoin
#endif // WITH_PNG
#endif
| 1,085 |
831 | <reponame>Snehakri022/HackerrankPractice
# Problem: https://www.hackerrank.com/challenges/sherlock-and-cost/problem
# Score: 50
test = int(input())
for i in range(test):
n = int(input())
b = list(map(int, input().split()))
dp = [[0], [0]]
for i in range(1, len(b)):
add_1 = max(dp[0][-1], dp[1][-1] + b[i - 1] - 1)
add_max = max(dp[0][-1] + b[i] - 1, dp[1][-1] + abs(b[i] - b[i - 1]))
dp[0].append(add_1)
dp[1].append(add_max)
print(max(dp[0][-1], dp[1][-1]))
| 273 |
634 | /*
* Copyright 2013-2016 consulo.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package consulo.ui.desktop.internal;
import com.intellij.ide.IdeEventQueue;
import consulo.components.impl.stores.ComponentStoreImpl;
import consulo.logging.Logger;
import consulo.ui.UIAccess;
import consulo.ui.annotation.RequiredUIAccess;
import consulo.util.concurrent.AsyncResult;
import javax.annotation.Nonnull;
import javax.swing.*;
import java.awt.*;
import java.lang.reflect.InvocationTargetException;
import java.util.function.Supplier;
/**
* @author VISTALL
* @since 11-Jun-16
*/
public class AWTUIAccessImpl implements UIAccess {
public static UIAccess ourInstance = new AWTUIAccessImpl();
private static final Logger LOGGER = Logger.getInstance(AWTUIAccessImpl.class);
@Override
public boolean isValid() {
return true;
}
@Override
public boolean isHeadless() {
return GraphicsEnvironment.isHeadless();
}
@RequiredUIAccess
@Override
public int getEventCount() {
UIAccess.assertIsUIThread();
return IdeEventQueue.getInstance().getEventCount();
}
@Nonnull
@Override
public <T> AsyncResult<T> give(@Nonnull Supplier<T> supplier) {
AsyncResult<T> asyncResult = AsyncResult.undefined();
SwingUtilities.invokeLater(() -> {
try {
T result = supplier.get();
asyncResult.setDone(result);
}
catch (Throwable e) {
LOGGER.error(e);
asyncResult.rejectWithThrowable(e);
}
});
return asyncResult;
}
@Override
public void giveAndWait(@Nonnull Runnable runnable) {
ComponentStoreImpl.assertIfInsideSavingSession();
try {
SwingUtilities.invokeAndWait(runnable);
}
catch (InterruptedException | InvocationTargetException e) {
//
}
}
} | 762 |
1,080 | <gh_stars>1000+
# Copyright (c) 2012 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from unittest import mock
from neutron.cmd import ovs_cleanup as util
from neutron.tests import base
class TestOVSCleanup(base.BaseTestCase):
def test_clean_ovs_bridges(self):
conf = mock.Mock()
conf.ovs_all_ports = True
conf.ovs_integration_bridge = 'br-int'
conf.external_network_bridge = 'br-ex'
bridges = [conf.ovs_integration_bridge, conf.external_network_bridge]
with mock.patch('neutron.agent.common.ovs_lib.BaseOVS') as ovs_cls:
ovs_base = mock.Mock()
ovs_base.get_bridges.return_value = bridges
ovs_cls.return_value = ovs_base
util.do_main(conf)
ovs_base.ovsdb.ovs_cleanup.assert_has_calls(
[mock.call(conf.ovs_integration_bridge, True),
mock.call(conf.external_network_bridge, True)],
any_order=True)
| 616 |
521 | /* $Id: HGCM.h $ */
/** @file
* HGCM - Host-Guest Communication Manager.
*/
/*
* Copyright (C) 2006-2017 Oracle Corporation
*
* This file is part of VirtualBox Open Source Edition (OSE), as
* available from http://www.virtualbox.org. This file is free software;
* you can redistribute it and/or modify it under the terms of the GNU
* General Public License (GPL) as published by the Free Software
* Foundation, in version 2 as it comes in the "COPYING" file of the
* VirtualBox OSE distribution. VirtualBox OSE is distributed in the
* hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
*/
#ifndef __HGCM_h__
#define __HGCM_h__
#include <VBox/cdefs.h>
#include <VBox/types.h>
#include <VBox/vmm/pdmifs.h>
#include <VBox/VMMDev.h>
#include <VBox/hgcmsvc.h>
/* HGCM saved state version */
#define HGCM_SSM_VERSION 2
/* Handle of a HGCM service extension. */
struct _HGCMSVCEXTHANDLEDATA;
typedef struct _HGCMSVCEXTHANDLEDATA *HGCMSVCEXTHANDLE;
RT_C_DECLS_BEGIN
int HGCMHostInit (void);
int HGCMHostShutdown (void);
int HGCMHostReset (void);
int HGCMHostLoad (const char *pszServiceLibrary, const char *pszServiceName);
int HGCMHostRegisterServiceExtension (HGCMSVCEXTHANDLE *pHandle, const char *pszServiceName, PFNHGCMSVCEXT pfnExtension, void *pvExtension);
void HGCMHostUnregisterServiceExtension (HGCMSVCEXTHANDLE handle);
int HGCMGuestConnect (PPDMIHGCMPORT pHGCMPort, PVBOXHGCMCMD pCmdPtr, const char *pszServiceName, uint32_t *pClientID);
int HGCMGuestDisconnect (PPDMIHGCMPORT pHGCMPort, PVBOXHGCMCMD pCmdPtr, uint32_t clientID);
int HGCMGuestCall (PPDMIHGCMPORT pHGCMPort, PVBOXHGCMCMD pCmdPtr, uint32_t clientID, uint32_t function, uint32_t cParms, VBOXHGCMSVCPARM *paParms);
int HGCMHostCall (const char *pszServiceName, uint32_t function, uint32_t cParms, VBOXHGCMSVCPARM aParms[]);
#ifdef VBOX_WITH_CRHGSMI
int HGCMHostSvcHandleCreate (const char *pszServiceName, HGCMCVSHANDLE * phSvc);
int HGCMHostSvcHandleDestroy (HGCMCVSHANDLE hSvc);
int HGCMHostFastCallAsync (HGCMCVSHANDLE hSvc, uint32_t function, PVBOXHGCMSVCPARM pParm, PHGCMHOSTFASTCALLCB pfnCompletion, void *pvCompletion);
#endif
int HGCMHostSaveState (PSSMHANDLE pSSM);
int HGCMHostLoadState (PSSMHANDLE pSSM);
RT_C_DECLS_END
#endif /* __HGCM_h__ */
| 872 |
5,133 | /*
* Copyright MapStruct Authors.
*
* Licensed under the Apache License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package org.mapstruct.ap.test.bugs._1453;
import java.util.List;
import java.util.Map;
import org.mapstruct.Mapper;
import org.mapstruct.factory.Mappers;
/**
* @author <NAME>
*/
@Mapper
public interface Issue1453Mapper {
Issue1453Mapper INSTANCE = Mappers.getMapper( Issue1453Mapper.class );
AuctionDto map(Auction auction);
List<AuctionDto> mapExtend(List<? extends Auction> auctions);
List<? super AuctionDto> mapSuper(List<Auction> auctions);
Map<AuctionDto, AuctionDto> mapExtend(Map<? extends Auction, ? extends Auction> auctions);
Map<? super AuctionDto, ? super AuctionDto> mapSuper(Map<Auction, Auction> auctions);
}
| 271 |
4,054 | <filename>config-model/src/test/java/com/yahoo/searchdefinition/derived/CasingTestCase.java<gh_stars>1000+
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.searchdefinition.derived;
import com.yahoo.searchdefinition.Search;
import com.yahoo.searchdefinition.SearchBuilder;
import com.yahoo.searchdefinition.SchemaTestCase;
import com.yahoo.searchdefinition.parser.ParseException;
import org.junit.Test;
import java.io.IOException;
import static org.junit.Assert.assertEquals;
/**
* Correct casing for derived attributes
*
* @author vegardh
*/
public class CasingTestCase extends SchemaTestCase {
@Test
public void testCasing() throws IOException, ParseException {
Search search = SearchBuilder.buildFromFile("src/test/examples/casing.sd");
assertEquals(search.getIndex("color").getName(), "color");
assertEquals(search.getIndex("Foo").getName(), "Foo");
assertEquals(search.getIndex("Price").getName(), "Price");
assertEquals(search.getAttribute("artist").getName(), "artist");
assertEquals(search.getAttribute("Drummer").getName(), "Drummer");
assertEquals(search.getAttribute("guitarist").getName(), "guitarist");
assertEquals(search.getAttribute("title").getName(), "title");
assertEquals(search.getAttribute("Trumpetist").getName(), "Trumpetist");
assertEquals(search.getAttribute("Saxophonist").getName(), "Saxophonist");
assertEquals(search.getAttribute("TenorSaxophonist").getName(), "TenorSaxophonist");
assertEquals(search.getAttribute("Flutist").getName(), "Flutist");
}
}
| 558 |
575 | #!/usr/bin/env vpython
#
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
from blinkpy.web_tests.merge_results import main
main(sys.argv[1:])
| 85 |
826 | package edu.berkeley.bvlc;
public final class CAFFE {
private CAFFE() {}
static {
LibUtils.loadLibrary("caffe");
}
public static native void set_mode(int mode);
public static native void set_phase(int phase);
public static native int get_mode();
public static native int get_phase();
public static native void set_device(int n);
public static native void DeviceQuery();
}
| 195 |
626 | """
Anserini: A toolkit for reproducible information retrieval research built on Lucene
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import print_function
import os
import re
import argparse
import logging
import json
logging.basicConfig()
class XFoldValidate(object):
"""
Perform X-Fold cross validation for various
parameters and report the average effectiveness
for each fold. fold_mapping is an optional argument.
It can be a dictionary {qid:fold_id} that maps
each qid to its corresponding fold.
"""
def __init__(self,output_root,collection,
fold=5,fold_mapping=None):
self.logger = logging.getLogger('x_fold_cv.XFlodValidate')
self.output_root = output_root
self.eval_files_root = 'eval_files'
self.collection = collection
self.fold = fold
self.fold_mapping = fold_mapping
def _get_param_average(self):
# For each parameter set, get its
# average performances in each fold,
# metric, reranking model, and base
# ranking model
avg_performances = {}
eval_root_dir = os.path.join(self.output_root, self.collection,self.eval_files_root)
# do x-fold cv for the collection
for metric in os.listdir(eval_root_dir):
eval_dir = os.path.join(eval_root_dir,metric)
if os.path.isfile(eval_dir):
continue
# if it is a directory containing effectiveness
# for a metric, do x-fold cv for the metric
for fn in os.listdir(eval_dir):
model, param = fn.split('_', 1)
if model not in avg_performances:
avg_performances[model] = {}
param_avg_performances = self._get_param_avg_performances(os.path.join(eval_dir,fn))
for metric in param_avg_performances:
if metric not in avg_performances[model]:
avg_performances[model][metric] = {}
for fold_id in param_avg_performances[metric]:
if fold_id not in avg_performances[model][metric]:
avg_performances[model][metric][fold_id] = {}
avg_performances[model][metric][fold_id][param] = param_avg_performances[metric][fold_id]
return avg_performances
def _compute_fold_id(self,qid):
# compute fold id
if self.fold_mapping:
# use the fold mapping passed to it
return self.fold_mapping[qid]
else:
# compute the fold id based on qid
return int(qid) % self.fold
def tune(self,verbose):
# Tune parameter with x-fold. Use x-1 fold
# for training and 1 fold for testing. Do
# it for each fold and report average
avg_performances = self._get_param_average()
res = {}
for model in avg_performances:
res[model] = {}
for metric in avg_performances[model]:
if verbose:
print('model: {}, metric: {}'.format(model, metric))
metric_fold_performances = []
for test_idx in range(self.fold):
test_fold_performances = avg_performances[model][metric][test_idx]
training_data = {}
for train_idx in range(self.fold):
if train_idx == test_idx:
continue
fold_performance = avg_performances[model][metric][train_idx]
for param in fold_performance:
if param not in training_data:
training_data[param] = .0
training_data[param] += fold_performance[param]
# sort in descending order based on performance first, then use filenames(x[0]) to break ties
sorted_training_performance = sorted(training_data.items(),
key=lambda x:(x[1], x[0]),
reverse=True)
best_param = sorted_training_performance[0][0]
if verbose:
print('\tFold: {}'.format(test_idx))
print('\t\tBest param: {}'.format(best_param))
print('\t\ttest performance: {0:.4f}'.format(test_fold_performances[best_param]))
metric_fold_performances.append(test_fold_performances[best_param])
res[model][metric] = round(sum(metric_fold_performances) / len(metric_fold_performances), 4)
return res
def _get_param_avg_performances(self,file_path):
# Given a file, return its average effectiveness
# for each metric in each fold
param_performance_list = {}
for fold_id in range(self.fold):
param_performance_list[fold_id] = {}
with open(file_path) as f:
for line in f:
line = line.strip()
if line:
row = line.split()
metric = row[0]
if metric not in param_performance_list[0]:
for fold_id in param_performance_list:
param_performance_list[fold_id][metric] = []
qid = row[1]
try:
value = float(row[2])
except:
self.logger.error( 'Cannot parse %s' %(row[2]) )
continue
else:
if qid != 'all':
# compute fold id base on qid
fold_id = self._compute_fold_id(qid)
param_performance_list[fold_id][metric].append(value)
param_avg_performances = {}
for metric in param_performance_list[0].keys():
param_avg_performances[metric] = {}
for fold_id in param_performance_list:
param_avg_performances[metric][fold_id] = round(sum(param_performance_list[fold_id][metric])/len(param_performance_list[fold_id][metric]), 4)
return param_avg_performances
def main():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('--output_root', default='fine_tuning_results', help='output directory of all results')
parser.add_argument('--fold', '-f', default=2, type=int, help='number of fold')
parser.add_argument('--verbose', '-v', action='store_true', help='output in verbose mode')
parser.add_argument('--collection', required=True, help='the collection key in yaml')
parser.add_argument('--fold_dir', help='directory of drr fold files')
args=parser.parse_args()
fold_mapping = {}
if args.fold_dir:
from run_batch import load_drr_fold_mapping
fold_mapping = load_drr_fold_mapping(args.fold_dir)
print(json.dumps(XFoldValidate(args.output_root, args.collection, args.fold, fold_mapping).tune(args.verbose), sort_keys=True, indent=2))
if __name__ == '__main__':
main()
| 3,651 |
348 | {"nom":"Briarres-sur-Essonne","circ":"5ème circonscription","dpt":"Loiret","inscrits":456,"abs":255,"votants":201,"blancs":16,"nuls":5,"exp":180,"res":[{"nuance":"REM","nom":"<NAME>","voix":96},{"nuance":"LR","nom":"<NAME>","voix":84}]} | 96 |
348 | <reponame>chamberone/Leaflet.PixiOverlay
{"nom":"Villeneuve","circ":"2ème circonscription","dpt":"Alpes-de-Haute-Provence","inscrits":3181,"abs":1964,"votants":1217,"blancs":95,"nuls":33,"exp":1089,"res":[{"nuance":"REM","nom":"M. <NAME>","voix":652},{"nuance":"FI","nom":"M. <NAME>","voix":437}]} | 123 |
780 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.geektimes.enterprise.inject.standard.event;
import org.geektimes.enterprise.inject.standard.beans.manager.StandardBeanManager;
import javax.enterprise.inject.spi.*;
/**
* The {@link ProcessSyntheticAnnotatedType} event is fired by the container
* If an extension calls {@link BeforeBeanDiscovery#addAnnotatedType(AnnotatedType, String)}
* or {@link AfterTypeDiscovery#addAnnotatedType(AnnotatedType, String)},
* the type passed must be added to the set of discovered types.
*
* @param <X> The class being annotated
* @author <a href="mailto:<EMAIL>">Mercy</a>
* @see BeforeBeanDiscovery#addAnnotatedType(AnnotatedType, String)
* @see AfterTypeDiscovery#addAnnotatedType(AnnotatedType, String)
* @since 1.0.0
*/
public class ProcessSyntheticAnnotatedTypeEvent<X> extends ProcessAnnotatedTypeEvent<X> implements ProcessSyntheticAnnotatedType<X> {
private final Extension source;
public ProcessSyntheticAnnotatedTypeEvent(AnnotatedType annotatedType, Extension source,
StandardBeanManager standardBeanManager) {
super(annotatedType, standardBeanManager);
this.source = source;
}
@Override
public Extension getSource() {
return source;
}
@Override
public String toString() {
return "ProcessSyntheticAnnotatedTypeEvent{" +
" annotatedType=" + getAnnotatedType() +
", source=" + getSource() +
'}';
}
}
| 751 |
14,668 | // Copyright (c) 2011 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// This file declares a helper function that will check to see if a given folder
// is "identical" to another (for some value of identical, see below).
//
#ifndef CHROME_INSTALLER_UTIL_DUPLICATE_TREE_DETECTOR_H_
#define CHROME_INSTALLER_UTIL_DUPLICATE_TREE_DETECTOR_H_
namespace base {
class FilePath;
}
namespace installer {
// Returns true if |dest_path| contains all the files from |src_path| in the
// same directory structure and each of those files is of the same length.
// src_path_ and |dest_path| must either both be files or both be directories.
// Note that THIS IS A WEAK DEFINITION OF IDENTICAL and is intended only to
// catch cases of missing files or obvious modifications.
// It notably DOES NOT CHECKSUM the files.
bool IsIdenticalFileHierarchy(const base::FilePath& src_path,
const base::FilePath& dest_path);
} // namespace installer
#endif // CHROME_INSTALLER_UTIL_DUPLICATE_TREE_DETECTOR_H_
| 366 |
3,705 | import chainer
import chainer.backends
from chainer.backends.cuda import cupy
import chainer.functions as F
import chainer.links as L
import chainer.testing
import chainer.testing.attr
import chainermn
import numpy as np
import pytest
class Model(chainer.Chain):
def __init__(self, n_vocab, n_hid, communicator, rank_next, rank_prev):
n_layers = 1
n_rnn_hid = 10
super(Model, self).__init__()
with self.init_scope():
self.l1 = L.EmbedID(n_vocab, n_rnn_hid, ignore_label=-1)
self.rnn = chainermn.links.create_multi_node_n_step_rnn(
L.NStepLSTM(
n_layers=n_layers, in_size=n_rnn_hid, out_size=n_rnn_hid,
dropout=0.1),
communicator, rank_in=rank_prev, rank_out=rank_next,
)
self.l2 = L.Linear(n_rnn_hid, n_hid)
self.l3 = L.Linear(n_hid, 1)
def __call__(self, xs, ts):
h1 = [self.l1(x) for x in xs]
# MultiNodeNStepRNN returns outputs of actual_rnn + delegate_variable.
cell1, cell2, os, delegate_variable = self.rnn(h1)
os = F.concat(os, axis=0)
h2 = self.l2(os)
h3 = self.l3(h2)
ys = F.sum(h3, axis=0)
err = F.mean_squared_error(ys, ts)
err, = chainermn.functions.pseudo_connect(delegate_variable, err)
return err
def setup_communicator(gpu):
if gpu:
communicator = chainermn.create_communicator('flat')
chainer.backends.cuda.get_device_from_id(
communicator.intra_rank).use()
else:
communicator = chainermn.create_communicator('naive')
if communicator.size < 2:
pytest.skip('This test is for multinode only')
rank_next = communicator.rank + 1
rank_prev = communicator.rank - 1
if rank_prev < 0:
rank_prev = None
if rank_next >= communicator.size:
rank_next = None
return communicator, rank_prev, rank_next
def check_homogeneous_rnn(gpu, dtype):
communicator, rank_prev, rank_next = setup_communicator(gpu=gpu)
n, n_vocab, l = 100, 8, 10
# Number of model parameters are same among processes.
n_hid = 2
with chainer.using_config('dtype', dtype):
X = [np.random.randint(
0, n_vocab, size=np.random.randint(l // 2, l + 1),
dtype=np.int32)
for _ in range(n)]
Y = (np.random.rand(n) * 2).astype(dtype)
model = Model(
n_vocab, n_hid, communicator, rank_next,
rank_prev)
if gpu:
model.to_device(cupy.cuda.Device())
X = [chainer.backends.cuda.to_gpu(x) for x in X]
Y = chainer.backends.cuda.to_gpu(Y)
for i in range(n):
err = model(X[i:i + 1], Y[i:i + 1])
err.backward()
# Check if backprop finishes without deadlock.
assert True
@pytest.mark.parametrize('dtype', [np.float16, np.float32])
def test_homogeneous_rnn_cpu(dtype):
check_homogeneous_rnn(False, dtype)
@chainer.testing.attr.gpu
@pytest.mark.parametrize('dtype', [np.float16, np.float32])
def test_homogeneous_rnn_gpu(dtype):
check_homogeneous_rnn(True, dtype)
def check_heterogeneous_rnn(gpu, dtype):
communicator, rank_prev, rank_next = setup_communicator(gpu)
with chainer.using_config('dtype', dtype):
n, n_vocab, l = 100, 8, 10
# Number of model parameters are different among processes.
n_hid = (communicator.rank + 1) * 10
X = [np.random.randint(
0, n_vocab, size=np.random.randint(l // 2, l + 1),
dtype=np.int32)
for _ in range(n)]
Y = (np.random.rand(n) * 2).astype(dtype)
model = Model(
n_vocab, n_hid, communicator, rank_next,
rank_prev)
if gpu:
model.to_device(cupy.cuda.Device())
X = [chainer.backends.cuda.to_gpu(x) for x in X]
Y = chainer.backends.cuda.to_gpu(Y)
for i in range(n):
err = model(X[i:i + 1], Y[i:i + 1])
err.backward()
# Check if backprop finishes without deadlock.
assert True
@pytest.mark.parametrize('dtype', [np.float16, np.float32])
def test_heterogeneous_rnn_cpu(dtype):
check_heterogeneous_rnn(False, dtype)
@chainer.testing.attr.gpu
@pytest.mark.parametrize('dtype', [np.float16, np.float32])
def test_heterogeneous_rnn_gpu(dtype):
check_heterogeneous_rnn(True, dtype)
| 2,161 |
676 | package com.alorma.github.ui.utils;
import android.widget.AdapterView;
public abstract class SimpleItemSelectedItemListener implements AdapterView.OnItemSelectedListener {
@Override
public void onNothingSelected(AdapterView<?> adapterView) {
}
}
| 71 |
2,434 | <reponame>SergKhram/swagger2markup
/*
* Copyright 2016 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.swagger2markup.internal.utils;
import com.google.common.collect.LinkedHashMultimap;
import com.google.common.collect.Multimap;
import io.github.swagger2markup.model.SwaggerPathOperation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Arrays;
import java.util.List;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class RegexUtils {
private static Logger logger = LoggerFactory.getLogger(TagUtils.class);
/**
* Alphabetically sort the list of groups
*
* @param groups List of available groups
* @return String[] of sorted groups
*/
public static String[] toSortedArray(Set<String> groups) {
//TODO: sort in another way than just alphabetically
String[] sortedArray = groups.toArray(new String[groups.size()]);
Arrays.sort(sortedArray);
return sortedArray;
}
/**
* Groups the operations by regex group. The key of the Multimap is the group name.
* The value of the Multimap is a PathOperation
*
* @param allOperations all operations
* @param headerPattern regex pattern used for determining headers
* @return Operations grouped by regex
*/
public static Multimap<String, SwaggerPathOperation> groupOperationsByRegex(List<SwaggerPathOperation> allOperations, Pattern headerPattern) {
Multimap<String, SwaggerPathOperation> operationsGroupedByRegex = LinkedHashMultimap.create();
for (SwaggerPathOperation operation : allOperations) {
String path = operation.getPath();
Matcher m = headerPattern.matcher(path);
if (m.matches() && m.group(1) != null) {
if (logger.isDebugEnabled()) {
logger.debug("Added path operation '{}' to header '{}'", operation, m.group(1));
}
operationsGroupedByRegex.put(m.group(1), operation);
} else {
if(logger.isWarnEnabled()) {
logger.warn("Operation '{}' does not match regex '{}' and will not be included in output", operation, headerPattern.toString());
}
}
}
return operationsGroupedByRegex;
}
}
| 1,055 |
852 | <reponame>ckamtsikis/cmssw<gh_stars>100-1000
#include "EventFilter/HcalRawToDigi/interface/HcalPacker.h"
#include "EventFilter/HcalRawToDigi/interface/HcalHTRData.h"
#include "EventFilter/HcalRawToDigi/interface/HcalDCCHeader.h"
#include "DataFormats/HcalDetId/interface/HcalGenericDetId.h"
#include "FWCore/MessageLogger/interface/MessageLogger.h"
#include "DataFormats/FEDRawData/interface/FEDTrailer.h"
#include "FWCore/Utilities/interface/CRC16.h"
HcalPacker::Collections::Collections() {
hbhe = nullptr;
hoCont = nullptr;
hfCont = nullptr;
tpCont = nullptr;
zdcCont = nullptr;
calibCont = nullptr;
}
template <class Coll, class DetIdClass>
int process(const Coll* pt, const DetId& did, unsigned short* buffer, int& presamples, bool& isUS, bool& isMP) {
isUS = false;
isMP = false;
if (pt == nullptr) {
return 0;
}
int size = 0;
typename Coll::const_iterator i = pt->find(DetIdClass(did));
if (i != pt->end()) {
isUS = i->zsUnsuppressed();
isMP = i->zsMarkAndPass();
presamples = i->presamples();
size = i->size();
for (int j = 0; j < size; j++) {
buffer[j] = (*i)[j].raw();
}
}
return size;
}
static unsigned char processTrig(const HcalTrigPrimDigiCollection* pt,
const HcalTrigTowerDetId& tid,
unsigned short* buffer) {
if (pt == nullptr) {
return 0;
}
int size = 0;
HcalTrigPrimDigiCollection::const_iterator i = pt->find(tid);
bool any_nonzero = false;
if (i != pt->end()) {
int presamples = i->presamples();
size = i->size();
for (int j = 0; j < size; j++) {
buffer[j] = (*i)[j].raw();
if ((buffer[j] & 0x1FF) != 0)
any_nonzero = true;
if (j == presamples) {
buffer[j] |= 0x0200;
}
}
}
return (any_nonzero) ? (size) : (0);
}
int HcalPacker::findSamples(const DetId& did,
const Collections& inputs,
unsigned short* buffer,
int& presamples,
bool& isUS,
bool& isMP) const {
if (!(did.det() == DetId::Hcal || (did.det() == DetId::Calo && did.subdetId() == HcalZDCDetId::SubdetectorId))) {
return 0;
}
int size = 0;
HcalGenericDetId genId(did);
switch (genId.genericSubdet()) {
case (HcalGenericDetId::HcalGenBarrel):
case (HcalGenericDetId::HcalGenEndcap):
size = process<HBHEDigiCollection, HcalDetId>(inputs.hbhe, did, buffer, presamples, isUS, isMP);
break;
case (HcalGenericDetId::HcalGenOuter):
size = process<HODigiCollection, HcalDetId>(inputs.hoCont, did, buffer, presamples, isUS, isMP);
break;
case (HcalGenericDetId::HcalGenForward):
size = process<HFDigiCollection, HcalDetId>(inputs.hfCont, did, buffer, presamples, isUS, isMP);
break;
case (HcalGenericDetId::HcalGenZDC):
size = process<ZDCDigiCollection, HcalZDCDetId>(inputs.zdcCont, did, buffer, presamples, isUS, isMP);
break;
case (HcalGenericDetId::HcalGenCalibration):
size = process<HcalCalibDigiCollection, HcalCalibDetId>(inputs.calibCont, did, buffer, presamples, isUS, isMP);
break;
default:
size = 0;
}
return size;
}
void HcalPacker::pack(int fedid,
int dccnumber,
int nl1a,
int orbitn,
int bcn,
const Collections& inputs,
const HcalElectronicsMap& emap,
FEDRawData& output) const {
std::vector<unsigned short> precdata(HcalHTRData::CHANNELS_PER_SPIGOT * HcalHTRData::MAXIMUM_SAMPLES_PER_CHANNEL);
std::vector<unsigned short> trigdata(HcalHTRData::CHANNELS_PER_SPIGOT * HcalHTRData::MAXIMUM_SAMPLES_PER_CHANNEL);
std::vector<unsigned char> preclen(HcalHTRData::CHANNELS_PER_SPIGOT);
std::vector<unsigned char> triglen(HcalHTRData::CHANNELS_PER_SPIGOT);
static const int HTRFormatVersion = 5;
bool channelIsMP[HcalHTRData::CHANNELS_PER_SPIGOT];
HcalHTRData spigots[15];
// loop over all valid channels in the given dcc, spigot by spigot.
for (int spigot = 0; spigot < 15; spigot++) {
spigots[spigot].allocate(HTRFormatVersion);
HcalElectronicsId exampleEId;
int npresent = 0, npresenttp = 0;
int presamples = -1, samples = -1;
bool haveUnsuppressed = false;
for (int fiber = 1; fiber <= 8; fiber++) {
for (int fiberchan = 0; fiberchan < 3; fiberchan++) {
int linear = (fiber - 1) * 3 + fiberchan;
HcalQIESample chanSample(0, 0, fiber, fiberchan, false, false);
unsigned short chanid = chanSample.raw() & 0xF800;
preclen[linear] = 0;
channelIsMP[linear] = false;
HcalElectronicsId partialEid(fiberchan, fiber, spigot, dccnumber);
// does this partial id exist?
HcalElectronicsId fullEid;
HcalGenericDetId genId;
if (!emap.lookup(partialEid, fullEid, genId)) {
continue;
}
// next, see if there is a digi with this id
unsigned short* database = &(precdata[linear * HcalHTRData::MAXIMUM_SAMPLES_PER_CHANNEL]);
int mypresamples = -1;
bool isUS = false, isMP = false;
int mysamples = findSamples(genId, inputs, database, mypresamples, isUS, isMP);
haveUnsuppressed = haveUnsuppressed || isUS;
channelIsMP[linear] = isMP;
if (mysamples > 0) {
if (samples < 0) {
samples = mysamples;
} else if (samples != mysamples) {
edm::LogError("HCAL") << "Mismatch of samples in a single HTR (unsupported) " << mysamples
<< " != " << samples;
continue;
}
if (presamples < 0) {
presamples = mypresamples;
exampleEId = fullEid;
} else if (mypresamples != presamples) {
edm::LogError("HCAL") << "Mismatch of presamples in a single HTR (unsupported) " << mypresamples
<< " != " << presamples;
continue;
}
for (int ii = 0; ii < samples; ii++) {
database[ii] = (database[ii] & 0x7FF) | chanid;
}
preclen[linear] = (unsigned char)(samples);
npresent++;
}
}
}
for (int slb = 1; slb <= 6; slb++) {
for (int slbchan = 0; slbchan <= 3; slbchan++) {
int linear = (slb - 1) * 4 + slbchan;
HcalTriggerPrimitiveSample idCvt(0, false, slb, slbchan);
unsigned short chanid = idCvt.raw() & 0xF800;
triglen[linear] = 0;
HcalElectronicsId partialEid(slbchan, slb, spigot, dccnumber, 0, 0, 0);
// does this partial id exist?
HcalElectronicsId fullEid;
HcalTrigTowerDetId tid;
if (!emap.lookup(partialEid, fullEid, tid)) {
// std::cout << "TPGPACK : no match for " << partialEid << std::endl;
continue;
} //else std::cout << "TPGPACK : converted " << partialEid << " to " << fullEid << "/" << tid << std::endl;
// finally, what about a trigger channel?
if (!tid.null()) {
if (presamples < 0) {
exampleEId = fullEid;
}
unsigned short* trigbase = &(trigdata[linear * HcalHTRData::MAXIMUM_SAMPLES_PER_CHANNEL]);
triglen[linear] = processTrig(inputs.tpCont, tid, trigbase);
if (triglen[linear]) {
npresent++;
npresenttp++;
}
for (unsigned char q = 0; q < triglen[linear]; q++) {
trigbase[q] = (trigbase[q] & 0x7FF) | chanid;
}
}
}
}
/// pack into HcalHTRData
if (npresent > 0) {
spigots[spigot].pack(&(preclen[0]), &(precdata[0]), &(triglen[0]), &(trigdata[0]), false);
static const int pipeline = 0x22;
static const int firmwareRev = 0;
int submodule = exampleEId.htrTopBottom() & 0x1;
submodule |= (exampleEId.htrSlot() & 0x1F) << 1;
submodule |= (exampleEId.readoutVMECrateId() & 0x1f) << 6;
// Samples and Presamples can't be negative, or the HeaderTrailer will
// generate a large large number using them (unsigned int roll over)
if (samples < 0) {
samples = 0;
}
if (presamples < 0) {
presamples = 0;
}
spigots[spigot].packHeaderTrailer(nl1a,
bcn,
submodule,
orbitn,
pipeline,
samples,
presamples,
firmwareRev,
1); // need non-zero falvor
if (haveUnsuppressed) {
spigots[spigot].packUnsuppressed(channelIsMP);
}
}
}
// calculate the total length, and resize the FEDRawData
int theSize = 0;
for (int spigot = 0; spigot < 15; spigot++) {
theSize += spigots[spigot].getRawLength() * sizeof(unsigned short);
}
theSize += sizeof(HcalDCCHeader) + 8; // 8 for trailer
theSize += (8 - (theSize % 8)) % 8; // even number of 64-bit words.
output.resize(theSize);
// construct the bare DCC Header
HcalDCCHeader* dcc = (HcalDCCHeader*)(output.data());
dcc->clear();
dcc->setHeader(fedid, bcn, nl1a, orbitn);
// pack the HTR data into the FEDRawData block using HcalDCCHeader
for (int spigot = 0; spigot < 15; spigot++) {
if (spigots[spigot].getRawLength() > 0) {
dcc->copySpigotData(spigot, spigots[spigot], true, 0);
}
}
// trailer
FEDTrailer fedTrailer(output.data() + (output.size() - 8));
fedTrailer.set(
output.data() + (output.size() - 8), output.size() / 8, evf::compute_crc(output.data(), output.size()), 0, 0);
}
| 4,723 |
570 | <filename>picture_library/src/main/java/com/luck/picture/lib/instagram/adapter/FilterItemView.java
package com.luck.picture.lib.instagram.adapter;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Color;
import android.graphics.Typeface;
import android.view.Gravity;
import android.widget.FrameLayout;
import android.widget.ImageView;
import android.widget.TextView;
import com.luck.picture.lib.config.PictureSelectionConfig;
import com.luck.picture.lib.instagram.InsGallery;
import com.luck.picture.lib.instagram.filter.FilterType;
import com.luck.picture.lib.tools.ScreenUtils;
import androidx.annotation.NonNull;
/**
* ================================================
* Created by JessYan on 2020/6/2 15:35
* <a href="mailto:<EMAIL>">Contact me</a>
* <a href="https://github.com/JessYanCoding">Follow me</a>
* ================================================
*/
public class FilterItemView extends FrameLayout {
private TextView mTitleView;
private PictureSelectionConfig mConfig;
private ImageView mImageView;
public FilterItemView(@NonNull Context context, PictureSelectionConfig config) {
super(context);
mConfig = config;
mTitleView = new TextView(context);
mTitleView.setTextColor(Color.parseColor("#999999"));
mTitleView.setTextSize(12);
mTitleView.setTypeface(Typeface.defaultFromStyle(Typeface.BOLD));
mTitleView.setGravity(Gravity.CENTER);
addView(mTitleView);
mImageView = new ImageView(context);
mImageView.setScaleType(ImageView.ScaleType.CENTER_CROP);
addView(mImageView);
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
int width = ScreenUtils.dip2px(getContext(), 100);
int height = MeasureSpec.getSize(heightMeasureSpec);
mTitleView.measure(MeasureSpec.makeMeasureSpec(width, MeasureSpec.AT_MOST), MeasureSpec.makeMeasureSpec(height, MeasureSpec.AT_MOST));
mImageView.measure(MeasureSpec.makeMeasureSpec(width, MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(width, MeasureSpec.EXACTLY));
setMeasuredDimension(width, height);
}
@Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
int viewLeft = (getMeasuredWidth() - mTitleView.getMeasuredWidth()) / 2;
int viewTop = (getMeasuredHeight() - mTitleView.getMeasuredHeight() - mImageView.getMeasuredHeight() - ScreenUtils.dip2px(getContext(), 5)) / 2;
mTitleView.layout(viewLeft, viewTop, viewLeft + mTitleView.getMeasuredWidth(), viewTop + mTitleView.getMeasuredHeight());
viewLeft = 0;
viewTop = viewTop + ScreenUtils.dip2px(getContext(), 5) + mTitleView.getMeasuredHeight();
mImageView.layout(viewLeft, viewTop, viewLeft + mImageView.getMeasuredWidth(), viewTop + mImageView.getMeasuredHeight());
}
public void selection(boolean isSelection) {
if (isSelection) {
if (mConfig.instagramSelectionConfig.getCurrentTheme() == InsGallery.THEME_STYLE_DEFAULT) {
mTitleView.setTextColor(Color.parseColor("#262626"));
} else {
mTitleView.setTextColor(Color.parseColor("#fafafa"));
}
setTranslationY(-ScreenUtils.dip2px(getContext(), 10));
} else {
setTranslationY(0);
mTitleView.setTextColor(Color.parseColor("#999999"));
}
}
public void refreshFilter(FilterType filterType, Bitmap bitmap, int position, int selectionPosition) {
if (position == selectionPosition) {
selection(true);
} else {
selection(false);
}
mTitleView.setText(filterType.getName());
mImageView.setImageBitmap(bitmap);
}
}
| 1,426 |
2,728 | #ifndef BOOST_SYSTEM_DETAIL_THROWS_HPP_INCLUDED
#define BOOST_SYSTEM_DETAIL_THROWS_HPP_INCLUDED
// Copyright <NAME> 2006, 2007
// Copyright <NAME> 2007
// Copyright <NAME> 2017, 2018
//
// Distributed under the Boost Software License, Version 1.0. (See accompanying
// file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
//
// See library home page at http://www.boost.org/libs/system
namespace boost
{
namespace system
{
class error_code;
} // namespace system
// boost::throws()
namespace detail
{
// Misuse of the error_code object is turned into a noisy failure by
// poisoning the reference. This particular implementation doesn't
// produce warnings or errors from popular compilers, is very efficient
// (as determined by inspecting generated code), and does not suffer
// from order of initialization problems. In practice, it also seems
// cause user function error handling implementation errors to be detected
// very early in the development cycle.
inline system::error_code* throws()
{
// See github.com/boostorg/system/pull/12 by visigoth for why the return
// is poisoned with nonzero rather than (0). A test, test_throws_usage(),
// has been added to error_code_test.cpp, and as visigoth mentioned it
// fails on clang for release builds with a return of 0 but works fine
// with (1).
// Since the undefined behavior sanitizer (-fsanitize=undefined) does not
// allow a reference to be formed to the unaligned address of (1), we use
// (8) instead.
return reinterpret_cast<system::error_code*>(8);
}
} // namespace detail
inline system::error_code& throws()
{
return *detail::throws();
}
} // namespace boost
#endif // #ifndef BOOST_SYSTEM_DETAIL_THROWS_HPP_INCLUDED
| 557 |
380 | from rest_framework import views
from rest_framework.response import Response
import ipaddress
import socket
from .hash_calculator.cryptographic_hash_calculator import CryptographicHashWrapper as CryptoHash
from .hash_calculator.historic_hash_calculator import HistoricHashWrapper as HistoryHash
from .entropy_calculator.shanon_entropy import ShanonEntropy
from .port_scanner import PortScanner
class HashCalcualtorView(views.APIView):
"""Obliczanie najpopularnieszjych wartości hashy dla podanego ciagu znaków."""
def get(self, request, value):
"""
Zwraca wartości różnych hashy na podstawie stringa poganego przez użytkownika.
"""
return Response({
"cryptographic_hashes": CryptoHash(value).values,
"historic_hashes": HistoryHash(value).values
})
class EntropyCalculatorView(views.APIView):
"""Obliczanie Entropii ciągu znaków - obliczanie na podstawie entropi Shannona."""
def get(self, request, value_sequence):
return Response({
"shanon_entropy": ShanonEntropy.calculate(value_sequence)
})
class PortScannerView(views.APIView):
"""Uwaga - takie skanowanie zajmuje dużo czasu - zdecydowanie lepiej użyć oryginalnego nmapa!"""
def is_ipv4(self, host):
try:
ipaddress.IPv4Network(host)
return True
except ValueError:
return False
def change_to_domain_addres(self, host):
if self.is_ipv4(host):
return host
return socket.gethostbyname(host)
def get(self, request, host, port):
"""
Podstawowe skanowanie portów hosta.
:param request: obiekt request django
:param host: adres ip lub domeny wybranego hosta
:return: informacje o statusie przeskanowanych portów - "open" albo "closed"
"""
ip_address = self.change_to_domain_addres(host)
try:
return Response({
"port_scanner": PortScanner(ip_address, port).scan()
})
except BaseException as e:
print(e)
print(type(e))
return Response({
"PortScanner": f"Unable to scan host={host}"
}) | 989 |
2,489 | /*
right.h
Right panel of UI utils.
*/
#pragma once
#include "ofMain.h"
#include "header.h"
#include "radarContainer.h"
#include "spikeGraph.h"
#include "animated.h"
class Right : public Animated {
public:
Right();
void draw();
float x;
float y;
float w;
float h;
void setPos(float x_, float y_);
void updateDependencyEvents();
void updateDependencyDelays(int delay_);
private:
vector<AnimatedText> texts;
Header header;
RadarContainer radar;
SpikeGraph sg;
Header footer;
AnimatedTickLine tline1;
AnimatedTickLine tline2;
}; | 222 |
1,288 | // Copyright 2017 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
#ifndef PACKAGER_MEDIA_FORMATS_WEBVTT_WEBVTT_MP4_CUE_HANDLER_H_
#define PACKAGER_MEDIA_FORMATS_WEBVTT_WEBVTT_MP4_CUE_HANDLER_H_
#include <stdint.h>
#include <list>
#include <queue>
#include "packager/media/base/buffer_writer.h"
#include "packager/media/base/media_handler.h"
namespace shaka {
namespace media {
// A media handler that should come after the cue aligner and segmenter and
// should come before the muxer. This handler is to convert text samples
// to media samples so that they can be sent to a mp4 muxer.
class WebVttToMp4Handler : public MediaHandler {
public:
WebVttToMp4Handler() = default;
virtual ~WebVttToMp4Handler() override = default;
private:
WebVttToMp4Handler(const WebVttToMp4Handler&) = delete;
WebVttToMp4Handler& operator=(const WebVttToMp4Handler&) = delete;
Status InitializeInternal() override;
Status Process(std::unique_ptr<StreamData> stream_data) override;
Status OnStreamInfo(std::unique_ptr<StreamData> stream_data);
Status OnCueEvent(std::unique_ptr<StreamData> stream_data);
Status OnSegmentInfo(std::unique_ptr<StreamData> stream_data);
Status OnTextSample(std::unique_ptr<StreamData> stream_data);
Status DispatchCurrentSegment(int64_t segment_start, int64_t segment_end);
Status MergeDispatchSamples(int64_t start_in_seconds,
int64_t end_in_seconds,
const std::list<const TextSample*>& state);
std::list<std::shared_ptr<const TextSample>> current_segment_;
// This is the current state of the box we are writing.
BufferWriter box_writer_;
};
} // namespace media
} // namespace shaka
#endif // PACKAGER_MEDIA_FORMATS_WEBVTT_WEBVTT_MP4_CUE_HANDLER_H_
| 691 |
1,041 | package org.tests.model.inheritmany;
import javax.persistence.DiscriminatorValue;
import javax.persistence.Entity;
import java.util.Date;
@Entity
@DiscriminatorValue("TWO")
public class IMRootTwo extends IMRoot {
String title;
Date whenTitle;
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public Date getWhenTitle() {
return whenTitle;
}
public void setWhenTitle(Date whenTitle) {
this.whenTitle = whenTitle;
}
}
| 175 |
977 | def foo():
print(len(text_list))
text_list: List[str] = []
foo() | 30 |
988 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.modules.hudson.tasklist;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.netbeans.api.annotations.common.CheckForNull;
import org.netbeans.api.project.Project;
import org.netbeans.api.project.ProjectUtils;
import org.netbeans.api.project.SourceGroup;
import org.netbeans.modules.hudson.api.ConnectionBuilder;
import org.netbeans.modules.hudson.api.HudsonJob;
import org.netbeans.modules.hudson.api.Utilities;
import org.netbeans.spi.tasklist.Task;
import org.openide.filesystems.FileObject;
import org.openide.util.lookup.ServiceProvider;
import org.openide.xml.XMLUtil;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
/**
* Connects to the static code analysis plugin suite.
* Uses exported API mainly from {@code hudson.plugins.analysis.util.model.FileAnnotation}.
* @see https://wiki.jenkins-ci.org/display/JENKINS/Static+Code+Analysis+Plug-ins#StaticCodeAnalysisPlug-ins-RemoteAPI
*/
@ServiceProvider(service=JobScanner.class)
public class AnalysisPluginImpl implements JobScanner {
private static final Logger LOG = Logger.getLogger(AnalysisPluginImpl.class.getName());
/**
* Exported item names for types of analysis we support.
* See {@code PluginDescriptor.getPluginName} for syntax.
* Could also include {@code tasks} but {@code message} is always null, which is not very nice for our purposes.
*/
private static final String[] PLUGINS = {"checkstyle", "pmd", "warnings", "dry"};
@Override public void findTasks(Project p, HudsonJob job, int buildNumber, TaskAdder callback) throws IOException {
List<FileObject> roots = new ArrayList<FileObject>();
roots.add(p.getProjectDirectory());
// Also add Java source roots; otherwise e.g. /tmp/clover2054820708001846544.tmp/org/apache/hadoop/fs/TestFileSystemCaching.java would never be found:
for (SourceGroup g : ProjectUtils.getSources(p).getSourceGroups(/* JavaProjectConstants.SOURCES_TYPE_JAVA */"java")) {
roots.add(g.getRootFolder());
}
for (String plugin : PLUGINS) {
if (Thread.interrupted()) {
return;
}
String url = job.getUrl() + buildNumber + "/" + plugin + "Result/api/xml?tree=warnings[fileName,primaryLineNumber,priority,message]";
Document doc;
try {
HttpURLConnection conn = new ConnectionBuilder().job(job).url(url).httpConnection();
try {
InputSource input = new InputSource(conn.getInputStream());
input.setSystemId(url);
doc = XMLUtil.parse(input, false, false, XMLUtil.defaultErrorHandler(), null);
} catch (SAXException x) {
LOG.log(Level.FINE, "parse error for " + url, x);
continue;
} finally {
conn.disconnect();
}
} catch (FileNotFoundException x) {
LOG.log(Level.FINE, "no {0} for {1}", new Object[] {plugin, job});
continue;
}
LOG.log(Level.FINE, "found {0} for {1}", new Object[] {plugin, job});
for (Element warning : XMLUtil.findSubElements(doc.getDocumentElement())) {
if (Thread.interrupted()) {
return;
}
Element warningEl = XMLUtil.findElement(warning, "message", null);
if (warningEl == null) {
LOG.log(Level.FINE, "skipping {0} since it may be pre-1.367", job.getInstance());
return;
}
String message = XMLUtil.findText(warningEl);
if (message == null) {
LOG.log(Level.WARNING, "no message in <warning> from {0}", url);
continue;
}
// XXX perhaps create separate groups according to plugin?
String group = "HIGH".equals(XMLUtil.findText(XMLUtil.findElement(warning, "priority", null))) ? "nb-tasklist-error" : "nb-tasklist-warning"; // else NORMAL or LOW
String fileName = XMLUtil.findText(XMLUtil.findElement(warning, "fileName", null));
FileObject f = locate(fileName, roots);
if (f != null) {
LOG.log(Level.FINER, "successfully located {0}", f);
int primaryLineNumber = Integer.parseInt(XMLUtil.findText(XMLUtil.findElement(warning, "primaryLineNumber", null)));
callback.add(Task.create(f, group, message, primaryLineNumber));
} else {
String workspacePath = workspacePath(fileName, job.getName());
if (workspacePath == null) {
LOG.log(Level.WARNING, "{0} does not look to be inside {1}", new Object[] {fileName, job});
continue;
} else {
LOG.log(Level.FINE, "did not find any local file for {0}", fileName);
callback.add(Task.create(new URL(job.getUrl() + "workspace/" + Utilities.uriEncode(workspacePath)), group, message));
}
}
}
}
}
static @CheckForNull FileObject locate(String fileName, Collection<FileObject> roots) {
String fileNameSlashes = fileName.replace('\\', '/');
int pos = 0;
while (true) {
int i = fileNameSlashes.indexOf('/', pos);
if (i == -1) {
return null;
}
pos = i + 1;
String path = fileNameSlashes.substring(pos);
for (FileObject root : roots) {
FileObject f = root.getFileObject(path);
if (f != null) {
return f;
}
}
}
}
static @CheckForNull String workspacePath(String fileName, String jobName) {
String fileNameSlashes = fileName.replace('\\', '/');
String infix = "/workspace/" + jobName + "/";
int i = fileNameSlashes.indexOf(infix);
if (i == -1) {
infix = "/" + jobName + "/workspace/";
i = fileNameSlashes.indexOf(infix);
}
if (i == -1) {
return null;
} else {
return fileNameSlashes.substring(i + infix.length());
}
}
}
| 3,215 |
918 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.locks;
import java.io.IOException;
import java.util.Properties;
import org.apache.curator.test.TestingServer;
import org.hamcrest.MatcherAssert;
import org.hamcrest.Matchers;
import org.testng.annotations.AfterClass;
import org.testng.annotations.Test;
import com.google.common.io.Closer;
import org.apache.gobblin.configuration.ConfigurationKeys;
@Test(groups = {"gobblin.runtime"})
public class LegacyJobLockFactoryManagerTest {
@AfterClass
public void tearDown() throws IOException {
ZookeeperBasedJobLock.shutdownCuratorFramework();
}
@Test(expectedExceptions = { NullPointerException.class })
public void testNullProperties_ThrowsException() throws JobLockException, IOException {
Closer closer = Closer.create();
try {
closer.register(LegacyJobLockFactoryManager.getJobLock(null, new JobLockEventListener()));
} finally {
closer.close();
}
}
@Test(expectedExceptions = { NullPointerException.class })
public void testNullListener_ThrowsException() throws JobLockException, IOException {
Closer closer = Closer.create();
try {
closer.register(LegacyJobLockFactoryManager.getJobLock(new Properties(), null));
} finally {
closer.close();
}
}
@Test
public void testMissingJobLockType_ResultsIn_FileBasedJobLock() throws JobLockException, IOException {
Closer closer = Closer.create();
try {
Properties properties = new Properties();
properties.setProperty(ConfigurationKeys.FS_URI_KEY, "file:///");
properties.setProperty(FileBasedJobLock.JOB_LOCK_DIR, "JobLockFactoryTest");
properties.setProperty(ConfigurationKeys.JOB_NAME_KEY, "JobLockFactoryTest-" + System.currentTimeMillis());
properties.setProperty(ConfigurationKeys.JOB_LOCK_TYPE, FileBasedJobLock.class.getName());
JobLock jobLock = closer.register(LegacyJobLockFactoryManager.getJobLock(properties, new JobLockEventListener()));
MatcherAssert.assertThat(jobLock, Matchers.instanceOf(FileBasedJobLock.class));
} finally {
closer.close();
}
}
@Test(expectedExceptions = { JobLockException.class })
public void testInvalidJobLockType_ThrowsException() throws JobLockException, IOException {
Closer closer = Closer.create();
try {
Properties properties = new Properties();
properties.setProperty(ConfigurationKeys.JOB_LOCK_TYPE, "ThisIsATest");
JobLock jobLock = closer.register(LegacyJobLockFactoryManager.getJobLock(properties, new JobLockEventListener()));
MatcherAssert.assertThat(jobLock, Matchers.instanceOf(FileBasedJobLock.class));
} finally {
closer.close();
}
}
@Test
public void testGetFileBasedJobLock() throws JobLockException, IOException {
Closer closer = Closer.create();
try {
Properties properties = new Properties();
properties.setProperty(ConfigurationKeys.FS_URI_KEY, "file:///");
properties.setProperty(FileBasedJobLock.JOB_LOCK_DIR, "JobLockFactoryTest");
properties.setProperty(ConfigurationKeys.JOB_NAME_KEY, "JobLockFactoryTest-" + System.currentTimeMillis());
properties.setProperty(ConfigurationKeys.JOB_LOCK_TYPE, FileBasedJobLock.class.getName());
JobLock jobLock = closer.register(LegacyJobLockFactoryManager.getJobLock(properties, new JobLockEventListener()));
MatcherAssert.assertThat(jobLock, Matchers.instanceOf(FileBasedJobLock.class));
} finally {
closer.close();
}
}
@Test
public void testGetZookeeperBasedJobLock() throws Exception {
Closer closer = Closer.create();
try {
TestingServer testingServer = closer.register(new TestingServer(-1));
Properties properties = new Properties();
properties.setProperty(ConfigurationKeys.JOB_NAME_KEY, "JobLockFactoryTest-" + System.currentTimeMillis());
properties.setProperty(ConfigurationKeys.JOB_LOCK_TYPE, ZookeeperBasedJobLock.class.getName());
properties.setProperty(ZookeeperBasedJobLock.CONNECTION_STRING, testingServer.getConnectString());
properties.setProperty(ZookeeperBasedJobLock.MAX_RETRY_COUNT, "1");
properties.setProperty(ZookeeperBasedJobLock.LOCKS_ACQUIRE_TIMEOUT_MILLISECONDS, "1000");
properties.setProperty(ZookeeperBasedJobLock.RETRY_BACKOFF_SECONDS, "1");
properties.setProperty(ZookeeperBasedJobLock.SESSION_TIMEOUT_SECONDS, "180");
properties.setProperty(ZookeeperBasedJobLock.CONNECTION_TIMEOUT_SECONDS, "30");
JobLock jobLock = closer.register(LegacyJobLockFactoryManager.getJobLock(properties, new JobLockEventListener()));
MatcherAssert.assertThat(jobLock, Matchers.instanceOf(ZookeeperBasedJobLock.class));
} finally {
closer.close();
}
}
}
| 1,756 |
635 | <filename>bagua/torch_api/contrib/utils/store.py
from typing import List, Dict, Optional, Union
from collections import defaultdict
__all__ = ["Store", "ClusterStore"]
class Store:
"""
Base class for key-value store implementations. Entries are added to store with :meth:`set` or :meth:`mset`, and retrieved
with :meth:`get` or :meth:`mget`.
"""
def set(self, key: str, value: Union[str, bytes]):
"""Set a key-value pair."""
pass
def get(self, key: str) -> Optional[Union[str, bytes]]:
"""Returns the value associated with :attr:`key`, or ``None`` if the key doesn't exist."""
pass # type: ignore
def num_keys(self) -> int:
"""Returns the number of keys in the current store."""
pass # type: ignore
def clear(self):
"""Delete all keys in the current store."""
pass
def mset(self, dictionary: Dict[str, Union[str, bytes]]):
"""
Set multiple entries at once with a dictionary. Each key-value pair in the :attr:`dictionary` will be set.
"""
pass
def mget(self, keys: List[str]) -> List[Optional[Union[str, bytes]]]:
"""
Retrieve each key's corresponding value and return them in a list with the same order as :attr:`keys`.
"""
pass # type: ignore
def status(self) -> bool:
"""
Returns ``True`` if the current store is alive.
"""
pass # type: ignore
def shutdown(self):
"""
Shutdown the managed store instances. Unmanaged instances will not be killed.
"""
pass
class ClusterStore(Store):
"""
Base class for distributed key-value stores.
In cluster store, entries will be sharded equally among multiple store instances based on their keys.
Args:
stores(List[Store]): A list of stores to shard entries on.
"""
def __init__(self, stores: List[Store]):
self.stores = stores
self.num_stores = len(stores)
import xxhash
def xxh64(x):
return xxhash.xxh64(x).intdigest()
self.hash_fn = xxh64
def _hash_key(self, key: str) -> int:
hash_code = self.hash_fn(key.encode())
return hash_code % self.num_stores
def route(self, key: str) -> Store:
return (
self.stores[self._hash_key(key)] if self.num_stores > 1 else self.stores[0]
)
def set(self, key: str, value: Union[str, bytes]):
if self.num_stores == 1:
return self.stores[0].set(key, value)
self.route(key).set(key, value)
def get(self, key: str) -> Optional[Union[str, bytes]]:
if self.num_stores == 1:
return self.stores[0].get(key)
return self.route(key).get(key)
def num_keys(self) -> int:
return sum([store.num_keys() for store in self.stores])
def clear(self):
for store in self.stores:
store.clear()
def mset(self, dictionary: Dict[str, Union[str, bytes]]):
if self.num_stores == 1:
return self.stores[0].mset(dictionary)
route_table = {}
for k, v in dictionary.items():
sid = self._hash_key(k)
m = route_table.get(sid, defaultdict(dict))
m[k] = v
route_table[sid] = m
for sid, m in route_table.items():
self.stores[sid].mset(m)
def mget(self, keys: List[str]) -> List[Optional[Union[str, bytes]]]:
if self.num_stores == 1:
return self.stores[0].mget(keys)
route_table = {}
for k in keys:
sid = self._hash_key(k)
ll = route_table.get(sid, [])
ll.append(k)
route_table[sid] = ll
result_map = {}
for sid, ll in route_table.items():
ret = self.stores[sid].mget(ll)
m = {k: v for k, v in zip(ll, ret)}
result_map = {**result_map, **m}
return list(map(lambda x: result_map.get(x, None), keys))
def status(self) -> bool:
return all([store.status() for store in self.stores])
def shutdown(self):
for store in self.stores:
store.shutdown()
| 1,826 |
16,461 | package abi42_0_0.expo.modules.interfaces.sensors;
import android.hardware.SensorEventListener2;
public interface SensorServiceInterface {
SensorServiceSubscriptionInterface createSubscriptionForListener(SensorEventListener2 sensorEventListener);
}
| 66 |
3,294 | //{{NO_DEPENDENCIES}}
// Microsoft Developer Studio generated include file.
// Used by SetPaneSize.rc
//
#define IDP_OLE_INIT_FAILED 100
#define IDD_SetPaneSize_FORM 101
#define IDS_MAIN_TOOLBAR 101
#define IDR_MAINFRAME 128
#define IDR_SETBARTYPE 129
#define IDR_CONTEXT_MENU 130
#define IDR_POPUP_TOOLBAR 131
#define IDB_WORKSPACE 147
#define IDB_TOOLBAR256 151
#define IDD_DLG_BAR 154
#define IDD_ABOUTBOX 999
#define IDC_COMPANY_URL 1041
#define IDC_EDIT_SIZE 1042
#define IDC_EDIT_WIDTH 1043
#define IDC_BUTTON_SET_DLGBAR_SIZE 1044
#define IDC_BUTTON_SET_CONTAINER_SIZE 1045
#define IDC_EDIT_WIDTH_IN_PIXELS 1047
#define IDC_EDIT_HEIGHT_IN_PIXELS 1048
#define IDC_BTN_SET_WIDTH_IN_PIXELS 1049
#define IDC_BTN_SET_HEIGHT_IN_PIXELS 1050
#define ID_VIEW_CUSTOMIZE 32770
#define ID_VIEW_TOOLBARS 32771
#define ID_VIEW_DLGBAR 32792
#define ID_VIEW_WORKSPACE 32803
// Next default values for new objects
//
#ifdef APSTUDIO_INVOKED
#ifndef APSTUDIO_READONLY_SYMBOLS
#define _APS_3D_CONTROLS 1
#define _APS_NEXT_RESOURCE_VALUE 155
#define _APS_NEXT_COMMAND_VALUE 32833
#define _APS_NEXT_CONTROL_VALUE 1051
#define _APS_NEXT_SYMED_VALUE 108
#endif
#endif
| 858 |
367 | <filename>solaris/utils/log.py
import logging
def _get_logging_level(level_int):
"""Convert a logging level integer into a log level."""
if isinstance(level_int, bool):
level_int = int(level_int)
if level_int < 0:
return logging.CRITICAL + 1 # silence all possible outputs
elif level_int == 0:
return logging.WARNING
elif level_int == 1:
return logging.INFO
elif level_int == 2:
return logging.DEBUG
elif level_int in [10, 20, 30, 40, 50]: # if user provides the logger int
return level_int
elif isinstance(level_int, int): # if it's an int but not one of the above
return level_int
else:
raise ValueError(f"logging level set to {level_int}, "
"but it must be an integer <= 2.")
| 332 |
344 | /*
MIT License
Copyright (c) 2020 earlygrey
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
package squidpony.squidai.graph;
import squidpony.squidmath.BinaryHeap;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
/**
* An extended version of {@link squidpony.squidmath.BinaryHeap.Node} that also stores a reference to the parent Graph,
* a vertex object of type {@code V}, a Map of neighbor Nodes to the appropriate {@link Connection} per Node, an extra
* List of those same Connections for faster iteration, and a lot of internal data used by algorithms in this package.
* @param <V> the vertex type; often {@link squidpony.squidmath.Coord}
* @author earlygrey
*/
public class Node<V> extends BinaryHeap.Node implements Serializable {
private static final long serialVersionUID = 1L;
//================================================================================
// Graph structure related members
//================================================================================
protected final Graph<V> graph;
protected final int idHash;
protected final V object;
protected HashMap<Node<V>, Connection<V>> neighbors = new HashMap<>();
protected ArrayList<Connection<V>> outEdges = new ArrayList<>(); // List for fast iteration
//================================================================================
// Constructor
//================================================================================
protected Node(V v, Graph<V> graph) {
super(0.0);
this.object = v;
this.graph = graph;
idHash = System.identityHashCode(this);
}
//================================================================================
// Internal methods
//================================================================================
protected Connection<V> getEdge(Node<V> v) {
return neighbors.get(v);
}
protected Connection<V> addEdge(Node<V> v, float weight) {
Connection<V> edge = neighbors.get(v);
if (edge == null) {
edge = graph.obtainEdge();
edge.set(this, v, weight);
neighbors.put(v, edge);
outEdges.add(edge);
return edge;
} else {
edge.setWeight(weight);
}
return edge;
}
protected Connection<V> removeEdge(Node<V> v) {
Connection<V> edge = neighbors.remove(v);
if (edge == null) return null;
// loop backwards to make Graph#removeNode faster
for (int j = outEdges.size()-1; j >= 0; j--) {
Connection<V> connection = outEdges.get(j);
if (connection.equals(edge)) {
outEdges.remove(j);
break;
}
}
return edge;
}
protected void disconnect() {
neighbors.clear();
outEdges.clear();
}
//================================================================================
// Public Methods
//================================================================================
public Collection<Connection<V>> getConnections() {
return outEdges;
}
public V getObject() {
return object;
}
//================================================================================
// Algorithm fields and methods
//================================================================================
/**
* Internal; tracking bit for whether this Node has already been visited during the current algorithm.
*/
protected boolean visited;
/**
* Internal; tracking bit for whether this Node has been checked during the current algorithm.
*/
protected boolean seen;
/**
* Internal; confirmed distance so far to get to this Node from the start.
*/
protected double distance;
/**
* Internal; estimated distance to get from this Node to the goal.
*/
protected double estimate;
/**
* Internal; a reference to the previous Node in a BinaryHeap.
*/
protected Node<V> prev;
/**
* Internal; a utility field used to store depth in some algorithms.
*/
protected int i;
/**
* Internal; a utility field used to distinguish which algorithm last used this Node.
*/
protected int lastRunID;
/**
* If {@code runID} is not equal to {@link #lastRunID}, this resets the internal fields {@link #visited},
* {@link #seen}, {@link #distance}, {@link #estimate}, {@link #prev}, and {@link #i}, then sets {@link #lastRunID}
* to {@code runID}.
* @param runID an int that identifies which run of an algorithm is currently active
* @return true if anything was reset, or false if {@code runID} is equal to {@link #lastRunID}
*/
protected boolean resetAlgorithmAttributes(int runID) {
if (runID == this.lastRunID) return false;
visited = false;
prev = null;
distance = Float.MAX_VALUE;
estimate = 0;
i = 0;
seen = false;
this.lastRunID = runID;
return true;
}
//================================================================================
// Misc
//================================================================================
@Override
public boolean equals(Object o) {
return o == this;
}
@Override
public int hashCode() {
return idHash;
}
@Override
public String toString() {
return "["+object+"]";
}
}
| 2,088 |
3,222 | package org.apache.maven.model.building;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.nio.file.Path;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.Supplier;
import org.apache.maven.model.Model;
/**
*
* @author <NAME>
* @since 4.0.0
*/
class DefaultTransformerContext implements TransformerContext
{
final Map<String, String> userProperties = new ConcurrentHashMap<>();
final Map<Path, Holder> modelByPath = new ConcurrentHashMap<>();
final Map<GAKey, Holder> modelByGA = new ConcurrentHashMap<>();
public static class Holder
{
private volatile boolean set;
private volatile Model model;
Holder()
{
}
public static Model deref( Holder holder )
{
return holder != null ? holder.get() : null;
}
public Model get()
{
if ( !set )
{
synchronized ( this )
{
if ( !set )
{
try
{
this.wait();
}
catch ( InterruptedException e )
{
// Ignore
}
}
}
}
return model;
}
public Model computeIfAbsent( Supplier<Model> supplier )
{
if ( !set )
{
synchronized ( this )
{
if ( !set )
{
this.set = true;
this.model = supplier.get();
this.notifyAll();
}
}
}
return model;
}
}
@Override
public String getUserProperty( String key )
{
return userProperties.get( key );
}
@Override
public Model getRawModel( Path p )
{
return Holder.deref( modelByPath.get( p ) );
}
@Override
public Model getRawModel( String groupId, String artifactId )
{
return Holder.deref( modelByGA.get( new GAKey( groupId, artifactId ) ) );
}
static class GAKey
{
private final String groupId;
private final String artifactId;
private final int hashCode;
GAKey( String groupId, String artifactId )
{
this.groupId = groupId;
this.artifactId = artifactId;
this.hashCode = Objects.hash( groupId, artifactId );
}
@Override
public int hashCode()
{
return hashCode;
}
@Override
public boolean equals( Object obj )
{
if ( this == obj )
{
return true;
}
if ( !( obj instanceof GAKey ) )
{
return false;
}
GAKey other = (GAKey) obj;
return Objects.equals( artifactId, other.artifactId ) && Objects.equals( groupId, other.groupId );
}
}
}
| 1,878 |
471 | <reponame>esteinberg/plantuml4idea
package org.plantuml.idea.preview;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.actionSystem.Presentation;
import com.intellij.openapi.actionSystem.ex.CustomComponentAction;
import com.intellij.openapi.project.DumbAwareAction;
import com.intellij.ui.JBColor;
import com.intellij.util.Alarm;
import org.jetbrains.annotations.NotNull;
import javax.swing.*;
import java.awt.*;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
public class ExecutionStatusPanel extends DumbAwareAction implements CustomComponentAction {
private JLabel label;
private volatile int version;
private volatile State state;
private volatile String message = "---";
public static String DESCRIPTION;
private MyMouseAdapter myMouseAdapter;
private Runnable mouseOnClickAction;
private Alarm alarm = new Alarm(Alarm.ThreadToUse.SWING_THREAD);
{
{
StringBuilder sb = new StringBuilder();
State[] values = State.values();
for (State value : values) {
sb.append("<br>").append(value.description);
}
DESCRIPTION = "<html>Last execution time<br>" +
"[rendered, refreshed title, not changed] page count" +
"<br><br>Colors:" + sb.toString() + "</html>";
}
}
@Override
public void update(AnActionEvent e) {
super.update(e);
}
@Override
public void actionPerformed(AnActionEvent anActionEvent) {
}
@Override
public JComponent createCustomComponent(Presentation presentation) {
final JPanel panel = new JPanel();
this.label = createLabel();
panel.setToolTipText(DESCRIPTION);
label.setToolTipText(DESCRIPTION);
panel.add(this.label);
return panel;
}
@NotNull
public JLabel createLabel() {
JLabel jLabel = new JLabel("---");
Font font = jLabel.getFont();
Font boldFont = new Font(font.getFontName(), Font.BOLD, font.getSize());
jLabel.setFont(boldFont);
myMouseAdapter = new MyMouseAdapter();
jLabel.addMouseListener(myMouseAdapter);
return jLabel;
}
public synchronized void update(State state) {
this.state = state;
updateUiLater();
}
public synchronized void update(int version, State state) {
if (this.version <= version) {
setState(version, state, mouseOnClickAction, message);
updateUiLater();
}
}
public synchronized void update(int version, State state, String message) {
if (this.version <= version) {
setState(version, state, null, message);
updateUiLater();
}
}
public void updateNow(Integer version, State state, Runnable mouseOnClickAction, String message) {
alarm.cancelAllRequests();
if (this.version <= version) {
setState(version, state, mouseOnClickAction, message);
state.updateUi(label, myMouseAdapter, this.message, this.mouseOnClickAction);
} else {
//something else is already running, updateUi all but color
setState(version, this.state, mouseOnClickAction, message);
state.updateUi(label, myMouseAdapter, this.message, this.mouseOnClickAction);
}
}
private void setState(int version, State state, Runnable mouseOnClickAction, String message) {
this.version = version;
this.message = message;
this.state = state;
this.mouseOnClickAction = mouseOnClickAction;
}
private void updateUiLater() {
int i = alarm.cancelAllRequests();
alarm.addRequest(() -> {
if (state != null) {
state.updateUi(label, myMouseAdapter, message, mouseOnClickAction);
}
}, 0);
}
public enum State {
WAITING(JBColor.GRAY, "Delay waiting - gray"),
EXECUTING(new JBColor(Color.green.darker(), new Color(98, 150, 85)), "Executing - green"),
CANCELLED(JBColor.PINK, "Cancelled - pink"),
ERROR(JBColor.RED, "Error - red"),
DONE(JBColor.BLACK, "Done - black/white");
Color color;
String description;
State(Color color, String description) {
this.color = color;
this.description = description;
}
public void updateUi(JLabel comp, MyMouseAdapter myMouseAdapter, String message, Runnable mouseOnClickAction) {
// ApplicationManager.getApplication().assertIsDispatchThread();
if (comp != null) { //strange NPE
comp.setText(message);
comp.setForeground(this.color);
}
if (myMouseAdapter != null) {
myMouseAdapter.setRunnable(mouseOnClickAction);
}
}
}
private static class MyMouseAdapter extends MouseAdapter {
private Runnable runnable;
@Override
public void mouseReleased(MouseEvent e) {
if (runnable != null) {
runnable.run();
}
}
public void setRunnable(Runnable runnable) {
this.runnable = runnable;
}
public Runnable getRunnable() {
return runnable;
}
}
@Override
public String toString() {
return "ExecutionStatusPanel{" +
"hash()=" + hashCode() +
", label=" + label +
", version=" + version +
", state=" + state +
", message='" + message + '\'' +
", myMouseAdapter=" + myMouseAdapter +
", mouseOnClickAction=" + mouseOnClickAction +
", alarm=" + alarm +
'}';
}
}
| 2,500 |
56,632 | // This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
#include "opencv2/core/utils/buffer_area.private.hpp"
#include "opencv2/core/utils/configuration.private.hpp"
#ifndef OPENCV_ENABLE_MEMORY_SANITIZER
static bool CV_BUFFER_AREA_OVERRIDE_SAFE_MODE =
cv::utils::getConfigurationParameterBool("OPENCV_BUFFER_AREA_ALWAYS_SAFE", false);
#endif
namespace cv { namespace utils {
//==================================================================================================
class BufferArea::Block
{
private:
inline size_t reserve_count() const
{
return alignment / type_size - 1;
}
public:
Block(void **ptr_, ushort type_size_, size_t count_, ushort alignment_)
: ptr(ptr_), raw_mem(0), count(count_), type_size(type_size_), alignment(alignment_)
{
CV_Assert(ptr && *ptr == NULL);
}
void cleanup() const
{
CV_Assert(ptr && *ptr);
*ptr = 0;
if (raw_mem)
fastFree(raw_mem);
}
size_t getByteCount() const
{
return type_size * (count + reserve_count());
}
void real_allocate()
{
CV_Assert(ptr && *ptr == NULL);
const size_t allocated_count = count + reserve_count();
raw_mem = fastMalloc(type_size * allocated_count);
if (alignment != type_size)
{
*ptr = alignPtr(raw_mem, alignment);
CV_Assert(reinterpret_cast<size_t>(*ptr) % alignment == 0);
CV_Assert(static_cast<uchar*>(*ptr) + type_size * count <= static_cast<uchar*>(raw_mem) + type_size * allocated_count);
}
else
{
*ptr = raw_mem;
}
}
#ifndef OPENCV_ENABLE_MEMORY_SANITIZER
void * fast_allocate(void * buf) const
{
CV_Assert(ptr && *ptr == NULL);
buf = alignPtr(buf, alignment);
CV_Assert(reinterpret_cast<size_t>(buf) % alignment == 0);
*ptr = buf;
return static_cast<void*>(static_cast<uchar*>(*ptr) + type_size * count);
}
#endif
bool operator==(void **other) const
{
CV_Assert(ptr && other);
return *ptr == *other;
}
void zeroFill() const
{
CV_Assert(ptr && *ptr);
memset(static_cast<uchar*>(*ptr), 0, count * type_size);
}
private:
void **ptr;
void * raw_mem;
size_t count;
ushort type_size;
ushort alignment;
};
//==================================================================================================
#ifndef OPENCV_ENABLE_MEMORY_SANITIZER
BufferArea::BufferArea(bool safe_) :
oneBuf(0),
totalSize(0),
safe(safe_ || CV_BUFFER_AREA_OVERRIDE_SAFE_MODE)
{
// nothing
}
#else
BufferArea::BufferArea(bool safe_)
{
CV_UNUSED(safe_);
}
#endif
BufferArea::~BufferArea()
{
release();
}
void BufferArea::allocate_(void **ptr, ushort type_size, size_t count, ushort alignment)
{
blocks.push_back(Block(ptr, type_size, count, alignment));
#ifndef OPENCV_ENABLE_MEMORY_SANITIZER
if (!safe)
{
totalSize += blocks.back().getByteCount();
}
else
#endif
{
blocks.back().real_allocate();
}
}
void BufferArea::zeroFill_(void **ptr)
{
for(std::vector<Block>::const_iterator i = blocks.begin(); i != blocks.end(); ++i)
{
if (*i == ptr)
{
i->zeroFill();
break;
}
}
}
void BufferArea::zeroFill()
{
for(std::vector<Block>::const_iterator i = blocks.begin(); i != blocks.end(); ++i)
{
i->zeroFill();
}
}
void BufferArea::commit()
{
#ifndef OPENCV_ENABLE_MEMORY_SANITIZER
if (!safe)
{
CV_Assert(totalSize > 0);
CV_Assert(oneBuf == NULL);
CV_Assert(!blocks.empty());
oneBuf = fastMalloc(totalSize);
void * ptr = oneBuf;
for(std::vector<Block>::const_iterator i = blocks.begin(); i != blocks.end(); ++i)
{
ptr = i->fast_allocate(ptr);
}
}
#endif
}
void BufferArea::release()
{
for(std::vector<Block>::const_iterator i = blocks.begin(); i != blocks.end(); ++i)
{
i->cleanup();
}
blocks.clear();
#ifndef OPENCV_ENABLE_MEMORY_SANITIZER
if (oneBuf)
{
fastFree(oneBuf);
oneBuf = 0;
}
#endif
}
//==================================================================================================
}} // cv::utils::
| 1,946 |
850 | <filename>libs/models/detectors/two_stage_base_network.py
# -*-coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
import os
import tensorflow as tf
import tensorflow.contrib.slim as slim
from libs.models.anchor_heads.generate_anchors import GenerateAnchors
from libs.utils.show_box_in_tensor import DrawBoxTensor
from libs.models.backbones.build_backbone_p2top6 import BuildBackbone
from utils.box_ops import clip_boxes_to_img_boundaries
from libs.utils import bbox_transform
from dataloader.pretrained_weights.pretrain_zoo import PretrainModelZoo
class DetectionNetworkBase(object):
def __init__(self, cfgs, is_training):
self.cfgs = cfgs
self.base_network_name = cfgs.NET_NAME
self.is_training = is_training
if cfgs.ANCHOR_MODE == 'H':
self.num_anchors_per_location = len(cfgs.ANCHOR_SCALES) * len(cfgs.ANCHOR_RATIOS)
else:
self.num_anchors_per_location = len(cfgs.ANCHOR_SCALES) * len(cfgs.ANCHOR_RATIOS) * len(cfgs.ANCHOR_ANGLES)
self.anchor_mode = cfgs.ANCHOR_MODE
self.losses_dict = {}
self.drawer = DrawBoxTensor(cfgs)
self.backbone = BuildBackbone(cfgs, is_training)
self.pretrain_zoo = PretrainModelZoo()
def build_backbone(self, input_img_batch):
return self.backbone.build_backbone(input_img_batch)
def make_anchors(self, feature_pyramid):
with tf.variable_scope('make_anchors'):
anchor = GenerateAnchors(self.cfgs, self.anchor_mode)
anchor_list = anchor.generate_all_anchor(feature_pyramid)
return anchor_list
def rpn(self, feature_pyramid):
with tf.variable_scope('build_rpn',
regularizer=slim.l2_regularizer(self.cfgs.WEIGHT_DECAY)):
fpn_cls_score = []
fpn_box_pred = []
for level_name in self.cfgs.LEVEL:
if self.cfgs.SHARE_HEADS:
reuse_flag = None if level_name == self.cfgs.LEVEL[0] else True
scope_list = ['rpn_conv/3x3', 'rpn_cls_score', 'rpn_bbox_pred']
else:
reuse_flag = None
scope_list = ['rpn_conv/3x3_%s' % level_name, 'rpn_cls_score_%s' % level_name,
'rpn_bbox_pred_%s' % level_name]
rpn_conv3x3 = slim.conv2d(
feature_pyramid[level_name], self.cfgs.FPN_CHANNEL, [3, 3],
trainable=self.is_training, weights_initializer=self.cfgs.INITIALIZER, padding="SAME",
activation_fn=tf.nn.relu,
scope=scope_list[0],
reuse=reuse_flag)
rpn_cls_score = slim.conv2d(rpn_conv3x3, self.num_anchors_per_location * 2, [1, 1], stride=1,
trainable=self.is_training, weights_initializer=self.cfgs.INITIALIZER,
activation_fn=None, padding="VALID",
scope=scope_list[1],
reuse=reuse_flag)
rpn_box_pred = slim.conv2d(rpn_conv3x3, self.num_anchors_per_location * 4, [1, 1], stride=1,
trainable=self.is_training, weights_initializer=self.cfgs.BBOX_INITIALIZER,
activation_fn=None, padding="VALID",
scope=scope_list[2],
reuse=reuse_flag)
rpn_box_pred = tf.reshape(rpn_box_pred, [-1, 4])
rpn_cls_score = tf.reshape(rpn_cls_score, [-1, 2])
fpn_cls_score.append(rpn_cls_score)
fpn_box_pred.append(rpn_box_pred)
fpn_cls_score = tf.concat(fpn_cls_score, axis=0, name='fpn_cls_score')
fpn_box_pred = tf.concat(fpn_box_pred, axis=0, name='fpn_box_pred')
fpn_cls_prob = slim.softmax(fpn_cls_score, scope='fpn_cls_prob')
return fpn_box_pred, fpn_cls_score, fpn_cls_prob
def postprocess_rpn_proposals(self, rpn_bbox_pred, rpn_cls_prob, img_shape, anchors, is_training):
'''
:param rpn_bbox_pred: [-1, 4]
:param rpn_cls_prob: [-1, 2]
:param img_shape:
:param anchors:[-1, 4]
:param is_training:
:return:
'''
if is_training:
pre_nms_topN = self.cfgs.RPN_TOP_K_NMS_TRAIN
post_nms_topN = self.cfgs.RPN_MAXIMUM_PROPOSAL_TARIN
# pre_nms_topN = self.cfgs.FPN_TOP_K_PER_LEVEL_TRAIN
# post_nms_topN = pre_nms_topN
else:
pre_nms_topN = self.cfgs.RPN_TOP_K_NMS_TEST
post_nms_topN = self.cfgs.RPN_MAXIMUM_PROPOSAL_TEST
# pre_nms_topN = self.cfgs.FPN_TOP_K_PER_LEVEL_TEST
# post_nms_topN = pre_nms_topN
nms_thresh = self.cfgs.RPN_NMS_IOU_THRESHOLD
cls_prob = rpn_cls_prob[:, 1]
# 1. decode boxes
decode_boxes = bbox_transform.bbox_transform_inv(boxes=anchors, deltas=rpn_bbox_pred,
scale_factors=self.cfgs.ANCHOR_SCALE_FACTORS)
# 2. clip to img boundaries
decode_boxes = clip_boxes_to_img_boundaries(decode_boxes=decode_boxes,
img_shape=img_shape)
# 3. get top N to NMS
if pre_nms_topN > 0:
pre_nms_topN = tf.minimum(pre_nms_topN, tf.shape(decode_boxes)[0], name='avoid_unenough_boxes')
cls_prob, top_k_indices = tf.nn.top_k(cls_prob, k=pre_nms_topN)
decode_boxes = tf.gather(decode_boxes, top_k_indices)
# 4. NMS
keep = tf.image.non_max_suppression(
boxes=decode_boxes,
scores=cls_prob,
max_output_size=post_nms_topN,
iou_threshold=nms_thresh)
final_boxes = tf.gather(decode_boxes, keep)
final_probs = tf.gather(cls_prob, keep)
return final_boxes, final_probs
def add_anchor_img_smry(self, img, anchors, labels, method):
positive_anchor_indices = tf.reshape(tf.where(tf.greater_equal(labels, 1)), [-1])
negative_anchor_indices = tf.reshape(tf.where(tf.equal(labels, 0)), [-1])
positive_anchor = tf.gather(anchors, positive_anchor_indices)
negative_anchor = tf.gather(anchors, negative_anchor_indices)
pos_in_img = self.drawer.only_draw_boxes(img_batch=img,
boxes=positive_anchor,
method=method)
neg_in_img = self.drawer.only_draw_boxes(img_batch=img,
boxes=negative_anchor,
method=method)
tf.summary.image('positive_anchor', pos_in_img)
tf.summary.image('negative_anchors', neg_in_img)
def add_roi_batch_img_smry(self, img, rois, labels, method):
positive_roi_indices = tf.reshape(tf.where(tf.greater_equal(labels, 1)), [-1])
negative_roi_indices = tf.reshape(tf.where(tf.equal(labels, 0)), [-1])
pos_roi = tf.gather(rois, positive_roi_indices)
neg_roi = tf.gather(rois, negative_roi_indices)
pos_in_img = self.drawer.only_draw_boxes(img_batch=img,
boxes=pos_roi,
method=method)
neg_in_img = self.drawer.only_draw_boxes(img_batch=img,
boxes=neg_roi,
method=method)
tf.summary.image('pos_rois', pos_in_img)
tf.summary.image('neg_rois', neg_in_img)
def get_restorer(self):
checkpoint_path = tf.train.latest_checkpoint(os.path.join(self.cfgs.TRAINED_CKPT, self.cfgs.VERSION))
if checkpoint_path is not None:
if self.cfgs.RESTORE_FROM_RPN:
print('___restore from rpn___')
model_variables = slim.get_model_variables()
restore_variables = [var for var in model_variables if not var.name.startswith('FastRCNN_Head')] + \
[slim.get_or_create_global_step()]
for var in restore_variables:
print(var.name)
restorer = tf.train.Saver(restore_variables)
else:
restorer = tf.train.Saver()
print("model restore from :", checkpoint_path)
else:
if self.cfgs.NET_NAME in self.pretrain_zoo.pth_zoo:
return None, None
checkpoint_path = self.cfgs.PRETRAINED_CKPT
print("model restore from pretrained mode, path is :", checkpoint_path)
model_variables = slim.get_model_variables()
# for var in model_variables:
# print(var.name)
# print(20*"__++__++__")
def name_in_ckpt_rpn(var):
return var.op.name
def name_in_ckpt_fastrcnn_head(var):
'''
Fast-RCNN/resnet_v1_50/block4 -->resnet_v1_50/block4
Fast-RCNN/MobilenetV2/** -- > MobilenetV2 **
:param var:
:return:
'''
return '/'.join(var.op.name.split('/')[1:])
nameInCkpt_Var_dict = {}
for var in model_variables:
if var.name.startswith('Fast-RCNN/'+self.base_network_name): # +'/block4'
var_name_in_ckpt = name_in_ckpt_fastrcnn_head(var)
nameInCkpt_Var_dict[var_name_in_ckpt] = var
else:
if var.name.startswith(self.base_network_name):
var_name_in_ckpt = name_in_ckpt_rpn(var)
nameInCkpt_Var_dict[var_name_in_ckpt] = var
else:
continue
restore_variables = nameInCkpt_Var_dict
for key, item in restore_variables.items():
print("var_in_graph: ", item.name)
print("var_in_ckpt: ", key)
print(20*"___")
restorer = tf.train.Saver(restore_variables)
print(20 * "****")
print("restore from pretrained_weighs in IMAGE_NET")
return restorer, checkpoint_path
def assign_levels(self, all_rois, labels=None, bbox_targets=None):
'''
:param all_rois:
:param labels:
:param bbox_targets:
:return:
'''
with tf.name_scope('assign_levels'):
# all_rois = tf.Print(all_rois, [tf.shape(all_rois)], summarize=10, message='ALL_ROIS_SHAPE*****')
xmin, ymin, xmax, ymax = tf.unstack(all_rois, axis=1)
h = tf.maximum(0., ymax - ymin)
w = tf.maximum(0., xmax - xmin)
levels = tf.floor(4. + tf.log(tf.sqrt(w * h + 1e-8) / 224.0) / tf.log(2.)) # 4 + log_2(***)
# use floor instead of round
min_level = int(self.cfgs.LEVEL[0][-1])
max_level = min(5, int(self.cfgs.LEVEL[-1][-1]))
levels = tf.maximum(levels, tf.ones_like(levels) * min_level) # level minimum is 2
levels = tf.minimum(levels, tf.ones_like(levels) * max_level) # level maximum is 5
levels = tf.stop_gradient(tf.reshape(levels, [-1]))
def get_rois(levels, level_i, rois, labels, bbox_targets):
level_i_indices = tf.reshape(tf.where(tf.equal(levels, level_i)), [-1])
tf.summary.scalar('LEVEL/LEVEL_%d_rois_NUM' % level_i, tf.shape(level_i_indices)[0])
level_i_rois = tf.gather(rois, level_i_indices)
if self.is_training:
if not self.cfgs.CUDA8:
# Note: for > cuda 9
level_i_rois = tf.stop_gradient(level_i_rois)
level_i_labels = tf.gather(labels, level_i_indices)
level_i_targets = tf.gather(bbox_targets, level_i_indices)
else:
# Note: for cuda 8
level_i_rois = tf.stop_gradient(tf.concat([level_i_rois, [[0, 0, 0., 0.]]], axis=0))
# to avoid the num of level i rois is 0.0, which will broken the BP in tf
level_i_labels = tf.gather(labels, level_i_indices)
level_i_labels = tf.stop_gradient(tf.concat([level_i_labels, [0]], axis=0))
level_i_targets = tf.gather(bbox_targets, level_i_indices)
level_i_targets = tf.stop_gradient(tf.concat([level_i_targets,
tf.zeros(shape=(1, 5 * (self.cfgs.CLASS_NUM + 1)),
dtype=tf.float32)], axis=0))
return level_i_rois, level_i_labels, level_i_targets
else:
if self.cfgs.CUDA8:
# Note: for cuda 8
level_i_rois = tf.concat([level_i_rois, [[0, 0, 0., 0.]]], axis=0)
return level_i_rois, None, None
rois_list = []
labels_list = []
targets_list = []
for i in range(min_level, max_level + 1):
P_i_rois, P_i_labels, P_i_targets = get_rois(levels, level_i=i, rois=all_rois,
labels=labels,
bbox_targets=bbox_targets)
rois_list.append(P_i_rois)
labels_list.append(P_i_labels)
targets_list.append(P_i_targets)
if self.is_training:
all_labels = tf.concat(labels_list, axis=0)
all_targets = tf.concat(targets_list, axis=0)
return rois_list, all_labels, all_targets
else:
return rois_list # [P2_rois, P3_rois, P4_rois, P5_rois] Note: P6 do not assign rois
| 8,070 |
2,032 | /*
Tencent is pleased to support the open source community by making PhxQueue available.
Copyright (C) 2017 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the BSD 3-Clause License (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
<https://opensource.org/licenses/BSD-3-Clause>
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
/* phxrpc_lock_dispatcher.h
Generated by phxrpc_pb2service from lock.proto
Please DO NOT edit unless you know exactly what you are doing.
*/
#pragma once
#include "phxrpc/http.h"
#include "phxrpc/rpc.h"
class LockService;
class LockDispatcher {
public:
static const phxrpc::BaseDispatcher<LockDispatcher>::URIFuncMap &GetURIFuncMap();
LockDispatcher(LockService &service, phxrpc::DispatcherArgs_t *dispatcher_args);
virtual ~LockDispatcher();
int PHXEcho(const phxrpc::BaseRequest &req, phxrpc::BaseResponse *const resp);
int GetString(const phxrpc::BaseRequest &req, phxrpc::BaseResponse *const resp);
int SetString(const phxrpc::BaseRequest &req, phxrpc::BaseResponse *const resp);
int DeleteString(const phxrpc::BaseRequest &req, phxrpc::BaseResponse *const resp);
int GetLockInfo(const phxrpc::BaseRequest &req, phxrpc::BaseResponse *const resp);
int AcquireLock(const phxrpc::BaseRequest &req, phxrpc::BaseResponse *const resp);
private:
LockService &service_;
phxrpc::DispatcherArgs_t *dispatcher_args_;
};
| 565 |
10,225 | package io.quarkus.it.websocket;
import javax.websocket.OnMessage;
import javax.websocket.server.ServerEndpoint;
@ServerEndpoint("/echo")
public class EchoSocket {
@OnMessage
String echo(String msg) {
return msg;
}
}
| 97 |
1,016 | <reponame>peter-ls/kylo<filename>services/feed-manager-service/feed-manager-core/src/main/java/com/thinkbiganalytics/feedmgr/service/category/SimpleCategoryCache.java
package com.thinkbiganalytics.feedmgr.service.category;
/*-
* #%L
* thinkbig-feed-manager-core
* %%
* Copyright (C) 2017 ThinkBig Analytics
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.thinkbiganalytics.cluster.ClusterMessage;
import com.thinkbiganalytics.cluster.ClusterServiceMessageReceiver;
import com.thinkbiganalytics.feedmgr.rest.model.FeedCategory;
import com.thinkbiganalytics.metadata.api.MetadataAccess;
import com.thinkbiganalytics.metadata.api.category.Category;
import com.thinkbiganalytics.metadata.api.category.CategoryProvider;
import com.thinkbiganalytics.metadata.api.event.MetadataChange;
import com.thinkbiganalytics.metadata.api.event.MetadataEventListener;
import com.thinkbiganalytics.metadata.api.event.MetadataEventService;
import com.thinkbiganalytics.metadata.api.event.category.CategoryChange;
import com.thinkbiganalytics.metadata.api.event.category.CategoryChangeEvent;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.stream.Collectors;
import javax.annotation.Nonnull;
import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import javax.inject.Inject;
/**
*
*/
public class SimpleCategoryCache implements ClusterServiceMessageReceiver {
@Inject
MetadataAccess metadataAccess;
@Inject
private CategoryProvider categoryProvider;
@Inject
private SimpleCategoryModelTransform categoryModelTransform;
private AtomicBoolean populated = new AtomicBoolean(false);
@Inject
private MetadataEventService metadataEventService;
private CategoryChangeListener categoryChangeListener = new CategoryChangeListener();
/**
* Adds listeners for transferring events.
*/
@PostConstruct
public void addEventListener() {
metadataEventService.addListener(categoryChangeListener);
}
/**
* Removes listeners and stops transferring events.
*/
@PreDestroy
public void removeEventListener() {
metadataEventService.removeListener(categoryChangeListener);
}
private LoadingCache<String, FeedCategory> categoryIdCache = CacheBuilder.newBuilder().build(new CacheLoader<String, FeedCategory>() {
@Override
public FeedCategory load(String id) throws Exception {
FeedCategory feedCategory = metadataAccess.read(() -> {
Category category = categoryProvider.findById(categoryProvider.resolveId(id));
if (category != null) {
FeedCategory c = categoryModelTransform.domainToFeedCategorySimple(category, false, false);
categoryNameCache.put(c.getSystemName(), c);
return c;
} else {
return null;
}
}, MetadataAccess.SERVICE);
return feedCategory;
}
});
private LoadingCache<String, FeedCategory> categoryNameCache = CacheBuilder.newBuilder().build(new CacheLoader<String, FeedCategory>() {
@Override
public FeedCategory load(String name) throws Exception {
FeedCategory feedCategory = metadataAccess.read(() -> {
Category category = categoryProvider.findBySystemName(name);
if (category != null) {
return categoryModelTransform.domainToFeedCategorySimple(category, false, false);
} else {
return null;
}
}, MetadataAccess.SERVICE);
return feedCategory;
}
});
private synchronized void populate() {
List<FeedCategory> allCategories = metadataAccess.read(() -> {
List<FeedCategory> list = categoryProvider.findAll().stream().map(c -> categoryModelTransform.domainToFeedCategorySimple(c, false, false)).collect(Collectors.toList());
return list;
});
Map<String, FeedCategory> idMap = allCategories.stream().collect(Collectors.toMap(c -> c.getId(), c -> c));
Map<String, FeedCategory> nameMap = allCategories.stream().collect(Collectors.toMap(c -> c.getSystemName(), c -> c));
categoryIdCache.putAll(idMap);
categoryNameCache.putAll(nameMap);
populated.set(true);
}
public FeedCategory getFeedCategoryById(String id) {
if (!populated.get()) {
populate();
}
return categoryIdCache.getUnchecked(id);
}
public FeedCategory getFeedCategoryByName(String name) {
if (!populated.get()) {
populate();
}
return categoryNameCache.getUnchecked(name);
}
public Map<String, FeedCategory> getCategoriesByName() {
if (!populated.get()) {
populate();
}
return categoryNameCache.asMap();
}
public Map<String, FeedCategory> getCategoriesById() {
if (!populated.get()) {
populate();
}
return categoryIdCache.asMap();
}
private void onCategoryChange(CategoryChange change) {
if (change != null) {
if (change.getChange() == MetadataChange.ChangeType.DELETE) {
categoryIdCache.invalidate(change.getCategoryId().toString());
if (change.getCategoryName().isPresent()) {
categoryNameCache.invalidate(change.getCategoryName().get());
}
} else {
categoryIdCache.refresh(change.getCategoryId().toString());
}
}
}
private class CategoryChangeListener implements MetadataEventListener<CategoryChangeEvent> {
public void notify(@Nonnull final CategoryChangeEvent metadataEvent) {
CategoryChange change = metadataEvent.getData();
onCategoryChange(change);
}
}
@Override
public void onMessageReceived(String from, ClusterMessage message) {
if (CategoryChange.CLUSTER_EVENT_TYPE.equals(message.getType())) {
CategoryChange change = (CategoryChange) message.getMessage();
onCategoryChange(change);
}
}
}
| 2,547 |
380 | import numpy as np
from hls4ml.model.optimizer import OptimizerPass
from hls4ml.model.layers import Conv1D, Conv2D, Dense, SeparableConv1D, SeparableConv2D
class ApplyResourceStrategy(OptimizerPass):
''' Transposes the weights to use the dense_resource matrix multiply routine '''
def match(self, node):
node_matches = isinstance(node, (Dense, Conv1D, SeparableConv1D, Conv2D, SeparableConv2D))
is_resource_strategy = node.get_attr('strategy', '').lower() == 'resource'
already_transformed = node.get_attr('_weights_transposed', False) == True
return node_matches and is_resource_strategy and not already_transformed
def transform(self, model, node):
if isinstance(node, Dense):
node.weights['weight'].data = np.transpose(node.weights['weight'].data)
elif isinstance(node, Conv1D):
node.weights['weight'].data = np.transpose(node.weights['weight'].data, axes=[2, 0, 1]) #(W,C,F) => (F,W,C)
elif isinstance(node, SeparableConv1D):
node.weights['depthwise'].data = np.transpose(node.weights['depthwise'].data, axes=[2, 0, 1]) #(W,C,F) => (F,W,C)
node.weights['pointwise'].data = np.transpose(node.weights['pointwise'].data, axes=[2, 0, 1]) #(W,C,F) => (F,W,C)
elif isinstance(node, Conv2D):
node.weights['weight'].data = np.transpose(node.weights['weight'].data, axes=[3, 0, 1, 2]) #(H,W,C,F) => (F,H,W,C)
elif isinstance(node, SeparableConv2D):
node.weights['depthwise'].data = np.transpose(node.weights['depthwise'].data, axes=[3, 0, 1, 2]) #(H,W,C,F) => (F,H,W,C)
node.weights['pointwise'].data = np.transpose(node.weights['pointwise'].data, axes=[3, 0, 1, 2]) #(H,W,C,F) => (F,H,W,C)
else:
raise Exception('Unexpected layer {} with resource strategy'.format(node.class_name))
node.set_attr('_weights_transposed', True)
return False | 857 |
1,831 | /**
* Copyright (c) 2017-present, Facebook, Inc. and its affiliates.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree.
*/
#include "logdevice/server/ServerSSLFetcher.h"
#include <wangle/ssl/TLSCredProcessor.h>
#include <wangle/ssl/TLSTicketKeySeeds.h>
#include "logdevice/common/stats/Stats.h"
namespace facebook { namespace logdevice {
/* static */ std::unique_ptr<ServerSSLFetcher>
ServerSSLFetcher::create(const std::string& cert_path,
const std::string& key_path,
const std::string& ca_path,
bool use_tls_ticket_seeds,
const std::string& tls_ticket_seeds_path,
StatsHolder* stats) {
std::unique_ptr<ServerSSLFetcher> fetcher{new ServerSSLFetcher(
cert_path, key_path, ca_path, use_tls_ticket_seeds, stats)};
fetcher->reloadSSLContext();
if (use_tls_ticket_seeds) {
auto seeds =
wangle::TLSCredProcessor::processTLSTickets(tls_ticket_seeds_path);
if (!seeds.has_value()) {
RATELIMIT_ERROR(
std::chrono::seconds(10),
1,
"Failed to load TLS tickets seeds while --use-tls-ticket-seeds "
"is set. TLS session resumption will effectivly be disabled.");
return fetcher;
}
fetcher->reloadTLSTicketSeed(std::move(seeds).value());
}
return fetcher;
}
ServerSSLFetcher::ServerSSLFetcher(const std::string& cert_path,
const std::string& key_path,
const std::string& ca_path,
bool use_tls_ticket_seeds,
StatsHolder* stats)
: SSLFetcher(cert_path, key_path, ca_path, true, stats),
use_tls_ticket_seeds_(use_tls_ticket_seeds) {}
void ServerSSLFetcher::reloadSSLContext() {
// TLSTicketKeyManager should have a 1:1 relationship with the SSL context, so
// when the context is resetted, the ticket manager should be resetted as
// well.
// 1. Save the seeds from the current ticket manager.
wangle::TLSTicketKeySeeds seeds;
if (ticket_manager_) {
ticket_manager_->getTLSTicketKeySeeds(
seeds.oldSeeds, seeds.currentSeeds, seeds.newSeeds);
ticket_manager_.reset();
}
// 2. Reset the SSL context.
SSLFetcher::reloadSSLContext();
if (!context_ || !use_tls_ticket_seeds_) {
return;
}
// 3. Recreate the ticket manager and attach it to the new context.
ticket_manager_ =
std::make_unique<wangle::TLSTicketKeyManager>(context_.get(), nullptr);
if (seeds.isEmpty()) {
return;
}
// 4. Reload the saved seeds into the ticket manager.
reloadTLSTicketSeed(std::move(seeds));
}
void ServerSSLFetcher::reloadTLSTicketSeed(wangle::TLSTicketKeySeeds seeds) {
if (!ticket_manager_) {
return;
}
ticket_manager_->setTLSTicketKeySeeds(
seeds.oldSeeds, seeds.currentSeeds, seeds.newSeeds);
if (stats_) {
STAT_INCR(stats_, tls_ticket_seeds_reloaded);
}
}
}} // namespace facebook::logdevice
| 1,327 |
3,730 | <reponame>jackywyz/jvm.go
package jvm.lambda;
public class LambdaTest {
public static void main(String[] args) {
Runnable r = () -> System.out.println("Hello, World!");
r.run();
}
}
| 99 |
4,606 | import uuid
from unittest import mock
import pytest
from botocore import exceptions
from dagster import DagsterResourceFunctionError, In, Out, build_op_context, configured, job, op
from dagster_aws.s3 import S3FileHandle, S3FileManager, s3_file_manager, s3_resource
def test_s3_file_manager_write(mock_s3_resource, mock_s3_bucket):
file_manager = S3FileManager(mock_s3_resource.meta.client, mock_s3_bucket.name, "some-key")
body = b"foo"
file_handle = file_manager.write_data(body)
assert mock_s3_bucket.Object(file_handle.s3_key).get()["Body"].read() == body
file_handle = file_manager.write_data(body, ext="foo")
assert file_handle.s3_key.endswith(".foo")
assert mock_s3_bucket.Object(file_handle.s3_key).get()["Body"].read() == body
def test_s3_file_manager_read(mock_s3_resource, mock_s3_bucket):
body = b"bar"
remote_s3_object = mock_s3_bucket.Object("some-key/foo")
remote_s3_object.put(Body=body)
file_manager = S3FileManager(mock_s3_resource.meta.client, mock_s3_bucket.name, "some-key")
file_handle = S3FileHandle(mock_s3_bucket.name, "some-key/foo")
with file_manager.read(file_handle) as file_obj:
assert file_obj.read() == body
# read again. cached
remote_s3_object.delete()
with file_manager.read(file_handle) as file_obj:
assert file_obj.read() == body
def test_depends_on_s3_resource_file_manager(mock_s3_bucket):
bar_bytes = b"bar"
@op(out=Out(S3FileHandle), required_resource_keys={"file_manager"})
def emit_file(context):
return context.resources.file_manager.write_data(bar_bytes)
@op(
ins={"file_handle": In(S3FileHandle)},
required_resource_keys={"file_manager"},
)
def accept_file(context, file_handle):
local_path = context.resources.file_manager.copy_handle_to_local_temp(file_handle)
assert isinstance(local_path, str)
assert open(local_path, "rb").read() == bar_bytes
@job(resource_defs={"s3": s3_resource, "file_manager": s3_file_manager})
def s3_file_manager_test():
accept_file(emit_file())
result = s3_file_manager_test.execute_in_process(
run_config={
"resources": {
"file_manager": {
"config": {"s3_bucket": mock_s3_bucket.name, "s3_prefix": "some-prefix"}
}
},
},
)
assert result.success
keys_in_bucket = [obj.key for obj in mock_s3_bucket.objects.all()]
assert len(keys_in_bucket) == 1
file_key = list(keys_in_bucket)[0]
comps = file_key.split("/")
assert "/".join(comps[:-1]) == "some-prefix"
assert uuid.UUID(comps[-1])
@mock.patch("boto3.session.Session.resource")
@mock.patch("dagster_aws.s3.resources.S3FileManager")
def test_s3_file_manager_resource(MockS3FileManager, mock_boto3_resource):
did_it_run = dict(it_ran=False)
resource_config = {
"use_unsigned_session": True,
"region_name": "us-west-1",
"endpoint_url": "http://alternate-s3-host.io",
"s3_bucket": "some-bucket",
"s3_prefix": "some-prefix",
}
mock_s3_session = mock_boto3_resource.return_value.meta.client
@op(required_resource_keys={"file_manager"})
def test_op(context):
# test that we got back a S3FileManager
assert context.resources.file_manager == MockS3FileManager.return_value
# make sure the file manager was initalized with the config we are supplying
MockS3FileManager.assert_called_once_with(
s3_session=mock_s3_session,
s3_bucket=resource_config["s3_bucket"],
s3_base_key=resource_config["s3_prefix"],
)
_, call_kwargs = mock_boto3_resource.call_args
mock_boto3_resource.assert_called_once_with(
"s3",
region_name=resource_config["region_name"],
endpoint_url=resource_config["endpoint_url"],
use_ssl=True,
config=call_kwargs["config"],
)
assert call_kwargs["config"].retries["max_attempts"] == 5
did_it_run["it_ran"] = True
context = build_op_context(
resources={"file_manager": configured(s3_file_manager)(resource_config)}
)
test_op(context)
assert did_it_run["it_ran"]
def test_s3_file_manager_resource_with_profile():
resource_config = {
"use_unsigned_session": True,
"region_name": "us-west-1",
"endpoint_url": "http://alternate-s3-host.io",
"s3_bucket": "some-bucket",
"s3_prefix": "some-prefix",
"profile_name": "some-profile",
}
@op(required_resource_keys={"file_manager"})
def test_op(context):
# placeholder function to test resource initialization
return context.log.info("return from test_solid")
with pytest.raises(DagsterResourceFunctionError) as e:
context = build_op_context(
resources={"file_manager": configured(s3_file_manager)(resource_config)},
)
test_op(context)
assert isinstance(e.value.user_exception, exceptions.ProfileNotFound)
assert str(e.value.user_exception) == "The config profile (some-profile) could not be found"
| 2,236 |
2,002 | // Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See LICENSE in the project root for license information.
#include "StdAfx.h"
namespace Microsoft::Resources
{
/*!
* An atom pool initialized directly from an array of static strings.
* Uses the supplied strings directly, so they must remain valid for
* the life of the pool.
*/
HRESULT StaticAtomPool::CreateInstance(
_In_reads_(numStrings) const PCWSTR* ppStrings,
_In_ int numStrings,
_In_opt_ PCWSTR pDescription,
_In_ StaticAtomPoolFlags flags,
_Outptr_ StaticAtomPool** pool)
{
*pool = nullptr;
RETURN_HR_IF(E_INVALIDARG, numStrings < 0);
RETURN_HR_IF(E_INVALIDARG, (ppStrings == nullptr) && (numStrings != 0));
// Fail if any of the strings are NULL or empty
if ((flags & NoValidation) == 0)
{
// if AllowNullForAtom0 is set, don't validate the first atom
for (int i = ((flags & AllowNullForAtom0) ? 1 : 0); i < numStrings; i++)
{
if ((ppStrings[i] == NULL) || (ppStrings[i][0] == L'\0'))
{
RETURN_HR(E_DEF_ATOM_BAD_STRING);
}
}
}
DEFCOMPAREOPTIONS compareOptions = ((flags & CaseSensitive) ? DefCompare_Default : DefCompare_CaseInsensitive);
StaticAtomPool* pRtrn = new StaticAtomPool(ppStrings, numStrings, pDescription, compareOptions);
RETURN_IF_NULL_ALLOC(pRtrn);
*pool = pRtrn;
return S_OK;
}
bool StaticAtomPool::Contains(__in PCWSTR pString) const { return TryGetIndex(pString, NULL); }
bool StaticAtomPool::Contains(__in Atom atom) const
{
return ((atom.GetPoolIndex() == m_poolIndex) && (atom.GetIndex() >= 0) && (atom.GetIndex() < m_numStrings));
}
bool StaticAtomPool::Equals(__in Atom atom, __in PCWSTR pString) const
{
if (DefString_IsEmpty(pString))
{
return false;
}
if (atom.GetPoolIndex() != m_poolIndex)
{
return false;
}
if ((atom.GetIndex() < 0) || (atom.GetIndex() > m_numStrings - 1))
{
return false;
}
return (DefString_CompareWithOptions(m_ppStrings[atom.GetIndex()], pString, m_compareOptions) == Def_Equal);
}
AtomPoolGroup* StaticAtomPool::GetAtomPoolGroup() const { return m_pAtoms; }
PCWSTR StaticAtomPool::GetDescription() const { return ((m_pDescription != NULL) ? m_pDescription : L""); }
bool StaticAtomPool::GetIsCaseInsensitive() const { return ((m_compareOptions & DefCompare_CaseInsensitive) != 0); }
Atom::Index StaticAtomPool::GetNumAtoms() const { return m_numStrings; }
Atom::PoolIndex StaticAtomPool::GetPoolIndex() const { return m_poolIndex; }
void StaticAtomPool::SetAtomPoolGroup(AtomPoolGroup* pGroup) { m_pAtoms = pGroup; }
void StaticAtomPool::SetPoolIndex(Atom::PoolIndex index) { m_poolIndex = index; }
bool StaticAtomPool::TryGetAtom(__in PCWSTR pString, __out_opt Atom* pAtomOut) const
{
Atom::Index index = Atom::NullAtomIndex;
if (TryGetIndex(pString, &index))
{
if (pAtomOut != NULL)
{
pAtomOut->Set(index, m_poolIndex);
}
return true;
}
return false;
}
bool StaticAtomPool::TryGetIndex(__in PCWSTR pString, __out_opt Atom::Index* pIndexOut) const
{
if (pIndexOut != NULL)
{
*pIndexOut = Atom::NullAtomIndex;
}
if (DefString_IsEmpty(pString))
{
return false;
}
for (int i = 0; i < m_numStrings; i++)
{
if (DefString_CompareWithOptions(m_ppStrings[i], pString, m_compareOptions) == Def_Equal)
{
if (pIndexOut != NULL)
{
*pIndexOut = i;
}
return true;
}
}
return false;
}
bool StaticAtomPool::TryGetString(__in Atom atom, __inout_opt StringResult* pStringOut) const
{
if (atom.GetPoolIndex() != m_poolIndex)
{
return false;
}
if ((atom.GetIndex() < 0) || (atom.GetIndex() > m_numStrings - 1))
{
return false;
}
if (pStringOut != NULL)
{
if (FAILED(pStringOut->SetRef(m_ppStrings[atom.GetIndex()])))
{
return false;
}
}
return true;
}
bool StaticAtomPool::TryGetString(__in Atom::Index index, __inout_opt StringResult* pStringOut) const
{
if ((index < 0) || (index > m_numStrings - 1))
{
return false;
}
if (pStringOut != NULL)
{
if (FAILED(pStringOut->SetRef(m_ppStrings[index])))
{
return false;
}
}
return true;
}
} // namespace Microsoft::Resources | 2,134 |
372 | <reponame>noisecode3/DPF
/*
* DISTRHO Plugin Framework (DPF)
* Copyright (C) 2012-2021 <NAME> <<EMAIL>>
*
* Permission to use, copy, modify, and/or distribute this software for any purpose with
* or without fee is hereby granted, provided that the above copyright notice and this
* permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
* TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN
* NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL
* DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
* IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
* CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
#include "../ImageBase.hpp"
START_NAMESPACE_DGL
// --------------------------------------------------------------------------------------------------------------------
// protected constructors
ImageBase::ImageBase()
: rawData(nullptr),
size(0, 0),
format(kImageFormatNull) {}
ImageBase::ImageBase(const char* const rdata, const uint width, const uint height, const ImageFormat fmt)
: rawData(rdata),
size(width, height),
format(fmt) {}
ImageBase::ImageBase(const char* const rdata, const Size<uint>& s, const ImageFormat fmt)
: rawData(rdata),
size(s),
format(fmt) {}
ImageBase::ImageBase(const ImageBase& image)
: rawData(image.rawData),
size(image.size),
format(image.format) {}
// --------------------------------------------------------------------------------------------------------------------
// public methods
ImageBase::~ImageBase() {}
bool ImageBase::isValid() const noexcept
{
return (rawData != nullptr && size.isValid());
}
bool ImageBase::isInvalid() const noexcept
{
return (rawData == nullptr || size.isInvalid());
}
uint ImageBase::getWidth() const noexcept
{
return size.getWidth();
}
uint ImageBase::getHeight() const noexcept
{
return size.getHeight();
}
const Size<uint>& ImageBase::getSize() const noexcept
{
return size;
}
const char* ImageBase::getRawData() const noexcept
{
return rawData;
}
ImageFormat ImageBase::getFormat() const noexcept
{
return format;
}
void ImageBase::loadFromMemory(const char* const rdata,
const uint width,
const uint height,
const ImageFormat fmt) noexcept
{
loadFromMemory(rdata, Size<uint>(width, height), fmt);
}
void ImageBase::loadFromMemory(const char* const rdata, const Size<uint>& s, const ImageFormat fmt) noexcept
{
rawData = rdata;
size = s;
format = fmt;
}
void ImageBase::draw(const GraphicsContext& context)
{
drawAt(context, Point<int>(0, 0));
}
void ImageBase::drawAt(const GraphicsContext& context, const int x, const int y)
{
drawAt(context, Point<int>(x, y));
}
// --------------------------------------------------------------------------------------------------------------------
// public operators
ImageBase& ImageBase::operator=(const ImageBase& image) noexcept
{
rawData = image.rawData;
size = image.size;
format = image.format;
return *this;
}
bool ImageBase::operator==(const ImageBase& image) const noexcept
{
return (rawData == image.rawData && size == image.size && format == image.format);
}
bool ImageBase::operator!=(const ImageBase& image) const noexcept
{
return !operator==(image);
}
// --------------------------------------------------------------------------------------------------------------------
END_NAMESPACE_DGL
| 1,168 |
440 | /*========================== begin_copyright_notice ============================
Copyright (C) 2018-2021 Intel Corporation
SPDX-License-Identifier: MIT
============================= end_copyright_notice ===========================*/
#include "PacketBuilder.h"
#include <cstdarg>
#include "Probe/Assertion.h"
namespace pktz
{
void PacketBuilder::AssertMemoryUsageParams(Value* ptr, JIT_MEM_CLIENT usage)
{
IGC_ASSERT_MESSAGE(ptr->getType() != mInt64Ty,
"Address appears to be GFX access. Requires translation through BuilderGfxMem.");
}
Value* PacketBuilder::GEP(Value* Ptr, Value* Idx, Type* Ty, const Twine& Name)
{
return IRB()->CreateGEP(Ptr, Idx, Name);
}
Value* PacketBuilder::GEP(Type* Ty, Value* Ptr, Value* Idx, const Twine& Name)
{
return IRB()->CreateGEP(Ty, Ptr, Idx, Name);
}
Value* PacketBuilder::GEP(Value* ptr, const std::initializer_list<Value*>& indexList, Type* Ty)
{
std::vector<Value*> indices;
for (auto i : indexList)
indices.push_back(i);
return GEPA(ptr, indices);
}
Value* PacketBuilder::GEP(Value* ptr, const std::initializer_list<uint32_t>& indexList, Type* Ty)
{
std::vector<Value*> indices;
for (auto i : indexList)
indices.push_back(C(i));
return GEPA(ptr, indices);
}
Value* PacketBuilder::GEPA(Value* Ptr, ArrayRef<Value*> IdxList, const Twine& Name)
{
return IRB()->CreateGEP(Ptr, IdxList, Name);
}
Value* PacketBuilder::GEPA(Type* Ty, Value* Ptr, ArrayRef<Value*> IdxList, const Twine& Name)
{
return IRB()->CreateGEP(Ty, Ptr, IdxList, Name);
}
Value* PacketBuilder::IN_BOUNDS_GEP(Value* ptr, const std::initializer_list<Value*>& indexList)
{
std::vector<Value*> indices;
for (auto i : indexList)
indices.push_back(i);
return IN_BOUNDS_GEP(ptr, indices);
}
Value* PacketBuilder::IN_BOUNDS_GEP(Value* ptr, const std::initializer_list<uint32_t>& indexList)
{
std::vector<Value*> indices;
for (auto i : indexList)
indices.push_back(C(i));
return IN_BOUNDS_GEP(ptr, indices);
}
LoadInst* PacketBuilder::LOAD(Value* Ptr, const char* Name, Type* Ty, JIT_MEM_CLIENT usage)
{
AssertMemoryUsageParams(Ptr, usage);
return IRB()->CreateLoad(Ptr, Name);
}
LoadInst* PacketBuilder::LOAD(Value* Ptr, const Twine& Name, Type* Ty, JIT_MEM_CLIENT usage)
{
AssertMemoryUsageParams(Ptr, usage);
return IRB()->CreateLoad(Ptr, Name);
}
LoadInst* PacketBuilder::LOAD(Type* Ty, Value* Ptr, const Twine& Name, JIT_MEM_CLIENT usage)
{
AssertMemoryUsageParams(Ptr, usage);
return IRB()->CreateLoad(Ty, Ptr, Name);
}
LoadInst*
PacketBuilder::LOAD(Value* Ptr, bool isVolatile, const Twine& Name, Type* Ty, JIT_MEM_CLIENT usage)
{
AssertMemoryUsageParams(Ptr, usage);
return IRB()->CreateLoad(Ptr, isVolatile, Name);
}
LoadInst* PacketBuilder::LOAD(Value* basePtr,
const std::initializer_list<uint32_t>& indices,
const llvm::Twine& name,
Type* Ty,
JIT_MEM_CLIENT usage)
{
std::vector<Value*> valIndices;
for (auto i : indices)
valIndices.push_back(C(i));
return PacketBuilder::LOAD(GEPA(basePtr, valIndices), name);
}
LoadInst* PacketBuilder::LOADV(Value* basePtr,
const std::initializer_list<Value*>& indices,
const llvm::Twine& name)
{
std::vector<Value*> valIndices;
for (auto i : indices)
valIndices.push_back(i);
return LOAD(GEPA(basePtr, valIndices), name);
}
StoreInst*
PacketBuilder::STORE(Value* val, Value* basePtr, const std::initializer_list<uint32_t>& indices)
{
std::vector<Value*> valIndices;
for (auto i : indices)
valIndices.push_back(C(i));
return STORE(val, GEPA(basePtr, valIndices));
}
StoreInst*
PacketBuilder::STOREV(Value* val, Value* basePtr, const std::initializer_list<Value*>& indices)
{
std::vector<Value*> valIndices;
for (auto i : indices)
valIndices.push_back(i);
return STORE(val, GEPA(basePtr, valIndices));
}
Value* PacketBuilder::OFFSET_TO_NEXT_COMPONENT(Value* base, Constant* offset)
{
return GEP(base, offset);
}
Value* PacketBuilder::MEM_ADD(Value* i32Incr,
Value* basePtr,
const std::initializer_list<uint32_t>& indices,
const llvm::Twine& name)
{
Value* i32Value = LOAD(GEP(basePtr, indices), name);
Value* i32Result = ADD(i32Value, i32Incr);
return STORE(i32Result, GEP(basePtr, indices));
}
}
| 2,568 |
335 | <reponame>Safal08/Hacktoberfest-1
{
"word": "Bay",
"definitions": [
"(of a dog, especially a large one) bark or howl loudly.",
"(of a group of people) shout loudly, typically to demand something.",
"Bay at."
],
"parts-of-speech": "Verb"
} | 118 |
3,139 | <reponame>supertick/jmonkeyengine<filename>jme3-core/src/main/java/com/jme3/animation/SpatialTrack.java
/*
* Copyright (c) 2009-2020 jMonkeyEngine
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of 'jMonkeyEngine' nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.jme3.animation;
import com.jme3.export.*;
import com.jme3.math.Quaternion;
import com.jme3.math.Vector3f;
import com.jme3.scene.Spatial;
import com.jme3.util.TempVars;
import com.jme3.util.clone.Cloner;
import com.jme3.util.clone.JmeCloneable;
import java.io.IOException;
/**
* This class represents the track for spatial animation.
*
* @author <NAME> (Kaelthas)
*/
@Deprecated
public class SpatialTrack implements JmeCloneable, Track {
/**
* Translations of the track.
*/
private CompactVector3Array translations;
/**
* Rotations of the track.
*/
private CompactQuaternionArray rotations;
/**
* Scales of the track.
*/
private CompactVector3Array scales;
/**
* The spatial to which this track applies.
* Note that this is optional, if no spatial is defined, the AnimControl's Spatial will be used.
*/
private Spatial trackSpatial;
/**
* The times of the animations frames.
*/
private float[] times;
public SpatialTrack() {
}
/**
* Creates a spatial track for the given track data.
*
* @param times
* a float array with the time of each frame
* @param translations
* the translation of the bone for each frame
* @param rotations
* the rotation of the bone for each frame
* @param scales
* the scale of the bone for each frame
*/
public SpatialTrack(float[] times, Vector3f[] translations,
Quaternion[] rotations, Vector3f[] scales) {
setKeyframes(times, translations, rotations, scales);
}
/**
*
* Modify the spatial which this track modifies.
*
* @param time
* the current time of the animation
*/
@Override
public void setTime(float time, float weight, AnimControl control, AnimChannel channel, TempVars vars) {
Spatial spatial = trackSpatial;
if (spatial == null) {
spatial = control.getSpatial();
}
Vector3f tempV = vars.vect1;
Vector3f tempS = vars.vect2;
Quaternion tempQ = vars.quat1;
Vector3f tempV2 = vars.vect3;
Vector3f tempS2 = vars.vect4;
Quaternion tempQ2 = vars.quat2;
int lastFrame = times.length - 1;
if (time < 0 || lastFrame == 0) {
if (rotations != null)
rotations.get(0, tempQ);
if (translations != null)
translations.get(0, tempV);
if (scales != null) {
scales.get(0, tempS);
}
} else if (time >= times[lastFrame]) {
if (rotations != null)
rotations.get(lastFrame, tempQ);
if (translations != null)
translations.get(lastFrame, tempV);
if (scales != null) {
scales.get(lastFrame, tempS);
}
} else {
int startFrame = 0;
int endFrame = 1;
// use lastFrame so we never overflow the array
for (int i = 0; i < lastFrame && times[i] < time; ++i) {
startFrame = i;
endFrame = i + 1;
}
float blend = (time - times[startFrame]) / (times[endFrame] - times[startFrame]);
if (rotations != null)
rotations.get(startFrame, tempQ);
if (translations != null)
translations.get(startFrame, tempV);
if (scales != null) {
scales.get(startFrame, tempS);
}
if (rotations != null)
rotations.get(endFrame, tempQ2);
if (translations != null)
translations.get(endFrame, tempV2);
if (scales != null) {
scales.get(endFrame, tempS2);
}
tempQ.nlerp(tempQ2, blend);
tempV.interpolateLocal(tempV2, blend);
tempS.interpolateLocal(tempS2, blend);
}
if (translations != null) {
spatial.setLocalTranslation(tempV);
}
if (rotations != null) {
spatial.setLocalRotation(tempQ);
}
if (scales != null) {
spatial.setLocalScale(tempS);
}
}
/**
* Set the translations, rotations and scales for this track.
*
* @param times
* a float array with the time of each frame
* @param translations
* the translation of the bone for each frame
* @param rotations
* the rotation of the bone for each frame
* @param scales
* the scale of the bone for each frame
*/
public void setKeyframes(float[] times, Vector3f[] translations,
Quaternion[] rotations, Vector3f[] scales) {
if (times.length == 0) {
throw new RuntimeException("BoneTrack with no keyframes!");
}
this.times = times;
if (translations != null) {
assert times.length == translations.length;
this.translations = new CompactVector3Array();
this.translations.add(translations);
this.translations.freeze();
}
if (rotations != null) {
assert times.length == rotations.length;
this.rotations = new CompactQuaternionArray();
this.rotations.add(rotations);
this.rotations.freeze();
}
if (scales != null) {
assert times.length == scales.length;
this.scales = new CompactVector3Array();
this.scales.add(scales);
this.scales.freeze();
}
}
/**
* @return the array of rotations of this track
*/
public Quaternion[] getRotations() {
return rotations == null ? null : rotations.toObjectArray();
}
/**
* @return the array of scales for this track
*/
public Vector3f[] getScales() {
return scales == null ? null : scales.toObjectArray();
}
/**
* @return the arrays of time for this track
*/
public float[] getTimes() {
return times;
}
/**
* @return the array of translations of this track
*/
public Vector3f[] getTranslations() {
return translations == null ? null : translations.toObjectArray();
}
/**
* @return the length of the track
*/
@Override
public float getLength() {
return times == null ? 0 : times[times.length - 1] - times[0];
}
/**
* Create a clone with the same track spatial.
*
* @return a new track
*/
@Override
public SpatialTrack clone() {
Cloner cloner = new Cloner();
cloner.setClonedValue(trackSpatial, trackSpatial);
return cloner.clone(this);
}
@Override
public float[] getKeyFrameTimes() {
return times;
}
public void setTrackSpatial(Spatial trackSpatial) {
this.trackSpatial = trackSpatial;
}
public Spatial getTrackSpatial() {
return trackSpatial;
}
/**
* Create a shallow clone for the JME cloner.
*
* @return a new track
*/
@Override
public SpatialTrack jmeClone() {
try {
return (SpatialTrack) super.clone();
} catch (CloneNotSupportedException exception) {
throw new RuntimeException("Can't clone track", exception);
}
}
/**
* Callback from {@link com.jme3.util.clone.Cloner} to convert this
* shallow-cloned track into a deep-cloned one, using the specified cloner
* to resolve copied fields.
*
* @param cloner the cloner currently cloning this control (not null)
* @param original the track from which this track was shallow-cloned
* (unused)
*/
@Override
public void cloneFields(Cloner cloner, Object original) {
translations = cloner.clone(translations);
rotations = cloner.clone(rotations);
scales = cloner.clone(scales);
trackSpatial = cloner.clone(trackSpatial);
times = cloner.clone(times);
}
@Override
public void write(JmeExporter ex) throws IOException {
OutputCapsule oc = ex.getCapsule(this);
oc.write(translations, "translations", null);
oc.write(rotations, "rotations", null);
oc.write(times, "times", null);
oc.write(scales, "scales", null);
oc.write(trackSpatial, "trackSpatial", null);
}
@Override
public void read(JmeImporter im) throws IOException {
InputCapsule ic = im.getCapsule(this);
translations = (CompactVector3Array) ic.readSavable("translations", null);
rotations = (CompactQuaternionArray) ic.readSavable("rotations", null);
times = ic.readFloatArray("times", null);
scales = (CompactVector3Array) ic.readSavable("scales", null);
trackSpatial = (Spatial) ic.readSavable("trackSpatial", null);
}
}
| 4,861 |
2,338 | // RUN: %clang_cc1 -triple thumbv8m.base-eabi -fsyntax-only -ffreestanding %s -verify -mcmse
// RUN: %clang_cc1 -triple thumbv8m.base-eabi -fsyntax-only -ffreestanding -x c++ %s -verify -mcmse
// expected-no-diagnostics
#include <arm_cmse.h>
typedef void (*callback_t)(void);
void func(callback_t fptr, void *p)
{
cmse_TT(p);
cmse_TTT(p);
cmse_TTA(p);
cmse_TTAT(p);
cmse_TT_fptr(fptr);
cmse_TTT_fptr(fptr);
cmse_TTA_fptr(fptr);
cmse_TTAT_fptr(fptr);
}
| 243 |
348 | <reponame>chamberone/Leaflet.PixiOverlay
{"nom":"Calenzana","circ":"2ème circonscription","dpt":"Haute-Corse","inscrits":1660,"abs":793,"votants":867,"blancs":36,"nuls":8,"exp":823,"res":[{"nuance":"REG","nom":"<NAME>","voix":681},{"nuance":"REM","nom":"<NAME>","voix":142}]} | 113 |
323 | package org.xbib.io.compress.bzip2;
import static org.junit.Assert.*;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import org.junit.Test;
/**
* Tests BitInputStream
*/
public class BZip2BitInputStreamTests {
// Boolean
/**
* Test reading 8 zeroes
* @throws java.io.IOException
*/
@Test
public void testBooleanFalse8() throws IOException {
byte[] testData = { 0 };
BZip2BitInputStream inputStream = new BZip2BitInputStream (new ByteArrayInputStream (testData));
for (int i = 0; i < 8; i++) {
assertFalse (inputStream.readBoolean());
}
}
/**
* Test reading 8 ones
* @throws java.io.IOException
*/
@Test
public void testBooleanTrue8() throws IOException {
byte[] testData = { (byte)0xff };
BZip2BitInputStream inputStream = new BZip2BitInputStream (new ByteArrayInputStream (testData));
for (int i = 0; i < 8; i++) {
assertTrue (inputStream.readBoolean());
}
}
/**
* Test reading a single 1 in any position as a boolean
* @throws java.io.IOException
*/
@Test
public void testBooleanSingleOne() throws IOException {
for (int i = 0; i < 8; i++) {
byte[] testData = { (byte)(1 << (7 - i)) };
BZip2BitInputStream inputStream = new BZip2BitInputStream (new ByteArrayInputStream (testData));
for (int j = 0; j < 8; j++) {
if (j == i) {
assertTrue (inputStream.readBoolean());
} else {
assertFalse (inputStream.readBoolean());
}
}
}
}
/**
* Test reaching the end of the stream reading a boolean
* @throws java.io.IOException
*/
@Test(expected=IOException.class)
public void testBooleanEndOfStream() throws IOException {
byte[] testData = { };
BZip2BitInputStream inputStream = new BZip2BitInputStream (new ByteArrayInputStream (testData));
inputStream.readBoolean();
}
// Unary
/**
* Test reading unary 0
* @throws java.io.IOException
*/
@Test
public void testUnaryZero() throws IOException {
byte[] testData = { 0x00 };
BZip2BitInputStream inputStream = new BZip2BitInputStream (new ByteArrayInputStream (testData));
assertEquals (0, inputStream.readUnary());
}
/**
* Test reading unary 0
* @throws java.io.IOException
*/
@Test
public void testUnaryOne() throws IOException {
byte[] testData = { (byte)(1 << 7) };
BZip2BitInputStream inputStream = new BZip2BitInputStream (new ByteArrayInputStream (testData));
assertEquals (1, inputStream.readUnary());
}
/**
* Test reading unary 0
* @throws java.io.IOException
*/
@Test
public void testUnary31() throws IOException {
byte[] testData = { (byte)0xff, (byte)0xff, (byte)0xff, (byte)0xfe };
BZip2BitInputStream inputStream = new BZip2BitInputStream (new ByteArrayInputStream (testData));
assertEquals (31, inputStream.readUnary());
}
/**
* Test reaching the end of the stream reading a unary number
* @throws java.io.IOException
*/
@Test(expected=IOException.class)
public void testUnaryEndOfStream() throws IOException {
byte[] testData = { };
BZip2BitInputStream inputStream = new BZip2BitInputStream (new ByteArrayInputStream (testData));
inputStream.readUnary();
}
// Bits
/**
* Test reading a single 0 as bits
* @throws java.io.IOException
*/
@Test
public void testBits1_0() throws IOException {
byte[] testData = { (byte)0x00 };
BZip2BitInputStream inputStream = new BZip2BitInputStream (new ByteArrayInputStream (testData));
assertEquals (0, inputStream.readBits(1));
}
/**
* Test reading a single 1 as bits
* @throws java.io.IOException
*/
@Test
public void testBits1_1() throws IOException {
byte[] testData = { (byte)(1 << 7) };
BZip2BitInputStream inputStream = new BZip2BitInputStream (new ByteArrayInputStream (testData));
assertEquals (1, inputStream.readBits(1));
}
/**
* Test reading 23 bits
* @throws java.io.IOException
*/
@Test
public void testBits23() throws IOException {
byte[] testData = { 0x02, 0x03, 0x04 };
BZip2BitInputStream inputStream = new BZip2BitInputStream (new ByteArrayInputStream (testData));
assertEquals (0x020304 >> 1, inputStream.readBits(23));
}
/**
* Test reaching the end of the stream reading bits
* @throws java.io.IOException
*/
@Test(expected=IOException.class)
public void testBitsEndOfStream() throws IOException {
byte[] testData = { };
BZip2BitInputStream inputStream = new BZip2BitInputStream (new ByteArrayInputStream (testData));
inputStream.readBits(1);
}
// Integer
/**
* Test reading an integer
* @throws java.io.IOException
*/
@Test
public void testInteger() throws IOException {
byte[] testData = { 0x12, 0x34, 0x56, 0x78 };
BZip2BitInputStream inputStream = new BZip2BitInputStream (new ByteArrayInputStream (testData));
assertEquals (0x12345678, inputStream.readInteger());
}
/**
* Test reaching the end of the stream reading an integer
* @throws java.io.IOException
*/
@Test(expected=IOException.class)
public void testIntegerEndOfStream() throws IOException {
byte[] testData = { };
BZip2BitInputStream inputStream = new BZip2BitInputStream (new ByteArrayInputStream (testData));
inputStream.readInteger();
}
}
| 1,809 |
30,785 | package jadx.tests.integration.enums;
import org.junit.jupiter.api.Test;
import jadx.tests.api.IntegrationTest;
import static jadx.tests.api.utils.assertj.JadxAssertions.assertThat;
import static jadx.tests.integration.enums.TestEnums2a.TestCls.DoubleOperations.DIVIDE;
import static jadx.tests.integration.enums.TestEnums2a.TestCls.DoubleOperations.TIMES;
public class TestEnums2a extends IntegrationTest {
public static class TestCls {
public interface IOps {
double apply(double x, double y);
}
public enum DoubleOperations implements IOps {
TIMES("*") {
@Override
public double apply(double x, double y) {
return x * y;
}
},
DIVIDE("/") {
@Override
public double apply(double x, double y) {
return x / y;
}
};
private final String op;
DoubleOperations(String op) {
this.op = op;
}
public String getOp() {
return op;
}
}
public void check() {
assertThat(TIMES.getOp()).isEqualTo("*");
assertThat(DIVIDE.getOp()).isEqualTo("/");
assertThat(TIMES.apply(2, 3)).isEqualTo(6);
assertThat(DIVIDE.apply(10, 5)).isEqualTo(2);
}
}
@Test
public void test() {
assertThat(getClassNode(TestCls.class))
.code()
.containsOne("TIMES(\"*\") {")
.containsOne("DIVIDE(\"/\")");
}
}
| 553 |
336 | """
File name: fcn
Author: <NAME>
Date created: 17.02.2019
Date last modified: 16:59 17.02.2019
Python Version: "3.6"
Copyright = "Copyright (C) 2018-2019 of Packt"
Credits = ["<NAME>, <NAME>"]
License = "MIT"
Version = "1.0.0"
Maintainer = "non"
Status = "Prototype" # "Prototype", "Development", or "Production"
"""
#==============================================================================
# Imported Modules
#==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.keras.models import Model
from tensorflow.keras.layers import *
from tensorflow.keras.applications.vgg16 import *
#==============================================================================
# Constant Definitions
#==============================================================================
#==============================================================================
# Function Definitions
#==============================================================================
def fcn_8s(image_size, ch_in=3, ch_out=3):
"""
Build a FCN-8s Keras model, with the VGG-16 layers pretrained on ImageNet.
:param image_size: Image size (H x W)
:param ch_in: Number of input channels
:param ch_out: Number of output channels
:return: Keras model
"""
inputs = Input(shape=(*image_size, ch_in), name='input')
# Building a pre-trained VGG-16 feature extractor (i.e., without the final FC layers)
vgg16 = VGG16(include_top=False, weights='imagenet', input_tensor=inputs)
# Recovering the feature maps generated by each of the 3 final blocks:
f3 = vgg16.get_layer('block3_pool').output # shape: (28, 28, 256)
f4 = vgg16.get_layer('block4_pool').output # shape: (14, 14, 512)
f5 = vgg16.get_layer('block5_pool').output # shape: ( 7, 7, 512)
# Replacing VGG dense layers by convolutions:
f5_conv1 = Conv2D(filters=4086, kernel_size=7, padding='same',
activation='relu')(f5)
f5_drop1 = Dropout(0.5)(f5_conv1)
f5_conv2 = Conv2D(filters=4086, kernel_size=1, padding='same',
activation='relu')(f5_drop1)
f5_drop2 = Dropout(0.5)(f5_conv2)
f5_conv3 = Conv2D(filters=ch_out, kernel_size=1, padding='same',
activation=None)(f5_drop2)
# Using a transposed conv (w/ s=2) to upscale `f5_conv3` into a 14 x 14 map
# so it can be merged with features from `f4_conv1` obtained from `f4`:
f5_conv3_x2 = Conv2DTranspose(filters=ch_out, kernel_size=4, strides=2,
use_bias=False, padding='same', activation='relu')(f5_conv3)
f4_conv1 = Conv2D(filters=ch_out, kernel_size=1, padding='same',
activation=None)(f4)
# Merging the 2 feature maps (addition):
merge1 = add([f4_conv1, f5_conv3_x2])
# We repeat the operation to merge `merge1` and `f3` into a 28 x 28 map:
merge1_x2 = Conv2DTranspose(filters=ch_out, kernel_size=4, strides=2,
use_bias=False, padding='same', activation='relu')(merge1)
f3_conv1 = Conv2D(filters=ch_out, kernel_size=1, padding='same',
activation=None)(f3)
merge2 = add([f3_conv1, merge1_x2])
# Finally, we use another transposed conv to decode and up-scale the feature map
# to the original shape, i.e., using a stride 8 to go from 28 x 28 to 224 x 224 here:
outputs = Conv2DTranspose(filters=ch_out, kernel_size=16, strides=8,
padding='same', activation=None, name='predictions')(merge2)
fcn8s_model = Model(inputs, outputs)
return fcn8s_model
| 1,426 |
542 | <reponame>vlad-roid/OneSignal-Android-SDK
package com.onesignal;
import android.content.Context;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteException;
import org.robolectric.annotation.Implements;
@Implements(OneSignalDbHelper.class)
public class ShadowOneSignalDbHelper {
public static int DATABASE_VERSION;
public static boolean ignoreDuplicatedFieldsOnUpgrade;
private static OneSignalDbHelper sInstance;
public static void restSetStaticFields() {
ignoreDuplicatedFieldsOnUpgrade = false;
sInstance = null;
DATABASE_VERSION = OneSignalDbHelper.DATABASE_VERSION;
}
public static int getDbVersion() {
return DATABASE_VERSION;
}
public void onCreate(SQLiteDatabase db) {
db.execSQL(OneSignalDbHelper.SQL_CREATE_ENTRIES);
for (String ind : OneSignalDbHelper.SQL_INDEX_ENTRIES) {
db.execSQL(ind);
}
}
public static synchronized OneSignalDbHelper getInstance(Context context) {
if (sInstance == null)
sInstance = new OneSignalDbHelper(context.getApplicationContext());
return sInstance;
}
// Suppress errors related to duplicates when testing DB data migrations
public static void safeExecSQL(SQLiteDatabase db, String sql) {
try {
db.execSQL(sql);
} catch (SQLiteException e) {
if (!ignoreDuplicatedFieldsOnUpgrade)
throw e;
String causeMsg = e.getCause().getMessage();
if (!causeMsg.contains("duplicate") && !causeMsg.contains("already exists"))
throw e;
}
}
}
| 601 |
1,190 | <filename>configs/repvgg/repvgg-B3g4_4xb64-autoaug-lbs-mixup-coslr-200e_in1k.py<gh_stars>1000+
_base_ = './repvgg-B3_4xb64-autoaug-lbs-mixup-coslr-200e_in1k.py'
model = dict(backbone=dict(arch='B3g4'))
| 107 |
5,937 | // Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
//+-----------------------------------------------------------------------------
//
//
// $TAG ENGR
// $Module: win_mil_graphics_camera
// $Keywords:
//
// $ENDTAG
//
//------------------------------------------------------------------------------
#include "precomp.hpp"
#include <cfenv>
using namespace dxlayer;
MtDefine(OrthographicCameraResource, MILRender, "OrthographicCamera Resource");
MtDefine(CMilOrthographicCameraDuce, OrthographicCameraResource, "CMilOrthographicCameraDuce");
CMilOrthographicCameraDuce::~CMilOrthographicCameraDuce()
{
UnRegisterNotifiers();
}
//+-----------------------------------------------------------------------------
//
// Member:
// CMilOrthographicCameraDuce::GetProjectionTransform
//
// Synopsis:
// Gets the projection matrix for this orthographic
// camera.
// NOTE: Uses near and far plane values given in arguments
// rather than the ones stored in the camera data.
//
// Returns:
// Success if the matrix was retrieved successfully
//
// NOTE assumes that the camera data structure is already synchronized with
// any camera animations.
//
//------------------------------------------------------------------------------
/* override */ HRESULT CMilOrthographicCameraDuce::GetProjectionTransform(
const double aspectRatio,
const float flNearPlaneDistance,
const float flFarPlaneDistance,
__out_ecount(1) CMILMatrix *pProjectionMatrixOut) const
{
HRESULT hr = S_OK;
Assert(pProjectionMatrixOut);
double height = m_data.m_width/aspectRatio;
*pProjectionMatrixOut =
matrix::get_ortho_rh(
static_cast<float>(m_data.m_width),
static_cast<float>(height),
flNearPlaneDistance,
flFarPlaneDistance);
RRETURN(hr);
}
//+-----------------------------------------------------------------------------
//
// Member:
// CMilOrthographicCameraDuce::GetViewTransform
//
// Synopsis:
// Gets the view matrix.
//
// Returns:
// Success if the matrix was retrieved successfully
//
// NOTE: Assumes that the camera data structure is already
// synchronized with any camera animations.
//
// NOTE: We consider the Camera.Transform to be part of the
// camera's World-to-View transform here.
//
//------------------------------------------------------------------------------
/* override */ HRESULT CMilOrthographicCameraDuce::GetViewTransform(
__out_ecount(1) CMILMatrix *pViewMatrixOut) const
{
HRESULT hr = S_OK;
Assert(pViewMatrixOut);
const vector3& position = *reinterpret_cast<const basetypes<dx_apiset>::vector3_base_t*>(&m_data.m_position);
const vector3& look_direction = *reinterpret_cast<const basetypes<dx_apiset>::vector3_base_t*>(&m_data.m_lookDirection);
const vector3& up = *reinterpret_cast<const basetypes<dx_apiset>::vector3_base_t*>(&m_data.m_upDirection);
vector3 lookAtPoint = position + look_direction;
*pViewMatrixOut = matrix::get_lookat_rh(position, lookAtPoint, up);
IFC(PrependInverseTransform(m_data.m_pTransform, pViewMatrixOut));
Cleanup:
RRETURN(hr);
}
//+-----------------------------------------------------------------------------
//
// Member:
// CMilOrthographicCameraDuce::EnsureClippingPlaneDistance
//
// Synopsis:
// This method widens the given near and far planes to ensure that geometry
// right on the clipping planes still renders. It also enforces a
// numerically stable minimal distance between the planes to handle edge
// cases like the scene being entirely in a plane (i.e., nearPlane ==
// farPlane)
//
//------------------------------------------------------------------------------
/* override */ HRESULT CMilOrthographicCameraDuce::EnsureClippingPlaneDistance(
__inout_ecount(1) float &flNearPlane,
__inout_ecount(1) float &flFarPlane
) const
{
// If the near plane is farther than the far plane we consider the entire scene
// to be clipped. ApplyToContextState should have early exited.
Assert(flNearPlane <= flFarPlane);
// We need to do two adjustments to the scene depth
// span before we can use it.
// 1. We need to widen it if it is too small (like the
// scene is at one depth.) Too small will cause
// the camera matrix to overflow and step 2 to
// fail.
const float gamma = FLT_EPSILON * (fabs(flFarPlane) + fabs(flNearPlane));
if (fabs(flFarPlane - flNearPlane) < 2 * gamma)
{
flNearPlane -= gamma;
flFarPlane += gamma;
}
// 2. We need to widen it (regardless of size) so that
// geometry EXACTLY at the near and far renders.
// This step is different for PerspectiveCamera vs
// OrthographicCamera
// First we compute the delta required to expand the planes so
// they are at least FLT_EPSILON away from the geometry in
// float percision on our CPU. Because FLT_EPSILON is computed
// at 1.0f we need to scale this by the magnitude of the near or
// far plane, whichever is larger. (Note that we want the
// larger magnitude, not the magnitude of the larger value.)
const float fpDelta = FLT_EPSILON * max(fabs(flNearPlane), fabs(flFarPlane));
// Next we compute the delta required to expand the planes so
// that geometry is projected to be at least FIXED_24_EPSILON
// inside the 0..1 range in the 24-bit fixed point Z-Buffer.
const float fixDelta = (flFarPlane - flNearPlane) * FIXED_24_EPSILON / (1 - 2 * FIXED_24_EPSILON);
// We then use the larger of the deltas to extend our planes.
//
// NOTE: flNearPlane may end up slightly negative but that is fine in
// an orthographic projection and it'll produce more predictable results
// for items on the same plane as the camera position.
const float delta = max(fpDelta, fixDelta);
flNearPlane -= delta;
flFarPlane += delta;
#if defined(DIRECTXMATH)
// DirectXMath library requires that the distance between near and far
// planes be at least 0.00001f.
const float dxmath_epsilon = 0.00001f;
if (std::abs(flFarPlane - flNearPlane) <= dxmath_epsilon)
{
// This is the value by which we'd want to advance the 'mid' point
// in either direction to ensure that the condition
// | flFarPlane - flNearPlane | <= 0.00001f is satisfied.
const float dxmath_delta =
0.000005f + std::numeric_limits<float>::epsilon();
static_assert(std::numeric_limits<float>::is_iec559,
"floating point assumptions here depend on conformance with the IEC 559 standard");
// Calculate the next representable floating point value in each direction by
// calling into std::nextafter.
//
// From the 'next' value, calculate the 'gap size'.This 'gap size' represents the
// minimum noticeable floating-point change that can be made in either direction. Trivially,
// std::numeric_limits<float>::epsilon() would be the 'gap size' for values in
// the range [1.0f, 2.0f), and this gap-size for a given range would grow (exponentially)
// with the magnitude of the values bracketing that range.
// First, ensure that the values are not +/- infinity
// This will ensure that we do not have to deal with overflow/underflow
// conditions
flFarPlane =
ClampValue<float>(flFarPlane, std::numeric_limits<float>::lowest(), std::numeric_limits<float>::max());
flNearPlane =
ClampValue<float>(flNearPlane, std::numeric_limits<float>::lowest(), std::numeric_limits<float>::max());
float mid = (flFarPlane / 2.0f + flNearPlane / 2.0f);
float next_value_after_mid = std::nextafter(mid, std::numeric_limits<float>::max());
float prev_value_before_mid = std::nextafter(mid, std::numeric_limits<float>::lowest());
// if the 'gap size' is larger than our preferred delta (dxmath_delta), then
// use the 'next' value obtained from std::nextafter to widen the distance between
// the near and the far planes.Otherwise, use dxmath_delta to widen that distance.
//
// IF (dxmath_delta <= 'gap size')
// * dxmath_delta is too small to be perceptible in add/subtract *
// * operations. use the nextafter value *
// SET near/far plane = nextafter value
// ELSE
// * dxmath_delta is sufficiently large to be perceptible in *
// * add/subtract operations *
// SET near/far plane = near/far plane +/- dxmath_delta
// ENDIF
//
// This can be implemented in a simplified manner as follows:
flFarPlane = std::max(mid + dxmath_delta, next_value_after_mid);
flNearPlane = std::min(mid - dxmath_delta, prev_value_before_mid);
assert(std::abs(flFarPlane - flNearPlane) > dxmath_epsilon);
}
#endif
RRETURN(S_OK);
}
/* override */ HRESULT CMilOrthographicCameraDuce::ApplyToContextState(
__inout_ecount(1) CContextState *pCtxState, // Context state to modify
const float flViewportWidth,
const float flViewportHeight,
const bool fUseComputedPlanes,
const float flComputedNearPlane,
const float flComputedFarPlane,
__out_ecount(1) bool &fRenderRequiredOut
) const
{
RRETURN(CMilProjectionCameraDuce::ApplyToContextState(
pCtxState,
flViewportWidth,
flViewportHeight,
static_cast<float>(m_data.m_nearPlaneDistance),
static_cast<float>(m_data.m_farPlaneDistance),
fUseComputedPlanes,
flComputedNearPlane,
flComputedFarPlane,
/* out */ fRenderRequiredOut
));
}
| 3,600 |
415 | <filename>cli/src/pcluster/aws/sts.py<gh_stars>100-1000
# Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
# with the License. A copy of the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "LICENSE.txt" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
# OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions and
# limitations under the License.
from pcluster.aws.common import AWSExceptionHandler, Boto3Client, Cache
class StsClient(Boto3Client):
"""STS Boto3 client."""
def __init__(self):
super().__init__("sts")
@AWSExceptionHandler.handle_client_exception
@Cache.cached
def get_account_id(self):
"""Get account id by get_caller_identity."""
return self._client.get_caller_identity().get("Account")
| 326 |
364 | package com.jslsolucoes.nginx.admin.agent.model.response.virtual.host;
import com.jslsolucoes.nginx.admin.agent.model.FileObject;
import com.jslsolucoes.nginx.admin.agent.model.response.NginxResponse;
public class NginxVirtualHostReadResponse implements NginxResponse {
private FileObject fileObject;
public NginxVirtualHostReadResponse() {
}
public NginxVirtualHostReadResponse(FileObject fileObject) {
this.fileObject = fileObject;
}
public FileObject getFileObject() {
return fileObject;
}
public void setFileObject(FileObject fileObject) {
this.fileObject = fileObject;
}
}
| 226 |
406 | <filename>src/main/java/org/broad/igv/feature/PSLRecord.java
/*
* The MIT License (MIT)
*
* Copyright (c) 2007-2015 Broad Institute
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.broad.igv.feature;
import org.broad.igv.feature.*;
/**
* @author jrobinso
* Date: 11/29/12
* Time: 6:59 PM
*/
public class PSLRecord extends BasicFeature {
private int tSize;
private int match;
private int misMatch;
private int repMatch;
private int qNumInsert;
private int tNumInsert;
private int qGapCount;
private int tGapCount;
private int qSize;
private int ns;
private int qGapBases;
private int tGapBases;
private String text;
public void setMatch(int match) {
this.match = match;
}
public void setMisMatch(int misMatch) {
this.misMatch = misMatch;
}
public void setRepMatch(int repMatch) {
this.repMatch = repMatch;
}
public void setQGapCount(int QNumInsert) {
this.qGapCount = QNumInsert;
}
public void setTGapCount(int TNumInsert) {
this.tGapCount = TNumInsert;
}
public void setQSize(int qSize) {
this.qSize = qSize;
}
public void setNs(int ns) {
this.ns = ns;
}
public void setQGapBases(int qGapBases) {
this.qGapBases = qGapBases;
}
public void setTGapBases(int tGapBases) {
this.tGapBases = tGapBases;
}
public int getTSize() {
return tSize;
}
public int getMatch() {
return match;
}
public int getMisMatch() {
return misMatch;
}
public int getRepMatch() {
return repMatch;
}
public int getQNumInsert() {
return qNumInsert;
}
public int getTNumInsert() {
return tNumInsert;
}
public int getQGapCount() {
return qGapCount;
}
public int getTGapCount() {
return tGapCount;
}
public int getqSize() {
return qSize;
}
public int getNs() {
return ns;
}
public int getQGapBases() {
return qGapBases;
}
public int getTGapBases() {
return tGapBases;
}
public void setText(String text) {
this.text = text;
}
public String getText() {
return text;
}
}
| 1,296 |
1,281 | <reponame>mohitktanwr/toolkits
from .common import GenericTimmEncoder, make_n_channel_input_std_conv
__all__ = [
"NFNetF0Encoder",
"NFNetF1Encoder",
"NFNetF2Encoder",
"NFNetF3Encoder",
"NFNetF4Encoder",
"NFNetF5Encoder",
"NFNetF6Encoder",
"NFNetF7Encoder",
]
class NFNetF0Encoder(GenericTimmEncoder):
def __init__(self, pretrained=True, layers=None):
from timm.models import nfnet
encoder = nfnet.nfnet_f0(pretrained=pretrained, features_only=True)
super().__init__(encoder, layers)
def change_input_channels(self, input_channels: int, mode="auto", **kwargs):
self.encoder.stem_conv1 = make_n_channel_input_std_conv(
self.encoder.stem_conv1, input_channels, mode, **kwargs
)
return self
class NFNetF1Encoder(GenericTimmEncoder):
def __init__(self, pretrained=True, layers=None):
from timm.models import nfnet
encoder = nfnet.nfnet_f1(pretrained=pretrained, features_only=True)
super().__init__(encoder, layers)
def change_input_channels(self, input_channels: int, mode="auto", **kwargs):
self.encoder.stem_conv1 = make_n_channel_input_std_conv(
self.encoder.stem_conv1, input_channels, mode, **kwargs
)
return self
class NFNetF2Encoder(GenericTimmEncoder):
def __init__(self, pretrained=True, layers=None):
from timm.models import nfnet
encoder = nfnet.nfnet_f2(pretrained=pretrained, features_only=True)
super().__init__(encoder, layers)
def change_input_channels(self, input_channels: int, mode="auto", **kwargs):
self.encoder.stem_conv1 = make_n_channel_input_std_conv(
self.encoder.stem_conv1, input_channels, mode, **kwargs
)
return self
class NFNetF3Encoder(GenericTimmEncoder):
def __init__(self, pretrained=True, layers=None):
from timm.models import nfnet
encoder = nfnet.nfnet_f3(pretrained=pretrained, features_only=True)
super().__init__(encoder, layers)
def change_input_channels(self, input_channels: int, mode="auto", **kwargs):
self.encoder.stem_conv1 = make_n_channel_input_std_conv(
self.encoder.stem_conv1, input_channels, mode, **kwargs
)
return self
class NFNetF4Encoder(GenericTimmEncoder):
def __init__(self, pretrained=True, layers=None):
from timm.models import nfnet
encoder = nfnet.nfnet_f4(pretrained=pretrained, features_only=True)
super().__init__(encoder, layers)
def change_input_channels(self, input_channels: int, mode="auto", **kwargs):
self.encoder.stem_conv1 = make_n_channel_input_std_conv(
self.encoder.stem_conv1, input_channels, mode, **kwargs
)
return self
class NFNetF5Encoder(GenericTimmEncoder):
def __init__(self, pretrained=True, layers=None):
from timm.models import nfnet
encoder = nfnet.nfnet_f5(pretrained=pretrained, features_only=True)
super().__init__(encoder, layers)
def change_input_channels(self, input_channels: int, mode="auto", **kwargs):
self.encoder.stem_conv1 = make_n_channel_input_std_conv(
self.encoder.stem_conv1, input_channels, mode, **kwargs
)
return self
class NFNetF6Encoder(GenericTimmEncoder):
def __init__(self, pretrained=True, layers=None):
from timm.models import nfnet
encoder = nfnet.nfnet_f6(pretrained=pretrained, features_only=True)
super().__init__(encoder, layers)
def change_input_channels(self, input_channels: int, mode="auto", **kwargs):
self.encoder.stem_conv1 = make_n_channel_input_std_conv(
self.encoder.stem_conv1, input_channels, mode, **kwargs
)
return self
class NFNetF7Encoder(GenericTimmEncoder):
def __init__(self, pretrained=True, layers=None):
from timm.models import nfnet
encoder = nfnet.nfnet_f7(pretrained=pretrained, features_only=True)
super().__init__(encoder, layers)
def change_input_channels(self, input_channels: int, mode="auto", **kwargs):
self.encoder.stem_conv1 = make_n_channel_input_std_conv(
self.encoder.stem_conv1, input_channels, mode, **kwargs
)
return self
| 1,871 |
1,647 | #ifndef PYTHONIC_INCLUDE_NUMPY_ROT90_HPP
#define PYTHONIC_INCLUDE_NUMPY_ROT90_HPP
#include "pythonic/include/utils/functor.hpp"
#include "pythonic/include/utils/numpy_conversion.hpp"
#include "pythonic/include/types/ndarray.hpp"
#include "pythonic/include/numpy/copy.hpp"
PYTHONIC_NS_BEGIN
namespace numpy
{
template <class T, class pS>
types::ndarray<T, types::array<long, std::tuple_size<pS>::value>>
rot90(types::ndarray<T, pS> const &expr, int k = 1);
NUMPY_EXPR_TO_NDARRAY0_DECL(rot90)
DEFINE_FUNCTOR(pythonic::numpy, rot90);
}
PYTHONIC_NS_END
#endif
| 261 |
311 | /**
* Copyright 2019 The JoyQueue Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.joyqueue.service;
import org.joyqueue.model.domain.BaseModel;
import org.joyqueue.model.exception.RepositoryException;
import org.joyqueue.model.ListQuery;
import org.joyqueue.model.PageResult;
import org.joyqueue.model.QPageQuery;
import org.joyqueue.model.Query;
import java.util.List;
/**
* 服务接口
* Created by chenyanying3 on 2018-10-15.
*/
public interface PageService<M extends BaseModel, Q extends Query> extends Service<M> {
/**
* 分页查询
*
* @param query 分页查询条件
* @return 分页数据
* @throws RepositoryException
*/
PageResult<M> findByQuery(QPageQuery<Q> query) throws RepositoryException;
/**
* 根据条件查询
* @param query 查询条件
** @return 匹配的实体列表
*/
List<M> findByQuery(ListQuery<Q> query);
}
| 515 |
380 | <reponame>corriganjeff/oxAuth
/*
* oxAuth is available under the MIT License (2008). See http://opensource.org/licenses/MIT for full text.
*
* Copyright (c) 2014, Gluu
*/
package org.gluu.oxauth.interop;
import static org.testng.Assert.assertTrue;
import org.apache.commons.lang.StringUtils;
import org.gluu.oxauth.BaseTest;
import org.testng.annotations.Test;
/**
* OC5:FeatureTest-Support WebFinger Discovery
*
* @author <NAME>
* @version 0.9, 06/09/2014
*/
public class SupportWebFingerDiscovery extends BaseTest {
@Test
public void supportWebFingerDiscovery() {
showTitle("OC5:FeatureTest-Support WebFinger Discovery");
assertTrue(StringUtils.isNotBlank(configurationEndpoint));
}
} | 258 |
1,056 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.modules.xml.schema.model.impl;
import org.netbeans.modules.xml.schema.model.ReferenceableSchemaComponent;
import org.netbeans.modules.xml.xam.dom.AbstractNamedComponentReference;
import org.netbeans.modules.xml.xam.dom.NamedComponentReference;
/**
*
* @author <NAME>
* @author rico
*
* Represents global references. Provides additional information for referenced elements
* such as its broken state and its changeability.
*/
public class GlobalReferenceImpl<T extends ReferenceableSchemaComponent> extends AbstractNamedComponentReference<T>
implements NamedComponentReference<T> {
//factory uses this
public GlobalReferenceImpl(T target, Class<T> cType, SchemaComponentImpl parent) {
super(target, cType, parent);
}
//used by resolve methods
public GlobalReferenceImpl(Class<T> classType, SchemaComponentImpl parent, String refString){
super(classType, parent, refString);
}
public T get() {
if (getReferenced() == null) {
String namespace = getQName().getNamespaceURI();
namespace = namespace.length() == 0 ? null : namespace;
String localName = getLocalName();
T target = ((SchemaComponentImpl)getParent()).getModel().resolve(namespace, localName, getType());
setReferenced(target);
}
return getReferenced();
}
public SchemaComponentImpl getParent() {
return (SchemaComponentImpl) super.getParent();
}
public String getEffectiveNamespace() {
return getParent().getModel().getEffectiveNamespace(get());
}
}
| 766 |
1,959 | package com.xamarin.android;
public class CallNonvirtualDerived extends CallNonvirtualBase {
public boolean methodInvoked;
public void method () {
System.out.println ("CallNonvirtualDerived.method() invoked!");
methodInvoked = true;
}
}
| 74 |
965 | <reponame>bobbrow/cpp-docs
// int nInitialWidth
// CString strCaption
if (!m_wndShortcutsBar.Create(strCaption, this,
CRect(0, 0, nInitialWidth, nInitialWidth),
ID_VIEW_OUTLOOKBAR, WS_CHILD | WS_VISIBLE | CBRS_LEFT))
{
TRACE0("Failed to create outlook bar\n");
return FALSE; // fail to create
} | 175 |
1,068 | package org.assertj.android.support.v4.api.print;
import android.support.annotation.IntDef;
import android.support.v4.print.PrintHelper;
import java.lang.annotation.Retention;
import static java.lang.annotation.RetentionPolicy.SOURCE;
@IntDef({
PrintHelper.SCALE_MODE_FILL,
PrintHelper.SCALE_MODE_FIT
})
@Retention(SOURCE)
@interface PrintHelperScaleMode {
}
| 128 |
316 | #import <Foundation/Foundation.h>
#import <CoreGraphics/CoreGraphics.h>
#import <CoreLocation/CoreLocation.h>
#import "MGLFoundation.h"
NS_ASSUME_NONNULL_BEGIN
/**
An `MGLMapCamera` object represents a viewpoint from which the user observes
some point on an `MGLMapView`.
#### Related examples
See the <a href="https://docs.mapbox.com/ios/maps/examples/camera-animation/">
Camera animation</a> example to learn how to create a camera that rotates
around a central point. See the <a href="https://docs.mapbox.com/ios/maps/examples/constraining-gestures/">
Restrict map panning to an area</a> example to learn how to restrict map
panning using `MGLMapViewDelegate`'s
`-mapView:shouldChangeFromCamera:toCamera:` method.
*/
MGL_EXPORT
@interface MGLMapCamera : NSObject <NSSecureCoding, NSCopying>
/** Coordinate at the center of the map view. */
@property (nonatomic) CLLocationCoordinate2D centerCoordinate;
/** Heading measured in degrees clockwise from true north. */
@property (nonatomic) CLLocationDirection heading;
/**
Pitch toward the horizon measured in degrees, with 0 degrees resulting in a
two-dimensional map.
*/
@property (nonatomic) CGFloat pitch;
/**
The altitude (measured in meters) above the map at which the camera is
situated.
The altitude is the distance from the viewpoint to the map, perpendicular to
the map plane. This property does not account for physical elevation.
This property’s value may be less than that of the `viewingDistance` property.
Setting this property automatically updates the `viewingDistance` property
based on the `pitch` property’s current value.
*/
@property (nonatomic) CLLocationDistance altitude;
/**
The straight-line distance from the viewpoint to the `centerCoordinate`.
Setting this property automatically updates the `altitude` property based on
the `pitch` property’s current value.
*/
@property (nonatomic) CLLocationDistance viewingDistance;
/** Returns a new camera with all properties set to 0. */
+ (instancetype)camera;
/**
Returns a new camera based on information about the camera’s viewpoint
and focus point.
@param centerCoordinate The geographic coordinate on which the map should be
centered.
@param eyeCoordinate The geometric coordinate at which the camera should be
situated.
@param eyeAltitude The altitude (measured in meters) above the map at which the
camera should be situated. The altitude may be less than the distance from
the camera’s viewpoint to the camera’s focus point.
*/
+ (instancetype)cameraLookingAtCenterCoordinate:(CLLocationCoordinate2D)centerCoordinate
fromEyeCoordinate:(CLLocationCoordinate2D)eyeCoordinate
eyeAltitude:(CLLocationDistance)eyeAltitude;
/**
Returns a new camera with the given distance, pitch, and heading.
This method interprets the distance as a straight-line distance from the
viewpoint to the center coordinate. To specify the altitude of the viewpoint,
use the `-cameraLookingAtCenterCoordinate:altitude:pitch:heading:` method.
@param centerCoordinate The geographic coordinate on which the map should be
centered.
@param distance The straight-line distance from the viewpoint to the
`centerCoordinate`.
@param pitch The viewing angle of the camera, measured in degrees. A value of
`0` results in a camera pointed straight down at the map. Angles greater
than `0` result in a camera angled toward the horizon.
@param heading The camera’s heading, measured in degrees clockwise from true
north. A value of `0` means that the top edge of the map view corresponds to
true north. The value `90` means the top of the map is pointing due east.
The value `180` means the top of the map points due south, and so on.
*/
+ (instancetype)cameraLookingAtCenterCoordinate:(CLLocationCoordinate2D)centerCoordinate
acrossDistance:(CLLocationDistance)distance
pitch:(CGFloat)pitch
heading:(CLLocationDirection)heading;
/**
Returns a new camera with the given altitude, pitch, and heading.
@param centerCoordinate The geographic coordinate on which the map should be
centered.
@param altitude The altitude (measured in meters) above the map at which the
camera should be situated. The altitude may be less than the distance from
the camera’s viewpoint to the camera’s focus point.
@param pitch The viewing angle of the camera, measured in degrees. A value of
`0` results in a camera pointed straight down at the map. Angles greater
than `0` result in a camera angled toward the horizon.
@param heading The camera’s heading, measured in degrees clockwise from true
north. A value of `0` means that the top edge of the map view corresponds to
true north. The value `90` means the top of the map is pointing due east.
The value `180` means the top of the map points due south, and so on.
*/
+ (instancetype)cameraLookingAtCenterCoordinate:(CLLocationCoordinate2D)centerCoordinate
altitude:(CLLocationDistance)altitude
pitch:(CGFloat)pitch
heading:(CLLocationDirection)heading;
/**
@note This initializer incorrectly interprets the `distance` parameter. To
specify the straight-line distance from the viewpoint to `centerCoordinate`,
use the `-cameraLookingAtCenterCoordinate:acrossDistance:pitch:heading:`
method. To specify the altitude of the viewpoint, use the
`-cameraLookingAtCenterCoordinate:altitude:pitch:heading:` method, which has
the same behavior as this initializer.
*/
+ (instancetype)cameraLookingAtCenterCoordinate:(CLLocationCoordinate2D)centerCoordinate
fromDistance:(CLLocationDistance)distance
pitch:(CGFloat)pitch
heading:(CLLocationDirection)heading
__attribute__((deprecated("Use -cameraLookingAtCenterCoordinate:acrossDistance:pitch:heading: "
"or -cameraLookingAtCenterCoordinate:altitude:pitch:heading:.")));
/**
Returns a Boolean value indicating whether the given camera is functionally
equivalent to the receiver.
Unlike `-isEqual:`, this method returns `YES` if the difference between the
coordinates, altitudes, pitches, or headings of the two camera objects is
negligible.
@param otherCamera The camera with which to compare the receiver.
@return A Boolean value indicating whether the two cameras are functionally
equivalent.
*/
- (BOOL)isEqualToMapCamera:(MGLMapCamera *)otherCamera;
@end
NS_ASSUME_NONNULL_END
| 2,162 |
945 | <reponame>0x17FEFE/tensorflow-ue4
#pragma once
#include "IAudioCaptureInterface.h"
class FWindowsAudioCapture : public IAudioCaptureInterface
{
public:
FWindowsAudioCapture();
virtual void StartCapture(TFunction<void(const TArray<uint8>&, float)> OnAudioData = nullptr, TFunction<void(const TArray<uint8>&, float)> OnCaptureFinished = nullptr) override;
virtual void StopCapture() override;
virtual void SetOptions(const FAudioCaptureOptions& InOptions) override;
FAudioCaptureOptions Options;
private:
float CalculateMaxAudioLevel(TArray<uint8>& Buffer, int32 BitsPerSample);
FThreadSafeBool bRunLoopActive;
}; | 197 |
507 | # tests/test_provider_TomTucka_circleci.py
# Automatically generated by tools/makecode.py (24-Sep-2021 15:13:55 UTC)
def test_provider_import():
import terrascript.provider.TomTucka.circleci
def test_resource_import():
from terrascript.resource.TomTucka.circleci import circleci_environment_variable
from terrascript.resource.TomTucka.circleci import circleci_project
# TODO: Shortcut imports without namespace for official and supported providers.
# TODO: This has to be moved into a required_providers block.
# def test_version_source():
#
# import terrascript.provider.TomTucka.circleci
#
# t = terrascript.provider.TomTucka.circleci.circleci()
# s = str(t)
#
# assert 'https://github.com/TomTucka/terraform-provider-circleci' in s
# assert '0.4.0' in s
| 287 |
353 | CSEXPORT void CSCONV Export_FAssetData_Get_TagsAndValues(FAssetData* instance, TArray<FName>& outTags, TArray<FString>& outValues)
{
for(const TPair<FName, FString>& Pair : instance->TagsAndValues)
{
outTags.Add(Pair.Key);
outValues.Add(Pair.Value);
}
}
CSEXPORT csbool CSCONV Export_FAssetData_IsValid(FAssetData* instance)
{
return instance->IsValid();
}
CSEXPORT csbool CSCONV Export_FAssetData_IsUAsset(FAssetData* instance)
{
return instance->IsUAsset();
}
CSEXPORT csbool CSCONV Export_FAssetData_IsRedirector(FAssetData* instance)
{
return instance->IsRedirector();
}
CSEXPORT void CSCONV Export_FAssetData_GetFullName(FAssetData* instance, FString& result)
{
result = instance->GetFullName();
}
CSEXPORT void CSCONV Export_FAssetData_GetExportTextName(FAssetData* instance, FString& result)
{
result = instance->GetExportTextName();
}
CSEXPORT void CSCONV Export_FAssetData(RegisterFunc registerFunc)
{
REGISTER_FUNC(Export_FAssetData_Get_TagsAndValues);
REGISTER_FUNC(Export_FAssetData_IsValid);
REGISTER_FUNC(Export_FAssetData_IsUAsset);
REGISTER_FUNC(Export_FAssetData_IsRedirector);
REGISTER_FUNC(Export_FAssetData_GetFullName);
REGISTER_FUNC(Export_FAssetData_GetExportTextName);
} | 481 |
1,014 | <filename>firmware/fifo.c
#include <fx2regs.h>
#include <fx2delay.h>
#include "glasgow.h"
void fifo_init() {
// Use newest chip features.
SYNCDELAY;
REVCTL = _ENH_PKT|_DYN_OUT;
// Disable all FIFOs and bus.
// The FIFO clock must stay enabled for FIFO registers to work.
SYNCDELAY;
IFCONFIG = _IFCLKSRC;
SYNCDELAY;
FIFORESET = _NAKALL;
// Configure strobes and flags.
// All flags are configured as RDY; this means ~EF for OUT endpoints and ~FF for IN endpoints.
// SLRD and SLWR *must* be configured as active low; otherwise, when the FPGA I/Os are
// internally pulled up during reset, spurious reads and writes will happen.
SYNCDELAY;
FIFOPINPOLAR = 0;
SYNCDELAY;
PINFLAGSAB = 0b10011000; // FLAGA = EP2 ~EF, FLAGB = EP4 ~EF
SYNCDELAY;
PINFLAGSCD = 0b11111110; // FLAGC = EP6 ~FF, FLAGD = EP8 ~FF
SYNCDELAY;
PORTACFG |= _FLAGD; // PA7 is FLAGD
// Use 8-bit wide bus.
SYNCDELAY;
EP2FIFOCFG &= ~_WORDWIDE;
SYNCDELAY;
EP4FIFOCFG &= ~_WORDWIDE;
SYNCDELAY;
EP6FIFOCFG &= ~_WORDWIDE;
SYNCDELAY;
EP8FIFOCFG &= ~_WORDWIDE;
}
void fifo_configure(bool two_ep) {
uint8_t ep26buf, ep48valid;
if(two_ep) {
ep26buf = 0; // quad buffered
ep48valid = 0; // invalid
} else {
ep26buf = _BUF1; // double buffered
ep48valid = _VALID; // valid
}
// Disable all FIFOs.
SYNCDELAY;
FIFORESET = _NAKALL;
// Configure EP2.
SYNCDELAY;
EP2CFG = _VALID|_TYPE1|ep26buf; // OUT BULK 512B
// Configure EP4.
SYNCDELAY;
EP4CFG = ep48valid|_TYPE1; // OUT BULK 512B
// Configure EP6.
SYNCDELAY;
EP6CFG = _VALID|_DIR|_TYPE1|ep26buf; // IN BULK 512B
// Configure EP8.
SYNCDELAY;
EP8CFG = ep48valid|_DIR|_TYPE1; // IN BULK 512B
// Reset and configure endpoints.
fifo_reset(two_ep, two_ep ? 0x1 : 0x3);
// Enable FIFOs.
SYNCDELAY;
FIFORESET = 0;
}
void fifo_reset(bool two_ep, uint8_t interfaces) {
// For the following code, note that for FIFORESET and OUTPKTEND to do anything,
// the endpoints *must* be in manual mode (_AUTOIN/_AUTOOUT bits cleared).
if(interfaces & (1 << 0)) {
// Reset EP2OUT.
SYNCDELAY;
EP2FIFOCFG = 0;
SYNCDELAY;
FIFORESET |= 2;
SYNCDELAY;
OUTPKTEND = _SKIP|2;
SYNCDELAY;
OUTPKTEND = _SKIP|2;
if(two_ep) {
SYNCDELAY;
OUTPKTEND = _SKIP|2;
SYNCDELAY;
OUTPKTEND = _SKIP|2;
}
SYNCDELAY;
EP2FIFOCFG = _AUTOOUT;
// Reset EP6IN.
SYNCDELAY;
EP6FIFOCFG = 0;
SYNCDELAY;
FIFORESET |= 6;
SYNCDELAY;
EP6FIFOCFG = _ZEROLENIN;
}
if(interfaces & (1 << 1)) {
// Reset EP4OUT.
SYNCDELAY;
EP4FIFOCFG = 0;
SYNCDELAY;
FIFORESET |= 4;
SYNCDELAY;
OUTPKTEND = _SKIP|4;
SYNCDELAY;
OUTPKTEND = _SKIP|4;
SYNCDELAY;
EP4FIFOCFG = _AUTOOUT;
// Reset EP8IN.
SYNCDELAY;
EP8FIFOCFG = 0;
SYNCDELAY;
FIFORESET |= 8;
SYNCDELAY;
EP8FIFOCFG = _ZEROLENIN;
}
}
| 1,460 |
1,085 | <filename>dac/backend/src/main/java/com/dremio/dac/model/common/EnumTypeIdResolver.java
/*
* Copyright (C) 2017-2019 Dremio Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dremio.dac.model.common;
import static java.lang.String.format;
import java.util.HashMap;
import java.util.Map;
import com.fasterxml.jackson.annotation.JsonTypeInfo.Id;
import com.fasterxml.jackson.databind.DatabindContext;
import com.fasterxml.jackson.databind.JavaType;
import com.fasterxml.jackson.databind.jsontype.TypeIdResolver;
import com.fasterxml.jackson.databind.type.TypeFactory;
/**
* Custom subtype mapping for types annotated with TypesEnum
*/
public class EnumTypeIdResolver implements TypeIdResolver {
private final Map<String, JavaType> nameToType = new HashMap<>();
private final Map<Class<?>, String> typeToName = new HashMap<>();
private String description;
private JavaType baseType;
@Override
public void init(JavaType baseType) {
this.baseType = baseType;
Class<?> baseClass = baseType.getRawClass();
TypesEnum typesEnum = baseClass.getAnnotation(TypesEnum.class);
while (baseClass != null && typesEnum == null) {
baseClass = baseClass.getSuperclass();
typesEnum = baseClass.getAnnotation(TypesEnum.class);
}
if (typesEnum == null) {
throw new NullPointerException("Missing annotation TypesEnum on " + baseType.getRawClass());
}
SubTypeMapping mapping = new SubTypeMapping(typesEnum);
TypeFactory defaultInstance = TypeFactory.defaultInstance();
StringBuilder sb = new StringBuilder();
for (Enum<?> e : mapping.getEnumConstants()) {
String name = e.name();
String className = mapping.getClassName(e);
try {
Class<?> c = Class.forName(className, false, this.getClass().getClassLoader());
JavaType type = defaultInstance.uncheckedSimpleType(c);
this.nameToType.put(name.toLowerCase(), type);
this.typeToName.put(c, name);
sb.append(name + " => " + c.getName() + "\n");
} catch (ClassNotFoundException e1) {
throw new RuntimeException(String.format(
"class not found %s for enum value %s for base type %s",
className, name, baseType
) , e1);
}
}
this.description = sb.toString();
}
@Override
public String idFromValue(Object value) {
return idFromValueAndType(value, value.getClass());
}
@Override
public String idFromValueAndType(Object value, Class<?> suggestedType) {
String name = typeToName.get(suggestedType);
if (name == null) {
throw new NullPointerException(suggestedType + " " + String.valueOf(value) + "\n" + description);
}
return name;
}
@Override
public String idFromBaseType() {
throw new IllegalArgumentException("base type not serializable: " + baseType);
}
@Override
public JavaType typeFromId(DatabindContext context, String id) {
JavaType type = nameToType.get(id.toLowerCase());
if (type == null) {
throw new NullPointerException(
format("no subtype of %s found for enum value %s. existing mappings:\n%s",
baseType, id, description));
}
return type;
}
@Override
public String getDescForKnownTypeIds() {
return description;
}
@Override
public Id getMechanism() {
return Id.CUSTOM;
}
@Override
public String toString() {
return "EnumTypeIdResolver{\n" + description + "}";
}
}
| 1,380 |
892 | {
"schema_version": "1.2.0",
"id": "GHSA-m495-pg93-8h47",
"modified": "2022-05-13T01:48:38Z",
"published": "2022-05-13T01:48:38Z",
"aliases": [
"CVE-2018-1000511"
],
"details": "WP ULike version 2.8.1, 3.1 contains a Incorrect Access Control vulnerability in AJAX that can result in allows anybody to delete any row in certain tables. This attack appear to be exploitable via Attacker must make AJAX request. This vulnerability appears to have been fixed in 3.2.",
"severity": [
{
"type": "CVSS_V3",
"score": "CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:H/A:N"
}
],
"affected": [
],
"references": [
{
"type": "ADVISORY",
"url": "https://nvd.nist.gov/vuln/detail/CVE-2018-1000511"
},
{
"type": "WEB",
"url": "https://advisories.dxw.com/advisories/wp-ulike-delete-rows/"
}
],
"database_specific": {
"cwe_ids": [
"CWE-732"
],
"severity": "HIGH",
"github_reviewed": false
}
} | 450 |
521 | /**********************************************************
* Copyright 1998-2009 VMware, Inc. All rights reserved.
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use, copy,
* modify, merge, publish, distribute, sublicense, and/or sell copies
* of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
* BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
* ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
**********************************************************/
/*
* backdoor_def.h --
*
* This contains constants that define the backdoor I/O port, a
* simple hypercall mechanism that is used by VMware Tools.
*/
#ifndef _BACKDOOR_DEF_H_
#define _BACKDOOR_DEF_H_
#define BDOOR_MAGIC 0x564D5868
#define BDOOR_PORT 0x5658
#define BDOOR_CMD_GETMHZ 1
#define BDOOR_CMD_APMFUNCTION 2
#define BDOOR_CMD_GETDISKGEO 3
#define BDOOR_CMD_GETPTRLOCATION 4
#define BDOOR_CMD_SETPTRLOCATION 5
#define BDOOR_CMD_GETSELLENGTH 6
#define BDOOR_CMD_GETNEXTPIECE 7
#define BDOOR_CMD_SETSELLENGTH 8
#define BDOOR_CMD_SETNEXTPIECE 9
#define BDOOR_CMD_GETVERSION 10
#define BDOOR_CMD_GETDEVICELISTELEMENT 11
#define BDOOR_CMD_TOGGLEDEVICE 12
#define BDOOR_CMD_GETGUIOPTIONS 13
#define BDOOR_CMD_SETGUIOPTIONS 14
#define BDOOR_CMD_GETSCREENSIZE 15
#define BDOOR_CMD_MONITOR_CONTROL 16
#define BDOOR_CMD_GETHWVERSION 17
#define BDOOR_CMD_OSNOTFOUND 18
#define BDOOR_CMD_GETUUID 19
#define BDOOR_CMD_GETMEMSIZE 20
#define BDOOR_CMD_HOSTCOPY 21 /* Devel only */
#define BDOOR_CMD_SERVICE_VM 22 /* prototype only */
#define BDOOR_CMD_GETTIME 23 /* Deprecated. Use GETTIMEFULL. */
#define BDOOR_CMD_STOPCATCHUP 24
#define BDOOR_CMD_PUTCHR 25 /* Devel only */
#define BDOOR_CMD_ENABLE_MSG 26 /* Devel only */
#define BDOOR_CMD_GOTO_TCL 27 /* Devel only */
#define BDOOR_CMD_INITPCIOPROM 28
#define BDOOR_CMD_INT13 29
#define BDOOR_CMD_MESSAGE 30
#define BDOOR_CMD_RSVD0 31
#define BDOOR_CMD_RSVD1 32
#define BDOOR_CMD_RSVD2 33
#define BDOOR_CMD_ISACPIDISABLED 34
#define BDOOR_CMD_TOE 35 /* Not in use */
#define BDOOR_CMD_ISMOUSEABSOLUTE 36
#define BDOOR_CMD_PATCH_SMBIOS_STRUCTS 37
#define BDOOR_CMD_MAPMEM 38 /* Devel only */
#define BDOOR_CMD_ABSPOINTER_DATA 39
#define BDOOR_CMD_ABSPOINTER_STATUS 40
#define BDOOR_CMD_ABSPOINTER_COMMAND 41
#define BDOOR_CMD_TIMER_SPONGE 42
#define BDOOR_CMD_PATCH_ACPI_TABLES 43
#define BDOOR_CMD_DEVEL_FAKEHARDWARE 44 /* Debug only - needed in beta */
#define BDOOR_CMD_GETHZ 45
#define BDOOR_CMD_GETTIMEFULL 46
#define BDOOR_CMD_STATELOGGER 47
#define BDOOR_CMD_CHECKFORCEBIOSSETUP 48
#define BDOOR_CMD_LAZYTIMEREMULATION 49
#define BDOOR_CMD_BIOSBBS 50
#define BDOOR_CMD_VASSERT 51
#define BDOOR_CMD_ISGOSDARWIN 52
#define BDOOR_CMD_DEBUGEVENT 53
#define BDOOR_CMD_OSNOTMACOSXSERVER 54
#define BDOOR_CMD_GETTIMEFULL_WITH_LAG 55
#define BDOOR_CMD_ACPI_HOTPLUG_DEVICE 56
#define BDOOR_CMD_ACPI_HOTPLUG_MEMORY 57
#define BDOOR_CMD_ACPI_HOTPLUG_CBRET 58
#define BDOOR_CMD_GET_HOST_VIDEO_MODES 59
#define BDOOR_CMD_ACPI_HOTPLUG_CPU 60
#define BDOOR_CMD_MAX 61
/*
* High-bandwidth backdoor port.
*/
#define BDOORHB_PORT 0x5659
#define BDOORHB_CMD_MESSAGE 0
#define BDOORHB_CMD_VASSERT 1
#define BDOORHB_CMD_MAX 2
/*
* There is another backdoor which allows access to certain TSC-related
* values using otherwise illegal PMC indices when the pseudo_perfctr
* control flag is set.
*/
#define BDOOR_PMC_HW_TSC 0x10000
#define BDOOR_PMC_REAL_NS 0x10001
#define BDOOR_PMC_APPARENT_NS 0x10002
#define IS_BDOOR_PMC(index) (((index) | 3) == 0x10003)
#define BDOOR_CMD(ecx) ((ecx) & 0xffff)
#endif /* _BACKDOOR_DEF_H_ */
| 2,275 |
362 | package net.ripe.db.whois.scheduler.task.export;
import net.ripe.db.whois.common.rpsl.ObjectType;
import org.junit.jupiter.api.Test;
import static org.hamcrest.core.Is.is;
import static org.hamcrest.MatcherAssert.assertThat;
public class FilenameStrategyTest {
@Test
public void getFilename_SingleFile() {
final FilenameStrategy subject = new FilenameStrategy.SingleFile();
assertThat(subject.getFilename(ObjectType.MNTNER), is("ripe.db"));
}
@Test
public void getFilename_SplitFile() {
final FilenameStrategy subject = new FilenameStrategy.SplitFile();
assertThat(subject.getFilename(ObjectType.MNTNER), is("ripe.db.mntner"));
}
}
| 263 |
575 | // Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "third_party/blink/renderer/core/paint/svg_foreign_object_painter.h"
#include "base/optional.h"
#include "third_party/blink/renderer/core/layout/svg/layout_svg_foreign_object.h"
#include "third_party/blink/renderer/core/paint/block_painter.h"
#include "third_party/blink/renderer/core/paint/paint_info.h"
#include "third_party/blink/renderer/core/paint/paint_layer.h"
#include "third_party/blink/renderer/core/paint/paint_layer_painter.h"
#include "third_party/blink/renderer/core/paint/paint_timing.h"
#include "third_party/blink/renderer/core/paint/scoped_svg_paint_state.h"
#include "third_party/blink/renderer/core/svg/svg_element.h"
#include "third_party/blink/renderer/platform/graphics/paint/display_item_cache_skipper.h"
namespace blink {
void SVGForeignObjectPainter::PaintLayer(const PaintInfo& paint_info) {
if (paint_info.phase != PaintPhase::kForeground &&
paint_info.phase != PaintPhase::kSelectionDragImage)
return;
// Early out in the case of trying to paint an image filter before
// pre-paint has finished.
if (!layout_svg_foreign_object_.FirstFragment().HasLocalBorderBoxProperties())
return;
// TODO(crbug.com/797779): For now foreign object contents don't know whether
// they are painted in a fragmented context and may do something bad in a
// fragmented context, e.g. creating subsequences. Skip cache to avoid that.
// This will be unnecessary when the contents are fragment aware.
base::Optional<DisplayItemCacheSkipper> cache_skipper;
if (layout_svg_foreign_object_.Layer()->Parent()->EnclosingPaginationLayer())
cache_skipper.emplace(paint_info.context);
// <foreignObject> is a replaced normal-flow stacking element.
// See IsReplacedNormalFlowStacking in paint_layer_painter.cc.
PaintLayerPaintingInfo layer_painting_info(
layout_svg_foreign_object_.Layer(),
// Reset to an infinite cull rect, for simplicity. Otherwise
// an adjustment would be needed for ancestor scrolling, and any
// SVG transforms would have to be taken into account. Further,
// cull rects under transform are intentionally reset to infinity,
// to improve cache invalidation performance in the pre-paint tree
// walk (see https://http://crrev.com/482854).
CullRect::Infinite(), paint_info.GetGlobalPaintFlags(), PhysicalOffset());
PaintLayerPainter(*layout_svg_foreign_object_.Layer())
.Paint(paint_info.context, layer_painting_info, paint_info.PaintFlags());
}
void SVGForeignObjectPainter::Paint(const PaintInfo& paint_info) {
// ScopedSVGPaintState only applies masks (and clips-within-clips) here.
ScopedSVGPaintState paint_state(layout_svg_foreign_object_, paint_info);
PaintTiming& timing =
PaintTiming::From(layout_svg_foreign_object_.GetDocument());
timing.MarkFirstContentfulPaint();
BlockPainter(layout_svg_foreign_object_).Paint(paint_info);
}
} // namespace blink
| 1,017 |
15,666 | <reponame>FeryET/pytorch-lightning<gh_stars>1000+
# Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import operator
from pytorch_lightning.utilities import _module_available
from pytorch_lightning.utilities.imports import _compare_version
def test_module_exists():
"""Test if the some 3rd party libs are available."""
assert _module_available("torch")
assert _module_available("torch.nn.parallel")
assert not _module_available("torch.nn.asdf")
assert not _module_available("asdf")
assert not _module_available("asdf.bla.asdf")
def test_compare_version(monkeypatch):
from pytorch_lightning.utilities.imports import torch
monkeypatch.setattr(torch, "__version__", "1.8.9")
assert not _compare_version("torch", operator.ge, "1.10.0")
assert _compare_version("torch", operator.lt, "1.10.0")
monkeypatch.setattr(torch, "__version__", "1.10.0.dev123")
assert _compare_version("torch", operator.ge, "1.10.0.dev123")
assert not _compare_version("torch", operator.ge, "1.10.0.dev124")
assert _compare_version("torch", operator.ge, "1.10.0.dev123", use_base_version=True)
assert _compare_version("torch", operator.ge, "1.10.0.dev124", use_base_version=True)
monkeypatch.setattr(torch, "__version__", "1.10.0a0+0aef44c") # dev version before rc
assert _compare_version("torch", operator.ge, "1.10.0.rc0", use_base_version=True)
assert not _compare_version("torch", operator.ge, "1.10.0.rc0")
assert _compare_version("torch", operator.ge, "1.10.0", use_base_version=True)
assert not _compare_version("torch", operator.ge, "1.10.0")
| 755 |
2,603 | <gh_stars>1000+
#include <errno.h>
int _faccessat(int dirfd, const char *file, int mode, int flags) {
errno = ENOSYS;
return -1;
}
| 62 |
5,169 | {
"name": "ExternalLib",
"version": "0.0.1",
"summary": "A collection of iOS Libs",
"description": "A collection of iOS Libs.For code management.",
"homepage": "https://bitbucket.org/APPLYD/podtest",
"license": {
"type": "MIT",
"file": "LICENSE"
},
"authors": {
"APPLYD": "<EMAIL>"
},
"source": {
"git": "https://bitbucket.org/APPLYD/podtest.git",
"tag": "0.0.1"
},
"platforms": {
"ios": null
},
"source_files": "ExternalLib/MBProgressHUD/*.{h,m}",
"frameworks": "CoreGraphics",
"requires_arc": true
}
| 236 |
656 | <reponame>AlexDLSy/canmatrix
#!/usr/bin/env python3
import canmatrix.formats
import sys
# command line options...
usage = """
%prog [options] matrix frame
matrixX can be any of *.dbc|*.dbf|*.kcd|*.arxml
frame is AAA#YYYYYYYYYYYYYYYY or
BBBBB#YYYYYYYYYYYYYYYY or
where AAA is standard ID and BBBBB is extended ID
"""
if len(sys.argv) < 3:
print(usage)
sys.exit(1)
# load matrix
db = canmatrix.formats.loadp_flat(sys.argv[1])
# load frame data from argv
frame_string = sys.argv[2]
(arbitration_id_string, hexdata) = frame_string.split('#')
# set arbitration_id
if len(arbitration_id_string) <= 3:
arbitration_id = canmatrix.ArbitrationId(int(arbitration_id_string, 16), extended = False)
else:
# extended frame
arbitration_id = canmatrix.ArbitrationId(int(arbitration_id_string, 16), extended = True)
# find frame to given arbitration_id
frame = db.frame_by_id(arbitration_id)
can_data = bytearray.fromhex(hexdata)
# decode frame
decoded = frame.decode(can_data)
#print decoded signals
for (signal, value) in decoded.items():
print (signal + "\t" + hex(value.raw_value) + "\t(" + str(value.phys_value)+ ")")
| 451 |
372 | <filename>clients/google-api-services-apigee/v1/1.31.0/com/google/api/services/apigee/v1/model/GoogleCloudApigeeV1DeveloperSubscription.java
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.apigee.v1.model;
/**
* Structure of a DeveloperSubscription.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Apigee API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class GoogleCloudApigeeV1DeveloperSubscription extends com.google.api.client.json.GenericJson {
/**
* Name of the API product for which the developer is purchasing a subscription.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String apiproduct;
/**
* Output only. Time when the API product subscription was created in milliseconds since epoch.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long createdAt;
/**
* Time when the API product subscription ends in milliseconds since epoch.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long endTime;
/**
* Output only. Time when the API product subscription was last modified in milliseconds since
* epoch.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long lastModifiedAt;
/**
* Output only. Name of the API product subscription.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/**
* Time when the API product subscription starts in milliseconds since epoch.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long startTime;
/**
* Name of the API product for which the developer is purchasing a subscription.
* @return value or {@code null} for none
*/
public java.lang.String getApiproduct() {
return apiproduct;
}
/**
* Name of the API product for which the developer is purchasing a subscription.
* @param apiproduct apiproduct or {@code null} for none
*/
public GoogleCloudApigeeV1DeveloperSubscription setApiproduct(java.lang.String apiproduct) {
this.apiproduct = apiproduct;
return this;
}
/**
* Output only. Time when the API product subscription was created in milliseconds since epoch.
* @return value or {@code null} for none
*/
public java.lang.Long getCreatedAt() {
return createdAt;
}
/**
* Output only. Time when the API product subscription was created in milliseconds since epoch.
* @param createdAt createdAt or {@code null} for none
*/
public GoogleCloudApigeeV1DeveloperSubscription setCreatedAt(java.lang.Long createdAt) {
this.createdAt = createdAt;
return this;
}
/**
* Time when the API product subscription ends in milliseconds since epoch.
* @return value or {@code null} for none
*/
public java.lang.Long getEndTime() {
return endTime;
}
/**
* Time when the API product subscription ends in milliseconds since epoch.
* @param endTime endTime or {@code null} for none
*/
public GoogleCloudApigeeV1DeveloperSubscription setEndTime(java.lang.Long endTime) {
this.endTime = endTime;
return this;
}
/**
* Output only. Time when the API product subscription was last modified in milliseconds since
* epoch.
* @return value or {@code null} for none
*/
public java.lang.Long getLastModifiedAt() {
return lastModifiedAt;
}
/**
* Output only. Time when the API product subscription was last modified in milliseconds since
* epoch.
* @param lastModifiedAt lastModifiedAt or {@code null} for none
*/
public GoogleCloudApigeeV1DeveloperSubscription setLastModifiedAt(java.lang.Long lastModifiedAt) {
this.lastModifiedAt = lastModifiedAt;
return this;
}
/**
* Output only. Name of the API product subscription.
* @return value or {@code null} for none
*/
public java.lang.String getName() {
return name;
}
/**
* Output only. Name of the API product subscription.
* @param name name or {@code null} for none
*/
public GoogleCloudApigeeV1DeveloperSubscription setName(java.lang.String name) {
this.name = name;
return this;
}
/**
* Time when the API product subscription starts in milliseconds since epoch.
* @return value or {@code null} for none
*/
public java.lang.Long getStartTime() {
return startTime;
}
/**
* Time when the API product subscription starts in milliseconds since epoch.
* @param startTime startTime or {@code null} for none
*/
public GoogleCloudApigeeV1DeveloperSubscription setStartTime(java.lang.Long startTime) {
this.startTime = startTime;
return this;
}
@Override
public GoogleCloudApigeeV1DeveloperSubscription set(String fieldName, Object value) {
return (GoogleCloudApigeeV1DeveloperSubscription) super.set(fieldName, value);
}
@Override
public GoogleCloudApigeeV1DeveloperSubscription clone() {
return (GoogleCloudApigeeV1DeveloperSubscription) super.clone();
}
}
| 1,928 |
1,056 | <gh_stars>1000+
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.core.startup.preferences;
import java.util.prefs.Preferences;
import java.util.prefs.PreferencesFactory;
/**
*
* @author <NAME>
*/
public class NbPreferencesFactory implements PreferencesFactory {
private static final String FACTORY = "java.util.prefs.PreferencesFactory";//NOI18N
/** Creates a new instance */
public NbPreferencesFactory() {}
public Preferences userRoot() {
return NbPreferences.userRootImpl();
}
public Preferences systemRoot() {
return NbPreferences.systemRootImpl();
}
public static void doRegistration() {
if (System.getProperty(FACTORY) == null) {
System.setProperty(FACTORY,NbPreferencesFactory.class.getName());
}
}
}
| 484 |
1,029 | <reponame>suzhenyu006/sonic-agent<filename>src/main/java/org/cloud/sonic/agent/tools/file/FileTool.java<gh_stars>1000+
package org.cloud.sonic.agent.tools.file;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
/**
* @author ZhouYiXun
* @des 压缩文件
* @date 2022/3/29 23:38
*/
public class FileTool {
public static void zip(File result, File inputFile) throws IOException {
ZipOutputStream out = new ZipOutputStream(new FileOutputStream(
result.getAbsoluteFile()));
zip(out, inputFile, "");
out.close();
}
public static void zip(ZipOutputStream out, File f, String base) throws IOException {
if (f.isDirectory()) {
File[] fl = f.listFiles();
out.putNextEntry(new ZipEntry(base + "/"));
base = base.length() == 0 ? "" : base + "/";
for (int i = 0; i < fl.length; i++) {
zip(out, fl[i], base + fl[i]);
}
} else {
out.putNextEntry(new ZipEntry(base));
FileInputStream in = new FileInputStream(f);
int b;
while ((b = in.read()) != -1) {
out.write(b);
}
in.close();
}
}
public static void deleteDir(File file) {
if (!file.exists()) {
return;
}
File[] files = file.listFiles();
for (File f : files) {
if (f.isDirectory()) {
deleteDir(f);
} else {
f.delete();
}
}
file.delete();
}
}
| 823 |
14,668 | <gh_stars>1000+
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CHROME_INSTALLER_GCAPI_MAC_GCAPI_H_
#define CHROME_INSTALLER_GCAPI_MAC_GCAPI_H_
// Error conditions for GoogleChromeCompatibilityCheck().
#define GCCC_ERROR_ALREADYPRESENT (1 << 0)
#define GCCC_ERROR_ACCESSDENIED (1 << 1)
#define GCCC_ERROR_OSNOTSUPPORTED (1 << 2)
#define GCCC_ERROR_ALREADYOFFERED (1 << 3)
#define GCCC_ERROR_INTEGRITYLEVEL (1 << 4)
#ifdef __cplusplus
extern "C" {
#endif
// This function returns nonzero if Google Chrome should be offered.
// If the return value is 0, |reasons| explains why. If you don't care for the
// reason, you can pass nullptr for |reasons|.
int GoogleChromeCompatibilityCheck(unsigned* reasons);
// This function installs Google Chrome in the application folder and optionally
// sets up the brand code and master prefs.
// |source_path| Path to an uninstalled Google Chrome.app directory, for example
// in a mounted dmg, in file system representation.
// |brand_code| If not nullptr, a string containing the brand code Google Chrome
// should use. Has no effect if Google Chrome has an embedded brand
// code. Overwrites existing brand files.
// |master_prefs_contents| If not nullptr, the _contents_ of a master prefs file
// Google Chrome should use. This is not a path.
// Overwrites existing master pref files.
// Returns nonzero if Google Chrome was successfully copied. If copying
// succeeded but writing of master prefs, brand code, or other noncrucial
// setup tasks fail, this still returns nonzero.
// Returns 0 if the installation failed, for example if Google Chrome was
// already installed, or no disk space was left.
int InstallGoogleChrome(const char* source_path,
const char* brand_code,
const char* master_prefs_contents,
unsigned master_prefs_contents_size);
// This function launches Google Chrome after a successful install, or it does
// a best-effort search to launch an existing installation if
// InstallGoogleChrome() returned GCCC_ERROR_ALREADYPRESENT.
int LaunchGoogleChrome();
#ifdef __cplusplus
} // extern "C"
#endif
#endif // CHROME_INSTALLER_GCAPI_MAC_GCAPI_H_
| 813 |
346 | <reponame>rhyep/Python_tutorials
print "Time to conquer the variance!"
| 24 |
488 | /*
*
* Copyright (c) International Business Machines Corp., 2001
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See
* the GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
/*
* Test Name: lseek10
*
* Test Description:
* Verify that,
* 1. lseek() returns -1 and sets errno to ESPIPE, if the file handle of
* the specified file is associated with a pipe, socket, or FIFO.
* 2. lseek() returns -1 and sets errno to EINVAL, if the 'Whence' argument
* is not a proper value.
* 3. lseek() returns -1 and sets errno to EBADF, if the file handle of
* the specified file is not valid.
*
* Expected Result:
* lseek() should fail with return value -1 and set expected errno.
*
* Algorithm:
* Setup:
* Setup signal handling.
* Create temporary directory.
* Pause for SIGUSR1 if option specified.
*
* Test:
* Loop if the proper options are given.
* Execute system call
* Check return code, if system call failed (return=-1)
* if errno set == expected errno
* Issue sys call fails with expected return value and errno.
* Otherwise,
* Issue sys call fails with unexpected errno.
* Otherwise,
* Issue sys call returns unexpected value.
*
* Cleanup:
* Print errno log and/or timing stats if options given
* Delete the temporary directory(s)/file(s) created.
*
* Usage: <for command-line>
* lseek10 [-c n] [-e] [-i n] [-I x] [-p x] [-t]
* where, -c n : Run n copies concurrently.
* -e : Turn on errno logging.
* -i n : Execute test n times.
* -I x : Execute test for x seconds.
* -P x : Pause for x seconds between iterations.
* -t : Turn on syscall timing.
*
* HISTORY
* 07/2001 Ported by <NAME>
*
* RESTRICTIONS:
* None.
*/
#include <stdio.h>
#include <unistd.h>
#include <sys/types.h>
#include <errno.h>
#include <unistd.h>
#include <fcntl.h>
#include <utime.h>
#include <string.h>
#include <sys/stat.h>
#include <signal.h>
#include "test.h"
#include "usctest.h"
#define TEMP_FILE1 "tmp_file1"
#define TEMP_FILE2 "tmp_file2"
#define TEMP_FILE3 "tmp_file3"
#define FILE_MODE S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH
#define PIPE_MODE S_IFIFO | S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH
#define SEEK_TOP 10
char *TCID = "lseek10"; /* Test program identifier. */
int TST_TOTAL = 3; /* Total number of test cases. */
extern int Tst_count; /* Test Case counter for tst_* routines */
int exp_enos[] = { ESPIPE, EINVAL, EBADF, 0 };
int no_setup();
int setup1(); /* setup function to test lseek() for ESPIPE */
int setup2(); /* setup function to test lseek() for EINVAL */
int setup3(); /* setup function to test lseek() for EBADF */
int fd1; /* file handle for testfile1 */
int fd2; /* file handle for testfile2 */
int fd3; /* file handle for testfile3 */
struct test_case_t { /* test case struct. to hold ref. test cond's */
int fd;
int Whence;
char *desc;
int exp_errno;
int (*setupfunc) ();
} Test_cases[] = {
{
1, SEEK_SET, "'fd' associated with a pipe/fifo", ESPIPE, setup1}, {
2, SEEK_TOP, "'whence' argument is not valid", EINVAL, setup2}, {
3, SEEK_SET, "'fd' is not an open file descriptor", EBADF, setup3},
{
0, 0, NULL, 0, no_setup}
};
void setup(); /* Main setup function of test */
void cleanup(); /* cleanup function for the test */
int main(int ac, char **av)
{
int lc; /* loop counter */
char *msg; /* message returned from parse_opts */
int fildes; /* file handle for testfile */
int whence; /* position of file handle in the file */
char *test_desc; /* test specific error message */
int ind; /* counter to test different test conditions */
/* Parse standard options given to run the test. */
msg = parse_opts(ac, av, (option_t *) NULL, NULL);
if (msg != (char *)NULL) {
tst_brkm(TBROK, NULL, "OPTION PARSING ERROR - %s", msg);
tst_exit();
}
/* Perform global setup for test */
setup();
/* set the expected errnos... */
TEST_EXP_ENOS(exp_enos);
/* Check looping state if -i option given */
for (lc = 0; TEST_LOOPING(lc); lc++) {
/* Reset Tst_count in case we are looping. */
Tst_count = 0;
for (ind = 0; Test_cases[ind].desc != NULL; ind++) {
fildes = Test_cases[ind].fd;
test_desc = Test_cases[ind].desc;
whence = Test_cases[ind].Whence;
/* Assign the 'fd' values appropriatly */
if (fildes == 1) {
fildes = fd1;
} else if (fildes == 2) {
fildes = fd2;
} else {
fildes = fd3;
}
/*
* Invoke lseek(2) to test different test conditions.
* Verify that it fails with -1 return value and
* sets appropriate errno.
*/
TEST(lseek(fildes, 0, whence));
/* check return code of lseek(2) */
if (TEST_RETURN != (off_t) - 1) {
tst_resm(TFAIL, "lseek() returned %ld, expected "
"-1, errno:%d", TEST_RETURN,
Test_cases[ind].exp_errno);
continue;
}
TEST_ERROR_LOG(TEST_ERRNO);
if (TEST_ERRNO == Test_cases[ind].exp_errno) {
tst_resm(TPASS, "lseek() fails, %s, errno:%d",
test_desc, TEST_ERRNO);
} else {
tst_resm(TFAIL, "lseek() fails, %s, errno:%d, "
"expected errno:%d", test_desc,
TEST_ERRNO, Test_cases[ind].exp_errno);
}
}
}
/* Call cleanup() to undo setup done for the test. */
cleanup();
/*NOTREACHED*/ return 0;
} /* End main */
/*
* setup() - performs all ONE TIME setup for this test.
* Create a temporary directory and change directory to it.
* Invoke individual test setup functions according to the order
* set in test struct. definition.
*/
void setup()
{
int ind; /* counter for test setup function */
/* capture signals */
tst_sig(NOFORK, DEF_HANDLER, cleanup);
/* Pause if that option was specified */
TEST_PAUSE;
/* make a temp directory and cd to it */
tst_tmpdir();
/* call individual setup functions */
for (ind = 0; Test_cases[ind].desc != NULL; ind++) {
Test_cases[ind].setupfunc();
}
}
/*
* no_setup() - This is a dummy function which simply returns 0.
*/
int no_setup()
{
return 0;
}
/*
* setup1() - setup function for a test condition for which lseek(2)
* returns -1 and sets errno to ESPIPE.
* Creat a named pipe/fifo using mknod() and open it for
* reading/writing.
* This function returns 0 on success.
*/
int setup1()
{
/* Creat a named pipe/fifo using mknod() */
if (mknod(TEMP_FILE1, PIPE_MODE, 0) < 0) {
tst_brkm(TBROK, cleanup,
"mknod(%s, %#o, 0) Failed, errno=%d :%s",
TEMP_FILE1, FILE_MODE, errno, strerror(errno));
}
/* Open the named pipe/fifo for reading/writing */
if ((fd1 = open(TEMP_FILE1, O_RDWR)) < 0) {
tst_brkm(TBROK, cleanup,
"open(%s, O_RDWR) Failed, errno=%d, :%s",
TEMP_FILE1, errno, strerror(errno));
}
return 0;
}
/*
* setup2() - setup function for a test condition for which lseek(2)
* returns -1 and sets errno to EINVAL.
* Creat a temporary file for reading/writing and write some data
* into it.
* This function returns 0 on success.
*/
int setup2()
{
char write_buff[BUFSIZ]; /* buffer to hold data */
/* Get the data to be written to temporary file */
strcpy(write_buff, "abcdefg");
/* Creat/open a temporary file under above directory */
if ((fd2 = open(TEMP_FILE2, O_RDWR | O_CREAT, FILE_MODE)) == -1) {
tst_brkm(TBROK, cleanup,
"open(%s, O_RDWR|O_CREAT, %#o) Failed, errno=%d :%s",
TEMP_FILE2, FILE_MODE, errno, strerror(errno));
}
/* Write data into temporary file */
if (write(fd2, write_buff, sizeof(write_buff)) <= 0) {
tst_brkm(TBROK, cleanup,
"write(2) on %s Failed, errno=%d : %s",
TEMP_FILE2, errno, strerror(errno));
}
return 0;
}
/*
* setup3() - setup function for a test condition for which lseek(2)
* returns -1 and sets errno to EBADF.
* Creat a temporary file for reading/writing and close it.
* This function returns 0 on success.
*/
int setup3()
{
/* Creat/open a temporary file under above directory */
if ((fd3 = open(TEMP_FILE3, O_RDWR | O_CREAT, FILE_MODE)) == -1) {
tst_brkm(TBROK, cleanup,
"open(%s, O_RDWR|O_CREAT, %#o) Failed, errno=%d :%s",
TEMP_FILE3, FILE_MODE, errno, strerror(errno));
}
/* Close the temporary file created above */
if (close(fd3) < 0) {
tst_brkm(TBROK, cleanup,
"close(%s) Failed, errno=%d : %s:",
TEMP_FILE3, errno, strerror(errno));
}
return 0;
}
/*
* cleanup() - performs all ONE TIME cleanup for this test at
* completion or premature exit.
* Remove the test directory and testfile(s) created in the setup.
*/
void cleanup()
{
/*
* print timing stats if that option was specified.
* print errno log if that option was specified.
*/
TEST_CLEANUP;
/* Close the temporary file(s) created in setup1/setup2 */
if (close(fd1) < 0) {
tst_brkm(TFAIL, NULL,
"close(%s) Failed, errno=%d : %s:",
TEMP_FILE1, errno, strerror(errno));
}
if (close(fd2) < 0) {
tst_brkm(TFAIL, NULL,
"close(%s) Failed, errno=%d : %s:",
TEMP_FILE2, errno, strerror(errno));
}
/* Remove tmp dir and all files in it */
tst_rmdir();
/* exit with return code appropriate for results */
tst_exit();
}
| 3,837 |
8,586 | /* Copyright (c) 2020 vesoft inc. All rights reserved.
*
* This source code is licensed under Apache 2.0 License.
*/
#pragma once
#include <boost/endian/conversion.hpp>
#include "common/base/Base.h"
namespace nebula {
namespace geo {
enum class ByteOrder : uint8_t {
BigEndian = 0,
LittleEndian = 1,
};
struct ByteOrderData {
static ByteOrder getMachineByteOrder() {
static int endianCheck = 1;
return static_cast<ByteOrder>(
*(reinterpret_cast<char *>(&endianCheck))); // 0 for BigEndian, 1 for LittleEndian
}
static uint32_t getUint32(const uint8_t *buf, ByteOrder byteOrder) {
if (byteOrder == ByteOrder::BigEndian) {
return boost::endian::load_big_u32(buf);
} else {
DCHECK(byteOrder == ByteOrder::LittleEndian);
return boost::endian::load_little_u32(buf);
}
}
static uint64_t getUint64(const uint8_t *buf, ByteOrder byteOrder) {
if (byteOrder == ByteOrder::BigEndian) {
return boost::endian::load_big_u64(buf);
} else {
DCHECK(byteOrder == ByteOrder::LittleEndian);
return boost::endian::load_little_u64(buf);
}
}
static double getDouble(const uint8_t *buf, ByteOrder byteOrder) {
uint64_t v = getUint64(buf, byteOrder);
double ret;
std::memcpy(&ret, &v, sizeof(double));
return ret;
}
static void putUint32(uint8_t *buf, ByteOrder byteOrder, uint32_t v) {
if (byteOrder == ByteOrder::BigEndian) {
boost::endian::store_big_u32(buf, v);
} else {
DCHECK(byteOrder == ByteOrder::LittleEndian);
boost::endian::store_little_u32(buf, v);
}
}
static void putUint64(uint8_t *buf, ByteOrder byteOrder, uint64_t v) {
if (byteOrder == ByteOrder::BigEndian) {
boost::endian::store_big_u64(buf, v);
} else {
DCHECK(byteOrder == ByteOrder::LittleEndian);
boost::endian::store_little_u64(buf, v);
}
}
static void putDouble(uint8_t *buf, ByteOrder byteOrder, double v) {
const char *c = reinterpret_cast<const char *>(&v);
uint64_t v2 = *reinterpret_cast<const uint64_t *>(c);
putUint64(buf, byteOrder, v2);
}
};
} // namespace geo
} // namespace nebula
| 861 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.